xref: /petsc/include/petscsftypes.h (revision bfe80ac4a46d58cb7760074b25f5e81b2f541d8a)
1 #pragma once
2 
3 /* MANSEC = Vec */
4 /* SUBMANSEC = PetscSF */
5 
6 /*S
7    PetscSF - PETSc object for setting up and managing the communication of certain entries of arrays and `Vec` between MPI ranks.
8 
9    Level: intermediate
10 
11   `PetscSF` uses the concept of star forests to indicate and determine the communication patterns concisely and efficiently.
12   A star  <https://en.wikipedia.org/wiki/Star_(graph_theory)> forest is simply a collection of trees of height 1. The leave nodes represent
13   "ghost locations" for the root nodes.
14 
15 .seealso: `PetscSFCreate()`, `VecScatter`, `VecScatterCreate()`
16 S*/
17 typedef struct _p_PetscSF *PetscSF;
18 
19 /*J
20     PetscSFType - String with the name of a `PetscSF` type
21 
22    Level: beginner
23 
24 .seealso: `PetscSFSetType()`, `PetscSF`
25 J*/
26 typedef const char *PetscSFType;
27 #define PETSCSFBASIC      "basic"
28 #define PETSCSFNEIGHBOR   "neighbor"
29 #define PETSCSFALLGATHERV "allgatherv"
30 #define PETSCSFALLGATHER  "allgather"
31 #define PETSCSFGATHERV    "gatherv"
32 #define PETSCSFGATHER     "gather"
33 #define PETSCSFALLTOALL   "alltoall"
34 #define PETSCSFWINDOW     "window"
35 
36 /*S
37    PetscSFNode - specifier of owner and index
38 
39    Level: beginner
40 
41   Sample Usage:
42 .vb
43     PetscSFNode    *remote;
44     PetscCall(PetscMalloc1(nleaves,&remote));
45     for (i=0; i<size; i++) {
46       remote[i].rank = i;
47       remote[i].index = rank;
48     }
49 .ve
50 
51   Sample Fortran Usage:
52 .vb
53     type(PetscSFNode) remote(6)
54     remote(1)%rank  = modulo(rank+size-1,size)
55     remote(1)%index = 1 * stride
56 .ve
57 
58   Notes:
59   Use  `MPIU_SF_NODE` when performing MPI operations on arrays of `PetscSFNode`
60 
61   Generally the values of `rank` should be in $[ 0,size)$  and the value of `index` greater than or equal to 0, but there are some situations that violate this.
62 
63 .seealso: `PetscSF`, `PetscSFSetGraph()`
64 S*/
65 typedef struct {
66   PetscInt rank;  /* Rank of owner */
67   PetscInt index; /* Index of node on rank */
68 } PetscSFNode;
69 
70 #define MPIU_SF_NODE MPIU_2INT
71 
72 typedef enum {
73   PETSCSF_ROOT2LEAF = 0,
74   PETSCSF_LEAF2ROOT = 1
75 } PetscSFDirection;
76 typedef enum {
77   PETSCSF_BCAST  = 0,
78   PETSCSF_REDUCE = 1,
79   PETSCSF_FETCH  = 2
80 } PetscSFOperation;
81 /* When doing device-aware MPI, a backend refers to the SF/device interface */
82 typedef enum {
83   PETSCSF_BACKEND_INVALID = 0,
84   PETSCSF_BACKEND_CUDA    = 1,
85   PETSCSF_BACKEND_HIP     = 2,
86   PETSCSF_BACKEND_KOKKOS  = 3
87 } PetscSFBackend;
88 typedef struct _n_PetscSFLink *PetscSFLink;
89 
90 /*S
91      VecScatter - Object used to manage communication of data
92      between vectors in parallel or between parallel and sequential vectors. Manages both scatters and gathers
93 
94    Level: beginner
95 
96    Note:
97    This is an alias for `PetscSF`
98 
99 .seealso: `Vec`, `PetscSF`, `VecScatterCreate()`, `VecScatterBegin()`, `VecScatterEnd()`
100 S*/
101 typedef PetscSF VecScatter;
102 
103 /*J
104    VecScatterType - String with the name of a PETSc vector scatter type
105 
106    Level: beginner
107 
108    Note:
109    This is an alias for `PetscSFType`
110 
111 .seealso: `PetscSFType`, `VecScatterSetType()`, `VecScatter`, `VecScatterCreate()`, `VecScatterDestroy()`
112 J*/
113 typedef PetscSFType VecScatterType;
114