xref: /petsc/include/petscsftypes.h (revision a336c15037c72f93cd561f5a5e11e93175f2efd9)
1 #pragma once
2 
3 /* MANSEC = Vec */
4 /* SUBMANSEC = PetscSF */
5 
6 /*S
7    PetscSF - PETSc object for managing the communication of certain entries of arrays and `Vec` between MPI processes.
8 
9    Level: intermediate
10 
11   `PetscSF` uses the concept of star forests to indicate and determine the communication patterns concisely and efficiently.
12   A star  <https://en.wikipedia.org/wiki/Star_(graph_theory)> forest is simply a collection of trees of height 1. The leave nodes represent
13   "ghost locations" for the root nodes.
14 
15   The standard usage paradigm for `PetscSF` is to provide the communication pattern with `PetscSFSetGraph()` or `PetscSFSetGraphWithPattern()` and
16   then perform the communication using `PetscSFBcastBegin()` and `PetscSFBcastEnd()`, `PetscSFReduceBegin()` and `PetscSFReduceEnd()`.
17 
18 .seealso: [](sec_petscsf), `PetscSFCreate()`, `PetscSFSetGraph()`, `PetscSFSetGraphWithPattern()`, `PetscSFBcastBegin()`, `PetscSFBcastEnd()`,
19           `PetscSFReduceBegin()`, `PetscSFReduceEnd()`, `VecScatter`, `VecScatterCreate()`
20 S*/
21 typedef struct _p_PetscSF *PetscSF;
22 
23 /*J
24   PetscSFType - String with the name of a `PetscSF` type. Each `PetscSFType` uses different mechanisms to perform the communication.
25 
26   Level: beginner
27 
28   Available Types:
29 + `PETSCSFBASIC`      - use MPI sends and receives
30 . `PETSCSFNEIGHBOR`   - use MPI_Neighbor operations
31 . `PETSCSFALLGATHERV` - use MPI_Allgatherv operations
32 . `PETSCSFALLGATHER`  - use MPI_Allgather operations
33 . `PETSCSFGATHERV`    - use MPI_Igatherv and MPI_Iscatterv operations
34 . `PETSCSFGATHER`     - use MPI_Igather and MPI_Iscatter operations
35 . `PETSCSFALLTOALL`   - use MPI_Ialltoall operations
36 - `PETSCSFWINDOW`     - use MPI_Win operations
37 
38   Note:
39   Some `PetscSFType` only provide specialized code for a subset of the `PetscSF` operations and use `PETSCSFBASIC` for the others.
40 
41 .seealso: [](sec_petscsf), `PetscSFSetType()`, `PetscSF`
42 J*/
43 typedef const char *PetscSFType;
44 #define PETSCSFBASIC      "basic"
45 #define PETSCSFNEIGHBOR   "neighbor"
46 #define PETSCSFALLGATHERV "allgatherv"
47 #define PETSCSFALLGATHER  "allgather"
48 #define PETSCSFGATHERV    "gatherv"
49 #define PETSCSFGATHER     "gather"
50 #define PETSCSFALLTOALL   "alltoall"
51 #define PETSCSFWINDOW     "window"
52 
53 /*S
54    PetscSFNode - specifier of MPI rank owner and local index for array or `Vec` entry locations that are to be communicated with a `PetscSF`
55 
56    Level: beginner
57 
58   Sample Usage:
59 .vb
60     PetscSFNode    *remote;
61     PetscCall(PetscMalloc1(nleaves,&remote));
62     for (i=0; i<size; i++) {
63       remote[i].rank = i;
64       remote[i].index = rank;
65     }
66 .ve
67 
68   Sample Fortran Usage:
69 .vb
70     type(PetscSFNode) remote(6)
71     remote(1)%rank  = modulo(rank+size-1,size)
72     remote(1)%index = 1 * stride
73 .ve
74 
75   Notes:
76   Use  `MPIU_SF_NODE` when performing MPI operations on arrays of `PetscSFNode`
77 
78   Generally the values of `rank` should be in $[ 0,size)$  and the value of `index` greater than or equal to 0, but there are some situations that violate this.
79 
80 .seealso: [](sec_petscsf), `PetscSF`, `PetscSFSetGraph()`
81 S*/
82 typedef struct {
83   PetscInt rank;  /* MPI rank of owner */
84   PetscInt index; /* Index of node on rank */
85 } PetscSFNode;
86 
87 #define MPIU_SF_NODE MPIU_2INT
88 
89 typedef enum {
90   PETSCSF_ROOT2LEAF = 0,
91   PETSCSF_LEAF2ROOT = 1
92 } PetscSFDirection;
93 typedef enum {
94   PETSCSF_BCAST  = 0,
95   PETSCSF_REDUCE = 1,
96   PETSCSF_FETCH  = 2
97 } PetscSFOperation;
98 /* When doing device-aware MPI, a backend refers to the SF/device interface */
99 typedef enum {
100   PETSCSF_BACKEND_INVALID = 0,
101   PETSCSF_BACKEND_CUDA    = 1,
102   PETSCSF_BACKEND_HIP     = 2,
103   PETSCSF_BACKEND_KOKKOS  = 3
104 } PetscSFBackend;
105 typedef struct _n_PetscSFLink *PetscSFLink;
106 
107 /*S
108   VecScatter - Object used to manage communication of data
109   between vectors in parallel or between parallel and sequential vectors. Manages both scatters and gathers
110 
111   Level: beginner
112 
113   Note:
114   This is an alias for `PetscSF`.
115 
116 .seealso: [](sec_petscsf), `Vec`, `PetscSF`, `VecScatterCreate()`, `VecScatterBegin()`, `VecScatterEnd()`
117 S*/
118 typedef PetscSF VecScatter;
119 
120 /*J
121   VecScatterType - String with the name of a PETSc vector scatter type
122 
123   Level: beginner
124 
125   Note:
126   This is an alias for `PetscSFType`
127 
128 .seealso: [](sec_petscsf), `PetscSFType`, `VecScatterSetType()`, `VecScatter`, `VecScatterCreate()`, `VecScatterDestroy()`
129 J*/
130 typedef PetscSFType VecScatterType;
131