xref: /petsc/include/petscsf.h (revision 59af0bd3658d6c64d35e37f76ad6a8a026fa611f)
1 /*
2    A star forest (SF) describes a communication pattern
3 */
4 #if !defined(PETSCSF_H)
5 #define PETSCSF_H
6 #include <petscsys.h>
7 #include <petscsftypes.h>
8 #include <petscvec.h> /* for Vec, VecScatter etc */
9 
10 PETSC_EXTERN PetscClassId PETSCSF_CLASSID;
11 
12 #define PETSCSFBASIC      "basic"
13 #define PETSCSFNEIGHBOR   "neighbor"
14 #define PETSCSFALLGATHERV "allgatherv"
15 #define PETSCSFALLGATHER  "allgather"
16 #define PETSCSFGATHERV    "gatherv"
17 #define PETSCSFGATHER     "gather"
18 #define PETSCSFALLTOALL   "alltoall"
19 #define PETSCSFWINDOW     "window"
20 
21 /*E
22    PetscSFPattern - Pattern of the PetscSF graph
23 
24 $  PETSCSF_PATTERN_GENERAL   - A general graph. One sets the graph with PetscSFSetGraph() and usually does not use this enum directly.
25 $  PETSCSF_PATTERN_ALLGATHER - A graph that every rank gathers all roots from all ranks (like MPI_Allgather/v). One sets the graph with PetscSFSetGraphWithPattern().
26 $  PETSCSF_PATTERN_GATHER    - A graph that rank 0 gathers all roots from all ranks (like MPI_Gather/v with root=0). One sets the graph with PetscSFSetGraphWithPattern().
27 $  PETSCSF_PATTERN_ALLTOALL  - A graph that every rank gathers different roots from all ranks (like MPI_Alltoall). One sets the graph with PetscSFSetGraphWithPattern().
28                                In an ALLTOALL graph, we assume each process has <size> leaves and <size> roots, with each leaf connecting to a remote root. Here <size> is
29                                the size of the communicator. This does not mean one can not communicate multiple data items between a pair of processes. One just needs to
30                                create a new MPI datatype for the multiple data items, e.g., by MPI_Type_contiguous.
31    Level: beginner
32 
33 .seealso: PetscSFSetGraph(), PetscSFSetGraphWithPattern()
34 E*/
35 typedef enum {PETSCSF_PATTERN_GENERAL=0,PETSCSF_PATTERN_ALLGATHER,PETSCSF_PATTERN_GATHER,PETSCSF_PATTERN_ALLTOALL} PetscSFPattern;
36 
37 /*E
38     PetscSFWindowSyncType - Type of synchronization for PETSCSFWINDOW
39 
40 $  PETSCSF_WINDOW_SYNC_FENCE - simplest model, synchronizing across communicator
41 $  PETSCSF_WINDOW_SYNC_LOCK - passive model, less synchronous, requires less setup than PETSCSF_WINDOW_SYNC_ACTIVE, but may require more handshakes
42 $  PETSCSF_WINDOW_SYNC_ACTIVE - active model, provides most information to MPI implementation, needs to construct 2-way process groups (more setup than PETSCSF_WINDOW_SYNC_LOCK)
43 
44    Level: advanced
45 
46 .seealso: PetscSFWindowSetSyncType(), PetscSFWindowGetSyncType()
47 E*/
48 typedef enum {PETSCSF_WINDOW_SYNC_FENCE,PETSCSF_WINDOW_SYNC_LOCK,PETSCSF_WINDOW_SYNC_ACTIVE} PetscSFWindowSyncType;
49 PETSC_EXTERN const char *const PetscSFWindowSyncTypes[];
50 
51 /*E
52     PetscSFWindowFlavorType - Flavor for the creation of MPI windows for PETSCSFWINDOW
53 
54 $  PETSCSF_WINDOW_FLAVOR_CREATE - Use MPI_Win_create, no reusage
55 $  PETSCSF_WINDOW_FLAVOR_DYNAMIC - Use MPI_Win_create_dynamic and dynamically attach pointers
56 $  PETSCSF_WINDOW_FLAVOR_ALLOCATE - Use MPI_Win_allocate
57 $  PETSCSF_WINDOW_FLAVOR_SHARED - Use MPI_Win_allocate_shared
58 
59    Level: advanced
60 
61 .seealso: PetscSFWindowSetFlavorType(), PetscSFWindowGetFlavorType()
62 E*/
63 typedef enum {PETSCSF_WINDOW_FLAVOR_CREATE,PETSCSF_WINDOW_FLAVOR_DYNAMIC,PETSCSF_WINDOW_FLAVOR_ALLOCATE,PETSCSF_WINDOW_FLAVOR_SHARED} PetscSFWindowFlavorType;
64 PETSC_EXTERN const char *const PetscSFWindowFlavorTypes[];
65 
66 /*E
67     PetscSFDuplicateOption - Aspects to preserve when duplicating a PetscSF
68 
69 $  PETSCSF_DUPLICATE_CONFONLY - configuration only, user must call PetscSFSetGraph()
70 $  PETSCSF_DUPLICATE_RANKS - communication ranks preserved, but different graph (allows simpler setup after calling PetscSFSetGraph())
71 $  PETSCSF_DUPLICATE_GRAPH - entire graph duplicated
72 
73    Level: beginner
74 
75 .seealso: PetscSFDuplicate()
76 E*/
77 typedef enum {PETSCSF_DUPLICATE_CONFONLY,PETSCSF_DUPLICATE_RANKS,PETSCSF_DUPLICATE_GRAPH} PetscSFDuplicateOption;
78 PETSC_EXTERN const char *const PetscSFDuplicateOptions[];
79 
80 PETSC_EXTERN PetscFunctionList PetscSFList;
81 PETSC_EXTERN PetscErrorCode PetscSFRegister(const char[],PetscErrorCode (*)(PetscSF));
82 
83 PETSC_EXTERN PetscErrorCode PetscSFInitializePackage(void);
84 PETSC_EXTERN PetscErrorCode PetscSFFinalizePackage(void);
85 PETSC_EXTERN PetscErrorCode PetscSFCreate(MPI_Comm,PetscSF*);
86 PETSC_EXTERN PetscErrorCode PetscSFDestroy(PetscSF*);
87 PETSC_EXTERN PetscErrorCode PetscSFSetType(PetscSF,PetscSFType);
88 PETSC_EXTERN PetscErrorCode PetscSFGetType(PetscSF,PetscSFType*);
89 PETSC_EXTERN PetscErrorCode PetscSFView(PetscSF,PetscViewer);
90 PETSC_EXTERN PetscErrorCode PetscSFViewFromOptions(PetscSF,PetscObject,const char[]);
91 PETSC_EXTERN PetscErrorCode PetscSFSetUp(PetscSF);
92 PETSC_EXTERN PetscErrorCode PetscSFSetFromOptions(PetscSF);
93 PETSC_EXTERN PetscErrorCode PetscSFDuplicate(PetscSF,PetscSFDuplicateOption,PetscSF*);
94 PETSC_EXTERN PetscErrorCode PetscSFWindowSetSyncType(PetscSF,PetscSFWindowSyncType);
95 PETSC_EXTERN PetscErrorCode PetscSFWindowGetSyncType(PetscSF,PetscSFWindowSyncType*);
96 PETSC_EXTERN PetscErrorCode PetscSFWindowSetFlavorType(PetscSF,PetscSFWindowFlavorType);
97 PETSC_EXTERN PetscErrorCode PetscSFWindowGetFlavorType(PetscSF,PetscSFWindowFlavorType*);
98 PETSC_EXTERN PetscErrorCode PetscSFWindowSetInfo(PetscSF,MPI_Info);
99 PETSC_EXTERN PetscErrorCode PetscSFWindowGetInfo(PetscSF,MPI_Info*);
100 PETSC_EXTERN PetscErrorCode PetscSFSetRankOrder(PetscSF,PetscBool);
101 PETSC_EXTERN PetscErrorCode PetscSFSetGraph(PetscSF,PetscInt,PetscInt,const PetscInt*,PetscCopyMode,const PetscSFNode*,PetscCopyMode);
102 PETSC_EXTERN PetscErrorCode PetscSFSetGraphWithPattern(PetscSF,PetscLayout,PetscSFPattern);
103 PETSC_EXTERN PetscErrorCode PetscSFGetGraph(PetscSF,PetscInt*,PetscInt*,const PetscInt**,const PetscSFNode**);
104 PETSC_EXTERN PetscErrorCode PetscSFGetLeafRange(PetscSF,PetscInt*,PetscInt*);
105 PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF,PetscInt,const PetscInt*,PetscSF*);
106 PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedLeafSF(PetscSF,PetscInt,const PetscInt *, PetscSF *);
107 PETSC_EXTERN PetscErrorCode PetscSFReset(PetscSF);
108 PETSC_EXTERN PetscErrorCode PetscSFSetUpRanks(PetscSF,MPI_Group);
109 PETSC_EXTERN PetscErrorCode PetscSFGetRootRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscInt**,const PetscInt**);
110 PETSC_EXTERN PetscErrorCode PetscSFGetLeafRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscInt**);
111 PETSC_EXTERN PetscErrorCode PetscSFGetGroups(PetscSF,MPI_Group*,MPI_Group*);
112 PETSC_EXTERN PetscErrorCode PetscSFGetMultiSF(PetscSF,PetscSF*);
113 PETSC_EXTERN PetscErrorCode PetscSFCreateInverseSF(PetscSF,PetscSF*);
114 
115 PETSC_EXTERN PetscErrorCode PetscSFSetGraphLayout(PetscSF,PetscLayout,PetscInt,const PetscInt*,PetscCopyMode,const PetscInt*);
116 PETSC_EXTERN PetscErrorCode PetscSFSetGraphSection(PetscSF,PetscSection,PetscSection);
117 PETSC_EXTERN PetscErrorCode PetscSFCreateRemoteOffsets(PetscSF, PetscSection, PetscSection, PetscInt **);
118 PETSC_EXTERN PetscErrorCode PetscSFDistributeSection(PetscSF, PetscSection, PetscInt **, PetscSection);
119 PETSC_EXTERN PetscErrorCode PetscSFCreateSectionSF(PetscSF, PetscSection, PetscInt [], PetscSection, PetscSF *);
120 
121 /* Reduce rootdata to leafdata using provided operation */
122 PETSC_EXTERN PetscErrorCode PetscSFBcastAndOpBegin(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
123   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
124 PETSC_EXTERN PetscErrorCode PetscSFBcastAndOpEnd(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
125   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
126 PETSC_EXTERN PetscErrorCode PetscSFBcastAndOpWithMemTypeBegin(PetscSF,MPI_Datatype,PetscMemType,const void*,PetscMemType,void*,MPI_Op)
127   PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(6,2);
128 
129 /* Reduce leafdata into rootdata using provided operation */
130 PETSC_EXTERN PetscErrorCode PetscSFReduceBegin(PetscSF,MPI_Datatype,const void*,void *,MPI_Op)
131   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
132 PETSC_EXTERN PetscErrorCode PetscSFReduceEnd(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
133   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
134 PETSC_EXTERN PetscErrorCode PetscSFReduceWithMemTypeBegin(PetscSF,MPI_Datatype,PetscMemType,const void*,PetscMemType,void *,MPI_Op)
135   PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(6,2);
136 /* Atomically modifies (using provided operation) rootdata using leafdata from each leaf, value at root at time of modification is returned in leafupdate. */
137 PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpBegin(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op)
138   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2);
139 PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpEnd(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op)
140   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2);
141 /* Compute the degree of every root vertex (number of leaves in its star) */
142 PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeBegin(PetscSF,const PetscInt**);
143 PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeEnd(PetscSF,const PetscInt**);
144 PETSC_EXTERN PetscErrorCode PetscSFComputeMultiRootOriginalNumbering(PetscSF,const PetscInt[],PetscInt*,PetscInt*[]);
145 /* Concatenate data from all leaves into roots */
146 PETSC_EXTERN PetscErrorCode PetscSFGatherBegin(PetscSF,MPI_Datatype,const void*,void*)
147   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
148 PETSC_EXTERN PetscErrorCode PetscSFGatherEnd(PetscSF,MPI_Datatype,const void*,void*)
149   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
150 /* Distribute distinct values to each leaf from roots */
151 PETSC_EXTERN PetscErrorCode PetscSFScatterBegin(PetscSF,MPI_Datatype,const void*,void*)
152   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
153 PETSC_EXTERN PetscErrorCode PetscSFScatterEnd(PetscSF,MPI_Datatype,const void*,void*)
154   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
155 
156 PETSC_EXTERN PetscErrorCode PetscSFCompose(PetscSF,PetscSF,PetscSF*);
157 PETSC_EXTERN PetscErrorCode PetscSFComposeInverse(PetscSF,PetscSF,PetscSF*);
158 
159 #if defined(MPI_REPLACE)
160 #  define MPIU_REPLACE MPI_REPLACE
161 #else
162 /* When using an old MPI such that MPI_REPLACE is not defined, we do not pass MPI_REPLACE to MPI at all.  Instead, we
163  * use it as a flag for our own reducer in the PETSCSFBASIC implementation.  This could be any unique value unlikely to
164  * collide with another MPI_Op so we'll just use the value that has been used by every version of MPICH since
165  * MPICH2-1.0.6. */
166 #  define MPIU_REPLACE (MPI_Op)(0x5800000d)
167 #endif
168 
169 PETSC_DEPRECATED_FUNCTION("Use PetscSFGetRootRanks (since v3.12)")
170 PETSC_STATIC_INLINE PetscErrorCode PetscSFGetRanks(PetscSF sf,PetscInt *nranks,const PetscMPIInt **ranks,const PetscInt **roffset,const PetscInt **rmine,const PetscInt **rremote) {
171   return PetscSFGetRootRanks(sf,nranks,ranks,roffset,rmine,rremote);
172 }
173 
174 /*@C
175    PetscSFBcastBegin - begin pointwise broadcast to be concluded with call to PetscSFBcastEnd()
176 
177    Collective on PetscSF
178 
179    Input Arguments:
180 +  sf - star forest on which to communicate
181 .  unit - data type associated with each node
182 -  rootdata - buffer to broadcast
183 
184    Output Arguments:
185 .  leafdata - buffer to update with values from each leaf's respective root
186 
187    Level: intermediate
188 
189 .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFView(), PetscSFBcastEnd(), PetscSFReduceBegin(), PetscSFBcastAndOpBegin()
190 @*/
191 PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastBegin(PetscSF sf,MPI_Datatype unit,const void* rootdata,void* leafdata) {
192   return PetscSFBcastAndOpBegin(sf,unit,rootdata,leafdata,MPIU_REPLACE);
193 }
194 
195 PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastWithMemTypeBegin(PetscSF sf,MPI_Datatype unit,PetscMemType rootmtype,const void* rootdata,PetscMemType leafmtype,void* leafdata) {
196   return PetscSFBcastAndOpWithMemTypeBegin(sf,unit,rootmtype,rootdata,leafmtype,leafdata,MPIU_REPLACE);
197 }
198 
199 /*@C
200    PetscSFBcastEnd - end a broadcast operation started with PetscSFBcastBegin()
201 
202    Collective
203 
204    Input Arguments:
205 +  sf - star forest
206 .  unit - data type
207 -  rootdata - buffer to broadcast
208 
209    Output Arguments:
210 .  leafdata - buffer to update with values from each leaf's respective root
211 
212    Level: intermediate
213 
214 .seealso: PetscSFSetGraph(), PetscSFReduceEnd()
215 @*/
216 PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastEnd(PetscSF sf,MPI_Datatype unit,const void* rootdata,void* leafdata) {
217   return PetscSFBcastAndOpEnd(sf,unit,rootdata,leafdata,MPIU_REPLACE);
218 }
219 
220 #endif
221