xref: /petsc/include/petscsf.h (revision 670f3ff9a12f3667ff7d4cebfc50ebc8579c9e33)
1 /*
2    A star forest (SF) describes a communication pattern
3 */
4 #if !defined(__PETSCSF_H)
5 #define __PETSCSF_H
6 #include "petscsys.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 extern PetscClassId PETSCSF_CLASSID;
10 
11 /*S
12    PetscSF - PETSc object for communication using star forests
13 
14    Level: intermediate
15 
16   Concepts: star forest
17 
18 .seealso: PetscSFCreate()
19 S*/
20 typedef struct _p_PetscSF* PetscSF;
21 
22 /*S
23    PetscSFNode - specifier of owner and index
24 
25    Level: beginner
26 
27   Concepts: indexing, stride, distribution
28 
29 .seealso: PetscSFSetGraph()
30 S*/
31 typedef struct {
32   PetscInt rank;                /* Rank of owner */
33   PetscInt index;               /* Index of node on rank */
34 } PetscSFNode;
35 
36 /*E
37     PetscSFSynchronizationType - Type of synchronization for PetscSF
38 
39 $  PETSCSF_SYNCHRONIZATION_FENCE - simplest model, synchronizing across communicator
40 $  PETSCSF_SYNCHRONIZATION_LOCK - passive model, less synchronous, requires less setup than PETSCSF_SYNCHRONIZATION_ACTIVE, but may require more handshakes
41 $  PETSCSF_SYNCHRONIZATION_ACTIVE - active model, provides most information to MPI implementation, needs to construct 2-way process groups (more setup than PETSCSF_SYNCHRONIZATION_LOCK)
42 
43    Level: beginner
44 
45 .seealso: PetscSFSetSynchronizationType()
46 E*/
47 typedef enum {PETSCSF_SYNCHRONIZATION_FENCE,PETSCSF_SYNCHRONIZATION_LOCK,PETSCSF_SYNCHRONIZATION_ACTIVE} PetscSFSynchronizationType;
48 extern const char *const PetscSFSynchronizationTypes[];
49 
50 #if !defined(PETSC_HAVE_MPI_WIN_CREATE) /* The intent here is to be able to compile even without a complete MPI. */
51 typedef struct MPI_Win_MISSING *MPI_Win;
52 #endif
53 
54 extern PetscErrorCode PetscSFInitializePackage(const char*);
55 extern PetscErrorCode PetscSFFinalizePackage(void);
56 extern PetscErrorCode PetscSFCreate(MPI_Comm comm,PetscSF*);
57 extern PetscErrorCode PetscSFDestroy(PetscSF*);
58 extern PetscErrorCode PetscSFView(PetscSF,PetscViewer);
59 extern PetscErrorCode PetscSFSetFromOptions(PetscSF);
60 extern PetscErrorCode PetscSFSetSynchronizationType(PetscSF,PetscSFSynchronizationType);
61 extern PetscErrorCode PetscSFSetRankOrder(PetscSF,PetscBool);
62 extern PetscErrorCode PetscSFSetGraph(PetscSF,PetscInt nroots,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode modelocal,const PetscSFNode *remote,PetscCopyMode moderemote);
63 extern PetscErrorCode PetscSFGetGraph(PetscSF,PetscInt *nroots,PetscInt *nleaves,const PetscInt **ilocal,const PetscSFNode **iremote);
64 extern PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF,PetscInt nroots,const PetscInt *selected,PetscSF *newsf);
65 extern PetscErrorCode PetscSFCreateArray(PetscSF,MPI_Datatype,void*,void*);
66 extern PetscErrorCode PetscSFDestroyArray(PetscSF,MPI_Datatype,void*,void*);
67 extern PetscErrorCode PetscSFReset(PetscSF);
68 extern PetscErrorCode PetscSFGetRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscMPIInt**,const PetscMPIInt**);
69 extern PetscErrorCode PetscSFGetDataTypes(PetscSF,MPI_Datatype,const MPI_Datatype**,const MPI_Datatype**);
70 extern PetscErrorCode PetscSFGetWindow(PetscSF,MPI_Datatype,void*,PetscBool,PetscMPIInt,PetscMPIInt,PetscMPIInt,MPI_Win*);
71 extern PetscErrorCode PetscSFFindWindow(PetscSF,MPI_Datatype,const void*,MPI_Win*);
72 extern PetscErrorCode PetscSFRestoreWindow(PetscSF,MPI_Datatype,const void*,PetscBool,PetscMPIInt,MPI_Win*);
73 extern PetscErrorCode PetscSFGetGroups(PetscSF,MPI_Group*,MPI_Group*);
74 extern PetscErrorCode PetscSFGetMultiSF(PetscSF,PetscSF*);
75 extern PetscErrorCode PetscSFCreateInverseSF(PetscSF,PetscSF*);
76 
77 /* broadcasts rootdata to leafdata */
78 extern PetscErrorCode PetscSFBcastBegin(PetscSF,MPI_Datatype,const void *rootdata,void *leafdata);
79 extern PetscErrorCode PetscSFBcastEnd(PetscSF,MPI_Datatype,const void *rootdata,void *leafdata);
80 /* Reduce leafdata into rootdata using provided operation */
81 extern PetscErrorCode PetscSFReduceBegin(PetscSF,MPI_Datatype,const void *leafdata,void *rootdata,MPI_Op);
82 extern PetscErrorCode PetscSFReduceEnd(PetscSF,MPI_Datatype,const void *leafdata,void *rootdata,MPI_Op);
83 /* Atomically modifies (using provided operation) rootdata using leafdata from each leaf, value at root at time of modification is returned in leafupdate. */
84 extern PetscErrorCode PetscSFFetchAndOpBegin(PetscSF,MPI_Datatype,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op);
85 extern PetscErrorCode PetscSFFetchAndOpEnd(PetscSF,MPI_Datatype,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op);
86 /* Compute the degree of every root vertex (number of leaves in its star) */
87 extern PetscErrorCode PetscSFComputeDegreeBegin(PetscSF,const PetscInt **degree);
88 extern PetscErrorCode PetscSFComputeDegreeEnd(PetscSF,const PetscInt **degree);
89 /* Concatenate data from all leaves into roots */
90 extern PetscErrorCode PetscSFGatherBegin(PetscSF,MPI_Datatype,const void *leafdata,void *multirootdata);
91 extern PetscErrorCode PetscSFGatherEnd(PetscSF,MPI_Datatype,const void *leafdata,void *multirootdata);
92 /* Distribute distinct values to each leaf from roots */
93 extern PetscErrorCode PetscSFScatterBegin(PetscSF,MPI_Datatype,const void *multirootdata,void *leafdata);
94 extern PetscErrorCode PetscSFScatterEnd(PetscSF,MPI_Datatype,const void *multirootdata,void *leafdata);
95 
96 PETSC_EXTERN_CXX_END
97 
98 #endif
99