1 /* 2 A star forest (SF) describes a communication pattern 3 */ 4 #if !defined(__PETSCSF_H) 5 #define __PETSCSF_H 6 #include <petscsys.h> 7 8 PETSC_EXTERN PetscClassId PETSCSF_CLASSID; 9 10 /*S 11 PetscSF - PETSc object for setting up and managing the communication of certain entries of arrays and Vecs between MPI processes. 12 13 Level: intermediate 14 15 Concepts: star forest 16 17 PetscSF uses the concept of star forests to indicate and determine the communication patterns concisely and efficiently. 18 A star http://en.wikipedia.org/wiki/Star_(graph_theory) forest is simply a collection of trees of height 1. The leave nodes represent 19 "ghost locations" for the root nodes. 20 21 .seealso: PetscSFCreate(), VecScatter, VecScatterCreate() 22 S*/ 23 typedef struct _p_PetscSF* PetscSF; 24 25 26 /*J 27 PetscSFType - String with the name of a PetscSF method or the creation function 28 with an optional dynamic library name, for example 29 http://www.mcs.anl.gov/petsc/lib.so:mysfcreate() 30 31 Level: beginner 32 33 Notes: The two approaches provided are 34 $ PETSCSFBASIC which uses MPI 1 message passing to perform the communication and 35 $ PETSCSFWINDOW which uses MPI 2 one-sided operations to perform the communication, this may be more efficient, 36 $ but may not be available for all MPI distributions. In particular OpenMPI has bugs in its one-sided 37 $ operations that prevent its use. 38 39 .seealso: PetscSFSetType(), PetscSF 40 J*/ 41 typedef const char *PetscSFType; 42 #define PETSCSFBASIC "basic" 43 #define PETSCSFWINDOW "window" 44 45 /*S 46 PetscSFNode - specifier of owner and index 47 48 Level: beginner 49 50 Concepts: indexing, stride, distribution 51 52 .seealso: PetscSFSetGraph() 53 S*/ 54 typedef struct { 55 PetscInt rank; /* Rank of owner */ 56 PetscInt index; /* Index of node on rank */ 57 } PetscSFNode; 58 59 /*E 60 PetscSFWindowSyncType - Type of synchronization for PETSCSFWINDOW 61 62 $ PETSCSF_WINDOW_SYNC_FENCE - simplest model, synchronizing across communicator 63 $ PETSCSF_WINDOW_SYNC_LOCK - passive model, less synchronous, requires less setup than PETSCSF_WINDOW_SYNC_ACTIVE, but may require more handshakes 64 $ PETSCSF_WINDOW_SYNC_ACTIVE - active model, provides most information to MPI implementation, needs to construct 2-way process groups (more setup than PETSCSF_WINDOW_SYNC_LOCK) 65 66 Level: advanced 67 68 .seealso: PetscSFWindowSetSyncType(), PetscSFWindowGetSyncType() 69 E*/ 70 typedef enum {PETSCSF_WINDOW_SYNC_FENCE,PETSCSF_WINDOW_SYNC_LOCK,PETSCSF_WINDOW_SYNC_ACTIVE} PetscSFWindowSyncType; 71 PETSC_EXTERN const char *const PetscSFWindowSyncTypes[]; 72 73 /*E 74 PetscSFDuplicateOption - Aspects to preserve when duplicating a PetscSF 75 76 $ PETSCSF_DUPLICATE_CONFONLY - configuration only, user must call PetscSFSetGraph() 77 $ PETSCSF_DUPLICATE_RANKS - communication ranks preserved, but different graph (allows simpler setup after calling PetscSFSetGraph()) 78 $ PETSCSF_DUPLICATE_GRAPH - entire graph duplicated 79 80 Level: beginner 81 82 .seealso: PetscSFDuplicate() 83 E*/ 84 typedef enum {PETSCSF_DUPLICATE_CONFONLY,PETSCSF_DUPLICATE_RANKS,PETSCSF_DUPLICATE_GRAPH} PetscSFDuplicateOption; 85 PETSC_EXTERN const char *const PetscSFDuplicateOptions[]; 86 87 PETSC_EXTERN PetscFunctionList PetscSFunctionList; 88 PETSC_EXTERN PetscErrorCode PetscSFRegisterDestroy(void); 89 PETSC_EXTERN PetscErrorCode PetscSFRegisterAll(const char[]); 90 PETSC_EXTERN PetscErrorCode PetscSFRegister(const char[],const char[],const char[],PetscErrorCode (*)(PetscSF)); 91 92 /*MC 93 PetscSFRegisterDynamic - Adds an implementation of the PetscSF communication protocol. 94 95 Synopsis: 96 #include "petscsf.h" 97 PetscErrorCode PetscSFRegisterDynamic(const char *name_method,const char *path,const char *name_create,PetscErrorCode (*routine_create)(PetscSF)) 98 99 Not collective 100 101 Input Parameters: 102 + name_impl - name of a new user-defined implementation 103 . path - path (either absolute or relative) the library containing this solver 104 . name_create - name of routine to create method context 105 - routine_create - routine to create method context 106 107 Notes: 108 PetscSFRegisterDynamic() may be called multiple times to add several user-defined implementations. 109 110 If dynamic libraries are used, then the fourth input argument (routine_create) 111 is ignored. 112 113 Environmental variables such as ${PETSC_ARCH}, ${PETSC_DIR}, ${PETSC_LIB_DIR}, 114 and others of the form ${any_environmental_variable} occuring in pathname will be 115 replaced with appropriate values. 116 117 Sample usage: 118 .vb 119 PetscSFRegisterDynamic("my_impl",/home/username/my_lib/lib/libg/solaris/mylib.a, 120 "MyImplCreate",MyImplCreate); 121 .ve 122 123 Then, this implementation can be chosen with the procedural interface via 124 $ PetscSFSetType(sf,"my_impl") 125 or at runtime via the option 126 $ -snes_type my_solver 127 128 Level: advanced 129 130 Note: If your function is not being put into a shared library then use PetscSFRegister() instead 131 132 .keywords: PetscSF, register 133 134 .seealso: PetscSFRegisterAll(), PetscSFRegisterDestroy() 135 M*/ 136 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 137 #define PetscSFRegisterDynamic(a,b,c,d) PetscSFRegister(a,b,c,0) 138 #else 139 #define PetscSFRegisterDynamic(a,b,c,d) PetscSFRegister(a,b,c,d) 140 #endif 141 142 PETSC_EXTERN PetscErrorCode PetscSFInitializePackage(const char*); 143 PETSC_EXTERN PetscErrorCode PetscSFFinalizePackage(void); 144 PETSC_EXTERN PetscErrorCode PetscSFCreate(MPI_Comm comm,PetscSF*); 145 PETSC_EXTERN PetscErrorCode PetscSFDestroy(PetscSF*); 146 PETSC_EXTERN PetscErrorCode PetscSFSetType(PetscSF,PetscSFType); 147 PETSC_EXTERN PetscErrorCode PetscSFView(PetscSF,PetscViewer); 148 PETSC_EXTERN PetscErrorCode PetscSFSetUp(PetscSF); 149 PETSC_EXTERN PetscErrorCode PetscSFSetFromOptions(PetscSF); 150 PETSC_EXTERN PetscErrorCode PetscSFDuplicate(PetscSF,PetscSFDuplicateOption,PetscSF*); 151 PETSC_EXTERN PetscErrorCode PetscSFWindowSetSyncType(PetscSF,PetscSFWindowSyncType); 152 PETSC_EXTERN PetscErrorCode PetscSFWindowGetSyncType(PetscSF,PetscSFWindowSyncType*); 153 PETSC_EXTERN PetscErrorCode PetscSFSetRankOrder(PetscSF,PetscBool); 154 PETSC_EXTERN PetscErrorCode PetscSFSetGraph(PetscSF,PetscInt,PetscInt,const PetscInt*,PetscCopyMode,const PetscSFNode*,PetscCopyMode); 155 PETSC_EXTERN PetscErrorCode PetscSFGetGraph(PetscSF,PetscInt *nroots,PetscInt *nleaves,const PetscInt **ilocal,const PetscSFNode **iremote); 156 PETSC_EXTERN PetscErrorCode PetscSFGetLeafRange(PetscSF,PetscInt*,PetscInt*); 157 PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF,PetscInt nroots,const PetscInt *selected,PetscSF *newsf); 158 PETSC_EXTERN PetscErrorCode PetscSFReset(PetscSF); 159 PETSC_EXTERN PetscErrorCode PetscSFGetRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscInt**,const PetscInt**); 160 PETSC_EXTERN PetscErrorCode PetscSFGetGroups(PetscSF,MPI_Group*,MPI_Group*); 161 PETSC_EXTERN PetscErrorCode PetscSFGetMultiSF(PetscSF,PetscSF*); 162 PETSC_EXTERN PetscErrorCode PetscSFCreateInverseSF(PetscSF,PetscSF*); 163 164 /* broadcasts rootdata to leafdata */ 165 PETSC_EXTERN PetscErrorCode PetscSFBcastBegin(PetscSF,MPI_Datatype,const void *rootdata,void *leafdata) 166 PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2); 167 PETSC_EXTERN PetscErrorCode PetscSFBcastEnd(PetscSF,MPI_Datatype,const void *rootdata,void *leafdata) 168 PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2); 169 /* Reduce leafdata into rootdata using provided operation */ 170 PETSC_EXTERN PetscErrorCode PetscSFReduceBegin(PetscSF,MPI_Datatype,const void *leafdata,void *rootdata,MPI_Op) 171 PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2); 172 PETSC_EXTERN PetscErrorCode PetscSFReduceEnd(PetscSF,MPI_Datatype,const void *leafdata,void *rootdata,MPI_Op) 173 PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2); 174 /* Atomically modifies (using provided operation) rootdata using leafdata from each leaf, value at root at time of modification is returned in leafupdate. */ 175 PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpBegin(PetscSF,MPI_Datatype,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op) 176 PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2); 177 PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpEnd(PetscSF,MPI_Datatype,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op) 178 PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2); 179 /* Compute the degree of every root vertex (number of leaves in its star) */ 180 PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeBegin(PetscSF,const PetscInt **degree); 181 PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeEnd(PetscSF,const PetscInt **degree); 182 /* Concatenate data from all leaves into roots */ 183 PETSC_EXTERN PetscErrorCode PetscSFGatherBegin(PetscSF,MPI_Datatype,const void *leafdata,void *multirootdata) 184 PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2); 185 PETSC_EXTERN PetscErrorCode PetscSFGatherEnd(PetscSF,MPI_Datatype,const void *leafdata,void *multirootdata) 186 PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2); 187 /* Distribute distinct values to each leaf from roots */ 188 PETSC_EXTERN PetscErrorCode PetscSFScatterBegin(PetscSF,MPI_Datatype,const void *multirootdata,void *leafdata) 189 PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2); 190 PETSC_EXTERN PetscErrorCode PetscSFScatterEnd(PetscSF,MPI_Datatype,const void *multirootdata,void *leafdata) 191 PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2); 192 193 #endif 194