xref: /petsc/src/sys/objects/subcomm.c (revision 053d1c95a855ffa1f645964a1d8c4de2e4510a38)
1 
2 /*
3      Provides utility routines for split MPI communicator.
4 */
5 #include <petscsys.h>    /*I   "petscsys.h"    I*/
6 #include <petsc-private/threadcommimpl.h> /* Petsc_ThreadComm_keyval */
7 #include <petscviewer.h>
8 
9 const char *const PetscSubcommTypes[] = {"GENERAL","CONTIGUOUS","INTERLACED","PetscSubcommType","PETSC_SUBCOMM_",0};
10 
11 extern PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
12 extern PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);
13 
14 #undef __FUNCT__
15 #define __FUNCT__ "PetscSubcommView"
16 PetscErrorCode PetscSubcommView(PetscSubcomm psubcomm,PetscViewer viewer)
17 {
18   PetscErrorCode    ierr;
19   PetscBool         iascii;
20   PetscViewerFormat format;
21 
22   PetscFunctionBegin;
23   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
24   if (iascii) {
25     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
26     if (format == PETSC_VIEWER_DEFAULT) {
27       MPI_Comm    comm=psubcomm->parent;
28       PetscMPIInt rank,size,subsize,subrank,duprank;
29 
30       ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
31       ierr = PetscViewerASCIIPrintf(viewer,"PetscSubcomm type %s with total %D MPI processes:\n",PetscSubcommTypes[psubcomm->type],size);CHKERRQ(ierr);
32       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
33       ierr = MPI_Comm_size(psubcomm->comm,&subsize);CHKERRQ(ierr);
34       ierr = MPI_Comm_rank(psubcomm->comm,&subrank);CHKERRQ(ierr);
35       ierr = MPI_Comm_rank(psubcomm->dupparent,&duprank);CHKERRQ(ierr);
36       ierr = PetscSynchronizedPrintf(comm,"  [%D], color %D, sub-size %D, sub-rank %D, duprank %D\n",rank,psubcomm->color,subsize,subrank,duprank);
37       ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
38     }
39   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported yet");
40   PetscFunctionReturn(0);
41 }
42 
43 #undef __FUNCT__
44 #define __FUNCT__ "PetscSubcommSetNumber"
45 /*@C
46   PetscSubcommSetNumber - Set total number of subcommunicators.
47 
48    Collective on MPI_Comm
49 
50    Input Parameter:
51 +  psubcomm - PetscSubcomm context
52 -  nsubcomm - the total number of subcommunicators in psubcomm
53 
54    Level: advanced
55 
56 .keywords: communicator
57 
58 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral()
59 @*/
60 PetscErrorCode  PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm)
61 {
62   PetscErrorCode ierr;
63   MPI_Comm       comm=psubcomm->parent;
64   PetscMPIInt    rank,size;
65 
66   PetscFunctionBegin;
67   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first");
68   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
69   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
70   if (nsubcomm < 1 || nsubcomm > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %D cannot be < 1 or > input comm size %D",nsubcomm,size);
71 
72   psubcomm->n = nsubcomm;
73   PetscFunctionReturn(0);
74 }
75 
76 #undef __FUNCT__
77 #define __FUNCT__ "PetscSubcommSetType"
78 /*@C
79   PetscSubcommSetType - Set type of subcommunicators.
80 
81    Collective on MPI_Comm
82 
83    Input Parameter:
84 +  psubcomm - PetscSubcomm context
85 -  subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED
86 
87    Level: advanced
88 
89 .keywords: communicator
90 
91 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral()
92 @*/
93 PetscErrorCode  PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype)
94 {
95   PetscErrorCode ierr;
96 
97   PetscFunctionBegin;
98   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
99   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %D is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
100 
101   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) {
102     ierr = PetscSubcommCreate_contiguous(psubcomm);CHKERRQ(ierr);
103   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) {
104     ierr = PetscSubcommCreate_interlaced(psubcomm);CHKERRQ(ierr);
105   } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %D is not supported yet",subcommtype);
106   PetscFunctionReturn(0);
107 }
108 
109 #undef __FUNCT__
110 #define __FUNCT__ "PetscSubcommSetTypeGeneral"
111 /*@C
112   PetscSubcommSetTypeGeneral - Set type of subcommunicators from user's specifications
113 
114    Collective on MPI_Comm
115 
116    Input Parameter:
117 +  psubcomm - PetscSubcomm context
118 .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
119 .  subrank - rank in the subcommunicator
120 -  duprank - rank in the dupparent (see PetscSubcomm)
121 
122    Level: advanced
123 
124 .keywords: communicator, create
125 
126 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType()
127 @*/
128 PetscErrorCode  PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank,PetscMPIInt duprank)
129 {
130   PetscErrorCode ierr;
131   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
132   PetscMPIInt    size;
133 
134   PetscFunctionBegin;
135   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
136   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %D is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
137 
138   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
139 
140   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm
141      if duprank is not a valid number, then dupcomm is not created - not all applications require dupcomm! */
142   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
143   if (duprank == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"duprank==PETSC_DECIDE is not supported yet");
144   else if (duprank >= 0 && duprank < size) {
145     ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
146   }
147   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
148   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
149   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
150   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
151 
152   psubcomm->color = color;
153   PetscFunctionReturn(0);
154 }
155 
156 #undef __FUNCT__
157 #define __FUNCT__ "PetscSubcommDestroy"
158 PetscErrorCode  PetscSubcommDestroy(PetscSubcomm *psubcomm)
159 {
160   PetscErrorCode ierr;
161 
162   PetscFunctionBegin;
163   if (!*psubcomm) PetscFunctionReturn(0);
164   ierr = PetscCommDestroy(&(*psubcomm)->dupparent);CHKERRQ(ierr);
165   ierr = PetscCommDestroy(&(*psubcomm)->comm);CHKERRQ(ierr);
166   ierr = PetscFree((*psubcomm));CHKERRQ(ierr);
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "PetscSubcommCreate"
172 /*@C
173   PetscSubcommCreate - Create a PetscSubcomm context.
174 
175    Collective on MPI_Comm
176 
177    Input Parameter:
178 .  comm - MPI communicator
179 
180    Output Parameter:
181 .  psubcomm - location to store the PetscSubcomm context
182 
183    Level: advanced
184 
185 .keywords: communicator, create
186 
187 .seealso: PetscSubcommDestroy()
188 @*/
189 PetscErrorCode  PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm)
190 {
191   PetscErrorCode ierr;
192   PetscMPIInt    rank,size;
193 
194   PetscFunctionBegin;
195   ierr = PetscNew(struct _n_PetscSubcomm,psubcomm);CHKERRQ(ierr);
196 
197   /* set defaults */
198   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
199   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
200   (*psubcomm)->parent    = comm;
201   (*psubcomm)->dupparent = comm;
202   (*psubcomm)->comm      = PETSC_COMM_SELF;
203   (*psubcomm)->n         = size;
204   (*psubcomm)->color     = rank;
205   (*psubcomm)->type      = PETSC_SUBCOMM_INTERLACED;
206   PetscFunctionReturn(0);
207 }
208 
209 #undef __FUNCT__
210 #define __FUNCT__ "PetscSubcommCreate_contiguous"
211 PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm)
212 {
213   PetscErrorCode ierr;
214   PetscMPIInt    rank,size,*subsize,duprank=-1,subrank=-1;
215   PetscInt       np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n;
216   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
217 
218   PetscFunctionBegin;
219   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
220   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
221 
222   /* get size of each subcommunicator */
223   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
224 
225   np_subcomm = size/nsubcomm;
226   nleftover  = size - nsubcomm*np_subcomm;
227   for (i=0; i<nsubcomm; i++) {
228     subsize[i] = np_subcomm;
229     if (i<nleftover) subsize[i]++;
230   }
231 
232   /* get color and subrank of this proc */
233   rankstart = 0;
234   for (i=0; i<nsubcomm; i++) {
235     if (rank >= rankstart && rank < rankstart+subsize[i]) {
236       color   = i;
237       subrank = rank - rankstart;
238       duprank = rank;
239       break;
240     } else rankstart += subsize[i];
241   }
242   ierr = PetscFree(subsize);CHKERRQ(ierr);
243 
244   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
245 
246   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
247   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
248   {
249     PetscThreadComm tcomm;
250     ierr = PetscCommGetThreadComm(comm,&tcomm);CHKERRQ(ierr);
251     ierr = MPI_Attr_put(dupcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
252     tcomm->refct++;
253     ierr = MPI_Attr_put(subcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
254     tcomm->refct++;
255   }
256   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
257   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
258   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
259   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
260 
261   psubcomm->color = color;
262   psubcomm->type  = PETSC_SUBCOMM_CONTIGUOUS;
263   PetscFunctionReturn(0);
264 }
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "PetscSubcommCreate_interlaced"
268 /*
269    Note:
270    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
271    by iteratively taking a process into a subcommunicator.
272    Example: size=4, nsubcomm=(*psubcomm)->n=3
273      comm=(*psubcomm)->parent:
274       rank:     [0]  [1]  [2]  [3]
275       color:     0    1    2    0
276 
277      subcomm=(*psubcomm)->comm:
278       subrank:  [0]  [0]  [0]  [1]
279 
280      dupcomm=(*psubcomm)->dupparent:
281       duprank:  [0]  [2]  [3]  [1]
282 
283      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
284            subcomm[color = 1] has subsize=1, owns process [1]
285            subcomm[color = 2] has subsize=1, owns process [2]
286            dupcomm has same number of processes as comm, and its duprank maps
287            processes in subcomm contiguously into a 1d array:
288             duprank: [0] [1]      [2]         [3]
289             rank:    [0] [3]      [1]         [2]
290                     subcomm[0] subcomm[1]  subcomm[2]
291 */
292 
293 PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm)
294 {
295   PetscErrorCode ierr;
296   PetscMPIInt    rank,size,*subsize,duprank,subrank;
297   PetscInt       np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n;
298   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
299 
300   PetscFunctionBegin;
301   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
302   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
303 
304   /* get size of each subcommunicator */
305   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
306 
307   np_subcomm = size/nsubcomm;
308   nleftover  = size - nsubcomm*np_subcomm;
309   for (i=0; i<nsubcomm; i++) {
310     subsize[i] = np_subcomm;
311     if (i<nleftover) subsize[i]++;
312   }
313 
314   /* find color for this proc */
315   color   = rank%nsubcomm;
316   subrank = rank/nsubcomm;
317 
318   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
319 
320   j = 0; duprank = 0;
321   for (i=0; i<nsubcomm; i++) {
322     if (j == color) {
323       duprank += subrank;
324       break;
325     }
326     duprank += subsize[i]; j++;
327   }
328   ierr = PetscFree(subsize);CHKERRQ(ierr);
329 
330   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
331   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
332   {
333     PetscThreadComm tcomm;
334     ierr = PetscCommGetThreadComm(comm,&tcomm);CHKERRQ(ierr);
335     ierr = MPI_Attr_put(dupcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
336     tcomm->refct++;
337     ierr = MPI_Attr_put(subcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
338     tcomm->refct++;
339   }
340   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
341   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
342   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
343   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
344 
345   psubcomm->color = color;
346   psubcomm->type  = PETSC_SUBCOMM_INTERLACED;
347   PetscFunctionReturn(0);
348 }
349 
350 
351