xref: /petsc/src/sys/objects/subcomm.c (revision 1b581b665160bc053f1db6f623a6eaab6b98fcae)
1 
2 /*
3      Provides utility routines for split MPI communicator.
4 */
5 #include <petscsys.h>    /*I   "petscsys.h"    I*/
6 #include <petscviewer.h>
7 
8 const char *const PetscSubcommTypes[] = {"GENERAL","CONTIGUOUS","INTERLACED","PetscSubcommType","PETSC_SUBCOMM_",0};
9 
10 extern PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
11 extern PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);
12 #undef __FUNCT__
13 #define __FUNCT__ "PetscSubcommSetFromOptions"
14 PetscErrorCode PetscSubcommSetFromOptions(PetscSubcomm psubcomm)
15 {
16   PetscErrorCode ierr;
17   PetscSubcommType type;
18   PetscBool      flg;
19 
20   PetscFunctionBegin;
21   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Must call PetscSubcommCreate firt");
22   type = psubcomm->type;
23   ierr = PetscOptionsGetEnum(NULL,NULL,"-psubcomm_type",PetscSubcommTypes,(PetscEnum*)&type,&flg);CHKERRQ(ierr);
24   if (flg && psubcomm->type != type) {
25     /* free old structures */
26     ierr = PetscCommDestroy(&(psubcomm)->dupparent);CHKERRQ(ierr);
27     ierr = PetscCommDestroy(&(psubcomm)->child);CHKERRQ(ierr);
28     ierr = PetscFree((psubcomm)->subsize);CHKERRQ(ierr);
29     switch (type) {
30     case PETSC_SUBCOMM_GENERAL:
31       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Runtime option PETSC_SUBCOMM_GENERAL is not supported, use PetscSubcommSetTypeGeneral()");
32     case PETSC_SUBCOMM_CONTIGUOUS:
33       ierr = PetscSubcommCreate_contiguous(psubcomm);CHKERRQ(ierr);
34       break;
35     case PETSC_SUBCOMM_INTERLACED:
36       ierr = PetscSubcommCreate_interlaced(psubcomm);CHKERRQ(ierr);
37       break;
38     default:
39       SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[type]);
40     }
41   }
42 
43   ierr = PetscOptionsHasName(NULL,NULL, "-psubcomm_view", &flg);CHKERRQ(ierr);
44   if (flg) {
45     ierr = PetscSubcommView(psubcomm,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
46   }
47   PetscFunctionReturn(0);
48 }
49 
50 #undef __FUNCT__
51 #define __FUNCT__ "PetscSubcommView"
52 PetscErrorCode PetscSubcommView(PetscSubcomm psubcomm,PetscViewer viewer)
53 {
54   PetscErrorCode    ierr;
55   PetscBool         iascii;
56   PetscViewerFormat format;
57 
58   PetscFunctionBegin;
59   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
60   if (iascii) {
61     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
62     if (format == PETSC_VIEWER_DEFAULT) {
63       MPI_Comm    comm=psubcomm->parent;
64       PetscMPIInt rank,size,subsize,subrank,duprank;
65 
66       ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
67       ierr = PetscViewerASCIIPrintf(viewer,"PetscSubcomm type %s with total %d MPI processes:\n",PetscSubcommTypes[psubcomm->type],size);CHKERRQ(ierr);
68       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
69       ierr = MPI_Comm_size(psubcomm->child,&subsize);CHKERRQ(ierr);
70       ierr = MPI_Comm_rank(psubcomm->child,&subrank);CHKERRQ(ierr);
71       ierr = MPI_Comm_rank(psubcomm->dupparent,&duprank);CHKERRQ(ierr);
72       ierr = PetscSynchronizedPrintf(comm,"  [%d], color %d, sub-size %d, sub-rank %d, duprank %d\n",rank,psubcomm->color,subsize,subrank,duprank);CHKERRQ(ierr);
73       ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
74     }
75   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported yet");
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "PetscSubcommSetNumber"
81 /*@C
82   PetscSubcommSetNumber - Set total number of subcommunicators.
83 
84    Collective on MPI_Comm
85 
86    Input Parameter:
87 +  psubcomm - PetscSubcomm context
88 -  nsubcomm - the total number of subcommunicators in psubcomm
89 
90    Level: advanced
91 
92 .keywords: communicator
93 
94 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral()
95 @*/
96 PetscErrorCode  PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm)
97 {
98   PetscErrorCode ierr;
99   MPI_Comm       comm=psubcomm->parent;
100   PetscMPIInt    msub,size;
101 
102   PetscFunctionBegin;
103   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first");
104   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
105   ierr = PetscMPIIntCast(nsubcomm,&msub);CHKERRQ(ierr);
106   if (msub < 1 || msub > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %d cannot be < 1 or > input comm size %d",msub,size);
107 
108   psubcomm->n = msub;
109   PetscFunctionReturn(0);
110 }
111 
112 #undef __FUNCT__
113 #define __FUNCT__ "PetscSubcommSetType"
114 /*@C
115   PetscSubcommSetType - Set type of subcommunicators.
116 
117    Collective on MPI_Comm
118 
119    Input Parameter:
120 +  psubcomm - PetscSubcomm context
121 -  subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED
122 
123    Level: advanced
124 
125 .keywords: communicator
126 
127 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral()
128 @*/
129 PetscErrorCode  PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype)
130 {
131   PetscErrorCode ierr;
132 
133   PetscFunctionBegin;
134   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
135   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
136 
137   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) {
138     ierr = PetscSubcommCreate_contiguous(psubcomm);CHKERRQ(ierr);
139   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) {
140     ierr = PetscSubcommCreate_interlaced(psubcomm);CHKERRQ(ierr);
141   } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[subcommtype]);
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "PetscSubcommSetTypeGeneral"
147 /*@C
148   PetscSubcommSetTypeGeneral - Set a PetscSubcomm from user's specifications
149 
150    Collective on MPI_Comm
151 
152    Input Parameter:
153 +  psubcomm - PetscSubcomm context
154 .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
155 -  subrank - rank in the subcommunicator
156 
157    Level: advanced
158 
159 .keywords: communicator, create
160 
161 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType()
162 @*/
163 PetscErrorCode PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank)
164 {
165   PetscErrorCode ierr;
166   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
167   PetscMPIInt    size,icolor,duprank,*recvbuf,sendbuf[3],mysubsize,rank,*subsize;
168   PetscMPIInt    i,nsubcomm=psubcomm->n;
169 
170   PetscFunctionBegin;
171   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
172   if (nsubcomm < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",nsubcomm);
173 
174   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
175 
176   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
177   /* TODO: this can be done in an ostensibly scalale way (i.e., without allocating an array of size 'size') as is done in PetscObjectsCreateGlobalOrdering(). */
178   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
179   ierr = PetscMalloc1(2*size,&recvbuf);CHKERRQ(ierr);
180 
181   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
182   ierr = MPI_Comm_size(subcomm,&mysubsize);CHKERRQ(ierr);
183 
184   sendbuf[0] = color;
185   sendbuf[1] = mysubsize;
186   ierr = MPI_Allgather(sendbuf,2,MPI_INT,recvbuf,2,MPI_INT,comm);CHKERRQ(ierr);
187 
188   ierr = PetscCalloc1(nsubcomm,&subsize);CHKERRQ(ierr);
189   for (i=0; i<2*size; i+=2) {
190     subsize[recvbuf[i]] = recvbuf[i+1];
191   }
192   ierr = PetscFree(recvbuf);CHKERRQ(ierr);
193 
194   duprank = 0;
195   for (icolor=0; icolor<nsubcomm; icolor++) {
196     if (icolor != color) { /* not color of this process */
197       duprank += subsize[icolor];
198     } else {
199       duprank += subrank;
200       break;
201     }
202   }
203   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
204 
205   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
206   ierr = PetscCommDuplicate(subcomm,&psubcomm->child,NULL);CHKERRQ(ierr);
207   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
208   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
209 
210   psubcomm->color   = color;
211   psubcomm->subsize = subsize;
212   psubcomm->type    = PETSC_SUBCOMM_GENERAL;
213   PetscFunctionReturn(0);
214 }
215 
216 #undef __FUNCT__
217 #define __FUNCT__ "PetscSubcommDestroy"
218 PetscErrorCode  PetscSubcommDestroy(PetscSubcomm *psubcomm)
219 {
220   PetscErrorCode ierr;
221 
222   PetscFunctionBegin;
223   if (!*psubcomm) PetscFunctionReturn(0);
224   ierr = PetscCommDestroy(&(*psubcomm)->dupparent);CHKERRQ(ierr);
225   ierr = PetscCommDestroy(&(*psubcomm)->child);CHKERRQ(ierr);
226   ierr = PetscFree((*psubcomm)->subsize);CHKERRQ(ierr);
227   ierr = PetscFree((*psubcomm));CHKERRQ(ierr);
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "PetscSubcommCreate"
233 /*@C
234   PetscSubcommCreate - Create a PetscSubcomm context.
235 
236    Collective on MPI_Comm
237 
238    Input Parameter:
239 .  comm - MPI communicator
240 
241    Output Parameter:
242 .  psubcomm - location to store the PetscSubcomm context
243 
244    Level: advanced
245 
246 .keywords: communicator, create
247 
248 .seealso: PetscSubcommDestroy()
249 @*/
250 PetscErrorCode  PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm)
251 {
252   PetscErrorCode ierr;
253   PetscMPIInt    rank,size;
254 
255   PetscFunctionBegin;
256   ierr = PetscNew(psubcomm);CHKERRQ(ierr);
257 
258   /* set defaults */
259   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
260   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
261 
262   (*psubcomm)->parent    = comm;
263   (*psubcomm)->dupparent = comm;
264   (*psubcomm)->child     = PETSC_COMM_SELF;
265   (*psubcomm)->n         = size;
266   (*psubcomm)->color     = rank;
267   (*psubcomm)->subsize   = NULL;
268   (*psubcomm)->type      = PETSC_SUBCOMM_INTERLACED;
269   PetscFunctionReturn(0);
270 }
271 
272 #undef __FUNCT__
273 #define __FUNCT__ "PetscSubcommCreate_contiguous"
274 PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm)
275 {
276   PetscErrorCode ierr;
277   PetscMPIInt    rank,size,*subsize,duprank=-1,subrank=-1;
278   PetscMPIInt    np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n;
279   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
280 
281   PetscFunctionBegin;
282   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
283   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
284 
285   /* get size of each subcommunicator */
286   ierr = PetscMalloc1(1+nsubcomm,&subsize);CHKERRQ(ierr);
287 
288   np_subcomm = size/nsubcomm;
289   nleftover  = size - nsubcomm*np_subcomm;
290   for (i=0; i<nsubcomm; i++) {
291     subsize[i] = np_subcomm;
292     if (i<nleftover) subsize[i]++;
293   }
294 
295   /* get color and subrank of this proc */
296   rankstart = 0;
297   for (i=0; i<nsubcomm; i++) {
298     if (rank >= rankstart && rank < rankstart+subsize[i]) {
299       color   = i;
300       subrank = rank - rankstart;
301       duprank = rank;
302       break;
303     } else rankstart += subsize[i];
304   }
305 
306   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
307 
308   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
309   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
310   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
311   ierr = PetscCommDuplicate(subcomm,&psubcomm->child,NULL);CHKERRQ(ierr);
312   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
313   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
314 
315   psubcomm->color   = color;
316   psubcomm->subsize = subsize;
317   psubcomm->type    = PETSC_SUBCOMM_CONTIGUOUS;
318   PetscFunctionReturn(0);
319 }
320 
321 #undef __FUNCT__
322 #define __FUNCT__ "PetscSubcommCreate_interlaced"
323 /*
324    Note:
325    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
326    by iteratively taking a process into a subcommunicator.
327    Example: size=4, nsubcomm=(*psubcomm)->n=3
328      comm=(*psubcomm)->parent:
329       rank:     [0]  [1]  [2]  [3]
330       color:     0    1    2    0
331 
332      subcomm=(*psubcomm)->comm:
333       subrank:  [0]  [0]  [0]  [1]
334 
335      dupcomm=(*psubcomm)->dupparent:
336       duprank:  [0]  [2]  [3]  [1]
337 
338      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
339            subcomm[color = 1] has subsize=1, owns process [1]
340            subcomm[color = 2] has subsize=1, owns process [2]
341            dupcomm has same number of processes as comm, and its duprank maps
342            processes in subcomm contiguously into a 1d array:
343             duprank: [0] [1]      [2]         [3]
344             rank:    [0] [3]      [1]         [2]
345                     subcomm[0] subcomm[1]  subcomm[2]
346 */
347 
348 PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm)
349 {
350   PetscErrorCode ierr;
351   PetscMPIInt    rank,size,*subsize,duprank,subrank;
352   PetscMPIInt    np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n;
353   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
354 
355   PetscFunctionBegin;
356   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
357   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
358 
359   /* get size of each subcommunicator */
360   ierr = PetscMalloc1(1+nsubcomm,&subsize);CHKERRQ(ierr);
361 
362   np_subcomm = size/nsubcomm;
363   nleftover  = size - nsubcomm*np_subcomm;
364   for (i=0; i<nsubcomm; i++) {
365     subsize[i] = np_subcomm;
366     if (i<nleftover) subsize[i]++;
367   }
368 
369   /* find color for this proc */
370   color   = rank%nsubcomm;
371   subrank = rank/nsubcomm;
372 
373   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
374 
375   j = 0; duprank = 0;
376   for (i=0; i<nsubcomm; i++) {
377     if (j == color) {
378       duprank += subrank;
379       break;
380     }
381     duprank += subsize[i]; j++;
382   }
383 
384   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
385   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
386   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
387   ierr = PetscCommDuplicate(subcomm,&psubcomm->child,NULL);CHKERRQ(ierr);
388   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
389   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
390 
391   psubcomm->color   = color;
392   psubcomm->subsize = subsize;
393   psubcomm->type    = PETSC_SUBCOMM_INTERLACED;
394   PetscFunctionReturn(0);
395 }
396 
397 
398