xref: /petsc/src/sys/objects/subcomm.c (revision 750b007cd8d816cecd9de99077bb0a703b4cf61a)
1 
2 /*
3      Provides utility routines for split MPI communicator.
4 */
5 #include <petscsys.h> /*I   "petscsys.h"    I*/
6 #include <petscviewer.h>
7 
8 const char *const PetscSubcommTypes[] = {"GENERAL", "CONTIGUOUS", "INTERLACED", "PetscSubcommType", "PETSC_SUBCOMM_", NULL};
9 
10 static PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
11 static PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);
12 
13 /*@
14    PetscSubcommSetFromOptions - Allows setting options for a `PetscSubcomm`
15 
16    Collective on psubcomm
17 
18    Input Parameter:
19 .  psubcomm - `PetscSubcomm` context
20 
21    Level: beginner
22 
23 .seealso: `PetscSubcomm`, `PetscSubcommCreate()`
24 @*/
25 PetscErrorCode PetscSubcommSetFromOptions(PetscSubcomm psubcomm) {
26   PetscSubcommType type;
27   PetscBool        flg;
28 
29   PetscFunctionBegin;
30   PetscCheck(psubcomm, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Must call PetscSubcommCreate firt");
31 
32   PetscOptionsBegin(psubcomm->parent, psubcomm->subcommprefix, "Options for PetscSubcomm", NULL);
33   PetscCall(PetscOptionsEnum("-psubcomm_type", NULL, NULL, PetscSubcommTypes, (PetscEnum)psubcomm->type, (PetscEnum *)&type, &flg));
34   if (flg && psubcomm->type != type) {
35     /* free old structures */
36     PetscCall(PetscCommDestroy(&(psubcomm)->dupparent));
37     PetscCall(PetscCommDestroy(&(psubcomm)->child));
38     PetscCall(PetscFree((psubcomm)->subsize));
39     switch (type) {
40     case PETSC_SUBCOMM_GENERAL: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Runtime option PETSC_SUBCOMM_GENERAL is not supported, use PetscSubcommSetTypeGeneral()");
41     case PETSC_SUBCOMM_CONTIGUOUS: PetscCall(PetscSubcommCreate_contiguous(psubcomm)); break;
42     case PETSC_SUBCOMM_INTERLACED: PetscCall(PetscSubcommCreate_interlaced(psubcomm)); break;
43     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "PetscSubcommType %s is not supported yet", PetscSubcommTypes[type]);
44     }
45   }
46 
47   PetscCall(PetscOptionsName("-psubcomm_view", "Triggers display of PetscSubcomm context", "PetscSubcommView", &flg));
48   if (flg) PetscCall(PetscSubcommView(psubcomm, PETSC_VIEWER_STDOUT_(psubcomm->parent)));
49   PetscOptionsEnd();
50   PetscFunctionReturn(0);
51 }
52 
53 /*@C
54   PetscSubcommSetOptionsPrefix - Sets the prefix used for searching for options in the options database for this object
55 
56   Logically collective on psubcomm
57 
58   Level: Intermediate
59 
60   Input Parameters:
61 +   psubcomm - `PetscSubcomm` context
62 -   prefix - the prefix to prepend all PetscSubcomm item names with.
63 
64 .seealso: `PetscSubcomm`, `PetscSubcommCreate()`
65 @*/
66 PetscErrorCode PetscSubcommSetOptionsPrefix(PetscSubcomm psubcomm, const char pre[]) {
67   PetscFunctionBegin;
68   if (!pre) {
69     PetscCall(PetscFree(psubcomm->subcommprefix));
70   } else {
71     PetscCheck(pre[0] != '-', PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
72     PetscCall(PetscFree(psubcomm->subcommprefix));
73     PetscCall(PetscStrallocpy(pre, &(psubcomm->subcommprefix)));
74   }
75   PetscFunctionReturn(0);
76 }
77 
78 /*@C
79    PetscSubcommView - Views a `PetscSubcomm`
80 
81    Collective on psubcomm
82 
83    Input Parameters:
84 +  psubcomm - `PetscSubcomm` context
85 -  viewer - `PetscViewer` to display the information
86 
87    Level: beginner
88 
89 .seealso: `PetscSubcomm`, `PetscSubcommCreate()`, `PetscViewer`
90 @*/
91 PetscErrorCode PetscSubcommView(PetscSubcomm psubcomm, PetscViewer viewer) {
92   PetscBool         iascii;
93   PetscViewerFormat format;
94 
95   PetscFunctionBegin;
96   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
97   if (iascii) {
98     PetscCall(PetscViewerGetFormat(viewer, &format));
99     if (format == PETSC_VIEWER_DEFAULT) {
100       MPI_Comm    comm = psubcomm->parent;
101       PetscMPIInt rank, size, subsize, subrank, duprank;
102 
103       PetscCallMPI(MPI_Comm_size(comm, &size));
104       PetscCall(PetscViewerASCIIPrintf(viewer, "PetscSubcomm type %s with total %d MPI processes:\n", PetscSubcommTypes[psubcomm->type], size));
105       PetscCallMPI(MPI_Comm_rank(comm, &rank));
106       PetscCallMPI(MPI_Comm_size(psubcomm->child, &subsize));
107       PetscCallMPI(MPI_Comm_rank(psubcomm->child, &subrank));
108       PetscCallMPI(MPI_Comm_rank(psubcomm->dupparent, &duprank));
109       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
110       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d], color %d, sub-size %d, sub-rank %d, duprank %d\n", rank, psubcomm->color, subsize, subrank, duprank));
111       PetscCall(PetscViewerFlush(viewer));
112       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
113     }
114   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Not supported yet");
115   PetscFunctionReturn(0);
116 }
117 
118 /*@
119   PetscSubcommSetNumber - Set total number of subcommunicators desired in the given `PetscSubcomm`
120 
121    Collective
122 
123    Input Parameters:
124 +  psubcomm - `PetscSubcomm` context
125 -  nsubcomm - the total number of subcommunicators in psubcomm
126 
127    Level: advanced
128 
129 .seealso: `PetscSubcomm`, `PetscSubcommCreate()`, `PetscSubcommDestroy()`, `PetscSubcommSetType()`, `PetscSubcommSetTypeGeneral()`
130 @*/
131 PetscErrorCode PetscSubcommSetNumber(PetscSubcomm psubcomm, PetscInt nsubcomm) {
132   MPI_Comm    comm = psubcomm->parent;
133   PetscMPIInt msub, size;
134 
135   PetscFunctionBegin;
136   PetscCheck(psubcomm, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "PetscSubcomm is not created. Call PetscSubcommCreate() first");
137   PetscCallMPI(MPI_Comm_size(comm, &size));
138   PetscCall(PetscMPIIntCast(nsubcomm, &msub));
139   PetscCheck(msub >= 1 && msub <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %d cannot be < 1 or > input comm size %d", msub, size);
140 
141   psubcomm->n = msub;
142   PetscFunctionReturn(0);
143 }
144 
145 /*@
146   PetscSubcommSetType - Set the way the original MPI communicator is divided up in the `PetscSubcomm`
147 
148    Collective
149 
150    Input Parameters:
151 +  psubcomm - `PetscSubcomm` context
152 -  subcommtype - `PetscSubcommType` `PETSC_SUBCOMM_CONTIGUOUS` or `PETSC_SUBCOMM_INTERLACED`
153 
154    Level: advanced
155 
156 .seealso: `PetscSubcommType`, `PETSC_SUBCOMM_CONTIGUOUS`, `PETSC_SUBCOMM_INTERLACED`,
157           `PetscSubcommCreate()`, `PetscSubcommDestroy()`, `PetscSubcommSetNumber()`, `PetscSubcommSetTypeGeneral()`, `PetscSubcommType`
158 @*/
159 PetscErrorCode PetscSubcommSetType(PetscSubcomm psubcomm, PetscSubcommType subcommtype) {
160   PetscFunctionBegin;
161   PetscCheck(psubcomm, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "PetscSubcomm is not created. Call PetscSubcommCreate()");
162   PetscCheck(psubcomm->n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()", psubcomm->n);
163 
164   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) {
165     PetscCall(PetscSubcommCreate_contiguous(psubcomm));
166   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) {
167     PetscCall(PetscSubcommCreate_interlaced(psubcomm));
168   } else SETERRQ(psubcomm->parent, PETSC_ERR_SUP, "PetscSubcommType %s is not supported yet", PetscSubcommTypes[subcommtype]);
169   PetscFunctionReturn(0);
170 }
171 
172 /*@
173   PetscSubcommSetTypeGeneral - Divides up a communicator based on a specific user's specification
174 
175    Collective
176 
177    Input Parameters:
178 +  psubcomm - `PetscSubcomm` context
179 .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
180 -  subrank - rank in the subcommunicator
181 
182    Level: advanced
183 
184 .seealso: `PetscSubcommType`, `PETSC_SUBCOMM_CONTIGUOUS`, `PETSC_SUBCOMM_INTERLACED`, `PetscSubcommCreate()`, `PetscSubcommDestroy()`, `PetscSubcommSetNumber()`, `PetscSubcommSetType()`
185 @*/
186 PetscErrorCode PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm, PetscMPIInt color, PetscMPIInt subrank) {
187   MPI_Comm    subcomm = 0, dupcomm = 0, comm = psubcomm->parent;
188   PetscMPIInt size, icolor, duprank, *recvbuf, sendbuf[3], mysubsize, rank, *subsize;
189   PetscMPIInt i, nsubcomm = psubcomm->n;
190 
191   PetscFunctionBegin;
192   PetscCheck(psubcomm, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "PetscSubcomm is not created. Call PetscSubcommCreate()");
193   PetscCheck(nsubcomm >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()", nsubcomm);
194 
195   PetscCallMPI(MPI_Comm_split(comm, color, subrank, &subcomm));
196 
197   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
198   /* TODO: this can be done in an ostensibly scalale way (i.e., without allocating an array of size 'size') as is done in PetscObjectsCreateGlobalOrdering(). */
199   PetscCallMPI(MPI_Comm_size(comm, &size));
200   PetscCall(PetscMalloc1(2 * size, &recvbuf));
201 
202   PetscCallMPI(MPI_Comm_rank(comm, &rank));
203   PetscCallMPI(MPI_Comm_size(subcomm, &mysubsize));
204 
205   sendbuf[0] = color;
206   sendbuf[1] = mysubsize;
207   PetscCallMPI(MPI_Allgather(sendbuf, 2, MPI_INT, recvbuf, 2, MPI_INT, comm));
208 
209   PetscCall(PetscCalloc1(nsubcomm, &subsize));
210   for (i = 0; i < 2 * size; i += 2) subsize[recvbuf[i]] = recvbuf[i + 1];
211   PetscCall(PetscFree(recvbuf));
212 
213   duprank = 0;
214   for (icolor = 0; icolor < nsubcomm; icolor++) {
215     if (icolor != color) { /* not color of this process */
216       duprank += subsize[icolor];
217     } else {
218       duprank += subrank;
219       break;
220     }
221   }
222   PetscCallMPI(MPI_Comm_split(comm, 0, duprank, &dupcomm));
223 
224   PetscCall(PetscCommDuplicate(dupcomm, &psubcomm->dupparent, NULL));
225   PetscCall(PetscCommDuplicate(subcomm, &psubcomm->child, NULL));
226   PetscCallMPI(MPI_Comm_free(&dupcomm));
227   PetscCallMPI(MPI_Comm_free(&subcomm));
228 
229   psubcomm->color   = color;
230   psubcomm->subsize = subsize;
231   psubcomm->type    = PETSC_SUBCOMM_GENERAL;
232   PetscFunctionReturn(0);
233 }
234 
235 /*@
236   PetscSubcommDestroy - Destroys a `PetscSubcomm` object
237 
238    Collective on psubcomm
239 
240    Input Parameter:
241    .  psubcomm - the `PetscSubcomm` context
242 
243    Level: advanced
244 
245 .seealso: `PetscSubcommCreate()`, `PetscSubcommSetType()`
246 @*/
247 PetscErrorCode PetscSubcommDestroy(PetscSubcomm *psubcomm) {
248   PetscFunctionBegin;
249   if (!*psubcomm) PetscFunctionReturn(0);
250   PetscCall(PetscCommDestroy(&(*psubcomm)->dupparent));
251   PetscCall(PetscCommDestroy(&(*psubcomm)->child));
252   PetscCall(PetscFree((*psubcomm)->subsize));
253   if ((*psubcomm)->subcommprefix) PetscCall(PetscFree((*psubcomm)->subcommprefix));
254   PetscCall(PetscFree((*psubcomm)));
255   PetscFunctionReturn(0);
256 }
257 
258 /*@
259   PetscSubcommCreate - Create a `PetscSubcomm` context. This object is used to manage the division of a `MPI_Comm` into subcommunicators
260 
261    Collective
262 
263    Input Parameter:
264 .  comm - MPI communicator
265 
266    Output Parameter:
267 .  psubcomm - location to store the `PetscSubcomm` context
268 
269    Level: advanced
270 
271 .seealso: `PetscSubcomm`, `PetscSubcommDestroy()`, `PetscSubcommSetTypeGeneral()`, `PetscSubcommSetFromOptions()`, `PetscSubcommSetType()`,
272           `PetscSubcommSetNumber()`
273 @*/
274 PetscErrorCode PetscSubcommCreate(MPI_Comm comm, PetscSubcomm *psubcomm) {
275   PetscMPIInt rank, size;
276 
277   PetscFunctionBegin;
278   PetscCall(PetscNew(psubcomm));
279 
280   /* set defaults */
281   PetscCallMPI(MPI_Comm_rank(comm, &rank));
282   PetscCallMPI(MPI_Comm_size(comm, &size));
283 
284   (*psubcomm)->parent    = comm;
285   (*psubcomm)->dupparent = comm;
286   (*psubcomm)->child     = PETSC_COMM_SELF;
287   (*psubcomm)->n         = size;
288   (*psubcomm)->color     = rank;
289   (*psubcomm)->subsize   = NULL;
290   (*psubcomm)->type      = PETSC_SUBCOMM_INTERLACED;
291   PetscFunctionReturn(0);
292 }
293 
294 /*@C
295   PetscSubcommGetParent - Gets the communicator that was used to create the `PetscSubcomm`
296 
297    Collective
298 
299    Input Parameter:
300 .  scomm - the `PetscSubcomm`
301 
302    Output Parameter:
303 .  pcomm - location to store the parent communicator
304 
305    Level: intermediate
306 
307 .seealso: `PetscSubcommDestroy()`, `PetscSubcommSetTypeGeneral()`, `PetscSubcommSetFromOptions()`, `PetscSubcommSetType()`,
308           `PetscSubcommSetNumber()`, `PetscSubcommGetChild()`, `PetscSubcommContiguousParent()`
309 @*/
310 PetscErrorCode PetscSubcommGetParent(PetscSubcomm scomm, MPI_Comm *pcomm) {
311   *pcomm = PetscSubcommParent(scomm);
312   return 0;
313 }
314 
315 /*@C
316   PetscSubcommGetContiguousParent - Gets a communicator that that is a duplicate of the parent but has the ranks
317                                     reordered by the order they are in the children
318 
319    Collective
320 
321    Input Parameter:
322 .  scomm - the `PetscSubcomm`
323 
324    Output Parameter:
325 .  pcomm - location to store the parent communicator
326 
327    Level: intermediate
328 
329 .seealso: `PetscSubcommDestroy()`, `PetscSubcommSetTypeGeneral()`, `PetscSubcommSetFromOptions()`, `PetscSubcommSetType()`,
330           `PetscSubcommSetNumber()`, `PetscSubcommGetChild()`, `PetscSubcommContiguousParent()`
331 @*/
332 PetscErrorCode PetscSubcommGetContiguousParent(PetscSubcomm scomm, MPI_Comm *pcomm) {
333   *pcomm = PetscSubcommContiguousParent(scomm);
334   return 0;
335 }
336 
337 /*@C
338   PetscSubcommGetChild - Gets the communicator created by the `PetscSubcomm`. This is part of one of the subcommunicators created by the `PetscSubcomm`
339 
340    Collective
341 
342    Input Parameter:
343 .  scomm - the `PetscSubcomm`
344 
345    Output Parameter:
346 .  ccomm - location to store the child communicator
347 
348    Level: intermediate
349 
350 .seealso: `PetscSubcommDestroy()`, `PetscSubcommSetTypeGeneral()`, `PetscSubcommSetFromOptions()`, `PetscSubcommSetType()`,
351           `PetscSubcommSetNumber()`, `PetscSubcommGetParent()`, `PetscSubcommContiguousParent()`
352 @*/
353 PetscErrorCode PetscSubcommGetChild(PetscSubcomm scomm, MPI_Comm *ccomm) {
354   *ccomm = PetscSubcommChild(scomm);
355   return 0;
356 }
357 
358 static PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm) {
359   PetscMPIInt rank, size, *subsize, duprank = -1, subrank = -1;
360   PetscMPIInt np_subcomm, nleftover, i, color = -1, rankstart, nsubcomm = psubcomm->n;
361   MPI_Comm    subcomm = 0, dupcomm = 0, comm = psubcomm->parent;
362 
363   PetscFunctionBegin;
364   PetscCallMPI(MPI_Comm_rank(comm, &rank));
365   PetscCallMPI(MPI_Comm_size(comm, &size));
366 
367   /* get size of each subcommunicator */
368   PetscCall(PetscMalloc1(1 + nsubcomm, &subsize));
369 
370   np_subcomm = size / nsubcomm;
371   nleftover  = size - nsubcomm * np_subcomm;
372   for (i = 0; i < nsubcomm; i++) {
373     subsize[i] = np_subcomm;
374     if (i < nleftover) subsize[i]++;
375   }
376 
377   /* get color and subrank of this proc */
378   rankstart = 0;
379   for (i = 0; i < nsubcomm; i++) {
380     if (rank >= rankstart && rank < rankstart + subsize[i]) {
381       color   = i;
382       subrank = rank - rankstart;
383       duprank = rank;
384       break;
385     } else rankstart += subsize[i];
386   }
387 
388   PetscCallMPI(MPI_Comm_split(comm, color, subrank, &subcomm));
389 
390   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
391   PetscCallMPI(MPI_Comm_split(comm, 0, duprank, &dupcomm));
392   PetscCall(PetscCommDuplicate(dupcomm, &psubcomm->dupparent, NULL));
393   PetscCall(PetscCommDuplicate(subcomm, &psubcomm->child, NULL));
394   PetscCallMPI(MPI_Comm_free(&dupcomm));
395   PetscCallMPI(MPI_Comm_free(&subcomm));
396 
397   psubcomm->color   = color;
398   psubcomm->subsize = subsize;
399   psubcomm->type    = PETSC_SUBCOMM_CONTIGUOUS;
400   PetscFunctionReturn(0);
401 }
402 
403 /*
404    Note:
405    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
406    by iteratively taking a process into a subcommunicator.
407    Example: size=4, nsubcomm=(*psubcomm)->n=3
408      comm=(*psubcomm)->parent:
409       rank:     [0]  [1]  [2]  [3]
410       color:     0    1    2    0
411 
412      subcomm=(*psubcomm)->comm:
413       subrank:  [0]  [0]  [0]  [1]
414 
415      dupcomm=(*psubcomm)->dupparent:
416       duprank:  [0]  [2]  [3]  [1]
417 
418      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
419            subcomm[color = 1] has subsize=1, owns process [1]
420            subcomm[color = 2] has subsize=1, owns process [2]
421            dupcomm has same number of processes as comm, and its duprank maps
422            processes in subcomm contiguously into a 1d array:
423             duprank: [0] [1]      [2]         [3]
424             rank:    [0] [3]      [1]         [2]
425                     subcomm[0] subcomm[1]  subcomm[2]
426 */
427 
428 static PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm) {
429   PetscMPIInt rank, size, *subsize, duprank, subrank;
430   PetscMPIInt np_subcomm, nleftover, i, j, color, nsubcomm = psubcomm->n;
431   MPI_Comm    subcomm = 0, dupcomm = 0, comm = psubcomm->parent;
432 
433   PetscFunctionBegin;
434   PetscCallMPI(MPI_Comm_rank(comm, &rank));
435   PetscCallMPI(MPI_Comm_size(comm, &size));
436 
437   /* get size of each subcommunicator */
438   PetscCall(PetscMalloc1(1 + nsubcomm, &subsize));
439 
440   np_subcomm = size / nsubcomm;
441   nleftover  = size - nsubcomm * np_subcomm;
442   for (i = 0; i < nsubcomm; i++) {
443     subsize[i] = np_subcomm;
444     if (i < nleftover) subsize[i]++;
445   }
446 
447   /* find color for this proc */
448   color   = rank % nsubcomm;
449   subrank = rank / nsubcomm;
450 
451   PetscCallMPI(MPI_Comm_split(comm, color, subrank, &subcomm));
452 
453   j       = 0;
454   duprank = 0;
455   for (i = 0; i < nsubcomm; i++) {
456     if (j == color) {
457       duprank += subrank;
458       break;
459     }
460     duprank += subsize[i];
461     j++;
462   }
463 
464   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
465   PetscCallMPI(MPI_Comm_split(comm, 0, duprank, &dupcomm));
466   PetscCall(PetscCommDuplicate(dupcomm, &psubcomm->dupparent, NULL));
467   PetscCall(PetscCommDuplicate(subcomm, &psubcomm->child, NULL));
468   PetscCallMPI(MPI_Comm_free(&dupcomm));
469   PetscCallMPI(MPI_Comm_free(&subcomm));
470 
471   psubcomm->color   = color;
472   psubcomm->subsize = subsize;
473   psubcomm->type    = PETSC_SUBCOMM_INTERLACED;
474   PetscFunctionReturn(0);
475 }
476