xref: /petsc/src/sys/objects/subcomm.c (revision 60f5f21c9986be7e96bc5f6a72a0920da4c6def8)
1 
2 /*
3      Provides utility routines for split MPI communicator.
4 */
5 #include <petscsys.h>    /*I   "petscsys.h"    I*/
6 #include <petscviewer.h>
7 
8 const char *const PetscSubcommTypes[] = {"GENERAL","CONTIGUOUS","INTERLACED","PetscSubcommType","PETSC_SUBCOMM_",0};
9 
10 extern PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
11 extern PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);
12 #undef __FUNCT__
13 #define __FUNCT__ "PetscSubcommSetFromOptions"
14 PetscErrorCode PetscSubcommSetFromOptions(PetscSubcomm psubcomm)
15 {
16   PetscErrorCode   ierr;
17   PetscSubcommType type;
18   PetscBool        flg;
19 
20   PetscFunctionBegin;
21   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Must call PetscSubcommCreate firt");
22 
23   ierr = PetscOptionsBegin(psubcomm->parent,NULL,"Options for PetscSubcomm",NULL);CHKERRQ(ierr);
24   ierr = PetscOptionsEnum("-psubcomm_type",NULL,NULL,PetscSubcommTypes,(PetscEnum)psubcomm->type,(PetscEnum*)&type,&flg);CHKERRQ(ierr);
25   if (flg && psubcomm->type != type) {
26     /* free old structures */
27     ierr = PetscCommDestroy(&(psubcomm)->dupparent);CHKERRQ(ierr);
28     ierr = PetscCommDestroy(&(psubcomm)->child);CHKERRQ(ierr);
29     ierr = PetscFree((psubcomm)->subsize);CHKERRQ(ierr);
30     switch (type) {
31     case PETSC_SUBCOMM_GENERAL:
32       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Runtime option PETSC_SUBCOMM_GENERAL is not supported, use PetscSubcommSetTypeGeneral()");
33     case PETSC_SUBCOMM_CONTIGUOUS:
34       ierr = PetscSubcommCreate_contiguous(psubcomm);CHKERRQ(ierr);
35       break;
36     case PETSC_SUBCOMM_INTERLACED:
37       ierr = PetscSubcommCreate_interlaced(psubcomm);CHKERRQ(ierr);
38       break;
39     default:
40       SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[type]);
41     }
42   }
43 
44   ierr = PetscOptionsName("-psubcomm_view","Triggers display of PetscSubcomm context","PetscSubcommView",&flg);CHKERRQ(ierr);
45   if (flg) {
46     ierr = PetscSubcommView(psubcomm,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
47   }
48   ierr = PetscOptionsEnd();CHKERRQ(ierr);
49   PetscFunctionReturn(0);
50 }
51 
52 #undef __FUNCT__
53 #define __FUNCT__ "PetscSubcommView"
54 PetscErrorCode PetscSubcommView(PetscSubcomm psubcomm,PetscViewer viewer)
55 {
56   PetscErrorCode    ierr;
57   PetscBool         iascii;
58   PetscViewerFormat format;
59 
60   PetscFunctionBegin;
61   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
62   if (iascii) {
63     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
64     if (format == PETSC_VIEWER_DEFAULT) {
65       MPI_Comm    comm=psubcomm->parent;
66       PetscMPIInt rank,size,subsize,subrank,duprank;
67 
68       ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
69       ierr = PetscViewerASCIIPrintf(viewer,"PetscSubcomm type %s with total %d MPI processes:\n",PetscSubcommTypes[psubcomm->type],size);CHKERRQ(ierr);
70       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
71       ierr = MPI_Comm_size(psubcomm->child,&subsize);CHKERRQ(ierr);
72       ierr = MPI_Comm_rank(psubcomm->child,&subrank);CHKERRQ(ierr);
73       ierr = MPI_Comm_rank(psubcomm->dupparent,&duprank);CHKERRQ(ierr);
74       ierr = PetscSynchronizedPrintf(comm,"  [%d], color %d, sub-size %d, sub-rank %d, duprank %d\n",rank,psubcomm->color,subsize,subrank,duprank);CHKERRQ(ierr);
75       ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
76     }
77   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported yet");
78   PetscFunctionReturn(0);
79 }
80 
81 #undef __FUNCT__
82 #define __FUNCT__ "PetscSubcommSetNumber"
83 /*@C
84   PetscSubcommSetNumber - Set total number of subcommunicators.
85 
86    Collective on MPI_Comm
87 
88    Input Parameter:
89 +  psubcomm - PetscSubcomm context
90 -  nsubcomm - the total number of subcommunicators in psubcomm
91 
92    Level: advanced
93 
94 .keywords: communicator
95 
96 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral()
97 @*/
98 PetscErrorCode  PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm)
99 {
100   PetscErrorCode ierr;
101   MPI_Comm       comm=psubcomm->parent;
102   PetscMPIInt    msub,size;
103 
104   PetscFunctionBegin;
105   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first");
106   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
107   ierr = PetscMPIIntCast(nsubcomm,&msub);CHKERRQ(ierr);
108   if (msub < 1 || msub > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %d cannot be < 1 or > input comm size %d",msub,size);
109 
110   psubcomm->n = msub;
111   PetscFunctionReturn(0);
112 }
113 
114 #undef __FUNCT__
115 #define __FUNCT__ "PetscSubcommSetType"
116 /*@C
117   PetscSubcommSetType - Set type of subcommunicators.
118 
119    Collective on MPI_Comm
120 
121    Input Parameter:
122 +  psubcomm - PetscSubcomm context
123 -  subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED
124 
125    Level: advanced
126 
127 .keywords: communicator
128 
129 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral()
130 @*/
131 PetscErrorCode  PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype)
132 {
133   PetscErrorCode ierr;
134 
135   PetscFunctionBegin;
136   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
137   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
138 
139   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) {
140     ierr = PetscSubcommCreate_contiguous(psubcomm);CHKERRQ(ierr);
141   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) {
142     ierr = PetscSubcommCreate_interlaced(psubcomm);CHKERRQ(ierr);
143   } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[subcommtype]);
144   PetscFunctionReturn(0);
145 }
146 
147 #undef __FUNCT__
148 #define __FUNCT__ "PetscSubcommSetTypeGeneral"
149 /*@C
150   PetscSubcommSetTypeGeneral - Set a PetscSubcomm from user's specifications
151 
152    Collective on MPI_Comm
153 
154    Input Parameter:
155 +  psubcomm - PetscSubcomm context
156 .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
157 -  subrank - rank in the subcommunicator
158 
159    Level: advanced
160 
161 .keywords: communicator, create
162 
163 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType()
164 @*/
165 PetscErrorCode PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank)
166 {
167   PetscErrorCode ierr;
168   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
169   PetscMPIInt    size,icolor,duprank,*recvbuf,sendbuf[3],mysubsize,rank,*subsize;
170   PetscMPIInt    i,nsubcomm=psubcomm->n;
171 
172   PetscFunctionBegin;
173   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
174   if (nsubcomm < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",nsubcomm);
175 
176   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
177 
178   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
179   /* TODO: this can be done in an ostensibly scalale way (i.e., without allocating an array of size 'size') as is done in PetscObjectsCreateGlobalOrdering(). */
180   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
181   ierr = PetscMalloc1(2*size,&recvbuf);CHKERRQ(ierr);
182 
183   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
184   ierr = MPI_Comm_size(subcomm,&mysubsize);CHKERRQ(ierr);
185 
186   sendbuf[0] = color;
187   sendbuf[1] = mysubsize;
188   ierr = MPI_Allgather(sendbuf,2,MPI_INT,recvbuf,2,MPI_INT,comm);CHKERRQ(ierr);
189 
190   ierr = PetscCalloc1(nsubcomm,&subsize);CHKERRQ(ierr);
191   for (i=0; i<2*size; i+=2) {
192     subsize[recvbuf[i]] = recvbuf[i+1];
193   }
194   ierr = PetscFree(recvbuf);CHKERRQ(ierr);
195 
196   duprank = 0;
197   for (icolor=0; icolor<nsubcomm; icolor++) {
198     if (icolor != color) { /* not color of this process */
199       duprank += subsize[icolor];
200     } else {
201       duprank += subrank;
202       break;
203     }
204   }
205   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
206 
207   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
208   ierr = PetscCommDuplicate(subcomm,&psubcomm->child,NULL);CHKERRQ(ierr);
209   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
210   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
211 
212   psubcomm->color   = color;
213   psubcomm->subsize = subsize;
214   psubcomm->type    = PETSC_SUBCOMM_GENERAL;
215   PetscFunctionReturn(0);
216 }
217 
218 #undef __FUNCT__
219 #define __FUNCT__ "PetscSubcommDestroy"
220 PetscErrorCode  PetscSubcommDestroy(PetscSubcomm *psubcomm)
221 {
222   PetscErrorCode ierr;
223 
224   PetscFunctionBegin;
225   if (!*psubcomm) PetscFunctionReturn(0);
226   ierr = PetscCommDestroy(&(*psubcomm)->dupparent);CHKERRQ(ierr);
227   ierr = PetscCommDestroy(&(*psubcomm)->child);CHKERRQ(ierr);
228   ierr = PetscFree((*psubcomm)->subsize);CHKERRQ(ierr);
229   ierr = PetscFree((*psubcomm));CHKERRQ(ierr);
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "PetscSubcommCreate"
235 /*@C
236   PetscSubcommCreate - Create a PetscSubcomm context.
237 
238    Collective on MPI_Comm
239 
240    Input Parameter:
241 .  comm - MPI communicator
242 
243    Output Parameter:
244 .  psubcomm - location to store the PetscSubcomm context
245 
246    Level: advanced
247 
248 .keywords: communicator, create
249 
250 .seealso: PetscSubcommDestroy()
251 @*/
252 PetscErrorCode  PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm)
253 {
254   PetscErrorCode ierr;
255   PetscMPIInt    rank,size;
256 
257   PetscFunctionBegin;
258   ierr = PetscNew(psubcomm);CHKERRQ(ierr);
259 
260   /* set defaults */
261   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
262   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
263 
264   (*psubcomm)->parent    = comm;
265   (*psubcomm)->dupparent = comm;
266   (*psubcomm)->child     = PETSC_COMM_SELF;
267   (*psubcomm)->n         = size;
268   (*psubcomm)->color     = rank;
269   (*psubcomm)->subsize   = NULL;
270   (*psubcomm)->type      = PETSC_SUBCOMM_INTERLACED;
271   PetscFunctionReturn(0);
272 }
273 
274 #undef __FUNCT__
275 #define __FUNCT__ "PetscSubcommCreate_contiguous"
276 PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm)
277 {
278   PetscErrorCode ierr;
279   PetscMPIInt    rank,size,*subsize,duprank=-1,subrank=-1;
280   PetscMPIInt    np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n;
281   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
282 
283   PetscFunctionBegin;
284   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
285   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
286 
287   /* get size of each subcommunicator */
288   ierr = PetscMalloc1(1+nsubcomm,&subsize);CHKERRQ(ierr);
289 
290   np_subcomm = size/nsubcomm;
291   nleftover  = size - nsubcomm*np_subcomm;
292   for (i=0; i<nsubcomm; i++) {
293     subsize[i] = np_subcomm;
294     if (i<nleftover) subsize[i]++;
295   }
296 
297   /* get color and subrank of this proc */
298   rankstart = 0;
299   for (i=0; i<nsubcomm; i++) {
300     if (rank >= rankstart && rank < rankstart+subsize[i]) {
301       color   = i;
302       subrank = rank - rankstart;
303       duprank = rank;
304       break;
305     } else rankstart += subsize[i];
306   }
307 
308   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
309 
310   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
311   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
312   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
313   ierr = PetscCommDuplicate(subcomm,&psubcomm->child,NULL);CHKERRQ(ierr);
314   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
315   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
316 
317   psubcomm->color   = color;
318   psubcomm->subsize = subsize;
319   psubcomm->type    = PETSC_SUBCOMM_CONTIGUOUS;
320   PetscFunctionReturn(0);
321 }
322 
323 #undef __FUNCT__
324 #define __FUNCT__ "PetscSubcommCreate_interlaced"
325 /*
326    Note:
327    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
328    by iteratively taking a process into a subcommunicator.
329    Example: size=4, nsubcomm=(*psubcomm)->n=3
330      comm=(*psubcomm)->parent:
331       rank:     [0]  [1]  [2]  [3]
332       color:     0    1    2    0
333 
334      subcomm=(*psubcomm)->comm:
335       subrank:  [0]  [0]  [0]  [1]
336 
337      dupcomm=(*psubcomm)->dupparent:
338       duprank:  [0]  [2]  [3]  [1]
339 
340      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
341            subcomm[color = 1] has subsize=1, owns process [1]
342            subcomm[color = 2] has subsize=1, owns process [2]
343            dupcomm has same number of processes as comm, and its duprank maps
344            processes in subcomm contiguously into a 1d array:
345             duprank: [0] [1]      [2]         [3]
346             rank:    [0] [3]      [1]         [2]
347                     subcomm[0] subcomm[1]  subcomm[2]
348 */
349 
350 PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm)
351 {
352   PetscErrorCode ierr;
353   PetscMPIInt    rank,size,*subsize,duprank,subrank;
354   PetscMPIInt    np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n;
355   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
356 
357   PetscFunctionBegin;
358   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
359   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
360 
361   /* get size of each subcommunicator */
362   ierr = PetscMalloc1(1+nsubcomm,&subsize);CHKERRQ(ierr);
363 
364   np_subcomm = size/nsubcomm;
365   nleftover  = size - nsubcomm*np_subcomm;
366   for (i=0; i<nsubcomm; i++) {
367     subsize[i] = np_subcomm;
368     if (i<nleftover) subsize[i]++;
369   }
370 
371   /* find color for this proc */
372   color   = rank%nsubcomm;
373   subrank = rank/nsubcomm;
374 
375   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
376 
377   j = 0; duprank = 0;
378   for (i=0; i<nsubcomm; i++) {
379     if (j == color) {
380       duprank += subrank;
381       break;
382     }
383     duprank += subsize[i]; j++;
384   }
385 
386   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
387   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
388   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
389   ierr = PetscCommDuplicate(subcomm,&psubcomm->child,NULL);CHKERRQ(ierr);
390   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
391   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
392 
393   psubcomm->color   = color;
394   psubcomm->subsize = subsize;
395   psubcomm->type    = PETSC_SUBCOMM_INTERLACED;
396   PetscFunctionReturn(0);
397 }
398 
399 
400