xref: /petsc/src/sys/objects/subcomm.c (revision e06cc25f388e7e78aff4d13e8bc6525d54dd772a)
1 
2 /*
3      Provides utility routines for split MPI communicator.
4 */
5 #include <petscsys.h>    /*I   "petscsys.h"    I*/
6 #include <petscviewer.h>
7 
8 const char *const PetscSubcommTypes[] = {"GENERAL","CONTIGUOUS","INTERLACED","PetscSubcommType","PETSC_SUBCOMM_",0};
9 
10 extern PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
11 extern PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);
12 #undef __FUNCT__
13 #define __FUNCT__ "PetscSubcommSetFromOptions"
14 PetscErrorCode PetscSubcommSetFromOptions(PetscSubcomm psubcomm)
15 {
16   PetscErrorCode ierr;
17   PetscSubcommType type;
18   PetscBool      flg;
19 
20   PetscFunctionBegin;
21   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Must call PetscSubcommCreate firt");
22   type = psubcomm->type;
23   ierr = PetscOptionsGetEnum(NULL,"-psubcomm_type",PetscSubcommTypes,(PetscEnum*)&type,&flg);CHKERRQ(ierr);
24   if (flg && psubcomm->type != type) {
25     /* free old structures */
26     ierr = PetscCommDestroy(&(psubcomm)->dupparent);CHKERRQ(ierr);
27     ierr = PetscCommDestroy(&(psubcomm)->child);CHKERRQ(ierr);
28     ierr = PetscFree((psubcomm)->subsize);CHKERRQ(ierr);
29     switch (type) {
30     case PETSC_SUBCOMM_GENERAL:
31       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Runtime option PETSC_SUBCOMM_GENERAL is not supported, use PetscSubcommSetTypeGeneral()");
32     case PETSC_SUBCOMM_CONTIGUOUS:
33       ierr = PetscSubcommCreate_contiguous(psubcomm);CHKERRQ(ierr);
34       break;
35     case PETSC_SUBCOMM_INTERLACED:
36       ierr = PetscSubcommCreate_interlaced(psubcomm);CHKERRQ(ierr);
37       break;
38     default:
39       SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[type]);
40     }
41   }
42 
43   ierr = PetscOptionsHasName(NULL, "-psubcomm_view", &flg);CHKERRQ(ierr);
44   if (flg) {
45     ierr = PetscSubcommView(psubcomm,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
46   }
47   PetscFunctionReturn(0);
48 }
49 
50 #undef __FUNCT__
51 #define __FUNCT__ "PetscSubcommView"
52 PetscErrorCode PetscSubcommView(PetscSubcomm psubcomm,PetscViewer viewer)
53 {
54   PetscErrorCode    ierr;
55   PetscBool         iascii;
56   PetscViewerFormat format;
57 
58   PetscFunctionBegin;
59   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
60   if (iascii) {
61     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
62     if (format == PETSC_VIEWER_DEFAULT) {
63       MPI_Comm    comm=psubcomm->parent;
64       PetscMPIInt rank,size,subsize,subrank,duprank;
65 
66       ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
67       ierr = PetscViewerASCIIPrintf(viewer,"PetscSubcomm type %s with total %d MPI processes:\n",PetscSubcommTypes[psubcomm->type],size);CHKERRQ(ierr);
68       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
69       ierr = MPI_Comm_size(psubcomm->child,&subsize);CHKERRQ(ierr);
70       ierr = MPI_Comm_rank(psubcomm->child,&subrank);CHKERRQ(ierr);
71       ierr = MPI_Comm_rank(psubcomm->dupparent,&duprank);CHKERRQ(ierr);
72       ierr = PetscSynchronizedPrintf(comm,"  [%d], color %d, sub-size %d, sub-rank %d, duprank %d\n",rank,psubcomm->color,subsize,subrank,duprank);CHKERRQ(ierr);
73       ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
74     }
75   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported yet");
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "PetscSubcommSetNumber"
81 /*@C
82   PetscSubcommSetNumber - Set total number of subcommunicators.
83 
84    Collective on MPI_Comm
85 
86    Input Parameter:
87 +  psubcomm - PetscSubcomm context
88 -  nsubcomm - the total number of subcommunicators in psubcomm
89 
90    Level: advanced
91 
92 .keywords: communicator
93 
94 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral()
95 @*/
96 PetscErrorCode  PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm)
97 {
98   PetscErrorCode ierr;
99   MPI_Comm       comm=psubcomm->parent;
100   PetscMPIInt    msub,size;
101 
102   PetscFunctionBegin;
103   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first");
104   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
105   ierr = PetscMPIIntCast(nsubcomm,&msub);CHKERRQ(ierr);
106   if (msub < 1 || msub > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %d cannot be < 1 or > input comm size %d",msub,size);
107 
108   psubcomm->n = msub;
109   PetscFunctionReturn(0);
110 }
111 
112 #undef __FUNCT__
113 #define __FUNCT__ "PetscSubcommSetType"
114 /*@C
115   PetscSubcommSetType - Set type of subcommunicators.
116 
117    Collective on MPI_Comm
118 
119    Input Parameter:
120 +  psubcomm - PetscSubcomm context
121 -  subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED
122 
123    Level: advanced
124 
125 .keywords: communicator
126 
127 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral()
128 @*/
129 PetscErrorCode  PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype)
130 {
131   PetscErrorCode ierr;
132 
133   PetscFunctionBegin;
134   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
135   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
136 
137   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) {
138     ierr = PetscSubcommCreate_contiguous(psubcomm);CHKERRQ(ierr);
139   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) {
140     ierr = PetscSubcommCreate_interlaced(psubcomm);CHKERRQ(ierr);
141   } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[subcommtype]);
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "PetscSubcommSetTypeGeneral"
147 /*@C
148   PetscSubcommSetTypeGeneral - Set a PetscSubcomm from user's specifications
149 
150    Collective on MPI_Comm
151 
152    Input Parameter:
153 +  psubcomm - PetscSubcomm context
154 .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
155 -  subrank - rank in the subcommunicator
156 
157    Level: advanced
158 
159 .keywords: communicator, create
160 
161 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType()
162 @*/
163 PetscErrorCode PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank)
164 {
165   PetscErrorCode ierr;
166   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
167   PetscMPIInt    size,icolor,duprank,*recvbuf,sendbuf[3],mysubsize,rank,*subsize;
168   PetscMPIInt    i,nsubcomm=psubcomm->n;
169 
170   PetscFunctionBegin;
171   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
172   if (nsubcomm < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",nsubcomm);
173 
174   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
175 
176   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
177   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
178   ierr = PetscMalloc1(2*size,&recvbuf);CHKERRQ(ierr);
179 
180   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
181   ierr = MPI_Comm_size(subcomm,&mysubsize);CHKERRQ(ierr);
182 
183   sendbuf[0] = color;
184   sendbuf[1] = mysubsize;
185   ierr = MPI_Allgather(sendbuf,2,MPI_INT,recvbuf,2,MPI_INT,comm);CHKERRQ(ierr);
186 
187   ierr = PetscCalloc1(nsubcomm,&subsize);CHKERRQ(ierr);
188   for (i=0; i<2*size; i+=2) {
189     subsize[recvbuf[i]] = recvbuf[i+1];
190   }
191   ierr = PetscFree(recvbuf);CHKERRQ(ierr);
192 
193   duprank = 0;
194   for (icolor=0; icolor<nsubcomm; icolor++) {
195     if (icolor != color) { /* not color of this process */
196       duprank += subsize[icolor];
197     } else {
198       duprank += subrank;
199       break;
200     }
201   }
202   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
203 
204   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
205   ierr = PetscCommDuplicate(subcomm,&psubcomm->child,NULL);CHKERRQ(ierr);
206   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
207   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
208 
209   psubcomm->color   = color;
210   psubcomm->subsize = subsize;
211   psubcomm->type    = PETSC_SUBCOMM_GENERAL;
212   PetscFunctionReturn(0);
213 }
214 
215 #undef __FUNCT__
216 #define __FUNCT__ "PetscSubcommDestroy"
217 PetscErrorCode  PetscSubcommDestroy(PetscSubcomm *psubcomm)
218 {
219   PetscErrorCode ierr;
220 
221   PetscFunctionBegin;
222   if (!*psubcomm) PetscFunctionReturn(0);
223   ierr = PetscCommDestroy(&(*psubcomm)->dupparent);CHKERRQ(ierr);
224   ierr = PetscCommDestroy(&(*psubcomm)->child);CHKERRQ(ierr);
225   ierr = PetscFree((*psubcomm)->subsize);CHKERRQ(ierr);
226   ierr = PetscFree((*psubcomm));CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "PetscSubcommCreate"
232 /*@C
233   PetscSubcommCreate - Create a PetscSubcomm context.
234 
235    Collective on MPI_Comm
236 
237    Input Parameter:
238 .  comm - MPI communicator
239 
240    Output Parameter:
241 .  psubcomm - location to store the PetscSubcomm context
242 
243    Level: advanced
244 
245 .keywords: communicator, create
246 
247 .seealso: PetscSubcommDestroy()
248 @*/
249 PetscErrorCode  PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm)
250 {
251   PetscErrorCode ierr;
252   PetscMPIInt    rank,size;
253 
254   PetscFunctionBegin;
255   ierr = PetscNew(psubcomm);CHKERRQ(ierr);
256 
257   /* set defaults */
258   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
259   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
260 
261   (*psubcomm)->parent    = comm;
262   (*psubcomm)->dupparent = comm;
263   (*psubcomm)->child     = PETSC_COMM_SELF;
264   (*psubcomm)->n         = size;
265   (*psubcomm)->color     = rank;
266   (*psubcomm)->subsize   = NULL;
267   (*psubcomm)->type      = PETSC_SUBCOMM_INTERLACED;
268   PetscFunctionReturn(0);
269 }
270 
271 #undef __FUNCT__
272 #define __FUNCT__ "PetscSubcommCreate_contiguous"
273 PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm)
274 {
275   PetscErrorCode ierr;
276   PetscMPIInt    rank,size,*subsize,duprank=-1,subrank=-1;
277   PetscMPIInt    np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n;
278   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
279 
280   PetscFunctionBegin;
281   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
282   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
283 
284   /* get size of each subcommunicator */
285   ierr = PetscMalloc1(1+nsubcomm,&subsize);CHKERRQ(ierr);
286 
287   np_subcomm = size/nsubcomm;
288   nleftover  = size - nsubcomm*np_subcomm;
289   for (i=0; i<nsubcomm; i++) {
290     subsize[i] = np_subcomm;
291     if (i<nleftover) subsize[i]++;
292   }
293 
294   /* get color and subrank of this proc */
295   rankstart = 0;
296   for (i=0; i<nsubcomm; i++) {
297     if (rank >= rankstart && rank < rankstart+subsize[i]) {
298       color   = i;
299       subrank = rank - rankstart;
300       duprank = rank;
301       break;
302     } else rankstart += subsize[i];
303   }
304 
305   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
306 
307   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
308   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
309   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
310   ierr = PetscCommDuplicate(subcomm,&psubcomm->child,NULL);CHKERRQ(ierr);
311   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
312   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
313 
314   psubcomm->color   = color;
315   psubcomm->subsize = subsize;
316   psubcomm->type    = PETSC_SUBCOMM_CONTIGUOUS;
317   PetscFunctionReturn(0);
318 }
319 
320 #undef __FUNCT__
321 #define __FUNCT__ "PetscSubcommCreate_interlaced"
322 /*
323    Note:
324    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
325    by iteratively taking a process into a subcommunicator.
326    Example: size=4, nsubcomm=(*psubcomm)->n=3
327      comm=(*psubcomm)->parent:
328       rank:     [0]  [1]  [2]  [3]
329       color:     0    1    2    0
330 
331      subcomm=(*psubcomm)->comm:
332       subrank:  [0]  [0]  [0]  [1]
333 
334      dupcomm=(*psubcomm)->dupparent:
335       duprank:  [0]  [2]  [3]  [1]
336 
337      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
338            subcomm[color = 1] has subsize=1, owns process [1]
339            subcomm[color = 2] has subsize=1, owns process [2]
340            dupcomm has same number of processes as comm, and its duprank maps
341            processes in subcomm contiguously into a 1d array:
342             duprank: [0] [1]      [2]         [3]
343             rank:    [0] [3]      [1]         [2]
344                     subcomm[0] subcomm[1]  subcomm[2]
345 */
346 
347 PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm)
348 {
349   PetscErrorCode ierr;
350   PetscMPIInt    rank,size,*subsize,duprank,subrank;
351   PetscMPIInt    np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n;
352   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
353 
354   PetscFunctionBegin;
355   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
356   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
357 
358   /* get size of each subcommunicator */
359   ierr = PetscMalloc1(1+nsubcomm,&subsize);CHKERRQ(ierr);
360 
361   np_subcomm = size/nsubcomm;
362   nleftover  = size - nsubcomm*np_subcomm;
363   for (i=0; i<nsubcomm; i++) {
364     subsize[i] = np_subcomm;
365     if (i<nleftover) subsize[i]++;
366   }
367 
368   /* find color for this proc */
369   color   = rank%nsubcomm;
370   subrank = rank/nsubcomm;
371 
372   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
373 
374   j = 0; duprank = 0;
375   for (i=0; i<nsubcomm; i++) {
376     if (j == color) {
377       duprank += subrank;
378       break;
379     }
380     duprank += subsize[i]; j++;
381   }
382 
383   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
384   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
385   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
386   ierr = PetscCommDuplicate(subcomm,&psubcomm->child,NULL);CHKERRQ(ierr);
387   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
388   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
389 
390   psubcomm->color   = color;
391   psubcomm->subsize = subsize;
392   psubcomm->type    = PETSC_SUBCOMM_INTERLACED;
393   PetscFunctionReturn(0);
394 }
395 
396 
397