xref: /petsc/src/sys/objects/subcomm.c (revision e37c6257ed0f98c39d2d110f3ff733a26fbdb7cb)
1 
2 /*
3      Provides utility routines for split MPI communicator.
4 */
5 #include <petscsys.h>    /*I   "petscsys.h"    I*/
6 #include <petsc-private/threadcommimpl.h> /* Petsc_ThreadComm_keyval */
7 #include <petscviewer.h>
8 
9 const char *const PetscSubcommTypes[] = {"GENERAL","CONTIGUOUS","INTERLACED","PetscSubcommType","PETSC_SUBCOMM_",0};
10 
11 extern PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
12 extern PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);
13 
14 #undef __FUNCT__
15 #define __FUNCT__ "PetscSubcommView"
16 PetscErrorCode PetscSubcommView(PetscSubcomm psubcomm,PetscViewer viewer)
17 {
18   PetscErrorCode    ierr;
19   PetscBool         iascii;
20   PetscViewerFormat format;
21 
22   PetscFunctionBegin;
23   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
24   if (iascii) {
25     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
26     if (format == PETSC_VIEWER_DEFAULT) {
27       MPI_Comm    comm=psubcomm->parent;
28       PetscMPIInt rank,size,subsize,subrank,duprank;
29 
30       ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
31       ierr = PetscViewerASCIIPrintf(viewer,"PetscSubcomm type %s with total %D MPI processes:\n",PetscSubcommTypes[psubcomm->type],size);CHKERRQ(ierr);
32       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
33       ierr = MPI_Comm_size(psubcomm->comm,&subsize);CHKERRQ(ierr);
34       ierr = MPI_Comm_rank(psubcomm->comm,&subrank);CHKERRQ(ierr);
35       ierr = MPI_Comm_rank(psubcomm->dupparent,&duprank);CHKERRQ(ierr);
36       ierr = PetscSynchronizedPrintf(comm,"  [%D], color %D, sub-size %D, sub-rank %D, duprank %D\n",rank,psubcomm->color,subsize,subrank,duprank);
37       ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
38     }
39   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported yet");
40   PetscFunctionReturn(0);
41 }
42 
43 #undef __FUNCT__
44 #define __FUNCT__ "PetscSubcommSetNumber"
45 /*@C
46   PetscSubcommSetNumber - Set total number of subcommunicators.
47 
48    Collective on MPI_Comm
49 
50    Input Parameter:
51 +  psubcomm - PetscSubcomm context
52 -  nsubcomm - the total number of subcommunicators in psubcomm
53 
54    Level: advanced
55 
56 .keywords: communicator
57 
58 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral()
59 @*/
60 PetscErrorCode  PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm)
61 {
62   PetscErrorCode ierr;
63   MPI_Comm       comm=psubcomm->parent;
64   PetscMPIInt    rank,size;
65 
66   PetscFunctionBegin;
67   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first");
68   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
69   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
70   if (nsubcomm < 1 || nsubcomm > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %D cannot be < 1 or > input comm size %D",nsubcomm,size);
71 
72   psubcomm->n = nsubcomm;
73   PetscFunctionReturn(0);
74 }
75 
76 #undef __FUNCT__
77 #define __FUNCT__ "PetscSubcommSetType"
78 /*@C
79   PetscSubcommSetType - Set type of subcommunicators.
80 
81    Collective on MPI_Comm
82 
83    Input Parameter:
84 +  psubcomm - PetscSubcomm context
85 -  subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED
86 
87    Level: advanced
88 
89 .keywords: communicator
90 
91 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral()
92 @*/
93 PetscErrorCode  PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype)
94 {
95   PetscErrorCode ierr;
96 
97   PetscFunctionBegin;
98   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
99   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %D is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
100 
101   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) {
102     ierr = PetscSubcommCreate_contiguous(psubcomm);CHKERRQ(ierr);
103   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) {
104     ierr = PetscSubcommCreate_interlaced(psubcomm);CHKERRQ(ierr);
105   } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %D is not supported yet",subcommtype);
106   PetscFunctionReturn(0);
107 }
108 
109 #undef __FUNCT__
110 #define __FUNCT__ "PetscSubcommSetTypeGeneral"
111 /*@C
112   PetscSubcommSetTypeGeneral - Set type of subcommunicators from user's specifications
113 
114    Collective on MPI_Comm
115 
116    Input Parameter:
117 +  psubcomm - PetscSubcomm context
118 .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
119 .  subrank - rank in the subcommunicator
120 -  duprank - rank in the dupparent (see PetscSubcomm)
121 
122    Level: advanced
123 
124 .keywords: communicator, create
125 
126 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType()
127 @*/
128 PetscErrorCode  PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank,PetscMPIInt duprank)
129 {
130   PetscErrorCode ierr;
131   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
132   PetscMPIInt    size;
133 
134   PetscFunctionBegin;
135   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
136   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %D is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
137 
138   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
139 
140   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm
141      if duprank is not a valid number, then dupcomm is not created - not all applications require dupcomm! */
142   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
143   if (duprank == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"duprank==PETSC_DECIDE is not supported yet");
144   else if (duprank >= 0 && duprank < size) {
145     ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
146   }
147   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
148   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
149   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
150   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
151 
152   psubcomm->color = color;
153   PetscFunctionReturn(0);
154 }
155 
156 #undef __FUNCT__
157 #define __FUNCT__ "PetscSubcommDestroy"
158 PetscErrorCode  PetscSubcommDestroy(PetscSubcomm *psubcomm)
159 {
160   PetscErrorCode ierr;
161 
162   PetscFunctionBegin;
163   if (!*psubcomm) PetscFunctionReturn(0);
164   ierr = PetscCommDestroy(&(*psubcomm)->dupparent);CHKERRQ(ierr);
165   ierr = PetscCommDestroy(&(*psubcomm)->comm);CHKERRQ(ierr);
166   ierr = PetscFree((*psubcomm)->subsize);CHKERRQ(ierr);
167   ierr = PetscFree((*psubcomm));CHKERRQ(ierr);
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "PetscSubcommCreate"
173 /*@C
174   PetscSubcommCreate - Create a PetscSubcomm context.
175 
176    Collective on MPI_Comm
177 
178    Input Parameter:
179 .  comm - MPI communicator
180 
181    Output Parameter:
182 .  psubcomm - location to store the PetscSubcomm context
183 
184    Level: advanced
185 
186 .keywords: communicator, create
187 
188 .seealso: PetscSubcommDestroy()
189 @*/
190 PetscErrorCode  PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm)
191 {
192   PetscErrorCode ierr;
193   PetscMPIInt    rank,size;
194 
195   PetscFunctionBegin;
196   ierr = PetscNew(struct _n_PetscSubcomm,psubcomm);CHKERRQ(ierr);
197 
198   /* set defaults */
199   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
200   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
201   (*psubcomm)->parent    = comm;
202   (*psubcomm)->dupparent = comm;
203   (*psubcomm)->comm      = PETSC_COMM_SELF;
204   (*psubcomm)->n         = size;
205   (*psubcomm)->color     = rank;
206   (*psubcomm)->subsize   = NULL;
207   (*psubcomm)->type      = PETSC_SUBCOMM_INTERLACED;
208   PetscFunctionReturn(0);
209 }
210 
211 #undef __FUNCT__
212 #define __FUNCT__ "PetscSubcommCreate_contiguous"
213 PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm)
214 {
215   PetscErrorCode ierr;
216   PetscMPIInt    rank,size,*subsize,duprank=-1,subrank=-1;
217   PetscInt       np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n;
218   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
219 
220   PetscFunctionBegin;
221   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
222   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
223 
224   /* get size of each subcommunicator */
225   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
226 
227   np_subcomm = size/nsubcomm;
228   nleftover  = size - nsubcomm*np_subcomm;
229   for (i=0; i<nsubcomm; i++) {
230     subsize[i] = np_subcomm;
231     if (i<nleftover) subsize[i]++;
232   }
233 
234   /* get color and subrank of this proc */
235   rankstart = 0;
236   for (i=0; i<nsubcomm; i++) {
237     if (rank >= rankstart && rank < rankstart+subsize[i]) {
238       color   = i;
239       subrank = rank - rankstart;
240       duprank = rank;
241       break;
242     } else rankstart += subsize[i];
243   }
244 
245   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
246 
247   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
248   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
249   {
250     PetscThreadComm tcomm;
251     ierr = PetscCommGetThreadComm(comm,&tcomm);CHKERRQ(ierr);
252     ierr = MPI_Attr_put(dupcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
253     tcomm->refct++;
254     ierr = MPI_Attr_put(subcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
255     tcomm->refct++;
256   }
257   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
258   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
259   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
260   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
261 
262   psubcomm->color   = color;
263   psubcomm->subsize = subsize;
264   psubcomm->type    = PETSC_SUBCOMM_CONTIGUOUS;
265   PetscFunctionReturn(0);
266 }
267 
268 #undef __FUNCT__
269 #define __FUNCT__ "PetscSubcommCreate_interlaced"
270 /*
271    Note:
272    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
273    by iteratively taking a process into a subcommunicator.
274    Example: size=4, nsubcomm=(*psubcomm)->n=3
275      comm=(*psubcomm)->parent:
276       rank:     [0]  [1]  [2]  [3]
277       color:     0    1    2    0
278 
279      subcomm=(*psubcomm)->comm:
280       subrank:  [0]  [0]  [0]  [1]
281 
282      dupcomm=(*psubcomm)->dupparent:
283       duprank:  [0]  [2]  [3]  [1]
284 
285      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
286            subcomm[color = 1] has subsize=1, owns process [1]
287            subcomm[color = 2] has subsize=1, owns process [2]
288            dupcomm has same number of processes as comm, and its duprank maps
289            processes in subcomm contiguously into a 1d array:
290             duprank: [0] [1]      [2]         [3]
291             rank:    [0] [3]      [1]         [2]
292                     subcomm[0] subcomm[1]  subcomm[2]
293 */
294 
295 PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm)
296 {
297   PetscErrorCode ierr;
298   PetscMPIInt    rank,size,*subsize,duprank,subrank;
299   PetscInt       np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n;
300   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
301 
302   PetscFunctionBegin;
303   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
304   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
305 
306   /* get size of each subcommunicator */
307   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
308 
309   np_subcomm = size/nsubcomm;
310   nleftover  = size - nsubcomm*np_subcomm;
311   for (i=0; i<nsubcomm; i++) {
312     subsize[i] = np_subcomm;
313     if (i<nleftover) subsize[i]++;
314   }
315 
316   /* find color for this proc */
317   color   = rank%nsubcomm;
318   subrank = rank/nsubcomm;
319 
320   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
321 
322   j = 0; duprank = 0;
323   for (i=0; i<nsubcomm; i++) {
324     if (j == color) {
325       duprank += subrank;
326       break;
327     }
328     duprank += subsize[i]; j++;
329   }
330 
331   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
332   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
333   {
334     PetscThreadComm tcomm;
335     ierr = PetscCommGetThreadComm(comm,&tcomm);CHKERRQ(ierr);
336     ierr = MPI_Attr_put(dupcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
337     tcomm->refct++;
338     ierr = MPI_Attr_put(subcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
339     tcomm->refct++;
340   }
341   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
342   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
343   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
344   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
345 
346   psubcomm->color   = color;
347   psubcomm->subsize = subsize;
348   psubcomm->type    = PETSC_SUBCOMM_INTERLACED;
349   PetscFunctionReturn(0);
350 }
351 
352 
353