xref: /petsc/src/sys/objects/subcomm.c (revision d3b23db5a7ee1f155504da2761e4a9fbf8c1ff2c)
1 
2 /*
3      Provides utility routines for split MPI communicator.
4 */
5 #include <petscsys.h>    /*I   "petscsys.h"    I*/
6 #include <petsc-private/threadcommimpl.h> /* Petsc_ThreadComm_keyval */
7 
8 const char *const PetscSubcommTypes[] = {"GENERAL","CONTIGUOUS","INTERLACED","PetscSubcommType","PETSC_SUBCOMM_",0};
9 
10 extern PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
11 extern PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);
12 
13 #undef __FUNCT__
14 #define __FUNCT__ "PetscSubcommSetNumber"
15 /*@C
16   PetscSubcommSetNumber - Set total number of subcommunicators.
17 
18    Collective on MPI_Comm
19 
20    Input Parameter:
21 +  psubcomm - PetscSubcomm context
22 -  nsubcomm - the total number of subcommunicators in psubcomm
23 
24    Level: advanced
25 
26 .keywords: communicator
27 
28 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral()
29 @*/
30 PetscErrorCode  PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm)
31 {
32   PetscErrorCode ierr;
33   MPI_Comm       comm=psubcomm->parent;
34   PetscMPIInt    rank,size;
35 
36   PetscFunctionBegin;
37   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first");
38   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
39   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
40   if (nsubcomm < 1 || nsubcomm > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %D cannot be < 1 or > input comm size %D",nsubcomm,size);
41 
42   psubcomm->n = nsubcomm;
43   PetscFunctionReturn(0);
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "PetscSubcommSetType"
48 /*@C
49   PetscSubcommSetType - Set type of subcommunicators.
50 
51    Collective on MPI_Comm
52 
53    Input Parameter:
54 +  psubcomm - PetscSubcomm context
55 -  subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED
56 
57    Level: advanced
58 
59 .keywords: communicator
60 
61 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral()
62 @*/
63 PetscErrorCode  PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype)
64 {
65   PetscErrorCode ierr;
66 
67   PetscFunctionBegin;
68   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
69   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %D is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
70 
71   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) {
72     ierr = PetscSubcommCreate_contiguous(psubcomm);CHKERRQ(ierr);
73   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) {
74     ierr = PetscSubcommCreate_interlaced(psubcomm);CHKERRQ(ierr);
75   } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %D is not supported yet",subcommtype);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "PetscSubcommSetTypeGeneral"
81 /*@C
82   PetscSubcommSetTypeGeneral - Set type of subcommunicators from user's specifications
83 
84    Collective on MPI_Comm
85 
86    Input Parameter:
87 +  psubcomm - PetscSubcomm context
88 .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
89 .  subrank - rank in the subcommunicator
90 -  duprank - rank in the dupparent (see PetscSubcomm)
91 
92    Level: advanced
93 
94 .keywords: communicator, create
95 
96 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType()
97 @*/
98 PetscErrorCode  PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank,PetscMPIInt duprank)
99 {
100   PetscErrorCode ierr;
101   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
102   PetscMPIInt    size;
103 
104   PetscFunctionBegin;
105   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
106   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %D is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
107 
108   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
109 
110   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm
111      if duprank is not a valid number, then dupcomm is not created - not all applications require dupcomm! */
112   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
113   if (duprank == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"duprank==PETSC_DECIDE is not supported yet");
114   else if (duprank >= 0 && duprank < size) {
115     ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
116   }
117   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
118   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
119   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
120   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
121 
122   psubcomm->color = color;
123   PetscFunctionReturn(0);
124 }
125 
126 #undef __FUNCT__
127 #define __FUNCT__ "PetscSubcommDestroy"
128 PetscErrorCode  PetscSubcommDestroy(PetscSubcomm *psubcomm)
129 {
130   PetscErrorCode ierr;
131 
132   PetscFunctionBegin;
133   if (!*psubcomm) PetscFunctionReturn(0);
134   ierr = PetscCommDestroy(&(*psubcomm)->dupparent);CHKERRQ(ierr);
135   ierr = PetscCommDestroy(&(*psubcomm)->comm);CHKERRQ(ierr);
136   ierr = PetscFree((*psubcomm));CHKERRQ(ierr);
137   PetscFunctionReturn(0);
138 }
139 
140 #undef __FUNCT__
141 #define __FUNCT__ "PetscSubcommCreate"
142 /*@C
143   PetscSubcommCreate - Create a PetscSubcomm context.
144 
145    Collective on MPI_Comm
146 
147    Input Parameter:
148 .  comm - MPI communicator
149 
150    Output Parameter:
151 .  psubcomm - location to store the PetscSubcomm context
152 
153    Level: advanced
154 
155 .keywords: communicator, create
156 
157 .seealso: PetscSubcommDestroy()
158 @*/
159 PetscErrorCode  PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm)
160 {
161   PetscErrorCode ierr;
162   PetscMPIInt    rank,size;
163 
164   PetscFunctionBegin;
165   ierr = PetscNew(struct _n_PetscSubcomm,psubcomm);CHKERRQ(ierr);
166 
167   /* set defaults */
168   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
169   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
170   (*psubcomm)->parent    = comm;
171   (*psubcomm)->dupparent = comm;
172   (*psubcomm)->comm      = PETSC_COMM_SELF;
173   (*psubcomm)->n         = size;
174   (*psubcomm)->color     = rank;
175   (*psubcomm)->type      = PETSC_SUBCOMM_INTERLACED;
176   PetscFunctionReturn(0);
177 }
178 
179 #undef __FUNCT__
180 #define __FUNCT__ "PetscSubcommCreate_contiguous"
181 PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm)
182 {
183   PetscErrorCode ierr;
184   PetscMPIInt    rank,size,*subsize,duprank=-1,subrank=-1;
185   PetscInt       np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n;
186   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
187 
188   PetscFunctionBegin;
189   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
190   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
191 
192   /* get size of each subcommunicator */
193   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
194 
195   np_subcomm = size/nsubcomm;
196   nleftover  = size - nsubcomm*np_subcomm;
197   for (i=0; i<nsubcomm; i++) {
198     subsize[i] = np_subcomm;
199     if (i<nleftover) subsize[i]++;
200   }
201 
202   /* get color and subrank of this proc */
203   rankstart = 0;
204   for (i=0; i<nsubcomm; i++) {
205     if (rank >= rankstart && rank < rankstart+subsize[i]) {
206       color   = i;
207       subrank = rank - rankstart;
208       duprank = rank;
209       break;
210     } else rankstart += subsize[i];
211   }
212   ierr = PetscFree(subsize);CHKERRQ(ierr);
213 
214   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
215 
216   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
217   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
218   {
219     PetscThreadComm tcomm;
220     ierr = PetscCommGetThreadComm(comm,&tcomm);CHKERRQ(ierr);
221     ierr = MPI_Attr_put(dupcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
222     tcomm->refct++;
223     ierr = MPI_Attr_put(subcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
224     tcomm->refct++;
225   }
226   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
227   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
228   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
229   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
230 
231   psubcomm->color = color;
232   psubcomm->type  = PETSC_SUBCOMM_CONTIGUOUS;
233   PetscFunctionReturn(0);
234 }
235 
236 #undef __FUNCT__
237 #define __FUNCT__ "PetscSubcommCreate_interlaced"
238 /*
239    Note:
240    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
241    by iteratively taking a process into a subcommunicator.
242    Example: size=4, nsubcomm=(*psubcomm)->n=3
243      comm=(*psubcomm)->parent:
244       rank:     [0]  [1]  [2]  [3]
245       color:     0    1    2    0
246 
247      subcomm=(*psubcomm)->comm:
248       subrank:  [0]  [0]  [0]  [1]
249 
250      dupcomm=(*psubcomm)->dupparent:
251       duprank:  [0]  [2]  [3]  [1]
252 
253      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
254            subcomm[color = 1] has subsize=1, owns process [1]
255            subcomm[color = 2] has subsize=1, owns process [2]
256            dupcomm has same number of processes as comm, and its duprank maps
257            processes in subcomm contiguously into a 1d array:
258             duprank: [0] [1]      [2]         [3]
259             rank:    [0] [3]      [1]         [2]
260                     subcomm[0] subcomm[1]  subcomm[2]
261 */
262 
263 PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm)
264 {
265   PetscErrorCode ierr;
266   PetscMPIInt    rank,size,*subsize,duprank,subrank;
267   PetscInt       np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n;
268   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
269 
270   PetscFunctionBegin;
271   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
272   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
273 
274   /* get size of each subcommunicator */
275   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
276 
277   np_subcomm = size/nsubcomm;
278   nleftover  = size - nsubcomm*np_subcomm;
279   for (i=0; i<nsubcomm; i++) {
280     subsize[i] = np_subcomm;
281     if (i<nleftover) subsize[i]++;
282   }
283 
284   /* find color for this proc */
285   color   = rank%nsubcomm;
286   subrank = rank/nsubcomm;
287 
288   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
289 
290   j = 0; duprank = 0;
291   for (i=0; i<nsubcomm; i++) {
292     if (j == color) {
293       duprank += subrank;
294       break;
295     }
296     duprank += subsize[i]; j++;
297   }
298   ierr = PetscFree(subsize);CHKERRQ(ierr);
299 
300   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
301   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
302   {
303     PetscThreadComm tcomm;
304     ierr = PetscCommGetThreadComm(comm,&tcomm);CHKERRQ(ierr);
305     ierr = MPI_Attr_put(dupcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
306     tcomm->refct++;
307     ierr = MPI_Attr_put(subcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
308     tcomm->refct++;
309   }
310   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
311   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
312   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
313   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
314 
315   psubcomm->color = color;
316   psubcomm->type  = PETSC_SUBCOMM_INTERLACED;
317   PetscFunctionReturn(0);
318 }
319 
320 
321