xref: /petsc/src/sys/objects/subcomm.c (revision f38d543f014061d7d9693584ad5578cc37639c60)
1 
2 /*
3      Provides utility routines for split MPI communicator.
4 */
5 #include <petscsys.h>    /*I   "petscsys.h"    I*/
6 #include <petsc-private/threadcommimpl.h> /* Petsc_ThreadComm_keyval */
7 
8 const char *const PetscSubcommTypes[] = {"GENERAL","CONTIGUOUS","INTERLACED","PetscSubcommType","PETSC_SUBCOMM_",0};
9 
10 extern PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
11 extern PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);
12 
13 #undef __FUNCT__
14 #define __FUNCT__ "PetscSubcommSetNumber"
15 /*@C
16   PetscSubcommSetNumber - Set total number of subcommunicators.
17 
18    Collective on MPI_Comm
19 
20    Input Parameter:
21 +  psubcomm - PetscSubcomm context
22 -  nsubcomm - the total number of subcommunicators in psubcomm
23 
24    Level: advanced
25 
26 .keywords: communicator
27 
28 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral()
29 @*/
30 PetscErrorCode  PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm)
31 {
32   PetscErrorCode ierr;
33   MPI_Comm       comm=psubcomm->parent;
34   PetscMPIInt    rank,size;
35 
36   PetscFunctionBegin;
37   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first");
38   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
39   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
40   if (nsubcomm < 1 || nsubcomm > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %D cannot be < 1 or > input comm size %D",nsubcomm,size);
41 
42   psubcomm->n = nsubcomm;
43   PetscFunctionReturn(0);
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "PetscSubcommSetType"
48 /*@C
49   PetscSubcommSetType - Set type of subcommunicators.
50 
51    Collective on MPI_Comm
52 
53    Input Parameter:
54 +  psubcomm - PetscSubcomm context
55 -  subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED
56 
57    Level: advanced
58 
59 .keywords: communicator
60 
61 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral()
62 @*/
63 PetscErrorCode  PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype)
64 {
65   PetscErrorCode ierr;
66 
67   PetscFunctionBegin;
68   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
69   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %D is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
70 
71   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) {
72     ierr = PetscSubcommCreate_contiguous(psubcomm);CHKERRQ(ierr);
73   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) {
74     ierr = PetscSubcommCreate_interlaced(psubcomm);CHKERRQ(ierr);
75   } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %D is not supported yet",subcommtype);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "PetscSubcommSetTypeGeneral"
81 /*@C
82   PetscSubcommSetTypeGeneral - Set type of subcommunicators from user's specifications
83 
84    Collective on MPI_Comm
85 
86    Input Parameter:
87 +  psubcomm - PetscSubcomm context
88 .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
89 .  subrank - rank in the subcommunicator
90 -  duprank - rank in the dupparent (see PetscSubcomm)
91 
92    Level: advanced
93 
94 .keywords: communicator, create
95 
96 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType()
97 @*/
98 PetscErrorCode  PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank,PetscMPIInt duprank)
99 {
100   PetscErrorCode ierr;
101   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
102   PetscMPIInt    size;
103 
104   PetscFunctionBegin;
105   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
106   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %D is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
107 
108   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
109 
110   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm
111      if duprank is not a valid number, then dupcomm is not created - not all applications require dupcomm! */
112   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
113   if (duprank == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"duprank==PETSC_DECIDE is not supported yet");
114   else if (duprank >= 0 && duprank < size) {
115     ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
116   }
117   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
118   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
119   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
120   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
121 
122   psubcomm->color = color;
123   PetscFunctionReturn(0);
124 }
125 
126 #undef __FUNCT__
127 #define __FUNCT__ "PetscSubcommDestroy"
128 PetscErrorCode  PetscSubcommDestroy(PetscSubcomm *psubcomm)
129 {
130   PetscErrorCode ierr;
131 
132   PetscFunctionBegin;
133   if (!*psubcomm) PetscFunctionReturn(0);
134   ierr = PetscCommDestroy(&(*psubcomm)->dupparent);CHKERRQ(ierr);
135   ierr = PetscCommDestroy(&(*psubcomm)->comm);CHKERRQ(ierr);
136   ierr = PetscFree((*psubcomm));CHKERRQ(ierr);
137   PetscFunctionReturn(0);
138 }
139 
140 #undef __FUNCT__
141 #define __FUNCT__ "PetscSubcommCreate"
142 /*@C
143   PetscSubcommCreate - Create a PetscSubcomm context.
144 
145    Collective on MPI_Comm
146 
147    Input Parameter:
148 .  comm - MPI communicator
149 
150    Output Parameter:
151 .  psubcomm - location to store the PetscSubcomm context
152 
153    Level: advanced
154 
155 .keywords: communicator, create
156 
157 .seealso: PetscSubcommDestroy()
158 @*/
159 PetscErrorCode  PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm)
160 {
161   PetscErrorCode ierr;
162 
163   PetscFunctionBegin;
164   ierr = PetscNew(struct _n_PetscSubcomm,psubcomm);CHKERRQ(ierr);
165 
166   (*psubcomm)->parent = comm;
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "PetscSubcommCreate_contiguous"
172 PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm)
173 {
174   PetscErrorCode ierr;
175   PetscMPIInt    rank,size,*subsize,duprank=-1,subrank=-1;
176   PetscInt       np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n;
177   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
178 
179   PetscFunctionBegin;
180   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
181   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
182 
183   /* get size of each subcommunicator */
184   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
185 
186   np_subcomm = size/nsubcomm;
187   nleftover  = size - nsubcomm*np_subcomm;
188   for (i=0; i<nsubcomm; i++) {
189     subsize[i] = np_subcomm;
190     if (i<nleftover) subsize[i]++;
191   }
192 
193   /* get color and subrank of this proc */
194   rankstart = 0;
195   for (i=0; i<nsubcomm; i++) {
196     if (rank >= rankstart && rank < rankstart+subsize[i]) {
197       color   = i;
198       subrank = rank - rankstart;
199       duprank = rank;
200       break;
201     } else rankstart += subsize[i];
202   }
203   ierr = PetscFree(subsize);CHKERRQ(ierr);
204 
205   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
206 
207   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
208   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
209   {
210     PetscThreadComm tcomm;
211     ierr = PetscCommGetThreadComm(comm,&tcomm);CHKERRQ(ierr);
212     ierr = MPI_Attr_put(dupcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
213     tcomm->refct++;
214     ierr = MPI_Attr_put(subcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
215     tcomm->refct++;
216   }
217   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
218   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
219   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
220   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
221 
222   psubcomm->color = color;
223   psubcomm->type  = PETSC_SUBCOMM_CONTIGUOUS;
224   PetscFunctionReturn(0);
225 }
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "PetscSubcommCreate_interlaced"
229 /*
230    Note:
231    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
232    by iteratively taking a process into a subcommunicator.
233    Example: size=4, nsubcomm=(*psubcomm)->n=3
234      comm=(*psubcomm)->parent:
235       rank:     [0]  [1]  [2]  [3]
236       color:     0    1    2    0
237 
238      subcomm=(*psubcomm)->comm:
239       subrank:  [0]  [0]  [0]  [1]
240 
241      dupcomm=(*psubcomm)->dupparent:
242       duprank:  [0]  [2]  [3]  [1]
243 
244      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
245            subcomm[color = 1] has subsize=1, owns process [1]
246            subcomm[color = 2] has subsize=1, owns process [2]
247            dupcomm has same number of processes as comm, and its duprank maps
248            processes in subcomm contiguously into a 1d array:
249             duprank: [0] [1]      [2]         [3]
250             rank:    [0] [3]      [1]         [2]
251                     subcomm[0] subcomm[1]  subcomm[2]
252 */
253 
254 PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm)
255 {
256   PetscErrorCode ierr;
257   PetscMPIInt    rank,size,*subsize,duprank,subrank;
258   PetscInt       np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n;
259   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
260 
261   PetscFunctionBegin;
262   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
263   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
264 
265   /* get size of each subcommunicator */
266   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
267 
268   np_subcomm = size/nsubcomm;
269   nleftover  = size - nsubcomm*np_subcomm;
270   for (i=0; i<nsubcomm; i++) {
271     subsize[i] = np_subcomm;
272     if (i<nleftover) subsize[i]++;
273   }
274 
275   /* find color for this proc */
276   color   = rank%nsubcomm;
277   subrank = rank/nsubcomm;
278 
279   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
280 
281   j = 0; duprank = 0;
282   for (i=0; i<nsubcomm; i++) {
283     if (j == color) {
284       duprank += subrank;
285       break;
286     }
287     duprank += subsize[i]; j++;
288   }
289   ierr = PetscFree(subsize);CHKERRQ(ierr);
290 
291   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
292   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
293   {
294     PetscThreadComm tcomm;
295     ierr = PetscCommGetThreadComm(comm,&tcomm);CHKERRQ(ierr);
296     ierr = MPI_Attr_put(dupcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
297     tcomm->refct++;
298     ierr = MPI_Attr_put(subcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
299     tcomm->refct++;
300   }
301   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
302   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
303   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
304   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
305 
306   psubcomm->color = color;
307   psubcomm->type  = PETSC_SUBCOMM_INTERLACED;
308   PetscFunctionReturn(0);
309 }
310 
311 
312