xref: /petsc/src/sys/objects/subcomm.c (revision 9895aa37ac365bac650f6bd8bf977519f7222510)
1 
2 /*
3      Provides utility routines for split MPI communicator.
4 */
5 #include <petscsys.h>    /*I   "petscsys.h"    I*/
6 #include <petsc-private/threadcommimpl.h> /* Petsc_ThreadComm_keyval */
7 
8 const char *const PetscSubcommTypes[] = {"GENERAL","CONTIGUOUS","INTERLACED","PetscSubcommType","PETSC_SUBCOMM_",0};
9 
10 extern PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
11 extern PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);
12 
13 #undef __FUNCT__
14 #define __FUNCT__ "PetscSubcommSetNumber"
15 /*@C
16   PetscSubcommSetNumber - Set total number of subcommunicators.
17 
18    Collective on MPI_Comm
19 
20    Input Parameter:
21 +  psubcomm - PetscSubcomm context
22 -  nsubcomm - the total number of subcommunicators in psubcomm
23 
24    Level: advanced
25 
26 .keywords: communicator
27 
28 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral()
29 @*/
30 PetscErrorCode  PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm)
31 {
32   PetscErrorCode ierr;
33   MPI_Comm       comm=psubcomm->parent;
34   PetscMPIInt    rank,size;
35 
36   PetscFunctionBegin;
37   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first");
38   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
39   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
40   if (nsubcomm < 1 || nsubcomm > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %D cannot be < 1 or > input comm size %D",nsubcomm,size);
41 
42   psubcomm->n = nsubcomm;
43   PetscFunctionReturn(0);
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "PetscSubcommSetType"
48 /*@C
49   PetscSubcommSetType - Set type of subcommunicators.
50 
51    Collective on MPI_Comm
52 
53    Input Parameter:
54 +  psubcomm - PetscSubcomm context
55 -  subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED
56 
57    Level: advanced
58 
59 .keywords: communicator
60 
61 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral()
62 @*/
63 PetscErrorCode  PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype)
64 {
65   PetscErrorCode ierr;
66 
67   PetscFunctionBegin;
68   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
69   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %D is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
70 
71   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) {
72     ierr = PetscSubcommCreate_contiguous(psubcomm);CHKERRQ(ierr);
73   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) {
74     ierr = PetscSubcommCreate_interlaced(psubcomm);CHKERRQ(ierr);
75   } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %D is not supported yet",subcommtype);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "PetscSubcommSetTypeGeneral"
81 /*@C
82   PetscSubcommSetTypeGeneral - Set type of subcommunicators from user's specifications
83 
84    Collective on MPI_Comm
85 
86    Input Parameter:
87 +  psubcomm - PetscSubcomm context
88 .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
89 .  subrank - rank in the subcommunicator
90 -  duprank - rank in the dupparent (see PetscSubcomm)
91 
92    Level: advanced
93 
94 .keywords: communicator, create
95 
96 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType()
97 @*/
98 PetscErrorCode  PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank,PetscMPIInt duprank)
99 {
100   PetscErrorCode ierr;
101   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
102   PetscMPIInt    size;
103 
104   PetscFunctionBegin;
105   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
106   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %D is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
107 
108   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
109 
110   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm
111      if duprank is not a valid number, then dupcomm is not created - not all applications require dupcomm! */
112   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
113   if (duprank == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"duprank==PETSC_DECIDE is not supported yet");
114   else if (duprank >= 0 && duprank < size) {
115     ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
116   }
117   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
118   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
119   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
120   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
121 
122   psubcomm->color = color;
123   PetscFunctionReturn(0);
124 }
125 
126 #undef __FUNCT__
127 #define __FUNCT__ "PetscSubcommDestroy"
128 PetscErrorCode  PetscSubcommDestroy(PetscSubcomm *psubcomm)
129 {
130   PetscErrorCode ierr;
131 
132   PetscFunctionBegin;
133   if (!*psubcomm) PetscFunctionReturn(0);
134   ierr = PetscCommDestroy(&(*psubcomm)->dupparent);CHKERRQ(ierr);
135   ierr = PetscCommDestroy(&(*psubcomm)->comm);CHKERRQ(ierr);
136   ierr = PetscFree((*psubcomm));CHKERRQ(ierr);
137   PetscFunctionReturn(0);
138 }
139 
140 #undef __FUNCT__
141 #define __FUNCT__ "PetscSubcommCreate"
142 /*@C
143   PetscSubcommCreate - Create a PetscSubcomm context.
144 
145    Collective on MPI_Comm
146 
147    Input Parameter:
148 .  comm - MPI communicator
149 
150    Output Parameter:
151 .  psubcomm - location to store the PetscSubcomm context
152 
153    Level: advanced
154 
155 .keywords: communicator, create
156 
157 .seealso: PetscSubcommDestroy()
158 @*/
159 PetscErrorCode  PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm)
160 {
161   PetscErrorCode ierr;
162 
163   PetscFunctionBegin;
164   ierr = PetscNew(struct _n_PetscSubcomm,psubcomm);CHKERRQ(ierr);
165 
166   (*psubcomm)->parent = comm;
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "PetscSubcommCreate_contiguous"
172 PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm)
173 {
174   PetscErrorCode ierr;
175   PetscMPIInt    rank,size,*subsize,duprank=-1,subrank=-1;
176   PetscInt       np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n;
177   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
178 
179   PetscFunctionBegin;
180   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
181   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
182 
183   /* get size of each subcommunicator */
184   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
185 
186   np_subcomm = size/nsubcomm;
187   nleftover  = size - nsubcomm*np_subcomm;
188   for (i=0; i<nsubcomm; i++) {
189     subsize[i] = np_subcomm;
190     if (i<nleftover) subsize[i]++;
191   }
192 
193   /* get color and subrank of this proc */
194   rankstart = 0;
195   for (i=0; i<nsubcomm; i++) {
196     if (rank >= rankstart && rank < rankstart+subsize[i]) {
197       color   = i;
198       subrank = rank - rankstart;
199       duprank = rank;
200       break;
201     } else rankstart += subsize[i];
202   }
203   ierr = PetscFree(subsize);CHKERRQ(ierr);
204 
205   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
206 
207   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
208   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
209   {
210     PetscThreadComm tcomm;
211     ierr = PetscCommGetThreadComm(comm,&tcomm);CHKERRQ(ierr);
212     ierr = MPI_Attr_put(dupcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
213     tcomm->refct++;
214     ierr = MPI_Attr_put(subcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
215     tcomm->refct++;
216   }
217   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
218   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
219   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
220   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
221 
222   psubcomm->color = color;
223   PetscFunctionReturn(0);
224 }
225 
226 #undef __FUNCT__
227 #define __FUNCT__ "PetscSubcommCreate_interlaced"
228 /*
229    Note:
230    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
231    by iteratively taking a process into a subcommunicator.
232    Example: size=4, nsubcomm=(*psubcomm)->n=3
233      comm=(*psubcomm)->parent:
234       rank:     [0]  [1]  [2]  [3]
235       color:     0    1    2    0
236 
237      subcomm=(*psubcomm)->comm:
238       subrank:  [0]  [0]  [0]  [1]
239 
240      dupcomm=(*psubcomm)->dupparent:
241       duprank:  [0]  [2]  [3]  [1]
242 
243      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
244            subcomm[color = 1] has subsize=1, owns process [1]
245            subcomm[color = 2] has subsize=1, owns process [2]
246            dupcomm has same number of processes as comm, and its duprank maps
247            processes in subcomm contiguously into a 1d array:
248             duprank: [0] [1]      [2]         [3]
249             rank:    [0] [3]      [1]         [2]
250                     subcomm[0] subcomm[1]  subcomm[2]
251 */
252 
253 PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm)
254 {
255   PetscErrorCode ierr;
256   PetscMPIInt    rank,size,*subsize,duprank,subrank;
257   PetscInt       np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n;
258   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
259 
260   PetscFunctionBegin;
261   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
262   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
263 
264   /* get size of each subcommunicator */
265   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
266 
267   np_subcomm = size/nsubcomm;
268   nleftover  = size - nsubcomm*np_subcomm;
269   for (i=0; i<nsubcomm; i++) {
270     subsize[i] = np_subcomm;
271     if (i<nleftover) subsize[i]++;
272   }
273 
274   /* find color for this proc */
275   color   = rank%nsubcomm;
276   subrank = rank/nsubcomm;
277 
278   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
279 
280   j = 0; duprank = 0;
281   for (i=0; i<nsubcomm; i++) {
282     if (j == color) {
283       duprank += subrank;
284       break;
285     }
286     duprank += subsize[i]; j++;
287   }
288   ierr = PetscFree(subsize);CHKERRQ(ierr);
289 
290   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
291   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
292   {
293     PetscThreadComm tcomm;
294     ierr = PetscCommGetThreadComm(comm,&tcomm);CHKERRQ(ierr);
295     ierr = MPI_Attr_put(dupcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
296     tcomm->refct++;
297     ierr = MPI_Attr_put(subcomm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
298     tcomm->refct++;
299   }
300   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);CHKERRQ(ierr);
301   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,NULL);CHKERRQ(ierr);
302   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
303   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
304 
305   psubcomm->color = color;
306   PetscFunctionReturn(0);
307 }
308 
309 
310