xref: /petsc/src/sys/objects/subcomm.c (revision c0174eb74783e1ccfa817272a5782dd72b4cfcef)
1 
2 /*
3      Provides utility routines for split MPI communicator.
4 */
5 #include <petscsys.h>    /*I   "petscsys.h"    I*/
6 #include <petsc-private/threadcommimpl.h> /* Petsc_ThreadComm_keyval */
7 
8 extern PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
9 extern PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);
10 
11 #undef __FUNCT__
12 #define __FUNCT__ "PetscSubcommSetNumber"
13 /*@C
14   PetscSubcommSetNumber - Set total number of subcommunicators.
15 
16    Collective on MPI_Comm
17 
18    Input Parameter:
19 +  psubcomm - PetscSubcomm context
20 -  nsubcomm - the total number of subcommunicators in psubcomm
21 
22    Level: advanced
23 
24 .keywords: communicator
25 
26 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral()
27 @*/
28 PetscErrorCode  PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm)
29 {
30   PetscErrorCode ierr;
31   MPI_Comm       comm=psubcomm->parent;
32   PetscMPIInt    rank,size;
33 
34   PetscFunctionBegin;
35   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first");
36   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
37   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
38   if (nsubcomm < 1 || nsubcomm > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %D cannot be < 1 or > input comm size %D",nsubcomm,size);
39 
40   psubcomm->n = nsubcomm;
41   PetscFunctionReturn(0);
42 }
43 
44 #undef __FUNCT__
45 #define __FUNCT__ "PetscSubcommSetType"
46 /*@C
47   PetscSubcommSetType - Set type of subcommunicators.
48 
49    Collective on MPI_Comm
50 
51    Input Parameter:
52 +  psubcomm - PetscSubcomm context
53 -  subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED
54 
55    Level: advanced
56 
57 .keywords: communicator
58 
59 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral()
60 @*/
61 PetscErrorCode  PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype)
62 {
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
67   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %D is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
68 
69   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS){
70     ierr = PetscSubcommCreate_contiguous(psubcomm);CHKERRQ(ierr);
71   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED){
72     ierr = PetscSubcommCreate_interlaced(psubcomm);CHKERRQ(ierr);
73   } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %D is not supported yet",subcommtype);
74   PetscFunctionReturn(0);
75 }
76 
77 #undef __FUNCT__
78 #define __FUNCT__ "PetscSubcommSetTypeGeneral"
79 /*@C
80   PetscSubcommSetTypeGeneral - Set type of subcommunicators from user's specifications
81 
82    Collective on MPI_Comm
83 
84    Input Parameter:
85 +  psubcomm - PetscSubcomm context
86 .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
87 .  subrank - rank in the subcommunicator
88 -  duprank - rank in the dupparent (see PetscSubcomm)
89 
90    Level: advanced
91 
92 .keywords: communicator, create
93 
94 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType()
95 @*/
96 PetscErrorCode  PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank,PetscMPIInt duprank)
97 {
98   PetscErrorCode ierr;
99   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
100   PetscMPIInt    size;
101 
102   PetscFunctionBegin;
103   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
104   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %D is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);
105 
106   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
107 
108   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm
109      if duprank is not a valid number, then dupcomm is not created - not all applications require dupcomm! */
110   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
111   if (duprank == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"duprank==PETSC_DECIDE is not supported yet");
112   else if (duprank >= 0 && duprank < size){
113     ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
114   }
115   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,PETSC_NULL);CHKERRQ(ierr);
116   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,PETSC_NULL);CHKERRQ(ierr);
117   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
118   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
119   psubcomm->color     = color;
120   PetscFunctionReturn(0);
121 }
122 
123 #undef __FUNCT__
124 #define __FUNCT__ "PetscSubcommDestroy"
125 PetscErrorCode  PetscSubcommDestroy(PetscSubcomm *psubcomm)
126 {
127   PetscErrorCode ierr;
128 
129   PetscFunctionBegin;
130   if (!*psubcomm) PetscFunctionReturn(0);
131   ierr = PetscCommDestroy(&(*psubcomm)->dupparent);CHKERRQ(ierr);
132   ierr = PetscCommDestroy(&(*psubcomm)->comm);CHKERRQ(ierr);
133   ierr = PetscFree((*psubcomm));CHKERRQ(ierr);
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "PetscSubcommCreate"
139 /*@C
140   PetscSubcommCreate - Create a PetscSubcomm context.
141 
142    Collective on MPI_Comm
143 
144    Input Parameter:
145 .  comm - MPI communicator
146 
147    Output Parameter:
148 .  psubcomm - location to store the PetscSubcomm context
149 
150    Level: advanced
151 
152 .keywords: communicator, create
153 
154 .seealso: PetscSubcommDestroy()
155 @*/
156 PetscErrorCode  PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm)
157 {
158   PetscErrorCode ierr;
159 
160   PetscFunctionBegin;
161   ierr = PetscNew(struct _n_PetscSubcomm,psubcomm);CHKERRQ(ierr);
162   (*psubcomm)->parent = comm;
163   PetscFunctionReturn(0);
164 }
165 
166 #undef __FUNCT__
167 #define __FUNCT__ "PetscSubcommCreate_contiguous"
168 PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm)
169 {
170   PetscErrorCode ierr;
171   PetscMPIInt    rank,size,*subsize,duprank=-1,subrank=-1;
172   PetscInt       np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n;
173   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
174 
175   PetscFunctionBegin;
176   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
177   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
178 
179   /* get size of each subcommunicator */
180   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
181   np_subcomm = size/nsubcomm;
182   nleftover  = size - nsubcomm*np_subcomm;
183   for (i=0; i<nsubcomm; i++){
184     subsize[i] = np_subcomm;
185     if (i<nleftover) subsize[i]++;
186   }
187 
188   /* get color and subrank of this proc */
189   rankstart = 0;
190   for (i=0; i<nsubcomm; i++){
191     if ( rank >= rankstart && rank < rankstart+subsize[i]) {
192       color   = i;
193       subrank = rank - rankstart;
194       duprank = rank;
195       break;
196     } else {
197       rankstart += subsize[i];
198     }
199   }
200   ierr = PetscFree(subsize);CHKERRQ(ierr);
201 
202   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
203 
204   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
205   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
206 
207   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,PETSC_NULL);CHKERRQ(ierr);
208   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,PETSC_NULL);CHKERRQ(ierr);
209   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
210   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
211   psubcomm->color     = color;
212 
213 #if defined(PETSC_THREADCOMM_ACTIVE)
214   {
215     PetscThreadComm tcomm;
216     ierr = PetscCommGetThreadComm(comm,&tcomm);CHKERRQ(ierr);
217     ierr = MPI_Attr_put(psubcomm->dupparent,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
218     tcomm->refct++;
219     ierr = MPI_Attr_put(psubcomm->comm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
220     tcomm->refct++;
221   }
222 #endif
223   PetscFunctionReturn(0);
224 }
225 
226 #undef __FUNCT__
227 #define __FUNCT__ "PetscSubcommCreate_interlaced"
228 /*
229    Note:
230    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
231    by iteratively taking a process into a subcommunicator.
232    Example: size=4, nsubcomm=(*psubcomm)->n=3
233      comm=(*psubcomm)->parent:
234       rank:     [0]  [1]  [2]  [3]
235       color:     0    1    2    0
236 
237      subcomm=(*psubcomm)->comm:
238       subrank:  [0]  [0]  [0]  [1]
239 
240      dupcomm=(*psubcomm)->dupparent:
241       duprank:  [0]  [2]  [3]  [1]
242 
243      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
244            subcomm[color = 1] has subsize=1, owns process [1]
245            subcomm[color = 2] has subsize=1, owns process [2]
246            dupcomm has same number of processes as comm, and its duprank maps
247            processes in subcomm contiguously into a 1d array:
248             duprank: [0] [1]      [2]         [3]
249             rank:    [0] [3]      [1]         [2]
250                     subcomm[0] subcomm[1]  subcomm[2]
251 */
252 
253 PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm)
254 {
255   PetscErrorCode ierr;
256   PetscMPIInt    rank,size,*subsize,duprank,subrank;
257   PetscInt       np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n;
258   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
259 
260   PetscFunctionBegin;
261   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
262   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
263 
264   /* get size of each subcommunicator */
265   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
266   np_subcomm = size/nsubcomm;
267   nleftover  = size - nsubcomm*np_subcomm;
268   for (i=0; i<nsubcomm; i++){
269     subsize[i] = np_subcomm;
270     if (i<nleftover) subsize[i]++;
271   }
272 
273   /* find color for this proc */
274   color   = rank%nsubcomm;
275   subrank = rank/nsubcomm;
276 
277   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
278 
279   j = 0; duprank = 0;
280   for (i=0; i<nsubcomm; i++){
281     if (j == color){
282       duprank += subrank;
283       break;
284     }
285     duprank += subsize[i]; j++;
286   }
287   ierr = PetscFree(subsize);CHKERRQ(ierr);
288 
289   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
290   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
291 
292   ierr = PetscCommDuplicate(dupcomm,&psubcomm->dupparent,PETSC_NULL);CHKERRQ(ierr);
293   ierr = PetscCommDuplicate(subcomm,&psubcomm->comm,PETSC_NULL);CHKERRQ(ierr);
294   ierr = MPI_Comm_free(&dupcomm);CHKERRQ(ierr);
295   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
296   psubcomm->color     = color;
297 
298 #if defined(PETSC_THREADCOMM_ACTIVE)
299   {
300     PetscThreadComm tcomm;
301     ierr = PetscCommGetThreadComm(comm,&tcomm);CHKERRQ(ierr);
302     ierr = MPI_Attr_put(psubcomm->dupparent,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
303     tcomm->refct++;
304     ierr = MPI_Attr_put(psubcomm->comm,Petsc_ThreadComm_keyval,tcomm);CHKERRQ(ierr);
305     tcomm->refct++;
306   }
307 #endif
308   PetscFunctionReturn(0);
309 }
310 
311 
312