xref: /petsc/src/sys/objects/subcomm.c (revision cd05a4c019503193975def50f0ad7dec844973eb)
1 #define PETSC_DLL
2 /*
3      Provides utility routines for split MPI communicator.
4 */
5 #include "petsc.h"  /*I   "petsc.h"    I*/
6 #include "petscsys.h"
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "PetscSubcommDestroy"
10 PetscErrorCode PETSCMAT_DLLEXPORT PetscSubcommDestroy(PetscSubcomm *psubcomm)
11 {
12   PetscErrorCode ierr;
13 
14   PetscFunctionBegin;
15   ierr = PetscFree(psubcomm);CHKERRQ(ierr);
16   PetscFunctionReturn(0);
17 }
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "PetscSubcommCreate"
21 /*@
22   PetscSubcommCreate - Create a PetscSubcomm context.
23 
24    Collective on MPI_Comm
25 
26    Input Parameter:
27 +  comm - MPI communicator
28 -  nsubcomm - the number of subcommunicators to be created
29 
30    Output Parameter:
31 .  psubcomm - location to store the PetscSubcomm context
32 
33    Options Database Keys:
34 
35    Notes:
36    To avoid data scattering from subcomm back to original comm, we create subcommunicators
37    by iteratively taking a processe into a subcommunicator.
38    Example: size=4, nsubcomm=(*psubcomm)->n=3
39      comm=(*psubcomm)->parent:
40       rank:     [0]  [1]  [2]  [3]
41       color:     0    1    2    0
42 
43      subcomm=(*psubcomm)->comm:
44       subrank:  [0]  [0]  [0]  [1]
45 
46      dupcomm=(*psubcomm)->dupparent:
47       duprank:  [0]  [2]  [3]  [1]
48 
49      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
50            subcomm[color = 1] has subsize=1, owns process [1]
51            subcomm[color = 2] has subsize=1, owns process [2]
52            dupcomm has same number of processes as comm, and its duprank maps
53            processes in subcomm contiguously into a 1d array:
54             duprank: [0] [1]      [2]         [3]
55             rank:    [0] [3]      [1]         [2]
56                     subcomm[0] subcomm[1]  subcomm[2]
57 
58    Level: advanced
59 
60 .keywords: communicator, create
61 
62 .seealso: PetscSubcommDestroy()
63 @*/
64 PetscErrorCode PETSCMAT_DLLEXPORT PetscSubcommCreate(MPI_Comm comm,PetscInt nsubcomm,PetscSubcomm **psubcomm)
65 {
66   PetscErrorCode ierr;
67   PetscMPIInt    rank,size,*subsize,duprank,subrank;
68   PetscInt       np_subcomm,nleftover,i,j,color;
69   MPI_Comm       subcomm=0,dupcomm=0;
70   const char     *prefix;
71   PetscSubcomm   *psubcomm_tmp;
72 
73   PetscFunctionBegin;
74   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
75   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
76   if (nsubcomm < 1 || nsubcomm > size) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %D cannot be < 1 or > input comm size %D",nsubcomm,size);
77 
78   /* get size of each subcommunicator */
79   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
80   np_subcomm = size/nsubcomm;
81   nleftover  = size - nsubcomm*np_subcomm;
82   for (i=0; i<nsubcomm; i++){
83     subsize[i] = np_subcomm;
84     if (i<nleftover) subsize[i]++;
85   }
86 
87   /* find color for this proc */
88   color   = rank%nsubcomm;
89   subrank = rank/nsubcomm;
90 
91   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
92 
93   j = 0; duprank = 0;
94   for (i=0; i<nsubcomm; i++){
95     if (j == color){
96       duprank += subrank;
97       break;
98     }
99     duprank += subsize[i]; j++;
100   }
101 
102   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
103   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
104   ierr = PetscFree(subsize);CHKERRQ(ierr);
105 
106   ierr = PetscNew(PetscSubcomm,&psubcomm_tmp);CHKERRQ(ierr);
107   psubcomm_tmp->parent    = comm;
108   psubcomm_tmp->dupparent = dupcomm;
109   psubcomm_tmp->comm      = subcomm;
110   psubcomm_tmp->n         = nsubcomm;
111   psubcomm_tmp->color     = color;
112   *psubcomm = psubcomm_tmp;
113   PetscFunctionReturn(0);
114 }
115