xref: /petsc/src/sys/objects/subcomm.c (revision f3fe499b4cc4d64bf04aa4f5e4963dcc4eb56541)
1 #define PETSC_DLL
2 /*
3      Provides utility routines for split MPI communicator.
4 */
5 #include "petscsys.h"    /*I   "petscsys.h"    I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "PetscSubcommDestroy"
9 PetscErrorCode PETSCMAT_DLLEXPORT PetscSubcommDestroy(PetscSubcomm psubcomm)
10 {
11   PetscErrorCode ierr;
12 
13   PetscFunctionBegin;
14   ierr = MPI_Comm_free(&psubcomm->dupparent);CHKERRQ(ierr);
15   ierr = MPI_Comm_free(&psubcomm->comm);CHKERRQ(ierr);
16   ierr = PetscFree(psubcomm);CHKERRQ(ierr);
17   PetscFunctionReturn(0);
18 }
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PetscSubcommCreate"
22 /*@C
23   PetscSubcommCreate - Create a PetscSubcomm context.
24 
25    Collective on MPI_Comm
26 
27    Input Parameter:
28 +  comm - MPI communicator
29 -  nsubcomm - the number of subcommunicators to be created
30 
31    Output Parameter:
32 .  psubcomm - location to store the PetscSubcomm context
33 
34 
35    Notes:
36    To avoid data scattering from subcomm back to original comm, we create subcommunicators
37    by iteratively taking a process into a subcommunicator.
38    Example: size=4, nsubcomm=(*psubcomm)->n=3
39      comm=(*psubcomm)->parent:
40       rank:     [0]  [1]  [2]  [3]
41       color:     0    1    2    0
42 
43      subcomm=(*psubcomm)->comm:
44       subrank:  [0]  [0]  [0]  [1]
45 
46      dupcomm=(*psubcomm)->dupparent:
47       duprank:  [0]  [2]  [3]  [1]
48 
49      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
50            subcomm[color = 1] has subsize=1, owns process [1]
51            subcomm[color = 2] has subsize=1, owns process [2]
52            dupcomm has same number of processes as comm, and its duprank maps
53            processes in subcomm contiguously into a 1d array:
54             duprank: [0] [1]      [2]         [3]
55             rank:    [0] [3]      [1]         [2]
56                     subcomm[0] subcomm[1]  subcomm[2]
57 
58    Level: advanced
59 
60 .keywords: communicator, create
61 
62 .seealso: PetscSubcommDestroy()
63 @*/
64 PetscErrorCode PETSCMAT_DLLEXPORT PetscSubcommCreate(MPI_Comm comm,PetscInt nsubcomm,PetscSubcomm *psubcomm)
65 {
66   PetscErrorCode ierr;
67   PetscMPIInt    rank,size,*subsize,duprank,subrank;
68   PetscInt       np_subcomm,nleftover,i,j,color;
69   MPI_Comm       subcomm=0,dupcomm=0;
70   PetscSubcomm   psubcomm_tmp;
71 
72   PetscFunctionBegin;
73   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
74   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
75   if (nsubcomm < 1 || nsubcomm > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %D cannot be < 1 or > input comm size %D",nsubcomm,size);
76 
77   /* get size of each subcommunicator */
78   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
79   np_subcomm = size/nsubcomm;
80   nleftover  = size - nsubcomm*np_subcomm;
81   for (i=0; i<nsubcomm; i++){
82     subsize[i] = np_subcomm;
83     if (i<nleftover) subsize[i]++;
84   }
85 
86   /* find color for this proc */
87   color   = rank%nsubcomm;
88   subrank = rank/nsubcomm;
89 
90   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
91 
92   j = 0; duprank = 0;
93   for (i=0; i<nsubcomm; i++){
94     if (j == color){
95       duprank += subrank;
96       break;
97     }
98     duprank += subsize[i]; j++;
99   }
100   ierr = PetscFree(subsize);CHKERRQ(ierr);
101 
102   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
103   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
104 
105   ierr = PetscNew(struct _n_PetscSubcomm,&psubcomm_tmp);CHKERRQ(ierr);
106   psubcomm_tmp->parent    = comm;
107   psubcomm_tmp->dupparent = dupcomm;
108   psubcomm_tmp->comm      = subcomm;
109   psubcomm_tmp->n         = nsubcomm;
110   psubcomm_tmp->color     = color;
111   *psubcomm = psubcomm_tmp;
112   PetscFunctionReturn(0);
113 }
114