xref: /petsc/src/sys/objects/subcomm.c (revision f98cea2c07897992b5b1217b65e95b08e1a8f673)
1 #define PETSC_DLL
2 /*
3      Provides utility routines for split MPI communicator.
4 */
5 #include "petsc.h"    /*I   "petsc.h"    I*/
6 #include "petscsys.h" /*I   "petscsys.h"    I*/
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "PetscSubcommDestroy"
10 PetscErrorCode PETSCMAT_DLLEXPORT PetscSubcommDestroy(PetscSubcomm psubcomm)
11 {
12   PetscErrorCode ierr;
13 
14   PetscFunctionBegin;
15   ierr = PetscFree(psubcomm);CHKERRQ(ierr);
16   PetscFunctionReturn(0);
17 }
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "PetscSubcommCreate"
21 /*@C
22   PetscSubcommCreate - Create a PetscSubcomm context.
23 
24    Collective on MPI_Comm
25 
26    Input Parameter:
27 +  comm - MPI communicator
28 -  nsubcomm - the number of subcommunicators to be created
29 
30    Output Parameter:
31 .  psubcomm - location to store the PetscSubcomm context
32 
33 
34    Notes:
35    To avoid data scattering from subcomm back to original comm, we create subcommunicators
36    by iteratively taking a process into a subcommunicator.
37    Example: size=4, nsubcomm=(*psubcomm)->n=3
38      comm=(*psubcomm)->parent:
39       rank:     [0]  [1]  [2]  [3]
40       color:     0    1    2    0
41 
42      subcomm=(*psubcomm)->comm:
43       subrank:  [0]  [0]  [0]  [1]
44 
45      dupcomm=(*psubcomm)->dupparent:
46       duprank:  [0]  [2]  [3]  [1]
47 
48      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
49            subcomm[color = 1] has subsize=1, owns process [1]
50            subcomm[color = 2] has subsize=1, owns process [2]
51            dupcomm has same number of processes as comm, and its duprank maps
52            processes in subcomm contiguously into a 1d array:
53             duprank: [0] [1]      [2]         [3]
54             rank:    [0] [3]      [1]         [2]
55                     subcomm[0] subcomm[1]  subcomm[2]
56 
57    Level: advanced
58 
59 .keywords: communicator, create
60 
61 .seealso: PetscSubcommDestroy()
62 @*/
63 PetscErrorCode PETSCMAT_DLLEXPORT PetscSubcommCreate(MPI_Comm comm,PetscInt nsubcomm,PetscSubcomm *psubcomm)
64 {
65   PetscErrorCode ierr;
66   PetscMPIInt    rank,size,*subsize,duprank,subrank;
67   PetscInt       np_subcomm,nleftover,i,j,color;
68   MPI_Comm       subcomm=0,dupcomm=0;
69   PetscSubcomm   psubcomm_tmp;
70 
71   PetscFunctionBegin;
72   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
73   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
74   if (nsubcomm < 1 || nsubcomm > size) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %D cannot be < 1 or > input comm size %D",nsubcomm,size);
75 
76   /* get size of each subcommunicator */
77   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
78   np_subcomm = size/nsubcomm;
79   nleftover  = size - nsubcomm*np_subcomm;
80   for (i=0; i<nsubcomm; i++){
81     subsize[i] = np_subcomm;
82     if (i<nleftover) subsize[i]++;
83   }
84 
85   /* find color for this proc */
86   color   = rank%nsubcomm;
87   subrank = rank/nsubcomm;
88 
89   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
90 
91   j = 0; duprank = 0;
92   for (i=0; i<nsubcomm; i++){
93     if (j == color){
94       duprank += subrank;
95       break;
96     }
97     duprank += subsize[i]; j++;
98   }
99   ierr = PetscFree(subsize);CHKERRQ(ierr);
100 
101   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
102   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
103 
104   ierr = PetscNew(struct _n_PetscSubcomm,&psubcomm_tmp);CHKERRQ(ierr);
105   psubcomm_tmp->parent    = comm;
106   psubcomm_tmp->dupparent = dupcomm;
107   psubcomm_tmp->comm      = subcomm;
108   psubcomm_tmp->n         = nsubcomm;
109   psubcomm_tmp->color     = color;
110   *psubcomm = psubcomm_tmp;
111   PetscFunctionReturn(0);
112 }
113