xref: /petsc/src/mat/utils/overlapsplit.c (revision c094ef4021e955ef5f85f7d8a1bbc6ed64ba7621)
1 /*
2  * overlapsplit.c: increase the overlap of a 'big' subdomain across several processor cores
3  *
4  * Author: Fande Kong <fdkong.jd@gmail.com>
5  */
6 
7 #include <petscsf.h>
8 #include <petsc/private/matimpl.h>
9 
10 
11 #undef __FUNCT__
12 #define __FUNCT__ "MatIncreaseOverlapSplit_Single"
13 
14 /*
15  * Increase overlap for the sub-matrix across sub communicator
16  * sub-matrix could be a graph or numerical matrix
17  * */
18 PetscErrorCode  MatIncreaseOverlapSplit_Single(Mat mat,IS *is,PetscInt ov)
19 {
20   PetscInt         i,nindx,*indices_sc,*indices_ov,localsize,*localsizes_sc,localsize_tmp;
21   PetscInt         *indices_ov_rd,nroots,nleaves,*localoffsets,*indices_recv,*sources_sc,*sources_sc_rd;
22   const PetscInt   *indices;
23   PetscMPIInt      srank,ssize,issamecomm,k,grank;
24   IS               is_sc,allis_sc,partitioning;
25   MPI_Comm         gcomm,dcomm,scomm;
26   PetscSF          sf;
27   PetscSFNode      *remote;
28   Mat              *smat;
29   MatPartitioning  part;
30   PetscErrorCode   ierr;
31 
32   PetscFunctionBegin;
33   /* get a sub communicator before call individual MatIncreaseOverlap
34    * since the sub communicator may be changed.
35    * */
36   ierr = PetscObjectGetComm((PetscObject)(*is),&dcomm);CHKERRQ(ierr);
37   /* make a copy before the original one is deleted */
38   ierr = PetscCommDuplicate(dcomm,&scomm,NULL);CHKERRQ(ierr);
39   /* get a global communicator, where mat should be a global matrix  */
40   ierr = PetscObjectGetComm((PetscObject)mat,&gcomm);CHKERRQ(ierr);
41   ierr = (*mat->ops->increaseoverlap)(mat,1,is,ov);CHKERRQ(ierr);
42   ierr = MPI_Comm_compare(gcomm,scomm,&issamecomm);CHKERRQ(ierr);
43   /* if the sub-communicator is the same as the global communicator,
44    * user does not want to use a sub-communicator
45    * */
46   if(issamecomm == MPI_IDENT || issamecomm == MPI_CONGRUENT){
47 	ierr = PetscCommDestroy(&scomm);CHKERRQ(ierr);
48 	PetscFunctionReturn(0);
49   }
50   /* if the sub-communicator is petsc_comm_self,
51    * user also does not care the sub-communicator
52    * */
53   ierr = MPI_Comm_compare(scomm,PETSC_COMM_SELF,&issamecomm);CHKERRQ(ierr);
54   if(issamecomm == MPI_IDENT || issamecomm == MPI_CONGRUENT){
55 	ierr = PetscCommDestroy(&scomm);CHKERRQ(ierr);
56 	PetscFunctionReturn(0);
57   }
58   ierr = MPI_Comm_rank(scomm,&srank);CHKERRQ(ierr);
59   ierr = MPI_Comm_size(scomm,&ssize);CHKERRQ(ierr);
60   ierr = MPI_Comm_rank(gcomm,&grank);CHKERRQ(ierr);
61   /* create a new IS based on sub-communicator
62    * since the old IS is often based on petsc_comm_self
63    * */
64   ierr = ISGetLocalSize(*is,&nindx);CHKERRQ(ierr);
65   ierr = PetscCalloc1(nindx,&indices_sc);CHKERRQ(ierr);
66   ierr = ISGetIndices(*is,&indices);CHKERRQ(ierr);
67   ierr = PetscMemcpy(indices_sc,indices,sizeof(PetscInt)*nindx);CHKERRQ(ierr);
68   ierr = ISRestoreIndices(*is,&indices);CHKERRQ(ierr);
69   /* we do not need any more */
70   ierr = ISDestroy(is);CHKERRQ(ierr);
71   /* create a index set based on the sub communicator  */
72   ierr = ISCreateGeneral(scomm,nindx,indices_sc,PETSC_OWN_POINTER,&is_sc);CHKERRQ(ierr);
73   /* gather all indices within  the sub communicator */
74   ierr = ISAllGather(is_sc,&allis_sc);CHKERRQ(ierr);
75   ierr = ISDestroy(&is_sc);CHKERRQ(ierr);
76   /* gather local sizes */
77   ierr = PetscMalloc1(ssize,&localsizes_sc);CHKERRQ(ierr);
78   /* get individual local sizes for all index sets */
79   ierr = MPI_Gather(&nindx,1,MPIU_INT,localsizes_sc,1,MPIU_INT,0,scomm);CHKERRQ(ierr);
80   /* only root does these computations */
81   if(!srank){
82    /* get local size for the big index set */
83    ierr = ISGetLocalSize(allis_sc,&localsize);CHKERRQ(ierr);
84    ierr = PetscCalloc2(localsize,&indices_ov,localsize,&sources_sc);CHKERRQ(ierr);
85    ierr = PetscCalloc2(localsize,&indices_ov_rd,localsize,&sources_sc_rd);CHKERRQ(ierr);
86    ierr = ISGetIndices(allis_sc,&indices);CHKERRQ(ierr);
87    ierr = PetscMemcpy(indices_ov,indices,sizeof(PetscInt)*localsize);CHKERRQ(ierr);
88    ierr = ISRestoreIndices(allis_sc,&indices);CHKERRQ(ierr);
89    ierr = ISDestroy(&allis_sc);CHKERRQ(ierr);
90    /* assign corresponding sources */
91    localsize_tmp = 0;
92    for(k=0; k<ssize; k++){
93      for(i=0; i<localsizes_sc[k]; i++){
94        sources_sc[localsize_tmp++] = k;
95      }
96    }
97    /* record where indices come from */
98    ierr = PetscSortIntWithArray(localsize,indices_ov,sources_sc);CHKERRQ(ierr);
99    /* count local sizes for reduced indices */
100    ierr = PetscMemzero(localsizes_sc,sizeof(PetscInt)*ssize);CHKERRQ(ierr);
101    /* initialize the first entity */
102    if(localsize){
103 	 indices_ov_rd[0] = indices_ov[0];
104 	 sources_sc_rd[0] = sources_sc[0];
105 	 localsizes_sc[sources_sc[0]]++;
106    }
107    localsize_tmp = 1;
108    /* remove duplicate integers */
109    for(i=1; i<localsize; i++){
110 	 if(indices_ov[i] != indices_ov[i-1]){
111 	   indices_ov_rd[localsize_tmp]   = indices_ov[i];
112 	   sources_sc_rd[localsize_tmp++] = sources_sc[i];
113 	   localsizes_sc[sources_sc[i]]++;
114 	 }
115    }
116    ierr = PetscFree2(indices_ov,sources_sc);CHKERRQ(ierr);
117    ierr = PetscCalloc1(ssize+1,&localoffsets);CHKERRQ(ierr);
118    for(k=0; k<ssize; k++){
119 	 localoffsets[k+1] = localoffsets[k] + localsizes_sc[k];
120    }
121    nleaves = localoffsets[ssize];
122    ierr = PetscMemzero(localoffsets,(ssize+1)*sizeof(PetscInt));CHKERRQ(ierr);
123    nroots  = localsizes_sc[srank];
124    ierr = PetscCalloc1(nleaves,&remote);CHKERRQ(ierr);
125    for(i=0; i<nleaves; i++){
126 	 remote[i].rank  = sources_sc_rd[i];
127 	 remote[i].index = localoffsets[sources_sc_rd[i]]++;
128    }
129    ierr = PetscFree(localoffsets);CHKERRQ(ierr);
130   }else{
131    ierr = ISDestroy(&allis_sc);CHKERRQ(ierr);
132    /* Allocate a 'zero' pointer to avoid using uninitialized variable  */
133    ierr = PetscCalloc1(0,&remote);CHKERRQ(ierr);
134    nleaves = 0;
135    indices_ov_rd = 0;
136    sources_sc_rd = 0;
137   }
138   /* scatter sizes to everybody */
139   ierr = MPI_Scatter(localsizes_sc,1, MPIU_INT,&nroots,1, MPIU_INT,0,scomm);CHKERRQ(ierr);
140   ierr = PetscFree(localsizes_sc);CHKERRQ(ierr);
141   ierr = PetscCalloc1(nroots,&indices_recv);CHKERRQ(ierr);
142   /* set data back to every body */
143   ierr = PetscSFCreate(scomm,&sf);CHKERRQ(ierr);
144   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
145   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
146   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,remote,PETSC_OWN_POINTER);CHKERRQ(ierr);
147   ierr = PetscSFReduceBegin(sf,MPIU_INT,indices_ov_rd,indices_recv,MPIU_REPLACE);CHKERRQ(ierr);
148   ierr = PetscSFReduceEnd(sf,MPIU_INT,indices_ov_rd,indices_recv,MPIU_REPLACE);CHKERRQ(ierr);
149   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
150   ierr = PetscFree2(indices_ov_rd,sources_sc_rd);CHKERRQ(ierr);
151   ierr = ISCreateGeneral(scomm,nroots,indices_recv,PETSC_OWN_POINTER,&is_sc);CHKERRQ(ierr);
152   ierr = MatGetSubMatricesMPI(mat,1,&is_sc,&is_sc,MAT_INITIAL_MATRIX,&smat);CHKERRQ(ierr);
153   ierr = ISDestroy(&allis_sc);CHKERRQ(ierr);
154   /* create a partitioner to repartition the sub-matrix */
155   ierr = MatPartitioningCreate(scomm,&part);CHKERRQ(ierr);
156   ierr = MatPartitioningSetAdjacency(part,smat[0]);CHKERRQ(ierr);
157 #if PETSC_HAVE_PARMETIS
158   /* if there exists a ParMETIS installation, we try to use ParMETIS
159    * because a repartition routine possibly work better
160    * */
161   ierr = MatPartitioningSetType(part,MATPARTITIONINGPARMETIS);CHKERRQ(ierr);
162   /* try to use reparition function, instead of partition function */
163   ierr = MatPartitioningParmetisSetRepartition(part);CHKERRQ(ierr);
164 #else
165   /* we at least provide a default partitioner to rebalance the computation  */
166   ierr = MatPartitioningSetType(part,MATPARTITIONINGAVERAGE);CHKERRQ(ierr);
167 #endif
168   /* user can pick up any partitioner by using an option */
169   ierr = MatPartitioningSetFromOptions(part);CHKERRQ(ierr);
170   ierr = MatPartitioningApply(part,&partitioning);CHKERRQ(ierr);
171   ierr = MatPartitioningDestroy(&part);CHKERRQ(ierr);
172   ierr = MatDestroy(&(smat[0]));CHKERRQ(ierr);
173   ierr = PetscFree(smat);CHKERRQ(ierr);
174   /* get local rows including  overlap */
175   ierr = ISBuildTwoSided(partitioning,is_sc,is);CHKERRQ(ierr);
176   ierr = ISDestroy(&is_sc);CHKERRQ(ierr);
177   ierr = ISDestroy(&partitioning);CHKERRQ(ierr);
178   ierr = PetscCommDestroy(&scomm);CHKERRQ(ierr);
179   PetscFunctionReturn(0);
180 }
181 
182 
183