xref: /petsc/src/mat/utils/overlapsplit.c (revision bdeb5f4eacf5dccc3fa35d5507b443449c4fb7e7)
1 #include <petscsf.h>
2 #include <petsc/private/matimpl.h>
3 
4 
5 #undef __FUNCT__
6 #define __FUNCT__ "MatIncreaseOverlapSplit_Single"
7 
8 /*
9  * Increase overlap for the sub-matrix across sub communicator
10  * sub-matrix could be a graph or numerical matrix
11  * */
12 PetscErrorCode  MatIncreaseOverlapSplit_Single(Mat mat,IS *is,PetscInt ov)
13 {
14   PetscInt         i,nindx,*indices_sc,*indices_ov,localsize,*localsizes_sc,localsize_tmp;
15   PetscInt         *indices_ov_rd,nroots,nleaves,*localoffsets,*indices_recv,*sources_sc,*sources_sc_rd;
16   const PetscInt   *indices;
17   PetscMPIInt      srank,ssize,issamecomm,k,grank;
18   IS               is_sc,allis_sc,allis_sc_tmp,partitioning;
19   MPI_Comm         gcomm,dcomm,scomm;
20   PetscSF          sf;
21   PetscSFNode      *remote;
22   Mat              *smat;
23   MatPartitioning  part;
24   PetscErrorCode   ierr;
25 
26   PetscFunctionBegin;
27   /* get a sub communicator before call individual MatIncreaseOverlap
28    * since the sub communicator may be changed.
29    * */
30   ierr = PetscObjectGetComm((PetscObject)(*is),&dcomm);CHKERRQ(ierr);
31   /*make a copy before the original one is deleted*/
32   ierr = PetscCommDuplicate(dcomm,&scomm,NULL);CHKERRQ(ierr);
33   /*get a global communicator, where mat should be a global matrix  */
34   ierr = PetscObjectGetComm((PetscObject)mat,&gcomm);CHKERRQ(ierr);
35 #if 1
36   ierr = PetscPrintf(gcomm,"before mat->ops->increaseoverlap\n");CHKERRQ(ierr);
37 #endif
38   /*increase overlap on each individual subdomain*/
39   ierr = (*mat->ops->increaseoverlap)(mat,1,is,ov);CHKERRQ(ierr);
40 #if 1
41   ierr = PetscPrintf(gcomm,"after mat->ops->increaseoverlap \n");CHKERRQ(ierr);
42   ierr = ISView(*is,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
43 #endif
44   /*compare communicators */
45   ierr = MPI_Comm_compare(gcomm,scomm,&issamecomm);CHKERRQ(ierr);
46   /* if the sub-communicator is the same as the global communicator,
47    * user does not want to use a sub-communicator
48    * */
49   if(issamecomm == MPI_IDENT || issamecomm == MPI_CONGRUENT) PetscFunctionReturn(0);
50   /* if the sub-communicator is petsc_comm_self,
51    * user also does not care the sub-communicator
52    * */
53   ierr = MPI_Comm_compare(scomm,PETSC_COMM_SELF,&issamecomm);CHKERRQ(ierr);
54   if(issamecomm == MPI_IDENT || issamecomm == MPI_CONGRUENT){PetscFunctionReturn(0);}
55   /*local rank, size in a sub-communicator  */
56   ierr = MPI_Comm_rank(scomm,&srank);CHKERRQ(ierr);
57   ierr = MPI_Comm_size(scomm,&ssize);CHKERRQ(ierr);
58   ierr = MPI_Comm_rank(gcomm,&grank);CHKERRQ(ierr);
59   /*create a new IS based on sub-communicator
60    * since the old IS is often based on petsc_comm_self
61    * */
62   ierr = ISGetLocalSize(*is,&nindx);CHKERRQ(ierr);
63   ierr = PetscCalloc1(nindx,&indices_sc);CHKERRQ(ierr);
64   ierr = ISGetIndices(*is,&indices);CHKERRQ(ierr);
65   ierr = PetscMemcpy(indices_sc,indices,sizeof(PetscInt)*nindx);CHKERRQ(ierr);
66   ierr = ISRestoreIndices(*is,&indices);CHKERRQ(ierr);
67   /*we do not need any more*/
68   ierr = ISDestroy(is);CHKERRQ(ierr);
69   /*create a index set based on the sub communicator  */
70   ierr = ISCreateGeneral(scomm,nindx,indices_sc,PETSC_OWN_POINTER,&is_sc);CHKERRQ(ierr);
71   /*gather all indices within  the sub communicator*/
72   ierr = ISAllGather(is_sc,&allis_sc);CHKERRQ(ierr);
73 #if 1
74   ierr = ISView(allis_sc,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
75   ierr = MPI_Barrier(gcomm);CHKERRQ(ierr);
76 #endif
77   ierr = ISDestroy(&is_sc);CHKERRQ(ierr);
78   /* gather local sizes */
79   ierr = PetscMalloc1(ssize,&localsizes_sc);CHKERRQ(ierr);
80   /*get individual local sizes for all index sets*/
81   ierr = MPI_Gather(&nindx,1,MPIU_INT,localsizes_sc,1,MPIU_INT,0,scomm);CHKERRQ(ierr);
82 #if 1
83   if(!srank){
84 	for(i=0; i<ssize; i++){
85 	  ierr = PetscPrintf(PETSC_COMM_SELF," localsize[%d]: %d \n",i,localsizes_sc[i]);CHKERRQ(ierr);
86 	}
87   }
88   ierr = MPI_Barrier(gcomm);CHKERRQ(ierr);
89 #endif
90   /*only root does these computations */
91   if(!srank){
92    /*get local size for the big index set*/
93    ierr = ISGetLocalSize(allis_sc,&localsize);CHKERRQ(ierr);
94    ierr = PetscCalloc2(localsize,&indices_ov,localsize,&sources_sc);CHKERRQ(ierr);
95    ierr = PetscCalloc2(localsize,&indices_ov_rd,localsize,&sources_sc_rd);CHKERRQ(ierr);
96    ierr = ISGetIndices(allis_sc,&indices);CHKERRQ(ierr);
97    ierr = PetscMemcpy(indices_ov,indices,sizeof(PetscInt)*localsize);CHKERRQ(ierr);
98    ierr = ISRestoreIndices(allis_sc,&indices);CHKERRQ(ierr);
99    /*we do not need it any more */
100    ierr = ISDestroy(&allis_sc);CHKERRQ(ierr);
101    /*assign corresponding sources */
102    localsize_tmp = 0;
103    for(k=0; k<ssize; k++){
104      for(i=0; i<localsizes_sc[k]; i++){
105        sources_sc[localsize_tmp++] = k;
106      }
107    }
108    /*record where indices come from */
109    ierr = PetscSortIntWithArray(localsize,indices_ov,sources_sc);CHKERRQ(ierr);
110 #if 0
111    ierr = PetscIntView(localsize,indices_ov,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
112    ierr = PetscIntView(localsize,sources_sc,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
113 #endif
114    /*count local sizes for reduced indices */
115    ierr = PetscMemzero(localsizes_sc,sizeof(PetscInt)*ssize);CHKERRQ(ierr);
116    /*initialize the first entity*/
117    if(localsize){
118 	 indices_ov_rd[0] = indices_ov[0];
119 	 sources_sc_rd[0] = sources_sc[0];
120 	 localsizes_sc[sources_sc[0]]++;
121    }
122    localsize_tmp = 1;
123    /*remove duplicate integers */
124    for(i=1; i<localsize; i++){
125 	 if(indices_ov[i] != indices_ov[i-1]){
126 	   indices_ov_rd[localsize_tmp]   = indices_ov[i];
127 	   sources_sc_rd[localsize_tmp++] = sources_sc[i];
128 	   localsizes_sc[sources_sc[i]]++;
129 	 }
130    }
131    ierr = PetscFree2(indices_ov,sources_sc);CHKERRQ(ierr);
132    ierr = PetscCalloc1(ssize+1,&localoffsets);CHKERRQ(ierr);
133    for(k=0; k<ssize; k++){
134 	 localoffsets[k+1] = localoffsets[k] + localsizes_sc[k];
135    }
136    /*construct a star forest to send data back */
137    nleaves = localoffsets[ssize];
138    ierr = PetscMemzero(localoffsets,(ssize+1)*sizeof(PetscInt));CHKERRQ(ierr);
139    nroots  = localsizes_sc[srank];
140    ierr = PetscCalloc1(nleaves,&remote);CHKERRQ(ierr);
141    for(i=0; i<nleaves; i++){
142 	 remote[i].rank  = sources_sc_rd[i];
143 	 remote[i].index = localoffsets[sources_sc_rd[i]]++;
144    }
145    ierr = PetscFree(localoffsets);CHKERRQ(ierr);
146 #if 0
147    if(grank==2){
148 	 ierr = PetscIntView(localsize_tmp,indices_ov_rd,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
149    }
150 #endif
151   }else{
152    ierr = ISDestroy(&allis_sc);CHKERRQ(ierr);
153    nleaves = 0;
154    indices_ov_rd = 0;
155    sources_sc_rd = 0;
156   }
157   /*scatter sizes to everybody */
158   ierr = MPI_Scatter(localsizes_sc,1, MPIU_INT,&nroots,1, MPIU_INT,0,scomm);CHKERRQ(ierr);
159   /*free memory */
160   ierr = PetscFree(localsizes_sc);CHKERRQ(ierr);
161   ierr = PetscCalloc1(nroots,&indices_recv);CHKERRQ(ierr);
162   /*ierr = MPI_Comm_dup(scomm,&dcomm);CHKERRQ(ierr);*/
163   /*set data back to every body */
164   ierr = PetscSFCreate(scomm,&sf);CHKERRQ(ierr);
165   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
166   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
167   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,remote,PETSC_OWN_POINTER);CHKERRQ(ierr);
168 #if 0
169   ierr = PetscSFView(sf,PETSC_NULL);CHKERRQ(ierr);
170   ierr = MPI_Barrier(gcomm);CHKERRQ(ierr);
171 #endif
172   ierr = PetscSFReduceBegin(sf,MPIU_INT,indices_ov_rd,indices_recv,MPIU_REPLACE);CHKERRQ(ierr);
173   ierr = PetscSFReduceEnd(sf,MPIU_INT,indices_ov_rd,indices_recv,MPIU_REPLACE);CHKERRQ(ierr);
174   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
175   /* free memory */
176   ierr = PetscFree2(indices_ov_rd,sources_sc_rd);CHKERRQ(ierr);
177   /*create a index set*/
178   ierr = ISCreateGeneral(scomm,nroots,indices_recv,PETSC_OWN_POINTER,&is_sc);CHKERRQ(ierr);
179 #if 1
180   ierr = ISView(is_sc,PETSC_NULL);CHKERRQ(ierr);
181   ierr = MPI_Barrier(gcomm);CHKERRQ(ierr);
182 #endif
183   /*create a index set for cols */
184   ierr = ISAllGather(is_sc,&allis_sc_tmp);CHKERRQ(ierr);
185   ierr = ISGetLocalSize(allis_sc_tmp,&localsize_tmp);CHKERRQ(ierr);
186   ierr = ISGetIndices(allis_sc_tmp,&indices);CHKERRQ(ierr);
187   ierr = ISCreateGeneral(scomm,localsize_tmp,indices,PETSC_COPY_VALUES,&allis_sc);CHKERRQ(ierr);
188   ierr = ISRestoreIndices(allis_sc_tmp,&indices);CHKERRQ(ierr);
189   ierr = ISDestroy(&allis_sc_tmp);CHKERRQ(ierr);
190   /*construct a parallel submatrix */
191   ierr = PetscCalloc1(1,&smat);CHKERRQ(ierr);
192 #if 0
193   ierr = ISView(allis_sc,PETSC_NULL);CHKERRQ(ierr);
194   ierr = MPI_Barrier(gcomm);CHKERRQ(ierr);
195   //ierr = ISView(is_sc,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
196   MPI_Comm   comm1, comm2;
197   ierr = PetscObjectGetComm((PetscObject)is_sc,&comm1);CHKERRQ(ierr);
198   ierr = PetscObjectGetComm((PetscObject)allis_sc,&comm2);CHKERRQ(ierr);
199   /*ierr = PetscCommDuplicate(comm1,&comm2,NULL);CHKERRQ(ierr);*/
200   /*ierr = MPI_Comm_dup(comm1,&comm2);CHKERRQ(ierr);*/
201   ierr = MPI_Comm_compare(comm2,comm1,&issamecomm);CHKERRQ(ierr);
202   if(issamecomm == MPI_IDENT){
203     ierr=PetscPrintf(gcomm,"the same communicator \n");CHKERRQ(ierr);
204   }else{
205   	ierr=PetscPrintf(gcomm,"different communicator \n");CHKERRQ(ierr);
206   }
207 #endif
208 #if 1
209   ierr = ISView(allis_sc,PETSC_NULL);CHKERRQ(ierr);
210   ierr = ISView(is_sc,PETSC_NULL);CHKERRQ(ierr);
211 #endif
212   ierr = MatGetSubMatricesMPI(mat,1,&is_sc,&allis_sc,MAT_INITIAL_MATRIX,&smat);CHKERRQ(ierr);
213   /* we do not need them any more */
214   ierr = ISDestroy(&allis_sc);CHKERRQ(ierr);
215 #if 1
216   ierr = MatView(smat[0],PETSC_NULL);CHKERRQ(ierr);
217 #endif
218   /*create a partitioner to repartition the sub-matrix*/
219   ierr = MatPartitioningCreate(scomm,&part);CHKERRQ(ierr);
220   ierr = MatPartitioningSetAdjacency(part,smat[0]);CHKERRQ(ierr);
221 #if PETSC_HAVE_PARMETIS
222   /* if there exists a ParMETIS installation, we try to use ParMETIS
223    * because a repartition routine possibly work better
224    * */
225   ierr = MatPartitioningSetType(part,MATPARTITIONINGPARMETIS);CHKERRQ(ierr);
226   /*try to use reparition function, instead of partition function */
227   ierr = MatPartitioningParmetisSetRepartition(part);CHKERRQ(ierr);
228 #else
229   /*we at least provide a default partitioner to rebalance the computation  */
230   ierr = MatPartitioningSetType(part,MATPARTITIONINGAVERAGE);CHKERRQ(ierr);
231 #endif
232   /*user can pick up any partitioner by using an option*/
233   ierr = MatPartitioningSetFromOptions(part);CHKERRQ(ierr);
234   /* apply partition */
235   ierr = MatPartitioningApply(part,&partitioning);CHKERRQ(ierr);
236   ierr = MatPartitioningDestroy(&part);CHKERRQ(ierr);
237   ierr = MatDestroy(&(smat[0]));CHKERRQ(ierr);
238   ierr = PetscFree(smat);CHKERRQ(ierr);
239 #if 1
240   ierr = ISView(partitioning,PETSC_NULL);CHKERRQ(ierr);
241 #endif
242   /* get local rows including  overlap */
243   ierr = ISBuildTwoSided(partitioning,is_sc,is);CHKERRQ(ierr);
244   /* destroy */
245   ierr = ISDestroy(&is_sc);CHKERRQ(ierr);
246   PetscFunctionReturn(0);
247 }
248 
249 
250