xref: /petsc/src/mat/utils/overlapsplit.c (revision 2452736b626c5516aa3e80c9b2dbe3345798995e)
1 #include <petscsf.h>
2 #include <petsc/private/matimpl.h>
3 
4 
5 /*
6  * Increase overlap for the sub-matrix across sub communicator
7  * sub-matrix could be a graph or numerical matrix
8  * */
9 PetscErrorCode  MatIncreaseOverlapSplit_Single(Mat mat,IS *is,PetscInt ov)
10 {
11   PetscInt         i,nindx,*indices_sc,*indices_ov,localsize,*localsizes_sc,localsize_tmp;
12   PetscInt         *indices_ov_rd,nroots,nleaves,*localoffsets,*indices_recv,*sources_sc,*sources_sc_rd;
13   const PetscInt   *indices;
14   PetscMPIInt      srank,ssize,issamecomm,k;
15   IS               is_sc,allis_sc,partitioning;
16   MPI_Comm         gcomm,scomm;
17   PetscSF          sf;
18   PetscSFNode      *remote;
19   Mat              *smat;
20   MatPartitioning  part;
21   PetscErrorCode   ierr;
22 
23   PetscFunctionBegin;
24   /*get a sub communicator before call individual MatIncreaseOverlap
25    * since the sub communicator may be changed.
26    * */
27   ierr = PetscObjectGetComm((PetscObject)*is,&scomm);CHKERRQ(ierr);
28   /*increase overlap on each individual subdomain*/
29   ierr = (*mat->ops->increaseoverlap)(mat,1,is,ov);CHKERRQ(ierr);
30   /*get a global communicator  */
31   ierr = PetscObjectGetComm((PetscObject)mat,&gcomm);CHKERRQ(ierr);
32   /*compare communicators */
33   ierr = MPI_Comm_compare(gcomm,scomm,&issamecomm);CHKERRQ(ierr);
34   /* if the sub-communicator is the same as the global communicator,
35    * user does not want to use a sub-communicator
36    * */
37   if(issamecomm == MPI_IDENT) PetscFunctionReturn(0);
38   /* if the sub-communicator is petsc_comm_self,
39    * user also does not care the sub-communicator
40    * */
41   ierr = MPI_Comm_compare(scomm,PETSC_COMM_SELF,&issamecomm);CHKERRQ(ierr);
42   if(issamecomm == MPI_IDENT) PetscFunctionReturn(0);
43   /*local rank, size in a subcomm */
44   ierr = MPI_Comm_rank(scomm,&srank);CHKERRQ(ierr);
45   ierr = MPI_Comm_size(scomm,&ssize);CHKERRQ(ierr);
46   /*create a new IS based on subcomm
47    * since the old IS is often petsc_comm_self
48    * */
49   ierr = ISGetLocalSize(*is,&nindx);CHKERRQ(ierr);
50   ierr = PetscCalloc1(nindx,&indices_sc);CHKERRQ(ierr);
51   ierr = ISGetIndices(*is,&indices);CHKERRQ(ierr);
52   ierr = PetscMemcpy(indices_sc,indices,sizeof(PetscInt)*nindx);CHKERRQ(ierr);
53   ierr = ISRestoreIndices(*is,&indices);CHKERRQ(ierr);
54   /*we do not need any more*/
55   ierr = ISDestroy(is);CHKERRQ(ierr);
56   /*create a index set based on the sub communicator  */
57   ierr = ISCreateGeneral(scomm,nindx,indices_sc,PETSC_OWN_POINTER,&is_sc);CHKERRQ(ierr);
58   /*gather all indices within  the sub communicator*/
59   ierr = ISAllGather(is_sc,&allis_sc);CHKERRQ(ierr);
60   ierr = ISDestroy(&is_sc);CHKERRQ(ierr);
61   /* gather local sizes */
62   ierr = PetscMalloc1(ssize,&localsizes_sc);CHKERRQ(ierr);
63   ierr = MPI_Gather(&nindx,1,MPIU_INT,localsizes_sc,1,MPIU_INT,0,scomm);CHKERRQ(ierr);
64   /*only root does these computation */
65   if(!srank){
66    /*get local size for the big index set*/
67    ierr = ISGetLocalSize(allis_sc,&localsize);CHKERRQ(ierr);
68    ierr = PetscCalloc2(localsize,&indices_ov,localsize,&sources_sc);CHKERRQ(ierr);
69    ierr = PetscCalloc2(localsize,&indices_ov_rd,localsize,&sources_sc_rd);CHKERRQ(ierr);
70    ierr = ISGetIndices(allis_sc,&indices);CHKERRQ(ierr);
71    ierr = PetscMemcpy(indices_ov,indices,sizeof(PetscInt)*localsize);CHKERRQ(ierr);
72    ierr = ISRestoreIndices(allis_sc,&indices);CHKERRQ(ierr);
73 
74    ierr = ISDestroy(&allis_sc);CHKERRQ(ierr);
75    /*assign corresponding sources */
76    localsize_tmp = 0;
77    for(k=0; k<ssize; k++){
78      for(i=0; i<localsizes_sc[k]; i++){
79        sources_sc[localsize_tmp++] = k;
80      }
81    }
82    /*record where indices come from */
83    ierr = PetscSortIntWithArray(localsize,indices_ov,sources_sc);CHKERRQ(ierr);
84    localsize_tmp = 1;
85    ierr = PetscMemzero(localsizes_sc,sizeof(PetscInt)*ssize);CHKERRQ(ierr);
86    /*initialize the first entities*/
87    if(localsize){
88 	 indices_ov_rd[0] = indices_ov[0];
89 	 sources_sc_rd[0] = sources_sc[0];
90 	 localsizes_sc[sources_sc[0]]++;
91    }
92    /*remove duplicate integers */
93    for(i=1; i<localsize; i++){
94 	 if(indices_ov[i] != indices_ov[i-1]){
95 	   indices_ov_rd[localsize_tmp]   = indices_ov[i];
96 	   sources_sc_rd[localsize_tmp++] = sources_sc[i];
97 	   localsizes_sc[sources_sc[i]]++;
98 	 }
99    }
100    ierr = PetscFree2(indices_ov,sources_sc);CHKERRQ(ierr);
101    ierr = PetscCalloc1(ssize+1,&localoffsets);CHKERRQ(ierr);
102    for(k=0; k<ssize; k++){
103 	 localoffsets[k+1] = localoffsets[k] + localsizes_sc[i];
104    }
105    /*build a star forest to send data back */
106    nleaves = localoffsets[ssize];
107    ierr = PetscMemzero(localoffsets,(ssize+1)*sizeof(PetscInt));CHKERRQ(ierr);
108    nroots  = localsizes_sc[srank];
109    ierr = PetscCalloc1(nleaves,&remote);CHKERRQ(ierr);
110    for(i=0; i<nleaves; i++){
111 	 remote[i].rank  = sources_sc[i];
112 	 remote[i].index = localoffsets[sources_sc[i]]++;
113    }
114   }else{
115    ierr = ISDestroy(&allis_sc);CHKERRQ(ierr);
116    nleaves = 0;
117    indices_ov_rd = 0;
118    sources_sc_rd = 0;
119   }
120   /*scatter sizes to everybody */
121   ierr = MPI_Scatter(localsizes_sc,1, MPIU_INT,&nroots,1, MPIU_INT,0,scomm);CHKERRQ(ierr);
122   ierr = PetscCalloc1(nroots,&indices_recv);CHKERRQ(ierr);
123   /*set data back to every body */
124   ierr = PetscSFCreate(scomm,&sf);CHKERRQ(ierr);
125   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
126   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
127   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,remote,PETSC_OWN_POINTER);CHKERRQ(ierr);
128   ierr = PetscSFReduceBegin(sf,MPIU_INT,&indices_ov_rd,indices_recv,MPI_REPLACE);CHKERRQ(ierr);
129   ierr = PetscSFReduceEnd(sf,MPIU_INT,&indices_ov_rd,indices_recv,MPI_REPLACE);CHKERRQ(ierr);
130   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
131   /* free memory */
132   ierr = PetscFree2(indices_ov_rd,sources_sc_rd);CHKERRQ(ierr);
133   /*create a index set*/
134   ierr = ISCreateGeneral(scomm,nroots,indices_recv,PETSC_OWN_POINTER,&is_sc);CHKERRQ(ierr);
135   /*create a index set for cols */
136   ierr = ISAllGather(is_sc,&allis_sc);CHKERRQ(ierr);
137   /*reparition */
138   /*construct a parallel submatrix */
139   ierr = PetscCalloc1(1,&smat);CHKERRQ(ierr);
140   ierr = MatGetSubMatricesMPI(mat,1,&is_sc,&allis_sc,MAT_INITIAL_MATRIX,&smat);CHKERRQ(ierr);
141   /* we do not need them any more */
142   ierr = ISDestroy(&is_sc);CHKERRQ(ierr);
143   ierr = ISDestroy(&allis_sc);CHKERRQ(ierr);
144   /*create a partitioner to repartition the sub-matrix*/
145   ierr = MatPartitioningCreate(scomm,&part);CHKERRQ(ierr);
146   ierr = MatPartitioningSetAdjacency(part,smat[0]);CHKERRQ(ierr);
147 #if PETSC_HAVE_PARMETIS
148   /* if there exists a ParMETIS installation, we try to use ParMETIS
149    * because a repartition routine possibly work better
150    * */
151   ierr = MatPartitioningSetType(part,MATPARTITIONINGPARMETIS);CHKERRQ(ierr);
152   /*try to use reparition function, instead of partition function */
153   ierr = MatPartitioningParmetisSetRepartition(part);CHKERRQ(ierr);
154 #else
155   /*we at least provide a default partitioner to rebalance the computation  */
156   ierr = MatPartitioningSetType(part,MATPARTITIONINGAVERAGE);CHKERRQ(ierr);
157 #endif
158   /*user can pick up any partitioner by using an option*/
159   ierr = MatPartitioningSetFromOptions(part);CHKERRQ(ierr);
160   /* apply partition */
161   ierr = MatPartitioningApply(part,&partitioning);CHKERRQ(ierr);
162   ierr = MatPartitioningDestroy(&part);CHKERRQ(ierr);
163   ierr = PetscFree(smat);CHKERRQ(ierr);
164   /* get local rows including  overlap */
165   ierr = ISBuildTwoSided(partitioning,is);CHKERRQ(ierr);
166   PetscFunctionReturn(0);
167 }
168 
169 
170