xref: /petsc/src/mat/tests/ex183.c (revision dfd676b1a855b7f967ece75a22ee7f6626d10f89)
1 static char help[] = "Example of extracting an array of MPI submatrices from a given MPI matrix.\n"
2   "This test can only be run in parallel.\n"
3   "\n";
4 
5 #include <petscmat.h>
6 
7 int main(int argc, char **args)
8 {
9   Mat             A,*submats;
10   MPI_Comm        subcomm;
11   PetscMPIInt     rank,size,subrank,subsize,color;
12   PetscInt        m,n,N,bs,rstart,rend,i,j,k,total_subdomains,hash,nsubdomains=1;
13   PetscInt        nis,*cols,gnsubdomains,gsubdomainnums[1],gsubdomainperm[1],s,gs;
14   PetscInt        *rowindices,*colindices,idx,rep;
15   PetscScalar     *vals;
16   IS              rowis[1],colis[1];
17   PetscViewer     viewer;
18   PetscBool       permute_indices,flg;
19 
20   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
21   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
22   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
23 
24   PetscOptionsBegin(PETSC_COMM_WORLD,NULL,"ex183","Mat");
25   m = 5;
26   PetscCall(PetscOptionsInt("-m","Local matrix size","MatSetSizes",m,&m,&flg));
27   total_subdomains = size-1;
28   PetscCall(PetscOptionsInt("-total_subdomains","Number of submatrices where 0 < n < comm size","MatCreateSubMatricesMPI",total_subdomains,&total_subdomains,&flg));
29   permute_indices = PETSC_FALSE;
30   PetscCall(PetscOptionsBool("-permute_indices","Whether to permute indices before breaking them into subdomains","ISCreateGeneral",permute_indices,&permute_indices,&flg));
31   hash = 7;
32   PetscCall(PetscOptionsInt("-hash","Permutation factor, which has to be relatively prime to M = size*m (total matrix size)","ISCreateGeneral",hash,&hash,&flg));
33   rep = 2;
34   PetscCall(PetscOptionsInt("-rep","Number of times to carry out submatrix extractions; currently only 1 & 2 are supported",NULL,rep,&rep,&flg));
35   PetscOptionsEnd();
36 
37   PetscCheck(total_subdomains <= size,PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Number of subdomains %" PetscInt_FMT " must not exceed comm size %d",total_subdomains,size);
38   PetscCheck(total_subdomains >= 1 && total_subdomains <= size,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subdomains must be > 0 and <= %d (comm size), got total_subdomains = %" PetscInt_FMT,size,total_subdomains);
39   PetscCheck(rep == 1 || rep == 2,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid number of test repetitions: %" PetscInt_FMT "; must be 1 or 2",rep);
40 
41   viewer = PETSC_VIEWER_STDOUT_WORLD;
42   /* Create logically sparse, but effectively dense matrix for easy verification of submatrix extraction correctness. */
43   PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
44   PetscCall(MatSetSizes(A,m,m,PETSC_DECIDE,PETSC_DECIDE));
45   PetscCall(MatSetFromOptions(A));
46   PetscCall(MatSetUp(A));
47   PetscCall(MatGetSize(A,NULL,&N));
48   PetscCall(MatGetLocalSize(A,NULL,&n));
49   PetscCall(MatGetBlockSize(A,&bs));
50   PetscCall(MatSeqAIJSetPreallocation(A,n,NULL));
51   PetscCall(MatMPIAIJSetPreallocation(A,n,NULL,N-n,NULL));
52   PetscCall(MatSeqBAIJSetPreallocation(A,bs,n/bs,NULL));
53   PetscCall(MatMPIBAIJSetPreallocation(A,bs,n/bs,NULL,(N-n)/bs,NULL));
54   PetscCall(MatSeqSBAIJSetPreallocation(A,bs,n/bs,NULL));
55   PetscCall(MatMPISBAIJSetPreallocation(A,bs,n/bs,NULL,(N-n)/bs,NULL));
56 
57   PetscCall(PetscMalloc2(N,&cols,N,&vals));
58   PetscCall(MatGetOwnershipRange(A,&rstart,&rend));
59   for (j = 0; j < N; ++j) cols[j] = j;
60   for (i=rstart; i<rend; i++) {
61     for (j=0;j<N;++j) {
62       vals[j] = i*10000+j;
63     }
64     PetscCall(MatSetValues(A,1,&i,N,cols,vals,INSERT_VALUES));
65   }
66   PetscCall(PetscFree2(cols,vals));
67   PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
68   PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
69 
70   PetscCall(PetscViewerASCIIPrintf(viewer,"Initial matrix:\n"));
71   PetscCall(MatView(A,viewer));
72 
73   /*
74      Create subcomms and ISs so that each rank participates in one IS.
75      The IS either coalesces adjacent rank indices (contiguous),
76      or selects indices by scrambling them using a hash.
77   */
78   k = size/total_subdomains + (size%total_subdomains>0); /* There are up to k ranks to a color */
79   color = rank/k;
80   PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD,color,rank,&subcomm));
81   PetscCallMPI(MPI_Comm_size(subcomm,&subsize));
82   PetscCallMPI(MPI_Comm_rank(subcomm,&subrank));
83   PetscCall(MatGetOwnershipRange(A,&rstart,&rend));
84   nis = 1;
85   PetscCall(PetscMalloc2(rend-rstart,&rowindices,rend-rstart,&colindices));
86 
87   for (j = rstart; j < rend; ++j) {
88     if (permute_indices) {
89       idx = (j*hash);
90     } else {
91       idx = j;
92     }
93     rowindices[j-rstart] = idx%N;
94     colindices[j-rstart] = (idx+m)%N;
95   }
96   PetscCall(ISCreateGeneral(subcomm,rend-rstart,rowindices,PETSC_COPY_VALUES,&rowis[0]));
97   PetscCall(ISCreateGeneral(subcomm,rend-rstart,colindices,PETSC_COPY_VALUES,&colis[0]));
98   PetscCall(ISSort(rowis[0]));
99   PetscCall(ISSort(colis[0]));
100   PetscCall(PetscFree2(rowindices,colindices));
101   /*
102     Now view the ISs.  To avoid deadlock when viewing a list of objects on different subcomms,
103     we need to obtain the global numbers of our local objects and wait for the corresponding global
104     number to be viewed.
105   */
106   PetscCall(PetscViewerASCIIPrintf(viewer,"Subdomains"));
107   if (permute_indices) {
108     PetscCall(PetscViewerASCIIPrintf(viewer," (hash=%" PetscInt_FMT ")",hash));
109   }
110   PetscCall(PetscViewerASCIIPrintf(viewer,":\n"));
111   PetscCall(PetscViewerFlush(viewer));
112 
113   nsubdomains = 1;
114   for (s = 0; s < nsubdomains; ++s) gsubdomainperm[s] = s;
115   PetscCall(PetscObjectsListGetGlobalNumbering(PETSC_COMM_WORLD,1,(PetscObject*)rowis,&gnsubdomains,gsubdomainnums));
116   PetscCall(PetscSortIntWithPermutation(nsubdomains,gsubdomainnums,gsubdomainperm));
117   for (gs=0,s=0; gs < gnsubdomains;++gs) {
118     if (s < nsubdomains) {
119       PetscInt ss;
120       ss = gsubdomainperm[s];
121       if (gs == gsubdomainnums[ss]) { /* Global subdomain gs being viewed is my subdomain with local number ss. */
122         PetscViewer subviewer = NULL;
123         PetscCall(PetscViewerGetSubViewer(viewer,PetscObjectComm((PetscObject)rowis[ss]),&subviewer));
124         PetscCall(PetscViewerASCIIPrintf(subviewer,"Row IS %" PetscInt_FMT "\n",gs));
125         PetscCall(ISView(rowis[ss],subviewer));
126         PetscCall(PetscViewerFlush(subviewer));
127         PetscCall(PetscViewerASCIIPrintf(subviewer,"Col IS %" PetscInt_FMT "\n",gs));
128         PetscCall(ISView(colis[ss],subviewer));
129         PetscCall(PetscViewerRestoreSubViewer(viewer,PetscObjectComm((PetscObject)rowis[ss]),&subviewer));
130         ++s;
131       }
132     }
133     PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
134   }
135   PetscCall(PetscViewerFlush(viewer));
136   PetscCall(ISSort(rowis[0]));
137   PetscCall(ISSort(colis[0]));
138   nsubdomains = 1;
139   PetscCall(MatCreateSubMatricesMPI(A,nsubdomains,rowis,colis,MAT_INITIAL_MATRIX,&submats));
140   /*
141     Now view the matrices.  To avoid deadlock when viewing a list of objects on different subcomms,
142     we need to obtain the global numbers of our local objects and wait for the corresponding global
143     number to be viewed.
144   */
145   PetscCall(PetscViewerASCIIPrintf(viewer,"Submatrices (repetition 1):\n"));
146   for (s = 0; s < nsubdomains; ++s) gsubdomainperm[s] = s;
147   PetscCall(PetscObjectsListGetGlobalNumbering(PETSC_COMM_WORLD,1,(PetscObject*)submats,&gnsubdomains,gsubdomainnums));
148   PetscCall(PetscSortIntWithPermutation(nsubdomains,gsubdomainnums,gsubdomainperm));
149   for (gs=0,s=0; gs < gnsubdomains;++gs) {
150     if (s < nsubdomains) {
151       PetscInt ss;
152       ss = gsubdomainperm[s];
153       if (gs == gsubdomainnums[ss]) { /* Global subdomain gs being viewed is my subdomain with local number ss. */
154         PetscViewer subviewer = NULL;
155         PetscCall(PetscViewerGetSubViewer(viewer,PetscObjectComm((PetscObject)submats[ss]),&subviewer));
156         PetscCall(MatView(submats[ss],subviewer));
157         PetscCall(PetscViewerRestoreSubViewer(viewer,PetscObjectComm((PetscObject)submats[ss]),&subviewer));
158         ++s;
159       }
160     }
161     PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
162   }
163   PetscCall(PetscViewerFlush(viewer));
164   if (rep == 1) goto cleanup;
165   nsubdomains = 1;
166   PetscCall(MatCreateSubMatricesMPI(A,nsubdomains,rowis,colis,MAT_REUSE_MATRIX,&submats));
167   /*
168     Now view the matrices.  To avoid deadlock when viewing a list of objects on different subcomms,
169     we need to obtain the global numbers of our local objects and wait for the corresponding global
170     number to be viewed.
171   */
172   PetscCall(PetscViewerASCIIPrintf(viewer,"Submatrices (repetition 2):\n"));
173   for (s = 0; s < nsubdomains; ++s) gsubdomainperm[s] = s;
174   PetscCall(PetscObjectsListGetGlobalNumbering(PETSC_COMM_WORLD,1,(PetscObject*)submats,&gnsubdomains,gsubdomainnums));
175   PetscCall(PetscSortIntWithPermutation(nsubdomains,gsubdomainnums,gsubdomainperm));
176   for (gs=0,s=0; gs < gnsubdomains;++gs) {
177     if (s < nsubdomains) {
178       PetscInt ss;
179       ss = gsubdomainperm[s];
180       if (gs == gsubdomainnums[ss]) { /* Global subdomain gs being viewed is my subdomain with local number ss. */
181         PetscViewer subviewer = NULL;
182         PetscCall(PetscViewerGetSubViewer(viewer,PetscObjectComm((PetscObject)submats[ss]),&subviewer));
183         PetscCall(MatView(submats[ss],subviewer));
184         PetscCall(PetscViewerRestoreSubViewer(viewer,PetscObjectComm((PetscObject)submats[ss]),&subviewer));
185         ++s;
186       }
187     }
188     PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
189   }
190   PetscCall(PetscViewerFlush(viewer));
191   cleanup:
192   for (k=0;k<nsubdomains;++k) {
193     PetscCall(MatDestroy(submats+k));
194   }
195   PetscCall(PetscFree(submats));
196   for (k=0;k<nis;++k) {
197     PetscCall(ISDestroy(rowis+k));
198     PetscCall(ISDestroy(colis+k));
199   }
200   PetscCall(MatDestroy(&A));
201   PetscCallMPI(MPI_Comm_free(&subcomm));
202   PetscCall(PetscFinalize());
203   return 0;
204 }
205 
206 /*TEST
207 
208    test:
209       nsize: 2
210       args: -total_subdomains 1
211       output_file: output/ex183_2_1.out
212 
213    test:
214       suffix: 2
215       nsize: 3
216       args: -total_subdomains 2
217       output_file: output/ex183_3_2.out
218 
219    test:
220       suffix: 3
221       nsize: 4
222       args: -total_subdomains 2
223       output_file: output/ex183_4_2.out
224 
225    test:
226       suffix: 4
227       nsize: 6
228       args: -total_subdomains 2
229       output_file: output/ex183_6_2.out
230 
231 TEST*/
232