xref: /petsc/src/mat/tests/ex183.c (revision 8ebe3e4e9e00d86ece2e9fcd0cc84910b0ad437c)
1 static char help[] = "Example of extracting an array of MPI submatrices from a given MPI matrix.\n"
2   "This test can only be run in parallel.\n"
3   "\n";
4 
5 /*T
6    Concepts: Mat^mat submatrix, parallel
7    Processors: n
8 T*/
9 
10 #include <petscmat.h>
11 
12 int main(int argc, char **args)
13 {
14   Mat             A,*submats;
15   MPI_Comm        subcomm;
16   PetscMPIInt     rank,size,subrank,subsize,color;
17   PetscInt        m,n,N,bs,rstart,rend,i,j,k,total_subdomains,hash,nsubdomains=1;
18   PetscInt        nis,*cols,gnsubdomains,gsubdomainnums[1],gsubdomainperm[1],s,gs;
19   PetscInt        *rowindices,*colindices,idx,rep;
20   PetscScalar     *vals;
21   IS              rowis[1],colis[1];
22   PetscViewer     viewer;
23   PetscBool       permute_indices,flg;
24   PetscErrorCode  ierr;
25 
26   ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
27   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRMPI(ierr);
28   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr);
29 
30   ierr = PetscOptionsBegin(PETSC_COMM_WORLD,NULL,"ex183","Mat");CHKERRQ(ierr);
31   m = 5;
32   ierr = PetscOptionsInt("-m","Local matrix size","MatSetSizes",m,&m,&flg);CHKERRQ(ierr);
33   total_subdomains = size-1;
34   ierr = PetscOptionsInt("-total_subdomains","Number of submatrices where 0 < n < comm size","MatCreateSubMatricesMPI",total_subdomains,&total_subdomains,&flg);CHKERRQ(ierr);
35   permute_indices = PETSC_FALSE;
36   ierr = PetscOptionsBool("-permute_indices","Whether to permute indices before breaking them into subdomains","ISCreateGeneral",permute_indices,&permute_indices,&flg);CHKERRQ(ierr);
37   hash = 7;
38   ierr = PetscOptionsInt("-hash","Permutation factor, which has to be relatively prime to M = size*m (total matrix size)","ISCreateGeneral",hash,&hash,&flg);CHKERRQ(ierr);
39   rep = 2;
40   ierr = PetscOptionsInt("-rep","Number of times to carry out submatrix extractions; currently only 1 & 2 are supported",NULL,rep,&rep,&flg);CHKERRQ(ierr);
41   ierr = PetscOptionsEnd();CHKERRQ(ierr);
42 
43   if (total_subdomains > size) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Number of subdomains %" PetscInt_FMT " must not exceed comm size %d",total_subdomains,size);
44   if (total_subdomains < 1 || total_subdomains > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subdomains must be > 0 and <= %d (comm size), got total_subdomains = %" PetscInt_FMT,size,total_subdomains);
45   if (rep != 1 && rep != 2) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid number of test repetitions: %" PetscInt_FMT "; must be 1 or 2",rep);
46 
47   viewer = PETSC_VIEWER_STDOUT_WORLD;
48   /* Create logically sparse, but effectively dense matrix for easy verification of submatrix extraction correctness. */
49   ierr = MatCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr);
50   ierr = MatSetSizes(A,m,m,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
51   ierr = MatSetFromOptions(A);CHKERRQ(ierr);
52   ierr = MatSetUp(A);CHKERRQ(ierr);
53   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
54   ierr = MatGetLocalSize(A,NULL,&n);CHKERRQ(ierr);
55   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
56   ierr = MatSeqAIJSetPreallocation(A,n,NULL);CHKERRQ(ierr);
57   ierr = MatMPIAIJSetPreallocation(A,n,NULL,N-n,NULL);CHKERRQ(ierr);
58   ierr = MatSeqBAIJSetPreallocation(A,bs,n/bs,NULL);CHKERRQ(ierr);
59   ierr = MatMPIBAIJSetPreallocation(A,bs,n/bs,NULL,(N-n)/bs,NULL);CHKERRQ(ierr);
60   ierr = MatSeqSBAIJSetPreallocation(A,bs,n/bs,NULL);CHKERRQ(ierr);
61   ierr = MatMPISBAIJSetPreallocation(A,bs,n/bs,NULL,(N-n)/bs,NULL);CHKERRQ(ierr);
62 
63   ierr = PetscMalloc2(N,&cols,N,&vals);CHKERRQ(ierr);
64   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
65   for (j = 0; j < N; ++j) cols[j] = j;
66   for (i=rstart; i<rend; i++) {
67     for (j=0;j<N;++j) {
68       vals[j] = i*10000+j;
69     }
70     ierr = MatSetValues(A,1,&i,N,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
71   }
72   ierr = PetscFree2(cols,vals);CHKERRQ(ierr);
73   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
74   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
75 
76   ierr = PetscViewerASCIIPrintf(viewer,"Initial matrix:\n");CHKERRQ(ierr);
77   ierr = MatView(A,viewer);CHKERRQ(ierr);
78 
79   /*
80      Create subcomms and ISs so that each rank participates in one IS.
81      The IS either coalesces adjacent rank indices (contiguous),
82      or selects indices by scrambling them using a hash.
83   */
84   k = size/total_subdomains + (size%total_subdomains>0); /* There are up to k ranks to a color */
85   color = rank/k;
86   ierr = MPI_Comm_split(PETSC_COMM_WORLD,color,rank,&subcomm);CHKERRMPI(ierr);
87   ierr = MPI_Comm_size(subcomm,&subsize);CHKERRMPI(ierr);
88   ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRMPI(ierr);
89   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
90   nis = 1;
91   ierr = PetscMalloc2(rend-rstart,&rowindices,rend-rstart,&colindices);CHKERRQ(ierr);
92 
93   for (j = rstart; j < rend; ++j) {
94     if (permute_indices) {
95       idx = (j*hash);
96     } else {
97       idx = j;
98     }
99     rowindices[j-rstart] = idx%N;
100     colindices[j-rstart] = (idx+m)%N;
101   }
102   ierr = ISCreateGeneral(subcomm,rend-rstart,rowindices,PETSC_COPY_VALUES,&rowis[0]);CHKERRQ(ierr);
103   ierr = ISCreateGeneral(subcomm,rend-rstart,colindices,PETSC_COPY_VALUES,&colis[0]);CHKERRQ(ierr);
104   ierr = ISSort(rowis[0]);CHKERRQ(ierr);
105   ierr = ISSort(colis[0]);CHKERRQ(ierr);
106   ierr = PetscFree2(rowindices,colindices);CHKERRQ(ierr);
107   /*
108     Now view the ISs.  To avoid deadlock when viewing a list of objects on different subcomms,
109     we need to obtain the global numbers of our local objects and wait for the corresponding global
110     number to be viewed.
111   */
112   ierr = PetscViewerASCIIPrintf(viewer,"Subdomains");CHKERRQ(ierr);
113   if (permute_indices) {
114     ierr = PetscViewerASCIIPrintf(viewer," (hash=%" PetscInt_FMT ")",hash);CHKERRQ(ierr);
115   }
116   ierr = PetscViewerASCIIPrintf(viewer,":\n");CHKERRQ(ierr);
117   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
118 
119   nsubdomains = 1;
120   for (s = 0; s < nsubdomains; ++s) gsubdomainperm[s] = s;
121   ierr = PetscObjectsListGetGlobalNumbering(PETSC_COMM_WORLD,1,(PetscObject*)rowis,&gnsubdomains,gsubdomainnums);CHKERRQ(ierr);
122   ierr = PetscSortIntWithPermutation(nsubdomains,gsubdomainnums,gsubdomainperm);CHKERRQ(ierr);
123   for (gs=0,s=0; gs < gnsubdomains;++gs) {
124     if (s < nsubdomains) {
125       PetscInt ss;
126       ss = gsubdomainperm[s];
127       if (gs == gsubdomainnums[ss]) { /* Global subdomain gs being viewed is my subdomain with local number ss. */
128         PetscViewer subviewer = NULL;
129         ierr = PetscViewerGetSubViewer(viewer,PetscObjectComm((PetscObject)rowis[ss]),&subviewer);CHKERRQ(ierr);
130         ierr = PetscViewerASCIIPrintf(subviewer,"Row IS %" PetscInt_FMT "\n",gs);CHKERRQ(ierr);
131         ierr = ISView(rowis[ss],subviewer);CHKERRQ(ierr);
132         ierr = PetscViewerFlush(subviewer);CHKERRQ(ierr);
133         ierr = PetscViewerASCIIPrintf(subviewer,"Col IS %" PetscInt_FMT "\n",gs);CHKERRQ(ierr);
134         ierr = ISView(colis[ss],subviewer);CHKERRQ(ierr);
135         ierr = PetscViewerRestoreSubViewer(viewer,PetscObjectComm((PetscObject)rowis[ss]),&subviewer);CHKERRQ(ierr);
136         ++s;
137       }
138     }
139     ierr = MPI_Barrier(PETSC_COMM_WORLD);CHKERRMPI(ierr);
140   }
141   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
142   ierr = ISSort(rowis[0]);CHKERRQ(ierr);
143   ierr = ISSort(colis[0]);CHKERRQ(ierr);
144   nsubdomains = 1;
145   ierr = MatCreateSubMatricesMPI(A,nsubdomains,rowis,colis,MAT_INITIAL_MATRIX,&submats);CHKERRQ(ierr);
146   /*
147     Now view the matrices.  To avoid deadlock when viewing a list of objects on different subcomms,
148     we need to obtain the global numbers of our local objects and wait for the corresponding global
149     number to be viewed.
150   */
151   ierr = PetscViewerASCIIPrintf(viewer,"Submatrices (repetition 1):\n");CHKERRQ(ierr);
152   for (s = 0; s < nsubdomains; ++s) gsubdomainperm[s] = s;
153   ierr = PetscObjectsListGetGlobalNumbering(PETSC_COMM_WORLD,1,(PetscObject*)submats,&gnsubdomains,gsubdomainnums);CHKERRQ(ierr);
154   ierr = PetscSortIntWithPermutation(nsubdomains,gsubdomainnums,gsubdomainperm);CHKERRQ(ierr);
155   for (gs=0,s=0; gs < gnsubdomains;++gs) {
156     if (s < nsubdomains) {
157       PetscInt ss;
158       ss = gsubdomainperm[s];
159       if (gs == gsubdomainnums[ss]) { /* Global subdomain gs being viewed is my subdomain with local number ss. */
160         PetscViewer subviewer = NULL;
161         ierr = PetscViewerGetSubViewer(viewer,PetscObjectComm((PetscObject)submats[ss]),&subviewer);CHKERRQ(ierr);
162         ierr = MatView(submats[ss],subviewer);CHKERRQ(ierr);
163         ierr = PetscViewerRestoreSubViewer(viewer,PetscObjectComm((PetscObject)submats[ss]),&subviewer);CHKERRQ(ierr);
164         ++s;
165       }
166     }
167     ierr = MPI_Barrier(PETSC_COMM_WORLD);CHKERRMPI(ierr);
168   }
169   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
170   if (rep == 1) goto cleanup;
171   nsubdomains = 1;
172   ierr = MatCreateSubMatricesMPI(A,nsubdomains,rowis,colis,MAT_REUSE_MATRIX,&submats);CHKERRQ(ierr);
173   /*
174     Now view the matrices.  To avoid deadlock when viewing a list of objects on different subcomms,
175     we need to obtain the global numbers of our local objects and wait for the corresponding global
176     number to be viewed.
177   */
178   ierr = PetscViewerASCIIPrintf(viewer,"Submatrices (repetition 2):\n");CHKERRQ(ierr);
179   for (s = 0; s < nsubdomains; ++s) gsubdomainperm[s] = s;
180   ierr = PetscObjectsListGetGlobalNumbering(PETSC_COMM_WORLD,1,(PetscObject*)submats,&gnsubdomains,gsubdomainnums);CHKERRQ(ierr);
181   ierr = PetscSortIntWithPermutation(nsubdomains,gsubdomainnums,gsubdomainperm);CHKERRQ(ierr);
182   for (gs=0,s=0; gs < gnsubdomains;++gs) {
183     if (s < nsubdomains) {
184       PetscInt ss;
185       ss = gsubdomainperm[s];
186       if (gs == gsubdomainnums[ss]) { /* Global subdomain gs being viewed is my subdomain with local number ss. */
187         PetscViewer subviewer = NULL;
188         ierr = PetscViewerGetSubViewer(viewer,PetscObjectComm((PetscObject)submats[ss]),&subviewer);CHKERRQ(ierr);
189         ierr = MatView(submats[ss],subviewer);CHKERRQ(ierr);
190         ierr = PetscViewerRestoreSubViewer(viewer,PetscObjectComm((PetscObject)submats[ss]),&subviewer);CHKERRQ(ierr);
191         ++s;
192       }
193     }
194     ierr = MPI_Barrier(PETSC_COMM_WORLD);CHKERRMPI(ierr);
195   }
196   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
197   cleanup:
198   for (k=0;k<nsubdomains;++k) {
199     ierr = MatDestroy(submats+k);CHKERRQ(ierr);
200   }
201   ierr = PetscFree(submats);CHKERRQ(ierr);
202   for (k=0;k<nis;++k) {
203     ierr = ISDestroy(rowis+k);CHKERRQ(ierr);
204     ierr = ISDestroy(colis+k);CHKERRQ(ierr);
205   }
206   ierr = MatDestroy(&A);CHKERRQ(ierr);
207   ierr = MPI_Comm_free(&subcomm);CHKERRMPI(ierr);
208   ierr = PetscFinalize();
209   return ierr;
210 }
211 
212 /*TEST
213 
214    test:
215       nsize: 2
216       args: -total_subdomains 1
217       output_file: output/ex183_2_1.out
218 
219    test:
220       suffix: 2
221       nsize: 3
222       args: -total_subdomains 2
223       output_file: output/ex183_3_2.out
224 
225    test:
226       suffix: 3
227       nsize: 4
228       args: -total_subdomains 2
229       output_file: output/ex183_4_2.out
230 
231    test:
232       suffix: 4
233       nsize: 6
234       args: -total_subdomains 2
235       output_file: output/ex183_6_2.out
236 
237 TEST*/
238