xref: /petsc/src/mat/tests/ex73.c (revision ffa8c5705e8ab2cf85ee1d14dbe507a6e2eb5283)
1 
2 static char help[] = "Reads a PETSc matrix from a file partitions it\n\n";
3 
4 /*T
5    Concepts: partitioning
6    Processors: n
7 T*/
8 
9 /*
10   Include "petscmat.h" so that we can use matrices.  Note that this file
11   automatically includes:
12      petscsys.h       - base PETSc routines   petscvec.h - vectors
13      petscmat.h - matrices
14      petscis.h     - index sets
15      petscviewer.h - viewers
16 
17   Example of usage:
18     mpiexec -n 3 ex73 -f <matfile> -mat_partitioning_type parmetis/scotch -viewer_binary_skip_info -nox
19 */
20 #include <petscmat.h>
21 
22 int main(int argc,char **args)
23 {
24   MatType         mtype = MATMPIAIJ; /* matrix format */
25   Mat             A,B;               /* matrix */
26   PetscViewer     fd;                /* viewer */
27   char            file[PETSC_MAX_PATH_LEN];         /* input file name */
28   PetscBool       flg,viewMats,viewIS,viewVecs,useND,noVecLoad = PETSC_FALSE;
29   PetscInt        *nlocal,m,n;
30   PetscMPIInt     rank,size;
31   MatPartitioning part;
32   IS              is,isn;
33   Vec             xin, xout;
34   VecScatter      scat;
35 
36   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
37   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
38   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
39   PetscCall(PetscOptionsHasName(NULL,NULL, "-view_mats", &viewMats));
40   PetscCall(PetscOptionsHasName(NULL,NULL, "-view_is", &viewIS));
41   PetscCall(PetscOptionsHasName(NULL,NULL, "-view_vecs", &viewVecs));
42   PetscCall(PetscOptionsHasName(NULL,NULL, "-use_nd", &useND));
43   PetscCall(PetscOptionsHasName(NULL,NULL, "-novec_load", &noVecLoad));
44 
45   /*
46      Determine file from which we read the matrix
47   */
48   PetscCall(PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),&flg));
49 
50   /*
51        Open binary file.  Note that we use FILE_MODE_READ to indicate
52        reading from this file.
53   */
54   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd));
55 
56   /*
57       Load the matrix and vector; then destroy the viewer.
58   */
59   PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
60   PetscCall(MatSetType(A,mtype));
61   PetscCall(MatLoad(A,fd));
62   if (!noVecLoad) {
63     PetscCall(VecCreate(PETSC_COMM_WORLD,&xin));
64     PetscCall(VecLoad(xin,fd));
65   } else {
66     PetscCall(MatCreateVecs(A,&xin,NULL));
67     PetscCall(VecSetRandom(xin,NULL));
68   }
69   PetscCall(PetscViewerDestroy(&fd));
70   if (viewMats) {
71     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Original matrix:\n"));
72     PetscCall(MatView(A,PETSC_VIEWER_DRAW_WORLD));
73   }
74   if (viewVecs) {
75     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Original vector:\n"));
76     PetscCall(VecView(xin,PETSC_VIEWER_STDOUT_WORLD));
77   }
78 
79   /* Partition the graph of the matrix */
80   PetscCall(MatPartitioningCreate(PETSC_COMM_WORLD,&part));
81   PetscCall(MatPartitioningSetAdjacency(part,A));
82   PetscCall(MatPartitioningSetFromOptions(part));
83 
84   /* get new processor owner number of each vertex */
85   if (useND) {
86     PetscCall(MatPartitioningApplyND(part,&is));
87   } else {
88     PetscCall(MatPartitioningApply(part,&is));
89   }
90   if (viewIS) {
91     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"IS1 - new processor ownership:\n"));
92     PetscCall(ISView(is,PETSC_VIEWER_STDOUT_WORLD));
93   }
94 
95   /* get new global number of each old global number */
96   PetscCall(ISPartitioningToNumbering(is,&isn));
97   if (viewIS) {
98     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"IS2 - new global numbering:\n"));
99     PetscCall(ISView(isn,PETSC_VIEWER_STDOUT_WORLD));
100   }
101 
102   /* get number of new vertices for each processor */
103   PetscCall(PetscMalloc1(size,&nlocal));
104   PetscCall(ISPartitioningCount(is,size,nlocal));
105   PetscCall(ISDestroy(&is));
106 
107   /* get old global number of each new global number */
108   PetscCall(ISInvertPermutation(isn,useND ? PETSC_DECIDE : nlocal[rank],&is));
109   if (viewIS) {
110     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"IS3=inv(IS2) - old global number of each new global number:\n"));
111     PetscCall(ISView(is,PETSC_VIEWER_STDOUT_WORLD));
112   }
113 
114   /* move the matrix rows to the new processes they have been assigned to by the permutation */
115   PetscCall(MatCreateSubMatrix(A,is,is,MAT_INITIAL_MATRIX,&B));
116   PetscCall(PetscFree(nlocal));
117   PetscCall(ISDestroy(&isn));
118   PetscCall(MatDestroy(&A));
119   PetscCall(MatPartitioningDestroy(&part));
120   if (viewMats) {
121     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Partitioned matrix:\n"));
122     PetscCall(MatView(B,PETSC_VIEWER_DRAW_WORLD));
123   }
124 
125   /* move the vector rows to the new processes they have been assigned to */
126   PetscCall(MatGetLocalSize(B,&m,&n));
127   PetscCall(VecCreateMPI(PETSC_COMM_WORLD,m,PETSC_DECIDE,&xout));
128   PetscCall(VecScatterCreate(xin,is,xout,NULL,&scat));
129   PetscCall(VecScatterBegin(scat,xin,xout,INSERT_VALUES,SCATTER_FORWARD));
130   PetscCall(VecScatterEnd(scat,xin,xout,INSERT_VALUES,SCATTER_FORWARD));
131   PetscCall(VecScatterDestroy(&scat));
132   if (viewVecs) {
133     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Mapped vector:\n"));
134     PetscCall(VecView(xout,PETSC_VIEWER_STDOUT_WORLD));
135   }
136   PetscCall(VecDestroy(&xout));
137   PetscCall(ISDestroy(&is));
138 
139   {
140     PetscInt          rstart,i,*nzd,*nzo,nzl,nzmax = 0,*ncols,nrow,j;
141     Mat               J;
142     const PetscInt    *cols;
143     const PetscScalar *vals;
144     PetscScalar       *nvals;
145 
146     PetscCall(MatGetOwnershipRange(B,&rstart,NULL));
147     PetscCall(PetscCalloc2(2*m,&nzd,2*m,&nzo));
148     for (i=0; i<m; i++) {
149       PetscCall(MatGetRow(B,i+rstart,&nzl,&cols,NULL));
150       for (j=0; j<nzl; j++) {
151         if (cols[j] >= rstart && cols[j] < rstart+n) {
152           nzd[2*i] += 2;
153           nzd[2*i+1] += 2;
154         } else {
155           nzo[2*i] += 2;
156           nzo[2*i+1] += 2;
157         }
158       }
159       nzmax = PetscMax(nzmax,nzd[2*i]+nzo[2*i]);
160       PetscCall(MatRestoreRow(B,i+rstart,&nzl,&cols,NULL));
161     }
162     PetscCall(MatCreateAIJ(PETSC_COMM_WORLD,2*m,2*m,PETSC_DECIDE,PETSC_DECIDE,0,nzd,0,nzo,&J));
163     PetscCall(PetscInfo(0,"Created empty Jacobian matrix\n"));
164     PetscCall(PetscFree2(nzd,nzo));
165     PetscCall(PetscMalloc2(nzmax,&ncols,nzmax,&nvals));
166     PetscCall(PetscArrayzero(nvals,nzmax));
167     for (i=0; i<m; i++) {
168       PetscCall(MatGetRow(B,i+rstart,&nzl,&cols,&vals));
169       for (j=0; j<nzl; j++) {
170         ncols[2*j]   = 2*cols[j];
171         ncols[2*j+1] = 2*cols[j]+1;
172       }
173       nrow = 2*(i+rstart);
174       PetscCall(MatSetValues(J,1,&nrow,2*nzl,ncols,nvals,INSERT_VALUES));
175       nrow = 2*(i+rstart) + 1;
176       PetscCall(MatSetValues(J,1,&nrow,2*nzl,ncols,nvals,INSERT_VALUES));
177       PetscCall(MatRestoreRow(B,i+rstart,&nzl,&cols,&vals));
178     }
179     PetscCall(MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY));
180     PetscCall(MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY));
181     if (viewMats) {
182       PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Jacobian matrix structure:\n"));
183       PetscCall(MatView(J,PETSC_VIEWER_DRAW_WORLD));
184     }
185     PetscCall(MatDestroy(&J));
186     PetscCall(PetscFree2(ncols,nvals));
187   }
188 
189   /*
190        Free work space.  All PETSc objects should be destroyed when they
191        are no longer needed.
192   */
193   PetscCall(MatDestroy(&B));
194   PetscCall(VecDestroy(&xin));
195   PetscCall(PetscFinalize());
196   return 0;
197 }
198 
199 /*TEST
200 
201    test:
202       nsize: 3
203       requires: parmetis datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
204       args: -nox -f ${DATAFILESPATH}/matrices/arco1 -mat_partitioning_type parmetis -viewer_binary_skip_info -novec_load
205 
206    test:
207       requires: parmetis !complex double !defined(PETSC_USE_64BIT_INDICES)
208       output_file: output/ex73_1.out
209       suffix: parmetis_nd_32
210       nsize: 3
211       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int32-float64 -mat_partitioning_type parmetis -viewer_binary_skip_info -use_nd -novec_load
212 
213    test:
214       requires: parmetis !complex double defined(PETSC_USE_64BIT_INDICES)
215       output_file: output/ex73_1.out
216       suffix: parmetis_nd_64
217       nsize: 3
218       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int64-float64 -mat_partitioning_type parmetis -viewer_binary_skip_info -use_nd -novec_load
219 
220    test:
221       requires: ptscotch !complex double !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
222       output_file: output/ex73_1.out
223       suffix: ptscotch_nd_32
224       nsize: 4
225       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int32-float64 -mat_partitioning_type ptscotch -viewer_binary_skip_info -use_nd -novec_load
226 
227    test:
228       requires: ptscotch !complex double defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
229       output_file: output/ex73_1.out
230       suffix: ptscotch_nd_64
231       nsize: 4
232       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int64-float64 -mat_partitioning_type ptscotch -viewer_binary_skip_info -use_nd -novec_load
233 
234 TEST*/
235