xref: /petsc/src/mat/tests/ex111.c (revision 8fb5bd83c3955fefcf33a54e3bb66920a9fa884b)
1 
2 static char help[] ="Tests sequential and parallel MatMatMatMult() and MatPtAP(). Modified from ex96.c \n\
3   -Mx <xg>, where <xg> = number of coarse grid points in the x-direction\n\
4   -My <yg>, where <yg> = number of coarse grid points in the y-direction\n\
5   -Mz <zg>, where <zg> = number of coarse grid points in the z-direction\n\
6   -Npx <npx>, where <npx> = number of processors in the x-direction\n\
7   -Npy <npy>, where <npy> = number of processors in the y-direction\n\
8   -Npz <npz>, where <npz> = number of processors in the z-direction\n\n";
9 
10 /*
11     Example of usage: mpiexec -n 3 ./ex41 -Mx 10 -My 10 -Mz 10
12 */
13 
14 #include <petscdm.h>
15 #include <petscdmda.h>
16 
17 /* User-defined application contexts */
18 typedef struct {
19   PetscInt mx,my,mz;            /* number grid points in x, y and z direction */
20   Vec      localX,localF;       /* local vectors with ghost region */
21   DM       da;
22   Vec      x,b,r;               /* global vectors */
23   Mat      J;                   /* Jacobian on grid */
24 } GridCtx;
25 typedef struct {
26   GridCtx  fine;
27   GridCtx  coarse;
28   PetscInt ratio;
29   Mat      Ii;                  /* interpolation from coarse to fine */
30 } AppCtx;
31 
32 #define COARSE_LEVEL 0
33 #define FINE_LEVEL   1
34 
35 /*
36       Mm_ratio - ration of grid lines between fine and coarse grids.
37 */
38 int main(int argc,char **argv)
39 {
40   AppCtx         user;
41   PetscMPIInt    size,rank;
42   PetscInt       m,n,M,N,i,nrows;
43   PetscScalar    one = 1.0;
44   PetscReal      fill=2.0;
45   Mat            A,P,R,C,PtAP,D;
46   PetscScalar    *array;
47   PetscRandom    rdm;
48   PetscBool      Test_3D=PETSC_FALSE,flg;
49   const PetscInt *ia,*ja;
50 
51   PetscCall(PetscInitialize(&argc,&argv,NULL,help));
52   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
53   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
54 
55   /* Get size of fine grids and coarse grids */
56   user.ratio     = 2;
57   user.coarse.mx = 4; user.coarse.my = 4; user.coarse.mz = 4;
58 
59   PetscCall(PetscOptionsGetInt(NULL,NULL,"-Mx",&user.coarse.mx,NULL));
60   PetscCall(PetscOptionsGetInt(NULL,NULL,"-My",&user.coarse.my,NULL));
61   PetscCall(PetscOptionsGetInt(NULL,NULL,"-Mz",&user.coarse.mz,NULL));
62   PetscCall(PetscOptionsGetInt(NULL,NULL,"-ratio",&user.ratio,NULL));
63   if (user.coarse.mz) Test_3D = PETSC_TRUE;
64 
65   user.fine.mx = user.ratio*(user.coarse.mx-1)+1;
66   user.fine.my = user.ratio*(user.coarse.my-1)+1;
67   user.fine.mz = user.ratio*(user.coarse.mz-1)+1;
68 
69   if (rank == 0) {
70     if (!Test_3D) {
71       PetscCall(PetscPrintf(PETSC_COMM_SELF,"coarse grids: %" PetscInt_FMT " %" PetscInt_FMT "; fine grids: %" PetscInt_FMT " %" PetscInt_FMT "\n",user.coarse.mx,user.coarse.my,user.fine.mx,user.fine.my));
72     } else {
73       PetscCall(PetscPrintf(PETSC_COMM_SELF,"coarse grids: %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "; fine grids: %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n",user.coarse.mx,user.coarse.my,user.coarse.mz,user.fine.mx,user.fine.my,user.fine.mz));
74     }
75   }
76 
77   /* Set up distributed array for fine grid */
78   if (!Test_3D) {
79     PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,user.fine.my,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&user.fine.da));
80   } else {
81     PetscCall(DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,user.fine.my,user.fine.mz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,
82                            1,1,NULL,NULL,NULL,&user.fine.da));
83   }
84   PetscCall(DMSetFromOptions(user.fine.da));
85   PetscCall(DMSetUp(user.fine.da));
86 
87   /* Create and set A at fine grids */
88   PetscCall(DMSetMatType(user.fine.da,MATAIJ));
89   PetscCall(DMCreateMatrix(user.fine.da,&A));
90   PetscCall(MatGetLocalSize(A,&m,&n));
91   PetscCall(MatGetSize(A,&M,&N));
92 
93   /* set val=one to A (replace with random values!) */
94   PetscCall(PetscRandomCreate(PETSC_COMM_WORLD,&rdm));
95   PetscCall(PetscRandomSetFromOptions(rdm));
96   if (size == 1) {
97     PetscCall(MatGetRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
98     if (flg) {
99       PetscCall(MatSeqAIJGetArray(A,&array));
100       for (i=0; i<ia[nrows]; i++) array[i] = one;
101       PetscCall(MatSeqAIJRestoreArray(A,&array));
102     }
103     PetscCall(MatRestoreRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
104   } else {
105     Mat AA,AB;
106     PetscCall(MatMPIAIJGetSeqAIJ(A,&AA,&AB,NULL));
107     PetscCall(MatGetRowIJ(AA,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
108     if (flg) {
109       PetscCall(MatSeqAIJGetArray(AA,&array));
110       for (i=0; i<ia[nrows]; i++) array[i] = one;
111       PetscCall(MatSeqAIJRestoreArray(AA,&array));
112     }
113     PetscCall(MatRestoreRowIJ(AA,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
114     PetscCall(MatGetRowIJ(AB,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
115     if (flg) {
116       PetscCall(MatSeqAIJGetArray(AB,&array));
117       for (i=0; i<ia[nrows]; i++) array[i] = one;
118       PetscCall(MatSeqAIJRestoreArray(AB,&array));
119     }
120     PetscCall(MatRestoreRowIJ(AB,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
121   }
122   /* Set up distributed array for coarse grid */
123   if (!Test_3D) {
124     PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,user.coarse.my,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&user.coarse.da));
125   } else {
126     PetscCall(DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,user.coarse.my,user.coarse.mz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,NULL,&user.coarse.da));
127   }
128   PetscCall(DMSetFromOptions(user.coarse.da));
129   PetscCall(DMSetUp(user.coarse.da));
130 
131   /* Create interpolation between the fine and coarse grids */
132   PetscCall(DMCreateInterpolation(user.coarse.da,user.fine.da,&P,NULL));
133 
134   /* Get R = P^T */
135   PetscCall(MatTranspose(P,MAT_INITIAL_MATRIX,&R));
136 
137   /* C = R*A*P */
138   /* Developer's API */
139   PetscCall(MatProductCreate(R,A,P,&D));
140   PetscCall(MatProductSetType(D,MATPRODUCT_ABC));
141   PetscCall(MatProductSetFromOptions(D));
142   PetscCall(MatProductSymbolic(D));
143   PetscCall(MatProductNumeric(D));
144   PetscCall(MatProductNumeric(D)); /* Test reuse symbolic D */
145 
146   /* User's API */
147   { /* Test MatMatMatMult_Basic() */
148     Mat Adense,Cdense;
149     PetscCall(MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense));
150     PetscCall(MatMatMatMult(R,Adense,P,MAT_INITIAL_MATRIX,fill,&Cdense));
151     PetscCall(MatMatMatMult(R,Adense,P,MAT_REUSE_MATRIX,fill,&Cdense));
152 
153     PetscCall(MatMultEqual(D,Cdense,10,&flg));
154     PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"D*v != Cdense*v");
155     PetscCall(MatDestroy(&Adense));
156     PetscCall(MatDestroy(&Cdense));
157   }
158 
159   PetscCall(MatMatMatMult(R,A,P,MAT_INITIAL_MATRIX,fill,&C));
160   PetscCall(MatMatMatMult(R,A,P,MAT_REUSE_MATRIX,fill,&C));
161   PetscCall(MatProductClear(C));
162 
163   /* Test D == C */
164   PetscCall(MatEqual(D,C,&flg));
165   PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"D != C");
166 
167   /* Test C == PtAP */
168   PetscCall(MatPtAP(A,P,MAT_INITIAL_MATRIX,fill,&PtAP));
169   PetscCall(MatPtAP(A,P,MAT_REUSE_MATRIX,fill,&PtAP));
170   PetscCall(MatEqual(C,PtAP,&flg));
171   PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"C != PtAP");
172   PetscCall(MatDestroy(&PtAP));
173 
174   /* Clean up */
175   PetscCall(MatDestroy(&A));
176   PetscCall(PetscRandomDestroy(&rdm));
177   PetscCall(DMDestroy(&user.fine.da));
178   PetscCall(DMDestroy(&user.coarse.da));
179   PetscCall(MatDestroy(&P));
180   PetscCall(MatDestroy(&R));
181   PetscCall(MatDestroy(&C));
182   PetscCall(MatDestroy(&D));
183   PetscCall(PetscFinalize());
184   return 0;
185 }
186 
187 /*TEST
188 
189    test:
190 
191    test:
192       suffix: 2
193       nsize: 2
194       args: -matmatmatmult_via scalable
195 
196    test:
197       suffix: 3
198       nsize: 2
199       args: -matmatmatmult_via nonscalable
200       output_file: output/ex111_1.out
201 
202 TEST*/
203