xref: /petsc/src/mat/tests/ex111.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 
2 static char help[] ="Tests sequential and parallel MatMatMatMult() and MatPtAP(). Modified from ex96.c \n\
3   -Mx <xg>, where <xg> = number of coarse grid points in the x-direction\n\
4   -My <yg>, where <yg> = number of coarse grid points in the y-direction\n\
5   -Mz <zg>, where <zg> = number of coarse grid points in the z-direction\n\
6   -Npx <npx>, where <npx> = number of processors in the x-direction\n\
7   -Npy <npy>, where <npy> = number of processors in the y-direction\n\
8   -Npz <npz>, where <npz> = number of processors in the z-direction\n\n";
9 
10 /*
11     Example of usage: mpiexec -n 3 ./ex41 -Mx 10 -My 10 -Mz 10
12 */
13 
14 #include <petscdm.h>
15 #include <petscdmda.h>
16 
17 /* User-defined application contexts */
18 typedef struct {
19   PetscInt mx,my,mz;            /* number grid points in x, y and z direction */
20   Vec      localX,localF;       /* local vectors with ghost region */
21   DM       da;
22   Vec      x,b,r;               /* global vectors */
23   Mat      J;                   /* Jacobian on grid */
24 } GridCtx;
25 typedef struct {
26   GridCtx  fine;
27   GridCtx  coarse;
28   PetscInt ratio;
29   Mat      Ii;                  /* interpolation from coarse to fine */
30 } AppCtx;
31 
32 #define COARSE_LEVEL 0
33 #define FINE_LEVEL   1
34 
35 /*
36       Mm_ratio - ration of grid lines between fine and coarse grids.
37 */
38 int main(int argc,char **argv)
39 {
40   AppCtx         user;
41   PetscMPIInt    size,rank;
42   PetscInt       m,n,M,N,i,nrows;
43   PetscScalar    one = 1.0;
44   PetscReal      fill=2.0;
45   Mat            A,P,R,C,PtAP,D;
46   PetscScalar    *array;
47   PetscRandom    rdm;
48   PetscBool      Test_3D=PETSC_FALSE,flg;
49   const PetscInt *ia,*ja;
50 
51   PetscFunctionBeginUser;
52   PetscCall(PetscInitialize(&argc,&argv,NULL,help));
53   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
54   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
55 
56   /* Get size of fine grids and coarse grids */
57   user.ratio     = 2;
58   user.coarse.mx = 4; user.coarse.my = 4; user.coarse.mz = 4;
59 
60   PetscCall(PetscOptionsGetInt(NULL,NULL,"-Mx",&user.coarse.mx,NULL));
61   PetscCall(PetscOptionsGetInt(NULL,NULL,"-My",&user.coarse.my,NULL));
62   PetscCall(PetscOptionsGetInt(NULL,NULL,"-Mz",&user.coarse.mz,NULL));
63   PetscCall(PetscOptionsGetInt(NULL,NULL,"-ratio",&user.ratio,NULL));
64   if (user.coarse.mz) Test_3D = PETSC_TRUE;
65 
66   user.fine.mx = user.ratio*(user.coarse.mx-1)+1;
67   user.fine.my = user.ratio*(user.coarse.my-1)+1;
68   user.fine.mz = user.ratio*(user.coarse.mz-1)+1;
69 
70   if (rank == 0) {
71     if (!Test_3D) {
72       PetscCall(PetscPrintf(PETSC_COMM_SELF,"coarse grids: %" PetscInt_FMT " %" PetscInt_FMT "; fine grids: %" PetscInt_FMT " %" PetscInt_FMT "\n",user.coarse.mx,user.coarse.my,user.fine.mx,user.fine.my));
73     } else {
74       PetscCall(PetscPrintf(PETSC_COMM_SELF,"coarse grids: %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "; fine grids: %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n",user.coarse.mx,user.coarse.my,user.coarse.mz,user.fine.mx,user.fine.my,user.fine.mz));
75     }
76   }
77 
78   /* Set up distributed array for fine grid */
79   if (!Test_3D) {
80     PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,user.fine.my,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&user.fine.da));
81   } else {
82     PetscCall(DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,user.fine.my,user.fine.mz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,
83                            1,1,NULL,NULL,NULL,&user.fine.da));
84   }
85   PetscCall(DMSetFromOptions(user.fine.da));
86   PetscCall(DMSetUp(user.fine.da));
87 
88   /* Create and set A at fine grids */
89   PetscCall(DMSetMatType(user.fine.da,MATAIJ));
90   PetscCall(DMCreateMatrix(user.fine.da,&A));
91   PetscCall(MatGetLocalSize(A,&m,&n));
92   PetscCall(MatGetSize(A,&M,&N));
93 
94   /* set val=one to A (replace with random values!) */
95   PetscCall(PetscRandomCreate(PETSC_COMM_WORLD,&rdm));
96   PetscCall(PetscRandomSetFromOptions(rdm));
97   if (size == 1) {
98     PetscCall(MatGetRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
99     if (flg) {
100       PetscCall(MatSeqAIJGetArray(A,&array));
101       for (i=0; i<ia[nrows]; i++) array[i] = one;
102       PetscCall(MatSeqAIJRestoreArray(A,&array));
103     }
104     PetscCall(MatRestoreRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
105   } else {
106     Mat AA,AB;
107     PetscCall(MatMPIAIJGetSeqAIJ(A,&AA,&AB,NULL));
108     PetscCall(MatGetRowIJ(AA,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
109     if (flg) {
110       PetscCall(MatSeqAIJGetArray(AA,&array));
111       for (i=0; i<ia[nrows]; i++) array[i] = one;
112       PetscCall(MatSeqAIJRestoreArray(AA,&array));
113     }
114     PetscCall(MatRestoreRowIJ(AA,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
115     PetscCall(MatGetRowIJ(AB,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
116     if (flg) {
117       PetscCall(MatSeqAIJGetArray(AB,&array));
118       for (i=0; i<ia[nrows]; i++) array[i] = one;
119       PetscCall(MatSeqAIJRestoreArray(AB,&array));
120     }
121     PetscCall(MatRestoreRowIJ(AB,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
122   }
123   /* Set up distributed array for coarse grid */
124   if (!Test_3D) {
125     PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,user.coarse.my,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&user.coarse.da));
126   } else {
127     PetscCall(DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,user.coarse.my,user.coarse.mz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,NULL,&user.coarse.da));
128   }
129   PetscCall(DMSetFromOptions(user.coarse.da));
130   PetscCall(DMSetUp(user.coarse.da));
131 
132   /* Create interpolation between the fine and coarse grids */
133   PetscCall(DMCreateInterpolation(user.coarse.da,user.fine.da,&P,NULL));
134 
135   /* Get R = P^T */
136   PetscCall(MatTranspose(P,MAT_INITIAL_MATRIX,&R));
137 
138   /* C = R*A*P */
139   /* Developer's API */
140   PetscCall(MatProductCreate(R,A,P,&D));
141   PetscCall(MatProductSetType(D,MATPRODUCT_ABC));
142   PetscCall(MatProductSetFromOptions(D));
143   PetscCall(MatProductSymbolic(D));
144   PetscCall(MatProductNumeric(D));
145   PetscCall(MatProductNumeric(D)); /* Test reuse symbolic D */
146 
147   /* User's API */
148   { /* Test MatMatMatMult_Basic() */
149     Mat Adense,Cdense;
150     PetscCall(MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense));
151     PetscCall(MatMatMatMult(R,Adense,P,MAT_INITIAL_MATRIX,fill,&Cdense));
152     PetscCall(MatMatMatMult(R,Adense,P,MAT_REUSE_MATRIX,fill,&Cdense));
153 
154     PetscCall(MatMultEqual(D,Cdense,10,&flg));
155     PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"D*v != Cdense*v");
156     PetscCall(MatDestroy(&Adense));
157     PetscCall(MatDestroy(&Cdense));
158   }
159 
160   PetscCall(MatMatMatMult(R,A,P,MAT_INITIAL_MATRIX,fill,&C));
161   PetscCall(MatMatMatMult(R,A,P,MAT_REUSE_MATRIX,fill,&C));
162   PetscCall(MatProductClear(C));
163 
164   /* Test D == C */
165   PetscCall(MatEqual(D,C,&flg));
166   PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"D != C");
167 
168   /* Test C == PtAP */
169   PetscCall(MatPtAP(A,P,MAT_INITIAL_MATRIX,fill,&PtAP));
170   PetscCall(MatPtAP(A,P,MAT_REUSE_MATRIX,fill,&PtAP));
171   PetscCall(MatEqual(C,PtAP,&flg));
172   PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"C != PtAP");
173   PetscCall(MatDestroy(&PtAP));
174 
175   /* Clean up */
176   PetscCall(MatDestroy(&A));
177   PetscCall(PetscRandomDestroy(&rdm));
178   PetscCall(DMDestroy(&user.fine.da));
179   PetscCall(DMDestroy(&user.coarse.da));
180   PetscCall(MatDestroy(&P));
181   PetscCall(MatDestroy(&R));
182   PetscCall(MatDestroy(&C));
183   PetscCall(MatDestroy(&D));
184   PetscCall(PetscFinalize());
185   return 0;
186 }
187 
188 /*TEST
189 
190    test:
191 
192    test:
193       suffix: 2
194       nsize: 2
195       args: -matmatmatmult_via scalable
196 
197    test:
198       suffix: 3
199       nsize: 2
200       args: -matmatmatmult_via nonscalable
201       output_file: output/ex111_1.out
202 
203 TEST*/
204