xref: /petsc/src/mat/tests/ex89.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 static char help[] ="Tests MatPtAP() for MPIMAIJ and MPIAIJ \n ";
2 
3 #include <petscdmda.h>
4 
5 int main(int argc,char **argv)
6 {
7   DM             coarsedm,finedm;
8   PetscMPIInt    size,rank;
9   PetscInt       M,N,Z,i,nrows;
10   PetscScalar    one = 1.0;
11   PetscReal      fill=2.0;
12   Mat            A,P,C;
13   PetscScalar    *array,alpha;
14   PetscBool      Test_3D=PETSC_FALSE,flg;
15   const PetscInt *ia,*ja;
16   PetscInt       dof;
17   MPI_Comm       comm;
18 
19   PetscFunctionBeginUser;
20   PetscCall(PetscInitialize(&argc,&argv,NULL,help));
21   comm = PETSC_COMM_WORLD;
22   PetscCallMPI(MPI_Comm_rank(comm,&rank));
23   PetscCallMPI(MPI_Comm_size(comm,&size));
24   M = 10; N = 10; Z = 10;
25   dof  = 10;
26 
27   PetscCall(PetscOptionsGetBool(NULL,NULL,"-test_3D",&Test_3D,NULL));
28   PetscCall(PetscOptionsGetInt(NULL,NULL,"-M",&M,NULL));
29   PetscCall(PetscOptionsGetInt(NULL,NULL,"-N",&N,NULL));
30   PetscCall(PetscOptionsGetInt(NULL,NULL,"-Z",&Z,NULL));
31   /* Set up distributed array for fine grid */
32   if (!Test_3D) {
33     PetscCall(DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,M,N,PETSC_DECIDE,PETSC_DECIDE,dof,1,NULL,NULL,&coarsedm));
34   } else {
35     PetscCall(DMDACreate3d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,M,N,Z,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,dof,1,NULL,NULL,NULL,&coarsedm));
36   }
37   PetscCall(DMSetFromOptions(coarsedm));
38   PetscCall(DMSetUp(coarsedm));
39 
40   /* This makes sure the coarse DMDA has the same partition as the fine DMDA */
41   PetscCall(DMRefine(coarsedm,PetscObjectComm((PetscObject)coarsedm),&finedm));
42 
43   /*------------------------------------------------------------*/
44   PetscCall(DMSetMatType(finedm,MATAIJ));
45   PetscCall(DMCreateMatrix(finedm,&A));
46 
47   /* set val=one to A */
48   if (size == 1) {
49     PetscCall(MatGetRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
50     if (flg) {
51       PetscCall(MatSeqAIJGetArray(A,&array));
52       for (i=0; i<ia[nrows]; i++) array[i] = one;
53       PetscCall(MatSeqAIJRestoreArray(A,&array));
54     }
55     PetscCall(MatRestoreRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
56   } else {
57     Mat AA,AB;
58     PetscCall(MatMPIAIJGetSeqAIJ(A,&AA,&AB,NULL));
59     PetscCall(MatGetRowIJ(AA,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
60     if (flg) {
61       PetscCall(MatSeqAIJGetArray(AA,&array));
62       for (i=0; i<ia[nrows]; i++) array[i] = one;
63       PetscCall(MatSeqAIJRestoreArray(AA,&array));
64     }
65     PetscCall(MatRestoreRowIJ(AA,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
66     PetscCall(MatGetRowIJ(AB,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
67     if (flg) {
68       PetscCall(MatSeqAIJGetArray(AB,&array));
69       for (i=0; i<ia[nrows]; i++) array[i] = one;
70       PetscCall(MatSeqAIJRestoreArray(AB,&array));
71     }
72     PetscCall(MatRestoreRowIJ(AB,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg));
73   }
74   /* Create interpolation between the fine and coarse grids */
75   PetscCall(DMCreateInterpolation(coarsedm,finedm,&P,NULL));
76 
77   /* Test P^T * A * P - MatPtAP() */
78   /*------------------------------*/
79   /* (1) Developer API */
80   PetscCall(MatProductCreate(A,P,NULL,&C));
81   PetscCall(MatProductSetType(C,MATPRODUCT_PtAP));
82   PetscCall(MatProductSetAlgorithm(C,"allatonce"));
83   PetscCall(MatProductSetFill(C,PETSC_DEFAULT));
84   PetscCall(MatProductSetFromOptions(C));
85   PetscCall(MatProductSymbolic(C));
86   PetscCall(MatProductNumeric(C));
87   PetscCall(MatProductNumeric(C)); /* Test reuse of symbolic C */
88 
89   { /* Test MatProductView() */
90     PetscViewer viewer;
91     PetscCall(PetscViewerASCIIOpen(comm,NULL, &viewer));
92     PetscCall(PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO));
93     PetscCall(MatProductView(C,viewer));
94     PetscCall(PetscViewerPopFormat(viewer));
95     PetscCall(PetscViewerDestroy(&viewer));
96   }
97 
98   PetscCall(MatPtAPMultEqual(A,P,C,10,&flg));
99   PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"Error in MatProduct_PtAP");
100   PetscCall(MatDestroy(&C));
101 
102   /* (2) User API */
103   PetscCall(MatPtAP(A,P,MAT_INITIAL_MATRIX,fill,&C));
104   /* Test MAT_REUSE_MATRIX - reuse symbolic C */
105   alpha=1.0;
106   for (i=0; i<1; i++) {
107     alpha -= 0.1;
108     PetscCall(MatScale(A,alpha));
109     PetscCall(MatPtAP(A,P,MAT_REUSE_MATRIX,fill,&C));
110   }
111 
112   /* Free intermediate data structures created for reuse of C=Pt*A*P */
113   PetscCall(MatProductClear(C));
114 
115   PetscCall(MatPtAPMultEqual(A,P,C,10,&flg));
116   PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"Error in MatPtAP");
117 
118   PetscCall(MatDestroy(&C));
119   PetscCall(MatDestroy(&A));
120   PetscCall(MatDestroy(&P));
121   PetscCall(DMDestroy(&finedm));
122   PetscCall(DMDestroy(&coarsedm));
123   PetscCall(PetscFinalize());
124   return 0;
125 }
126 
127 /*TEST
128 
129    test:
130       args: -M 10 -N 10 -Z 10
131       output_file: output/ex89_1.out
132 
133    test:
134       suffix: allatonce
135       nsize: 4
136       args: -M 10 -N 10 -Z 10
137       output_file: output/ex89_2.out
138 
139    test:
140       suffix: allatonce_merged
141       nsize: 4
142       args: -M 10 -M 5 -M 10 -mat_product_algorithm allatonce_merged
143       output_file: output/ex89_3.out
144 
145    test:
146       suffix: nonscalable_3D
147       nsize: 4
148       args: -M 10 -M 5 -M 10 -test_3D 1 -mat_product_algorithm nonscalable
149       output_file: output/ex89_4.out
150 
151    test:
152       suffix: allatonce_merged_3D
153       nsize: 4
154       args: -M 10 -M 5 -M 10 -test_3D 1 -mat_product_algorithm allatonce_merged
155       output_file: output/ex89_3.out
156 
157    test:
158       suffix: nonscalable
159       nsize: 4
160       args: -M 10 -N 10 -Z 10 -mat_product_algorithm nonscalable
161       output_file: output/ex89_5.out
162 
163 TEST*/
164