xref: /petsc/src/mat/tests/ex12.c (revision 732aec7a18f2199fb53bb9a2f3aef439a834ce31)
1 static char help[] = "Tests the use of MatZeroRows() for parallel matrices.\n\
2 This example also tests the use of MatDuplicate() for both MPIAIJ and MPIBAIJ matrices";
3 
4 #include <petscmat.h>
5 
6 extern PetscErrorCode TestMatZeroRows_Basic(Mat, IS, PetscScalar);
7 extern PetscErrorCode TestMatZeroRows_with_no_allocation(Mat, IS, PetscScalar);
8 
main(int argc,char ** args)9 int main(int argc, char **args)
10 {
11   Mat         A;
12   PetscInt    i, j, m = 3, n, Ii, J, Imax;
13   PetscMPIInt rank, size;
14   PetscScalar v, diag = -4.0;
15   IS          is;
16 
17   PetscFunctionBeginUser;
18   PetscCall(PetscInitialize(&argc, &args, NULL, help));
19   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
20   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
21   n = 2 * size;
22 
23   /* create A Square matrix for the five point stencil,YET AGAIN*/
24   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
25   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
26   PetscCall(MatSetFromOptions(A));
27   PetscCall(MatSetUp(A));
28   for (i = 0; i < m; i++) {
29     for (j = 2 * rank; j < 2 * rank + 2; j++) {
30       v  = -1.0;
31       Ii = j + n * i;
32       if (i > 0) {
33         J = Ii - n;
34         PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
35       }
36       if (i < m - 1) {
37         J = Ii + n;
38         PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
39       }
40       if (j > 0) {
41         J = Ii - 1;
42         PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
43       }
44       if (j < n - 1) {
45         J = Ii + 1;
46         PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
47       }
48       v = 4.0;
49       PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
50     }
51   }
52   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
53   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
54 
55   /* Create AN IS required by MatZeroRows() */
56   Imax = n * rank;
57   if (Imax >= n * m - m - 1) Imax = m * n - m - 1;
58   PetscCall(ISCreateStride(PETSC_COMM_SELF, m, Imax, 1, &is));
59 
60   PetscCall(TestMatZeroRows_Basic(A, is, 0.0));
61   PetscCall(TestMatZeroRows_Basic(A, is, diag));
62 
63   PetscCall(TestMatZeroRows_with_no_allocation(A, is, 0.0));
64   PetscCall(TestMatZeroRows_with_no_allocation(A, is, diag));
65 
66   PetscCall(MatDestroy(&A));
67 
68   /* Now Create a rectangular matrix with five point stencil (app)
69    n+size is used so that this dimension is always divisible by size.
70    This way, we can always use bs = size for any number of procs */
71   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
72   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * (n + size)));
73   PetscCall(MatSetFromOptions(A));
74   PetscCall(MatSetUp(A));
75   for (i = 0; i < m; i++) {
76     for (j = 2 * rank; j < 2 * rank + 2; j++) {
77       v  = -1.0;
78       Ii = j + n * i;
79       if (i > 0) {
80         J = Ii - n;
81         PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
82       }
83       if (i < m - 1) {
84         J = Ii + n;
85         PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
86       }
87       if (j > 0) {
88         J = Ii - 1;
89         PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
90       }
91       if (j < n + size - 1) {
92         J = Ii + 1;
93         PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
94       }
95       v = 4.0;
96       PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
97     }
98   }
99   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
100   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
101 
102   PetscCall(TestMatZeroRows_Basic(A, is, 0.0));
103   PetscCall(TestMatZeroRows_Basic(A, is, diag));
104 
105   PetscCall(MatDestroy(&A));
106   PetscCall(ISDestroy(&is));
107   PetscCall(PetscFinalize());
108   return 0;
109 }
110 
TestMatZeroRows_Basic(Mat A,IS is,PetscScalar diag)111 PetscErrorCode TestMatZeroRows_Basic(Mat A, IS is, PetscScalar diag)
112 {
113   Mat       B;
114   PetscBool keepnonzeropattern;
115 
116   /* Now copy A into B, and test it with MatZeroRows() */
117   PetscFunctionBeginUser;
118   PetscCall(MatDuplicate(A, MAT_COPY_VALUES, &B));
119 
120   PetscCall(PetscOptionsHasName(NULL, NULL, "-keep_nonzero_pattern", &keepnonzeropattern));
121   if (keepnonzeropattern) PetscCall(MatSetOption(B, MAT_KEEP_NONZERO_PATTERN, PETSC_TRUE));
122 
123   PetscCall(MatZeroRowsIS(B, is, diag, 0, 0));
124   PetscCall(MatView(B, PETSC_VIEWER_STDOUT_WORLD));
125   PetscCall(MatDestroy(&B));
126   PetscFunctionReturn(PETSC_SUCCESS);
127 }
128 
TestMatZeroRows_with_no_allocation(Mat A,IS is,PetscScalar diag)129 PetscErrorCode TestMatZeroRows_with_no_allocation(Mat A, IS is, PetscScalar diag)
130 {
131   Mat B;
132 
133   /* Now copy A into B, and test it with MatZeroRows() */
134   PetscFunctionBeginUser;
135   PetscCall(MatDuplicate(A, MAT_COPY_VALUES, &B));
136   /* Set this flag after assembly. This way, it affects only MatZeroRows() */
137   PetscCall(MatSetOption(B, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
138 
139   PetscCall(MatZeroRowsIS(B, is, diag, 0, 0));
140   PetscCall(MatView(B, PETSC_VIEWER_STDOUT_WORLD));
141   PetscCall(MatDestroy(&B));
142   PetscFunctionReturn(PETSC_SUCCESS);
143 }
144 
145 /*TEST
146 
147    test:
148       nsize: 2
149       filter: grep -v " MPI process"
150 
151    test:
152       suffix: 2
153       nsize: 3
154       args: -mat_type mpibaij -mat_block_size 3
155       filter: grep -v " MPI process"
156 
157    test:
158       suffix: 3
159       nsize: 3
160       args: -mat_type mpiaij -keep_nonzero_pattern
161       filter: grep -v " MPI process"
162 
163    test:
164       suffix: 4
165       nsize: 3
166       args: -keep_nonzero_pattern -mat_type mpibaij -mat_block_size 3
167       filter: grep -v " MPI process"
168 
169    test:
170       requires: !defined(PETSC_HAVE_THREADSAFETY)
171       suffix: 5
172       nsize: 3
173       args: -mat_type mpibaij -mat_block_size 3 -mat_view
174       filter: grep -v " MPI process"
175 
176 TEST*/
177