xref: /petsc/src/mat/tests/ex9.c (revision 732aec7a18f2199fb53bb9a2f3aef439a834ce31)
1 static char help[] = "Tests MPI parallel matrix creation. Test MatCreateRedundantMatrix() \n\n";
2 
3 #include <petscmat.h>
4 
main(int argc,char ** args)5 int main(int argc, char **args)
6 {
7   Mat         C, Credundant;
8   MatInfo     info;
9   PetscMPIInt rank, size, subsize;
10   PetscInt    i, j, m = 3, n = 2, low, high, iglobal;
11   PetscInt    Ii, J, ldim, nsubcomms;
12   PetscBool   flg_info, flg_mat;
13   PetscScalar v, one = 1.0;
14   Vec         x, y;
15   MPI_Comm    subcomm;
16 
17   PetscFunctionBeginUser;
18   PetscCall(PetscInitialize(&argc, &args, NULL, help));
19   PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
20   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
21   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
22   n = 2 * size;
23 
24   PetscCall(MatCreate(PETSC_COMM_WORLD, &C));
25   PetscCall(MatSetSizes(C, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
26   PetscCall(MatSetFromOptions(C));
27   PetscCall(MatSetUp(C));
28 
29   /* Create the matrix for the five point stencil, YET AGAIN */
30   for (i = 0; i < m; i++) {
31     for (j = 2 * rank; j < 2 * rank + 2; j++) {
32       v  = -1.0;
33       Ii = j + n * i;
34       if (i > 0) {
35         J = Ii - n;
36         PetscCall(MatSetValues(C, 1, &Ii, 1, &J, &v, INSERT_VALUES));
37       }
38       if (i < m - 1) {
39         J = Ii + n;
40         PetscCall(MatSetValues(C, 1, &Ii, 1, &J, &v, INSERT_VALUES));
41       }
42       if (j > 0) {
43         J = Ii - 1;
44         PetscCall(MatSetValues(C, 1, &Ii, 1, &J, &v, INSERT_VALUES));
45       }
46       if (j < n - 1) {
47         J = Ii + 1;
48         PetscCall(MatSetValues(C, 1, &Ii, 1, &J, &v, INSERT_VALUES));
49       }
50       v = 4.0;
51       PetscCall(MatSetValues(C, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
52     }
53   }
54 
55   /* Add extra elements (to illustrate variants of MatGetInfo) */
56   Ii = n;
57   J  = n - 2;
58   v  = 100.0;
59   PetscCall(MatSetValues(C, 1, &Ii, 1, &J, &v, INSERT_VALUES));
60   Ii = n - 2;
61   J  = n;
62   v  = 100.0;
63   PetscCall(MatSetValues(C, 1, &Ii, 1, &J, &v, INSERT_VALUES));
64 
65   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
66   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
67 
68   /* Form vectors */
69   PetscCall(MatCreateVecs(C, &x, &y));
70   PetscCall(VecGetLocalSize(x, &ldim));
71   PetscCall(VecGetOwnershipRange(x, &low, &high));
72   for (i = 0; i < ldim; i++) {
73     iglobal = i + low;
74     v       = one * ((PetscReal)i) + 100.0 * rank;
75     PetscCall(VecSetValues(x, 1, &iglobal, &v, INSERT_VALUES));
76   }
77   PetscCall(VecAssemblyBegin(x));
78   PetscCall(VecAssemblyEnd(x));
79 
80   PetscCall(MatMult(C, x, y));
81 
82   PetscCall(PetscOptionsHasName(NULL, NULL, "-view_info", &flg_info));
83   if (flg_info) {
84     PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_ASCII_INFO));
85     PetscCall(MatView(C, PETSC_VIEWER_STDOUT_WORLD));
86 
87     PetscCall(MatGetInfo(C, MAT_GLOBAL_SUM, &info));
88     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "matrix information (global sums):\nnonzeros = %" PetscInt_FMT ", allocated nonzeros = %" PetscInt_FMT "\n", (PetscInt)info.nz_used, (PetscInt)info.nz_allocated));
89     PetscCall(MatGetInfo(C, MAT_GLOBAL_MAX, &info));
90     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "matrix information (global max):\nnonzeros = %" PetscInt_FMT ", allocated nonzeros = %" PetscInt_FMT "\n", (PetscInt)info.nz_used, (PetscInt)info.nz_allocated));
91   }
92 
93   PetscCall(PetscOptionsHasName(NULL, NULL, "-view_mat", &flg_mat));
94   if (flg_mat) PetscCall(MatView(C, PETSC_VIEWER_STDOUT_WORLD));
95 
96   /* Test MatCreateRedundantMatrix() */
97   nsubcomms = size;
98   PetscCall(PetscOptionsGetInt(NULL, NULL, "-nsubcomms", &nsubcomms, NULL));
99   PetscCall(MatCreateRedundantMatrix(C, nsubcomms, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &Credundant));
100   PetscCall(MatCreateRedundantMatrix(C, nsubcomms, MPI_COMM_NULL, MAT_REUSE_MATRIX, &Credundant));
101 
102   PetscCall(PetscObjectGetComm((PetscObject)Credundant, &subcomm));
103   PetscCallMPI(MPI_Comm_size(subcomm, &subsize));
104 
105   if (subsize == 2 && flg_mat) {
106     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_(subcomm), "\n[%d] Credundant:\n", rank));
107     PetscCall(MatView(Credundant, PETSC_VIEWER_STDOUT_(subcomm)));
108   }
109   PetscCall(MatDestroy(&Credundant));
110 
111   /* Test MatCreateRedundantMatrix() with user-provided subcomm */
112   {
113     PetscSubcomm psubcomm;
114 
115     PetscCall(PetscSubcommCreate(PETSC_COMM_WORLD, &psubcomm));
116     PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomms));
117     PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
118     /* enable runtime switch of psubcomm type, e.g., '-psubcomm_type interlaced */
119     PetscCall(PetscSubcommSetFromOptions(psubcomm));
120 
121     PetscCall(MatCreateRedundantMatrix(C, nsubcomms, PetscSubcommChild(psubcomm), MAT_INITIAL_MATRIX, &Credundant));
122     PetscCall(MatCreateRedundantMatrix(C, nsubcomms, PetscSubcommChild(psubcomm), MAT_REUSE_MATRIX, &Credundant));
123 
124     PetscCall(PetscSubcommDestroy(&psubcomm));
125     PetscCall(MatDestroy(&Credundant));
126   }
127 
128   PetscCall(VecDestroy(&x));
129   PetscCall(VecDestroy(&y));
130   PetscCall(MatDestroy(&C));
131   PetscCall(PetscFinalize());
132   return 0;
133 }
134 
135 /*TEST
136 
137    test:
138       nsize: 3
139       args: -view_info
140 
141    test:
142       suffix: 2
143       nsize: 3
144       args: -nsubcomms 2 -view_mat -psubcomm_type interlaced
145 
146    test:
147       suffix: 3
148       nsize: 3
149       args: -nsubcomms 2 -view_mat -psubcomm_type contiguous
150 
151    test:
152       suffix: 3_baij
153       nsize: 3
154       args: -mat_type baij -nsubcomms 2 -view_mat
155 
156    test:
157       suffix: 3_sbaij
158       nsize: 3
159       args: -mat_type sbaij -nsubcomms 2 -view_mat
160 
161    test:
162       suffix: 3_dense
163       nsize: 3
164       args: -mat_type dense -nsubcomms 2 -view_mat
165 
166    test:
167       suffix: 4_baij
168       nsize: 3
169       args: -mat_type baij -nsubcomms 2 -view_mat -psubcomm_type interlaced
170 
171    test:
172       suffix: 4_sbaij
173       nsize: 3
174       args: -mat_type sbaij -nsubcomms 2 -view_mat -psubcomm_type interlaced
175 
176    test:
177       suffix: 4_dense
178       nsize: 3
179       args: -mat_type dense -nsubcomms 2 -view_mat -psubcomm_type interlaced
180 
181 TEST*/
182