xref: /petsc/src/mat/tests/ex134.c (revision 9a3a8673b4aea812b2f0c314666d2e7ff14d2577)
1 static const char help[] = "Test parallel assembly of SBAIJ matrices\n\n";
2 
3 #include <petscmat.h>
4 
Assemble(MPI_Comm comm,PetscInt bs,MatType mtype)5 PetscErrorCode Assemble(MPI_Comm comm, PetscInt bs, MatType mtype)
6 {
7   const PetscInt    rc[]   = {0, 1, 2, 3};
8   const PetscScalar vals[] = {100, 2,  3,  4,  5,  600, 7,  8,  9,  100, 11, 1200, 13, 14, 15, 1600, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 2800, 29, 30, 31, 32,
9                               33,  34, 35, 36, 37, 38,  39, 40, 41, 42,  43, 44,   45, 46, 47, 48,   49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 49, 60,   61, 62, 63, 64};
10   Mat               A;
11 #if defined(PETSC_HAVE_MUMPS) || defined(PETSC_HAVE_MKL_CPARDISO)
12   Mat           F;
13   MatSolverType stype = MATSOLVERPETSC;
14   PetscRandom   rdm;
15   Vec           b, x, y;
16   PetscInt      i, j;
17   PetscReal     norm2, tol = 100 * PETSC_SQRT_MACHINE_EPSILON;
18   PetscBool     issbaij;
19 #endif
20   PetscViewer viewer;
21 
22   PetscFunctionBegin;
23   PetscCall(MatCreate(comm, &A));
24   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, 4 * bs, 4 * bs));
25   PetscCall(MatSetType(A, mtype));
26   PetscCall(MatMPIBAIJSetPreallocation(A, bs, 2, NULL, 2, NULL));
27   PetscCall(MatMPISBAIJSetPreallocation(A, bs, 2, NULL, 2, NULL));
28   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
29   /* All processes contribute a global matrix */
30   PetscCall(MatSetValuesBlocked(A, 4, rc, 4, rc, vals, ADD_VALUES));
31   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
32   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
33   PetscCall(PetscPrintf(comm, "Matrix %s(%" PetscInt_FMT ")\n", mtype, bs));
34   PetscCall(PetscViewerASCIIGetStdout(comm, &viewer));
35   PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL));
36   PetscCall(MatView(A, viewer));
37   PetscCall(PetscViewerPopFormat(viewer));
38   PetscCall(MatView(A, viewer));
39 #if defined(PETSC_HAVE_MUMPS) || defined(PETSC_HAVE_MKL_CPARDISO)
40   PetscCall(PetscStrcmp(mtype, MATMPISBAIJ, &issbaij));
41   if (!issbaij) PetscCall(MatShift(A, 10));
42   PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rdm));
43   PetscCall(PetscRandomSetFromOptions(rdm));
44   PetscCall(MatCreateVecs(A, &x, &y));
45   PetscCall(VecDuplicate(x, &b));
46   for (j = 0; j < 2; j++) {
47   #if defined(PETSC_HAVE_MUMPS)
48     if (j == 0) stype = MATSOLVERMUMPS;
49     if (PetscDefined(USE_REAL___FLOAT128)) tol = 1e-10;
50   #else
51     if (j == 0) continue;
52   #endif
53   #if defined(PETSC_HAVE_MKL_CPARDISO)
54     if (j == 1) {
55       if (issbaij && PetscDefined(USE_COMPLEX)) continue;
56       stype = MATSOLVERMKL_CPARDISO;
57     }
58   #else
59     if (j == 1) continue;
60   #endif
61     if (issbaij) {
62       PetscCall(MatGetFactor(A, stype, MAT_FACTOR_CHOLESKY, &F));
63       PetscCall(MatCholeskyFactorSymbolic(F, A, NULL, NULL));
64       PetscCall(MatCholeskyFactorNumeric(F, A, NULL));
65     } else {
66       PetscCall(MatGetFactor(A, stype, MAT_FACTOR_LU, &F));
67       PetscCall(MatLUFactorSymbolic(F, A, NULL, NULL, NULL));
68       PetscCall(MatLUFactorNumeric(F, A, NULL));
69     }
70     for (i = 0; i < 10; i++) {
71       PetscCall(VecSetRandom(b, rdm));
72       PetscCall(MatSolve(F, b, y));
73       /* Check the error */
74       PetscCall(MatMult(A, y, x));
75       PetscCall(VecAXPY(x, -1.0, b));
76       PetscCall(VecNorm(x, NORM_2, &norm2));
77       if (norm2 > tol) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error:MatSolve(), norm2: %g\n", (double)norm2));
78     }
79     PetscCall(MatDestroy(&F));
80   }
81   PetscCall(VecDestroy(&x));
82   PetscCall(VecDestroy(&y));
83   PetscCall(VecDestroy(&b));
84   PetscCall(PetscRandomDestroy(&rdm));
85 #endif
86   PetscCall(MatDestroy(&A));
87   PetscFunctionReturn(PETSC_SUCCESS);
88 }
89 
main(int argc,char * argv[])90 int main(int argc, char *argv[])
91 {
92   MPI_Comm    comm;
93   PetscMPIInt size;
94 
95   PetscFunctionBeginUser;
96   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
97   comm = PETSC_COMM_WORLD;
98   PetscCallMPI(MPI_Comm_size(comm, &size));
99   PetscCheck(size == 2, comm, PETSC_ERR_USER, "This example must be run with exactly two processes");
100   PetscCall(Assemble(comm, 2, MATMPIBAIJ));
101   PetscCall(Assemble(comm, 2, MATMPISBAIJ));
102   PetscCall(Assemble(comm, 1, MATMPIBAIJ));
103   PetscCall(Assemble(comm, 1, MATMPISBAIJ));
104   PetscCall(PetscFinalize());
105   return 0;
106 }
107 
108 /*TEST
109 
110    test:
111       nsize: 2
112       args: -mat_ignore_lower_triangular
113       filter: sed -e "s~mem [0-9]*~mem~g"
114 
115 TEST*/
116