1 static const char help[] = "Test parallel assembly of SBAIJ matrices\n\n"; 2 3 #include <petscmat.h> 4 5 PetscErrorCode Assemble(MPI_Comm comm, PetscInt bs, MatType mtype) { 6 const PetscInt rc[] = {0, 1, 2, 3}; 7 const PetscScalar vals[] = {100, 2, 3, 4, 5, 600, 7, 8, 9, 100, 11, 1200, 13, 14, 15, 1600, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 2800, 29, 30, 31, 32, 8 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 49, 60, 61, 62, 63, 64}; 9 Mat A; 10 #if defined(PETSC_HAVE_MUMPS) || defined(PETSC_HAVE_MKL_CPARDISO) 11 Mat F; 12 MatSolverType stype = MATSOLVERPETSC; 13 PetscRandom rdm; 14 Vec b, x, y; 15 PetscInt i, j; 16 PetscReal norm2, tol = 100 * PETSC_SMALL; 17 PetscBool issbaij; 18 #endif 19 PetscViewer viewer; 20 21 PetscFunctionBegin; 22 PetscCall(MatCreate(comm, &A)); 23 PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, 4 * bs, 4 * bs)); 24 PetscCall(MatSetType(A, mtype)); 25 PetscCall(MatMPIBAIJSetPreallocation(A, bs, 2, NULL, 2, NULL)); 26 PetscCall(MatMPISBAIJSetPreallocation(A, bs, 2, NULL, 2, NULL)); 27 PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE)); 28 /* All processes contribute a global matrix */ 29 PetscCall(MatSetValuesBlocked(A, 4, rc, 4, rc, vals, ADD_VALUES)); 30 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); 31 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); 32 PetscCall(PetscPrintf(comm, "Matrix %s(%" PetscInt_FMT ")\n", mtype, bs)); 33 PetscCall(PetscViewerASCIIGetStdout(comm, &viewer)); 34 PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL)); 35 PetscCall(MatView(A, viewer)); 36 PetscCall(PetscViewerPopFormat(viewer)); 37 PetscCall(MatView(A, viewer)); 38 #if defined(PETSC_HAVE_MUMPS) || defined(PETSC_HAVE_MKL_CPARDISO) 39 PetscCall(PetscStrcmp(mtype, MATMPISBAIJ, &issbaij)); 40 if (!issbaij) { PetscCall(MatShift(A, 10)); } 41 PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rdm)); 42 PetscCall(PetscRandomSetFromOptions(rdm)); 43 PetscCall(MatCreateVecs(A, &x, &y)); 44 PetscCall(VecDuplicate(x, &b)); 45 for (j = 0; j < 2; j++) { 46 #if defined(PETSC_HAVE_MUMPS) 47 if (j == 0) stype = MATSOLVERMUMPS; 48 #else 49 if (j == 0) continue; 50 #endif 51 #if defined(PETSC_HAVE_MKL_CPARDISO) 52 if (j == 1) stype = MATSOLVERMKL_CPARDISO; 53 #else 54 if (j == 1) continue; 55 #endif 56 if (issbaij) { 57 PetscCall(MatGetFactor(A, stype, MAT_FACTOR_CHOLESKY, &F)); 58 PetscCall(MatCholeskyFactorSymbolic(F, A, NULL, NULL)); 59 PetscCall(MatCholeskyFactorNumeric(F, A, NULL)); 60 } else { 61 PetscCall(MatGetFactor(A, stype, MAT_FACTOR_LU, &F)); 62 PetscCall(MatLUFactorSymbolic(F, A, NULL, NULL, NULL)); 63 PetscCall(MatLUFactorNumeric(F, A, NULL)); 64 } 65 for (i = 0; i < 10; i++) { 66 PetscCall(VecSetRandom(b, rdm)); 67 PetscCall(MatSolve(F, b, y)); 68 /* Check the error */ 69 PetscCall(MatMult(A, y, x)); 70 PetscCall(VecAXPY(x, -1.0, b)); 71 PetscCall(VecNorm(x, NORM_2, &norm2)); 72 if (norm2 > tol) { PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error:MatSolve(), norm2: %g\n", (double)norm2)); } 73 } 74 PetscCall(MatDestroy(&F)); 75 } 76 PetscCall(VecDestroy(&x)); 77 PetscCall(VecDestroy(&y)); 78 PetscCall(VecDestroy(&b)); 79 PetscCall(PetscRandomDestroy(&rdm)); 80 #endif 81 PetscCall(MatDestroy(&A)); 82 PetscFunctionReturn(0); 83 } 84 85 int main(int argc, char *argv[]) { 86 MPI_Comm comm; 87 PetscMPIInt size; 88 89 PetscFunctionBeginUser; 90 PetscCall(PetscInitialize(&argc, &argv, NULL, help)); 91 comm = PETSC_COMM_WORLD; 92 PetscCallMPI(MPI_Comm_size(comm, &size)); 93 PetscCheck(size == 2, comm, PETSC_ERR_USER, "This example must be run with exactly two processes"); 94 PetscCall(Assemble(comm, 2, MATMPIBAIJ)); 95 PetscCall(Assemble(comm, 2, MATMPISBAIJ)); 96 PetscCall(Assemble(comm, 1, MATMPIBAIJ)); 97 PetscCall(Assemble(comm, 1, MATMPISBAIJ)); 98 PetscCall(PetscFinalize()); 99 return 0; 100 } 101 102 /*TEST 103 104 test: 105 nsize: 2 106 args: -mat_ignore_lower_triangular 107 filter: sed -e "s~mem [0-9]*~mem~g" 108 109 TEST*/ 110