1 static const char help[] = "Test parallel assembly of SBAIJ matrices\n\n"; 2 3 #include <petscmat.h> 4 5 PetscErrorCode Assemble(MPI_Comm comm,PetscInt bs,MatType mtype) 6 { 7 const PetscInt rc[] = {0,1,2,3}; 8 const PetscScalar vals[] = {100, 2, 3, 4, 5, 600, 7, 8, 9 9,100,11,1200,13,14,15,1600, 10 17,18,19,20,21,22,23,24, 11 25,26,27,2800,29,30,31,32, 12 33,34,35,36,37,38,39,40, 13 41,42,43,44,45,46,47,48, 14 49,50,51,52,53,54,55,56, 15 57,58,49,60,61,62,63,64}; 16 Mat A; 17 #if defined(PETSC_HAVE_MUMPS) || defined(PETSC_HAVE_MKL_CPARDISO) 18 Mat F; 19 MatSolverType stype = MATSOLVERPETSC; 20 PetscRandom rdm; 21 Vec b,x,y; 22 PetscInt i,j; 23 PetscReal norm2,tol=100*PETSC_SMALL; 24 PetscBool issbaij; 25 #endif 26 PetscViewer viewer; 27 28 PetscFunctionBegin; 29 PetscCall(MatCreate(comm,&A)); 30 PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,4*bs,4*bs)); 31 PetscCall(MatSetType(A,mtype)); 32 PetscCall(MatMPIBAIJSetPreallocation(A,bs,2,NULL,2,NULL)); 33 PetscCall(MatMPISBAIJSetPreallocation(A,bs,2,NULL,2,NULL)); 34 PetscCall(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE)); 35 /* All processes contribute a global matrix */ 36 PetscCall(MatSetValuesBlocked(A,4,rc,4,rc,vals,ADD_VALUES)); 37 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 38 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 39 PetscCall(PetscPrintf(comm,"Matrix %s(%" PetscInt_FMT ")\n",mtype,bs)); 40 PetscCall(PetscViewerASCIIGetStdout(comm,&viewer)); 41 PetscCall(PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO_DETAIL)); 42 PetscCall(MatView(A,viewer)); 43 PetscCall(PetscViewerPopFormat(viewer)); 44 PetscCall(MatView(A,viewer)); 45 #if defined(PETSC_HAVE_MUMPS) || defined(PETSC_HAVE_MKL_CPARDISO) 46 PetscCall(PetscStrcmp(mtype,MATMPISBAIJ,&issbaij)); 47 if (!issbaij) { 48 PetscCall(MatShift(A,10)); 49 } 50 PetscCall(PetscRandomCreate(PETSC_COMM_WORLD,&rdm)); 51 PetscCall(PetscRandomSetFromOptions(rdm)); 52 PetscCall(MatCreateVecs(A,&x,&y)); 53 PetscCall(VecDuplicate(x,&b)); 54 for (j=0; j<2; j++) { 55 #if defined(PETSC_HAVE_MUMPS) 56 if (j==0) stype = MATSOLVERMUMPS; 57 #else 58 if (j==0) continue; 59 #endif 60 #if defined(PETSC_HAVE_MKL_CPARDISO) 61 if (j==1) stype = MATSOLVERMKL_CPARDISO; 62 #else 63 if (j==1) continue; 64 #endif 65 if (issbaij) { 66 PetscCall(MatGetFactor(A,stype,MAT_FACTOR_CHOLESKY,&F)); 67 PetscCall(MatCholeskyFactorSymbolic(F,A,NULL,NULL)); 68 PetscCall(MatCholeskyFactorNumeric(F,A,NULL)); 69 } else { 70 PetscCall(MatGetFactor(A,stype,MAT_FACTOR_LU,&F)); 71 PetscCall(MatLUFactorSymbolic(F,A,NULL,NULL,NULL)); 72 PetscCall(MatLUFactorNumeric(F,A,NULL)); 73 } 74 for (i=0; i<10; i++) { 75 PetscCall(VecSetRandom(b,rdm)); 76 PetscCall(MatSolve(F,b,y)); 77 /* Check the error */ 78 PetscCall(MatMult(A,y,x)); 79 PetscCall(VecAXPY(x,-1.0,b)); 80 PetscCall(VecNorm(x,NORM_2,&norm2)); 81 if (norm2>tol) { 82 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Error:MatSolve(), norm2: %g\n",(double)norm2)); 83 } 84 } 85 PetscCall(MatDestroy(&F)); 86 } 87 PetscCall(VecDestroy(&x)); 88 PetscCall(VecDestroy(&y)); 89 PetscCall(VecDestroy(&b)); 90 PetscCall(PetscRandomDestroy(&rdm)); 91 #endif 92 PetscCall(MatDestroy(&A)); 93 PetscFunctionReturn(0); 94 } 95 96 int main(int argc,char *argv[]) 97 { 98 MPI_Comm comm; 99 PetscMPIInt size; 100 101 PetscCall(PetscInitialize(&argc,&argv,NULL,help)); 102 comm = PETSC_COMM_WORLD; 103 PetscCallMPI(MPI_Comm_size(comm,&size)); 104 PetscCheck(size == 2,comm,PETSC_ERR_USER,"This example must be run with exactly two processes"); 105 PetscCall(Assemble(comm,2,MATMPIBAIJ)); 106 PetscCall(Assemble(comm,2,MATMPISBAIJ)); 107 PetscCall(Assemble(comm,1,MATMPIBAIJ)); 108 PetscCall(Assemble(comm,1,MATMPISBAIJ)); 109 PetscCall(PetscFinalize()); 110 return 0; 111 } 112 113 /*TEST 114 115 test: 116 nsize: 2 117 args: -mat_ignore_lower_triangular 118 filter: sed -e "s~mem [0-9]*~mem~g" 119 120 TEST*/ 121