1 2 static char help[] = "Test repeated LU factorizations. Used for checking memory leak\n\ 3 -m <size> : problem size\n\ 4 -mat_nonsym : use nonsymmetric matrix (default is symmetric)\n\n"; 5 6 #include <petscmat.h> 7 int main(int argc,char **args) 8 { 9 Mat C,F; /* matrix */ 10 Vec x,u,b; /* approx solution, RHS, exact solution */ 11 PetscReal norm; /* norm of solution error */ 12 PetscScalar v,none = -1.0; 13 PetscInt I,J,ldim,low,high,iglobal,Istart,Iend; 14 PetscInt i,j,m = 3,n = 2,its; 15 PetscMPIInt size,rank; 16 PetscBool mat_nonsymmetric; 17 PetscInt its_max; 18 MatFactorInfo factinfo; 19 IS perm,iperm; 20 21 PetscCall(PetscInitialize(&argc,&args,(char*)0,help)); 22 PetscCall(PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL)); 23 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank)); 24 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size)); 25 n = 2*size; 26 27 /* 28 Set flag if we are doing a nonsymmetric problem; the default is symmetric. 29 */ 30 PetscCall(PetscOptionsHasName(NULL,NULL,"-mat_nonsym",&mat_nonsymmetric)); 31 32 /* 33 Create parallel matrix, specifying only its global dimensions. 34 When using MatCreate(), the matrix format can be specified at 35 runtime. Also, the parallel partitioning of the matrix is 36 determined by PETSc at runtime. 37 */ 38 PetscCall(MatCreate(PETSC_COMM_WORLD,&C)); 39 PetscCall(MatSetSizes(C,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n)); 40 PetscCall(MatSetFromOptions(C)); 41 PetscCall(MatGetOwnershipRange(C,&Istart,&Iend)); 42 43 /* 44 Set matrix entries matrix in parallel. 45 - Each processor needs to insert only elements that it owns 46 locally (but any non-local elements will be sent to the 47 appropriate processor during matrix assembly). 48 - Always specify global row and columns of matrix entries. 49 */ 50 for (I=Istart; I<Iend; I++) { 51 v = -1.0; i = I/n; j = I - i*n; 52 if (i>0) {J = I - n; PetscCall(MatSetValues(C,1,&I,1,&J,&v,ADD_VALUES));} 53 if (i<m-1) {J = I + n; PetscCall(MatSetValues(C,1,&I,1,&J,&v,ADD_VALUES));} 54 if (j>0) {J = I - 1; PetscCall(MatSetValues(C,1,&I,1,&J,&v,ADD_VALUES));} 55 if (j<n-1) {J = I + 1; PetscCall(MatSetValues(C,1,&I,1,&J,&v,ADD_VALUES));} 56 v = 4.0; PetscCall(MatSetValues(C,1,&I,1,&I,&v,ADD_VALUES)); 57 } 58 59 /* 60 Make the matrix nonsymmetric if desired 61 */ 62 if (mat_nonsymmetric) { 63 for (I=Istart; I<Iend; I++) { 64 v = -1.5; i = I/n; 65 if (i>1) {J = I-n-1; PetscCall(MatSetValues(C,1,&I,1,&J,&v,ADD_VALUES));} 66 } 67 } else { 68 PetscCall(MatSetOption(C,MAT_SYMMETRIC,PETSC_TRUE)); 69 PetscCall(MatSetOption(C,MAT_SYMMETRY_ETERNAL,PETSC_TRUE)); 70 } 71 72 /* 73 Assemble matrix, using the 2-step process: 74 MatAssemblyBegin(), MatAssemblyEnd() 75 Computations can be done while messages are in transition 76 by placing code between these two statements. 77 */ 78 PetscCall(MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY)); 79 PetscCall(MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY)); 80 81 its_max=1000; 82 /* 83 Create parallel vectors. 84 - When using VecSetSizes(), we specify only the vector's global 85 dimension; the parallel partitioning is determined at runtime. 86 - Note: We form 1 vector from scratch and then duplicate as needed. 87 */ 88 PetscCall(VecCreate(PETSC_COMM_WORLD,&u)); 89 PetscCall(VecSetSizes(u,PETSC_DECIDE,m*n)); 90 PetscCall(VecSetFromOptions(u)); 91 PetscCall(VecDuplicate(u,&b)); 92 PetscCall(VecDuplicate(b,&x)); 93 94 /* 95 Currently, all parallel PETSc vectors are partitioned by 96 contiguous chunks across the processors. Determine which 97 range of entries are locally owned. 98 */ 99 PetscCall(VecGetOwnershipRange(x,&low,&high)); 100 101 /* 102 Set elements within the exact solution vector in parallel. 103 - Each processor needs to insert only elements that it owns 104 locally (but any non-local entries will be sent to the 105 appropriate processor during vector assembly). 106 - Always specify global locations of vector entries. 107 */ 108 PetscCall(VecGetLocalSize(x,&ldim)); 109 for (i=0; i<ldim; i++) { 110 iglobal = i + low; 111 v = (PetscScalar)(i + 100*rank); 112 PetscCall(VecSetValues(u,1,&iglobal,&v,INSERT_VALUES)); 113 } 114 115 /* 116 Assemble vector, using the 2-step process: 117 VecAssemblyBegin(), VecAssemblyEnd() 118 Computations can be done while messages are in transition, 119 by placing code between these two statements. 120 */ 121 PetscCall(VecAssemblyBegin(u)); 122 PetscCall(VecAssemblyEnd(u)); 123 124 /* Compute right-hand-side vector */ 125 PetscCall(MatMult(C,u,b)); 126 127 PetscCall(MatGetOrdering(C,MATORDERINGNATURAL,&perm,&iperm)); 128 its_max = 2000; 129 for (i=0; i<its_max; i++) { 130 PetscCall(MatGetFactor(C,MATSOLVERPETSC,MAT_FACTOR_LU,&F)); 131 PetscCall(MatLUFactorSymbolic(F,C,perm,iperm,&factinfo)); 132 for (j=0; j<1; j++) { 133 PetscCall(MatLUFactorNumeric(F,C,&factinfo)); 134 } 135 PetscCall(MatSolve(F,b,x)); 136 PetscCall(MatDestroy(&F)); 137 } 138 PetscCall(ISDestroy(&perm)); 139 PetscCall(ISDestroy(&iperm)); 140 141 /* Check the error */ 142 PetscCall(VecAXPY(x,none,u)); 143 PetscCall(VecNorm(x,NORM_2,&norm)); 144 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Norm of error %t\n",(double)norm)); 145 146 /* Free work space. */ 147 PetscCall(VecDestroy(&u)); 148 PetscCall(VecDestroy(&x)); 149 PetscCall(VecDestroy(&b)); 150 PetscCall(MatDestroy(&C)); 151 PetscCall(PetscFinalize()); 152 return 0; 153 } 154