1 2 static char help[] = "Test repeated LU factorizations. Used for checking memory leak\n\ 3 -m <size> : problem size\n\ 4 -mat_nonsym : use nonsymmetric matrix (default is symmetric)\n\n"; 5 6 #include <petscmat.h> 7 int main(int argc,char **args) 8 { 9 Mat C,F; /* matrix */ 10 Vec x,u,b; /* approx solution, RHS, exact solution */ 11 PetscReal norm; /* norm of solution error */ 12 PetscScalar v,none = -1.0; 13 PetscInt I,J,ldim,low,high,iglobal,Istart,Iend; 14 PetscInt i,j,m = 3,n = 2,its; 15 PetscMPIInt size,rank; 16 PetscBool mat_nonsymmetric; 17 PetscInt its_max; 18 MatFactorInfo factinfo; 19 IS perm,iperm; 20 21 PetscFunctionBeginUser; 22 PetscCall(PetscInitialize(&argc,&args,(char*)0,help)); 23 PetscCall(PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL)); 24 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank)); 25 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size)); 26 n = 2*size; 27 28 /* 29 Set flag if we are doing a nonsymmetric problem; the default is symmetric. 30 */ 31 PetscCall(PetscOptionsHasName(NULL,NULL,"-mat_nonsym",&mat_nonsymmetric)); 32 33 /* 34 Create parallel matrix, specifying only its global dimensions. 35 When using MatCreate(), the matrix format can be specified at 36 runtime. Also, the parallel partitioning of the matrix is 37 determined by PETSc at runtime. 38 */ 39 PetscCall(MatCreate(PETSC_COMM_WORLD,&C)); 40 PetscCall(MatSetSizes(C,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n)); 41 PetscCall(MatSetFromOptions(C)); 42 PetscCall(MatGetOwnershipRange(C,&Istart,&Iend)); 43 44 /* 45 Set matrix entries matrix in parallel. 46 - Each processor needs to insert only elements that it owns 47 locally (but any non-local elements will be sent to the 48 appropriate processor during matrix assembly). 49 - Always specify global row and columns of matrix entries. 50 */ 51 for (I=Istart; I<Iend; I++) { 52 v = -1.0; i = I/n; j = I - i*n; 53 if (i>0) {J = I - n; PetscCall(MatSetValues(C,1,&I,1,&J,&v,ADD_VALUES));} 54 if (i<m-1) {J = I + n; PetscCall(MatSetValues(C,1,&I,1,&J,&v,ADD_VALUES));} 55 if (j>0) {J = I - 1; PetscCall(MatSetValues(C,1,&I,1,&J,&v,ADD_VALUES));} 56 if (j<n-1) {J = I + 1; PetscCall(MatSetValues(C,1,&I,1,&J,&v,ADD_VALUES));} 57 v = 4.0; PetscCall(MatSetValues(C,1,&I,1,&I,&v,ADD_VALUES)); 58 } 59 60 /* 61 Make the matrix nonsymmetric if desired 62 */ 63 if (mat_nonsymmetric) { 64 for (I=Istart; I<Iend; I++) { 65 v = -1.5; i = I/n; 66 if (i>1) {J = I-n-1; PetscCall(MatSetValues(C,1,&I,1,&J,&v,ADD_VALUES));} 67 } 68 } else { 69 PetscCall(MatSetOption(C,MAT_SYMMETRIC,PETSC_TRUE)); 70 PetscCall(MatSetOption(C,MAT_SYMMETRY_ETERNAL,PETSC_TRUE)); 71 } 72 73 /* 74 Assemble matrix, using the 2-step process: 75 MatAssemblyBegin(), MatAssemblyEnd() 76 Computations can be done while messages are in transition 77 by placing code between these two statements. 78 */ 79 PetscCall(MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY)); 80 PetscCall(MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY)); 81 82 its_max=1000; 83 /* 84 Create parallel vectors. 85 - When using VecSetSizes(), we specify only the vector's global 86 dimension; the parallel partitioning is determined at runtime. 87 - Note: We form 1 vector from scratch and then duplicate as needed. 88 */ 89 PetscCall(VecCreate(PETSC_COMM_WORLD,&u)); 90 PetscCall(VecSetSizes(u,PETSC_DECIDE,m*n)); 91 PetscCall(VecSetFromOptions(u)); 92 PetscCall(VecDuplicate(u,&b)); 93 PetscCall(VecDuplicate(b,&x)); 94 95 /* 96 Currently, all parallel PETSc vectors are partitioned by 97 contiguous chunks across the processors. Determine which 98 range of entries are locally owned. 99 */ 100 PetscCall(VecGetOwnershipRange(x,&low,&high)); 101 102 /* 103 Set elements within the exact solution vector in parallel. 104 - Each processor needs to insert only elements that it owns 105 locally (but any non-local entries will be sent to the 106 appropriate processor during vector assembly). 107 - Always specify global locations of vector entries. 108 */ 109 PetscCall(VecGetLocalSize(x,&ldim)); 110 for (i=0; i<ldim; i++) { 111 iglobal = i + low; 112 v = (PetscScalar)(i + 100*rank); 113 PetscCall(VecSetValues(u,1,&iglobal,&v,INSERT_VALUES)); 114 } 115 116 /* 117 Assemble vector, using the 2-step process: 118 VecAssemblyBegin(), VecAssemblyEnd() 119 Computations can be done while messages are in transition, 120 by placing code between these two statements. 121 */ 122 PetscCall(VecAssemblyBegin(u)); 123 PetscCall(VecAssemblyEnd(u)); 124 125 /* Compute right-hand-side vector */ 126 PetscCall(MatMult(C,u,b)); 127 128 PetscCall(MatGetOrdering(C,MATORDERINGNATURAL,&perm,&iperm)); 129 its_max = 2000; 130 for (i=0; i<its_max; i++) { 131 PetscCall(MatGetFactor(C,MATSOLVERPETSC,MAT_FACTOR_LU,&F)); 132 PetscCall(MatLUFactorSymbolic(F,C,perm,iperm,&factinfo)); 133 for (j=0; j<1; j++) { 134 PetscCall(MatLUFactorNumeric(F,C,&factinfo)); 135 } 136 PetscCall(MatSolve(F,b,x)); 137 PetscCall(MatDestroy(&F)); 138 } 139 PetscCall(ISDestroy(&perm)); 140 PetscCall(ISDestroy(&iperm)); 141 142 /* Check the error */ 143 PetscCall(VecAXPY(x,none,u)); 144 PetscCall(VecNorm(x,NORM_2,&norm)); 145 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Norm of error %t\n",(double)norm)); 146 147 /* Free work space. */ 148 PetscCall(VecDestroy(&u)); 149 PetscCall(VecDestroy(&x)); 150 PetscCall(VecDestroy(&b)); 151 PetscCall(MatDestroy(&C)); 152 PetscCall(PetscFinalize()); 153 return 0; 154 } 155