xref: /petsc/src/mat/tests/ex106.c (revision 732aec7a18f2199fb53bb9a2f3aef439a834ce31)
1c4762a1bSJed Brown static char help[] = "Test repeated LU factorizations. Used for checking memory leak\n\
2c4762a1bSJed Brown   -m <size> : problem size\n\
3c4762a1bSJed Brown   -mat_nonsym : use nonsymmetric matrix (default is symmetric)\n\n";
4c4762a1bSJed Brown 
5c4762a1bSJed Brown #include <petscmat.h>
main(int argc,char ** args)6d71ae5a4SJacob Faibussowitsch int main(int argc, char **args)
7d71ae5a4SJacob Faibussowitsch {
8c4762a1bSJed Brown   Mat           C, F;    /* matrix */
9c4762a1bSJed Brown   Vec           x, u, b; /* approx solution, RHS, exact solution */
10c4762a1bSJed Brown   PetscReal     norm;    /* norm of solution error */
11c4762a1bSJed Brown   PetscScalar   v, none = -1.0;
12c4762a1bSJed Brown   PetscInt      I, J, ldim, low, high, iglobal, Istart, Iend;
13c4762a1bSJed Brown   PetscInt      i, j, m = 3, n = 2, its;
14c4762a1bSJed Brown   PetscMPIInt   size, rank;
15c4762a1bSJed Brown   PetscBool     mat_nonsymmetric;
16c4762a1bSJed Brown   PetscInt      its_max;
17c4762a1bSJed Brown   MatFactorInfo factinfo;
18c4762a1bSJed Brown   IS            perm, iperm;
19c4762a1bSJed Brown 
20327415f7SBarry Smith   PetscFunctionBeginUser;
21*c8025a54SPierre Jolivet   PetscCall(PetscInitialize(&argc, &args, NULL, help));
229566063dSJacob Faibussowitsch   PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
239566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
249566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
25c4762a1bSJed Brown   n = 2 * size;
26c4762a1bSJed Brown 
27c4762a1bSJed Brown   /*
28c4762a1bSJed Brown      Set flag if we are doing a nonsymmetric problem; the default is symmetric.
29c4762a1bSJed Brown   */
309566063dSJacob Faibussowitsch   PetscCall(PetscOptionsHasName(NULL, NULL, "-mat_nonsym", &mat_nonsymmetric));
31c4762a1bSJed Brown 
32c4762a1bSJed Brown   /*
33c4762a1bSJed Brown      Create parallel matrix, specifying only its global dimensions.
34c4762a1bSJed Brown      When using MatCreate(), the matrix format can be specified at
35c4762a1bSJed Brown      runtime. Also, the parallel partitioning of the matrix is
36c4762a1bSJed Brown      determined by PETSc at runtime.
37c4762a1bSJed Brown   */
389566063dSJacob Faibussowitsch   PetscCall(MatCreate(PETSC_COMM_WORLD, &C));
399566063dSJacob Faibussowitsch   PetscCall(MatSetSizes(C, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
409566063dSJacob Faibussowitsch   PetscCall(MatSetFromOptions(C));
419566063dSJacob Faibussowitsch   PetscCall(MatGetOwnershipRange(C, &Istart, &Iend));
42c4762a1bSJed Brown 
43c4762a1bSJed Brown   /*
44c4762a1bSJed Brown      Set matrix entries matrix in parallel.
45c4762a1bSJed Brown       - Each processor needs to insert only elements that it owns
46c4762a1bSJed Brown         locally (but any non-local elements will be sent to the
47c4762a1bSJed Brown         appropriate processor during matrix assembly).
48c4762a1bSJed Brown       - Always specify global row and columns of matrix entries.
49c4762a1bSJed Brown   */
50c4762a1bSJed Brown   for (I = Istart; I < Iend; I++) {
519371c9d4SSatish Balay     v = -1.0;
529371c9d4SSatish Balay     i = I / n;
539371c9d4SSatish Balay     j = I - i * n;
549371c9d4SSatish Balay     if (i > 0) {
559371c9d4SSatish Balay       J = I - n;
569371c9d4SSatish Balay       PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
579371c9d4SSatish Balay     }
589371c9d4SSatish Balay     if (i < m - 1) {
599371c9d4SSatish Balay       J = I + n;
609371c9d4SSatish Balay       PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
619371c9d4SSatish Balay     }
629371c9d4SSatish Balay     if (j > 0) {
639371c9d4SSatish Balay       J = I - 1;
649371c9d4SSatish Balay       PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
659371c9d4SSatish Balay     }
669371c9d4SSatish Balay     if (j < n - 1) {
679371c9d4SSatish Balay       J = I + 1;
689371c9d4SSatish Balay       PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
699371c9d4SSatish Balay     }
709371c9d4SSatish Balay     v = 4.0;
719371c9d4SSatish Balay     PetscCall(MatSetValues(C, 1, &I, 1, &I, &v, ADD_VALUES));
72c4762a1bSJed Brown   }
73c4762a1bSJed Brown 
74c4762a1bSJed Brown   /*
75c4762a1bSJed Brown      Make the matrix nonsymmetric if desired
76c4762a1bSJed Brown   */
77c4762a1bSJed Brown   if (mat_nonsymmetric) {
78c4762a1bSJed Brown     for (I = Istart; I < Iend; I++) {
799371c9d4SSatish Balay       v = -1.5;
809371c9d4SSatish Balay       i = I / n;
819371c9d4SSatish Balay       if (i > 1) {
829371c9d4SSatish Balay         J = I - n - 1;
839371c9d4SSatish Balay         PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
849371c9d4SSatish Balay       }
85c4762a1bSJed Brown     }
86c4762a1bSJed Brown   } else {
879566063dSJacob Faibussowitsch     PetscCall(MatSetOption(C, MAT_SYMMETRIC, PETSC_TRUE));
889566063dSJacob Faibussowitsch     PetscCall(MatSetOption(C, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
89c4762a1bSJed Brown   }
90c4762a1bSJed Brown 
91c4762a1bSJed Brown   /*
92c4762a1bSJed Brown      Assemble matrix, using the 2-step process:
93c4762a1bSJed Brown        MatAssemblyBegin(), MatAssemblyEnd()
94c4762a1bSJed Brown      Computations can be done while messages are in transition
95c4762a1bSJed Brown      by placing code between these two statements.
96c4762a1bSJed Brown   */
979566063dSJacob Faibussowitsch   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
989566063dSJacob Faibussowitsch   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
99c4762a1bSJed Brown 
100c4762a1bSJed Brown   its_max = 1000;
101c4762a1bSJed Brown   /*
102c4762a1bSJed Brown      Create parallel vectors.
103c4762a1bSJed Brown       - When using VecSetSizes(), we specify only the vector's global
104c4762a1bSJed Brown         dimension; the parallel partitioning is determined at runtime.
105c4762a1bSJed Brown       - Note: We form 1 vector from scratch and then duplicate as needed.
106c4762a1bSJed Brown   */
1079566063dSJacob Faibussowitsch   PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
1089566063dSJacob Faibussowitsch   PetscCall(VecSetSizes(u, PETSC_DECIDE, m * n));
1099566063dSJacob Faibussowitsch   PetscCall(VecSetFromOptions(u));
1109566063dSJacob Faibussowitsch   PetscCall(VecDuplicate(u, &b));
1119566063dSJacob Faibussowitsch   PetscCall(VecDuplicate(b, &x));
112c4762a1bSJed Brown 
113c4762a1bSJed Brown   /*
114c4762a1bSJed Brown      Currently, all parallel PETSc vectors are partitioned by
115c4762a1bSJed Brown      contiguous chunks across the processors.  Determine which
116c4762a1bSJed Brown      range of entries are locally owned.
117c4762a1bSJed Brown   */
1189566063dSJacob Faibussowitsch   PetscCall(VecGetOwnershipRange(x, &low, &high));
119c4762a1bSJed Brown 
120c4762a1bSJed Brown   /*
121c4762a1bSJed Brown     Set elements within the exact solution vector in parallel.
122c4762a1bSJed Brown      - Each processor needs to insert only elements that it owns
123c4762a1bSJed Brown        locally (but any non-local entries will be sent to the
124c4762a1bSJed Brown        appropriate processor during vector assembly).
125c4762a1bSJed Brown      - Always specify global locations of vector entries.
126c4762a1bSJed Brown   */
1279566063dSJacob Faibussowitsch   PetscCall(VecGetLocalSize(x, &ldim));
128c4762a1bSJed Brown   for (i = 0; i < ldim; i++) {
129c4762a1bSJed Brown     iglobal = i + low;
130c4762a1bSJed Brown     v       = (PetscScalar)(i + 100 * rank);
1319566063dSJacob Faibussowitsch     PetscCall(VecSetValues(u, 1, &iglobal, &v, INSERT_VALUES));
132c4762a1bSJed Brown   }
133c4762a1bSJed Brown 
134c4762a1bSJed Brown   /*
135c4762a1bSJed Brown      Assemble vector, using the 2-step process:
136c4762a1bSJed Brown        VecAssemblyBegin(), VecAssemblyEnd()
137c4762a1bSJed Brown      Computations can be done while messages are in transition,
138c4762a1bSJed Brown      by placing code between these two statements.
139c4762a1bSJed Brown   */
1409566063dSJacob Faibussowitsch   PetscCall(VecAssemblyBegin(u));
1419566063dSJacob Faibussowitsch   PetscCall(VecAssemblyEnd(u));
142c4762a1bSJed Brown 
143c4762a1bSJed Brown   /* Compute right-hand-side vector */
1449566063dSJacob Faibussowitsch   PetscCall(MatMult(C, u, b));
145c4762a1bSJed Brown 
1469566063dSJacob Faibussowitsch   PetscCall(MatGetOrdering(C, MATORDERINGNATURAL, &perm, &iperm));
147c4762a1bSJed Brown   its_max = 2000;
148c4762a1bSJed Brown   for (i = 0; i < its_max; i++) {
1499566063dSJacob Faibussowitsch     PetscCall(MatGetFactor(C, MATSOLVERPETSC, MAT_FACTOR_LU, &F));
1509566063dSJacob Faibussowitsch     PetscCall(MatLUFactorSymbolic(F, C, perm, iperm, &factinfo));
15148a46eb9SPierre Jolivet     for (j = 0; j < 1; j++) PetscCall(MatLUFactorNumeric(F, C, &factinfo));
1529566063dSJacob Faibussowitsch     PetscCall(MatSolve(F, b, x));
1539566063dSJacob Faibussowitsch     PetscCall(MatDestroy(&F));
154c4762a1bSJed Brown   }
1559566063dSJacob Faibussowitsch   PetscCall(ISDestroy(&perm));
1569566063dSJacob Faibussowitsch   PetscCall(ISDestroy(&iperm));
157c4762a1bSJed Brown 
158c4762a1bSJed Brown   /* Check the error */
1599566063dSJacob Faibussowitsch   PetscCall(VecAXPY(x, none, u));
1609566063dSJacob Faibussowitsch   PetscCall(VecNorm(x, NORM_2, &norm));
1619566063dSJacob Faibussowitsch   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %t\n", (double)norm));
162c4762a1bSJed Brown 
163c4762a1bSJed Brown   /* Free work space. */
1649566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&u));
1659566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&x));
1669566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&b));
1679566063dSJacob Faibussowitsch   PetscCall(MatDestroy(&C));
1689566063dSJacob Faibussowitsch   PetscCall(PetscFinalize());
169b122ec5aSJacob Faibussowitsch   return 0;
170c4762a1bSJed Brown }
171