xref: /petsc/src/mat/impls/aij/mpi/strumpack/strumpack.c (revision 2d30e087755efd99e28fdfe792ffbeb2ee1ea928)
1 #include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/
2 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3 #include <StrumpackSparseSolver.h>
4 
5 static PetscErrorCode MatGetDiagonal_STRUMPACK(Mat A, Vec v) {
6   PetscFunctionBegin;
7   SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type: STRUMPACK factor");
8   PetscFunctionReturn(0);
9 }
10 
11 static PetscErrorCode MatDestroy_STRUMPACK(Mat A) {
12   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->spptr;
13   PetscBool               flg;
14 
15   PetscFunctionBegin;
16   /* Deallocate STRUMPACK storage */
17   PetscStackCallExternalVoid("STRUMPACK_destroy", STRUMPACK_destroy(S));
18   PetscCall(PetscFree(A->spptr));
19   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &flg));
20   if (flg) {
21     PetscCall(MatDestroy_SeqAIJ(A));
22   } else {
23     PetscCall(MatDestroy_MPIAIJ(A));
24   }
25 
26   /* clear composed functions */
27   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
28   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetReordering_C", NULL));
29   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetColPerm_C", NULL));
30   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSRelTol_C", NULL));
31   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSAbsTol_C", NULL));
32   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSMaxRank_C", NULL));
33   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSLeafSize_C", NULL));
34   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSMinSepSize_C", NULL));
35 
36   PetscFunctionReturn(0);
37 }
38 
39 static PetscErrorCode MatSTRUMPACKSetReordering_STRUMPACK(Mat F, MatSTRUMPACKReordering reordering) {
40   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
41 
42   PetscFunctionBegin;
43   PetscStackCallExternalVoid("STRUMPACK_reordering_method", STRUMPACK_set_reordering_method(*S, (STRUMPACK_REORDERING_STRATEGY)reordering));
44   PetscFunctionReturn(0);
45 }
46 
47 /*@
48   MatSTRUMPACKSetReordering - Set STRUMPACK fill-reducing reordering
49 
50    Input Parameters:
51 +  F - the factored matrix obtained by calling `MatGetFactor(`) from PETSc-STRUMPACK interface
52 -  reordering - the code to be used to find the fill-reducing reordering
53       Possible values: NATURAL=0 METIS=1 PARMETIS=2 SCOTCH=3 PTSCOTCH=4 RCM=5
54 
55   Options Database Key:
56 .   -mat_strumpack_reordering <METIS>  - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM (None)
57 
58    Level: beginner
59 
60    References:
61 .  * - STRUMPACK manual
62 
63 .seealso: `MatGetFactor()`
64 @*/
65 PetscErrorCode MatSTRUMPACKSetReordering(Mat F, MatSTRUMPACKReordering reordering) {
66   PetscFunctionBegin;
67   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
68   PetscValidLogicalCollectiveEnum(F, reordering, 2);
69   PetscTryMethod(F, "MatSTRUMPACKSetReordering_C", (Mat, MatSTRUMPACKReordering), (F, reordering));
70   PetscFunctionReturn(0);
71 }
72 
73 static PetscErrorCode MatSTRUMPACKSetColPerm_STRUMPACK(Mat F, PetscBool cperm) {
74   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
75 
76   PetscFunctionBegin;
77   PetscStackCallExternalVoid("STRUMPACK_set_mc64job", STRUMPACK_set_mc64job(*S, cperm ? 5 : 0));
78   PetscFunctionReturn(0);
79 }
80 
81 /*@
82   MatSTRUMPACKSetColPerm - Set whether STRUMPACK should try to permute the columns of the matrix in order to get a nonzero diagonal
83 
84    Logically Collective on F
85 
86    Input Parameters:
87 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
88 -  cperm - `PETSC_TRUE` to permute (internally) the columns of the matrix
89 
90   Options Database Key:
91 .   -mat_strumpack_colperm <cperm> - true to use the permutation
92 
93    Level: beginner
94 
95    References:
96 .  * - STRUMPACK manual
97 
98 .seealso: `MatGetFactor()`
99 @*/
100 PetscErrorCode MatSTRUMPACKSetColPerm(Mat F, PetscBool cperm) {
101   PetscFunctionBegin;
102   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
103   PetscValidLogicalCollectiveBool(F, cperm, 2);
104   PetscTryMethod(F, "MatSTRUMPACKSetColPerm_C", (Mat, PetscBool), (F, cperm));
105   PetscFunctionReturn(0);
106 }
107 
108 static PetscErrorCode MatSTRUMPACKSetHSSRelTol_STRUMPACK(Mat F, PetscReal rtol) {
109   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
110 
111   PetscFunctionBegin;
112   PetscStackCallExternalVoid("STRUMPACK_set_HSS_rel_tol", STRUMPACK_set_HSS_rel_tol(*S, rtol));
113   PetscFunctionReturn(0);
114 }
115 
116 /*@
117   MatSTRUMPACKSetHSSRelTol - Set STRUMPACK relative tolerance for HSS compression
118 
119   Logically Collective on F
120 
121    Input Parameters:
122 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
123 -  rtol - relative compression tolerance
124 
125   Options Database Key:
126 .   -mat_strumpack_hss_rel_tol <1e-2>         - Relative compression tolerance (None)
127 
128    Level: beginner
129 
130    References:
131 .  * - STRUMPACK manual
132 
133 .seealso: `MatGetFactor()`
134 @*/
135 PetscErrorCode MatSTRUMPACKSetHSSRelTol(Mat F, PetscReal rtol) {
136   PetscFunctionBegin;
137   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
138   PetscValidLogicalCollectiveReal(F, rtol, 2);
139   PetscTryMethod(F, "MatSTRUMPACKSetHSSRelTol_C", (Mat, PetscReal), (F, rtol));
140   PetscFunctionReturn(0);
141 }
142 
143 static PetscErrorCode MatSTRUMPACKSetHSSAbsTol_STRUMPACK(Mat F, PetscReal atol) {
144   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
145 
146   PetscFunctionBegin;
147   PetscStackCallExternalVoid("STRUMPACK_set_HSS_abs_tol", STRUMPACK_set_HSS_abs_tol(*S, atol));
148   PetscFunctionReturn(0);
149 }
150 
151 /*@
152   MatSTRUMPACKSetHSSAbsTol - Set STRUMPACK absolute tolerance for HSS compression
153 
154    Logically Collective on F
155 
156    Input Parameters:
157 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
158 -  atol - absolute compression tolerance
159 
160   Options Database Key:
161 .   -mat_strumpack_hss_abs_tol <1e-8>         - Absolute compression tolerance (None)
162 
163    Level: beginner
164 
165    References:
166 .  * - STRUMPACK manual
167 
168 .seealso: `MatGetFactor()`
169 @*/
170 PetscErrorCode MatSTRUMPACKSetHSSAbsTol(Mat F, PetscReal atol) {
171   PetscFunctionBegin;
172   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
173   PetscValidLogicalCollectiveReal(F, atol, 2);
174   PetscTryMethod(F, "MatSTRUMPACKSetHSSAbsTol_C", (Mat, PetscReal), (F, atol));
175   PetscFunctionReturn(0);
176 }
177 
178 static PetscErrorCode MatSTRUMPACKSetHSSMaxRank_STRUMPACK(Mat F, PetscInt hssmaxrank) {
179   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
180 
181   PetscFunctionBegin;
182   PetscStackCallExternalVoid("STRUMPACK_set_HSS_max_rank", STRUMPACK_set_HSS_max_rank(*S, hssmaxrank));
183   PetscFunctionReturn(0);
184 }
185 
186 /*@
187   MatSTRUMPACKSetHSSMaxRank - Set STRUMPACK maximum HSS rank
188 
189    Logically Collective on F
190 
191    Input Parameters:
192 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
193 -  hssmaxrank - maximum rank used in low-rank approximation
194 
195   Options Database Key:
196 .   -mat_strumpack_max_rank - Maximum rank in HSS compression, when using pctype ilu (None)
197 
198    Level: beginner
199 
200    References:
201 .  * - STRUMPACK manual
202 
203 .seealso: `MatGetFactor()`
204 @*/
205 PetscErrorCode MatSTRUMPACKSetHSSMaxRank(Mat F, PetscInt hssmaxrank) {
206   PetscFunctionBegin;
207   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
208   PetscValidLogicalCollectiveInt(F, hssmaxrank, 2);
209   PetscTryMethod(F, "MatSTRUMPACKSetHSSMaxRank_C", (Mat, PetscInt), (F, hssmaxrank));
210   PetscFunctionReturn(0);
211 }
212 
213 static PetscErrorCode MatSTRUMPACKSetHSSLeafSize_STRUMPACK(Mat F, PetscInt leaf_size) {
214   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
215 
216   PetscFunctionBegin;
217   PetscStackCallExternalVoid("STRUMPACK_set_HSS_leaf_size", STRUMPACK_set_HSS_leaf_size(*S, leaf_size));
218   PetscFunctionReturn(0);
219 }
220 
221 /*@
222   MatSTRUMPACKSetHSSLeafSize - Set STRUMPACK HSS leaf size
223 
224    Logically Collective on F
225 
226    Input Parameters:
227 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
228 -  leaf_size - Size of diagonal blocks in HSS approximation
229 
230   Options Database Key:
231 .   -mat_strumpack_leaf_size - Size of diagonal blocks in HSS approximation, when using pctype ilu (None)
232 
233    Level: beginner
234 
235    References:
236 .  * - STRUMPACK manual
237 
238 .seealso: `MatGetFactor()`
239 @*/
240 PetscErrorCode MatSTRUMPACKSetHSSLeafSize(Mat F, PetscInt leaf_size) {
241   PetscFunctionBegin;
242   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
243   PetscValidLogicalCollectiveInt(F, leaf_size, 2);
244   PetscTryMethod(F, "MatSTRUMPACKSetHSSLeafSize_C", (Mat, PetscInt), (F, leaf_size));
245   PetscFunctionReturn(0);
246 }
247 
248 static PetscErrorCode MatSTRUMPACKSetHSSMinSepSize_STRUMPACK(Mat F, PetscInt hssminsize) {
249   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
250 
251   PetscFunctionBegin;
252   PetscStackCallExternalVoid("STRUMPACK_set_HSS_min_sep_size", STRUMPACK_set_HSS_min_sep_size(*S, hssminsize));
253   PetscFunctionReturn(0);
254 }
255 
256 /*@
257   MatSTRUMPACKSetHSSMinSepSize - Set STRUMPACK minimum separator size for low-rank approximation
258 
259    Logically Collective on F
260 
261    Input Parameters:
262 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
263 -  hssminsize - minimum dense matrix size for low-rank approximation
264 
265   Options Database Key:
266 .   -mat_strumpack_hss_min_sep_size <hssminsize> - set the minimum separator size
267 
268    Level: beginner
269 
270    References:
271 .  * - STRUMPACK manual
272 
273 .seealso: `MatGetFactor()`
274 @*/
275 PetscErrorCode MatSTRUMPACKSetHSSMinSepSize(Mat F, PetscInt hssminsize) {
276   PetscFunctionBegin;
277   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
278   PetscValidLogicalCollectiveInt(F, hssminsize, 2);
279   PetscTryMethod(F, "MatSTRUMPACKSetHSSMinSepSize_C", (Mat, PetscInt), (F, hssminsize));
280   PetscFunctionReturn(0);
281 }
282 
283 static PetscErrorCode MatSolve_STRUMPACK(Mat A, Vec b_mpi, Vec x) {
284   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->spptr;
285   STRUMPACK_RETURN_CODE   sp_err;
286   const PetscScalar      *bptr;
287   PetscScalar            *xptr;
288 
289   PetscFunctionBegin;
290   PetscCall(VecGetArray(x, &xptr));
291   PetscCall(VecGetArrayRead(b_mpi, &bptr));
292 
293   PetscStackCallExternalVoid("STRUMPACK_solve", sp_err = STRUMPACK_solve(*S, (PetscScalar *)bptr, xptr, 0));
294   switch (sp_err) {
295   case STRUMPACK_SUCCESS: break;
296   case STRUMPACK_MATRIX_NOT_SET: {
297     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
298     break;
299   }
300   case STRUMPACK_REORDERING_ERROR: {
301     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
302     break;
303   }
304   default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: solve failed");
305   }
306   PetscCall(VecRestoreArray(x, &xptr));
307   PetscCall(VecRestoreArrayRead(b_mpi, &bptr));
308   PetscFunctionReturn(0);
309 }
310 
311 static PetscErrorCode MatMatSolve_STRUMPACK(Mat A, Mat B_mpi, Mat X) {
312   PetscBool flg;
313 
314   PetscFunctionBegin;
315   PetscCall(PetscObjectTypeCompareAny((PetscObject)B_mpi, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
316   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix B must be MATDENSE matrix");
317   PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
318   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix X must be MATDENSE matrix");
319   SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "MatMatSolve_STRUMPACK() is not implemented yet");
320   PetscFunctionReturn(0);
321 }
322 
323 static PetscErrorCode MatView_Info_STRUMPACK(Mat A, PetscViewer viewer) {
324   PetscFunctionBegin;
325   /* check if matrix is strumpack type */
326   if (A->ops->solve != MatSolve_STRUMPACK) PetscFunctionReturn(0);
327   PetscCall(PetscViewerASCIIPrintf(viewer, "STRUMPACK sparse solver!\n"));
328   PetscFunctionReturn(0);
329 }
330 
331 static PetscErrorCode MatView_STRUMPACK(Mat A, PetscViewer viewer) {
332   PetscBool         iascii;
333   PetscViewerFormat format;
334 
335   PetscFunctionBegin;
336   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
337   if (iascii) {
338     PetscCall(PetscViewerGetFormat(viewer, &format));
339     if (format == PETSC_VIEWER_ASCII_INFO) PetscCall(MatView_Info_STRUMPACK(A, viewer));
340   }
341   PetscFunctionReturn(0);
342 }
343 
344 static PetscErrorCode MatLUFactorNumeric_STRUMPACK(Mat F, Mat A, const MatFactorInfo *info) {
345   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
346   STRUMPACK_RETURN_CODE   sp_err;
347   Mat_SeqAIJ             *A_d, *A_o;
348   Mat_MPIAIJ             *mat;
349   PetscInt                M = A->rmap->N, m = A->rmap->n;
350   PetscBool               flg;
351 
352   PetscFunctionBegin;
353   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATMPIAIJ, &flg));
354   if (flg) { /* A is MATMPIAIJ */
355     mat = (Mat_MPIAIJ *)A->data;
356     A_d = (Mat_SeqAIJ *)(mat->A)->data;
357     A_o = (Mat_SeqAIJ *)(mat->B)->data;
358     PetscStackCallExternalVoid("STRUMPACK_set_MPIAIJ_matrix", STRUMPACK_set_MPIAIJ_matrix(*S, &m, A_d->i, A_d->j, A_d->a, A_o->i, A_o->j, A_o->a, mat->garray));
359   } else { /* A is MATSEQAIJ */
360     A_d = (Mat_SeqAIJ *)A->data;
361     PetscStackCallExternalVoid("STRUMPACK_set_csr_matrix", STRUMPACK_set_csr_matrix(*S, &M, A_d->i, A_d->j, A_d->a, 0));
362   }
363 
364   /* Reorder and Factor the matrix. */
365   /* TODO figure out how to avoid reorder if the matrix values changed, but the pattern remains the same. */
366   PetscStackCallExternalVoid("STRUMPACK_reorder", sp_err = STRUMPACK_reorder(*S));
367   PetscStackCallExternalVoid("STRUMPACK_factor", sp_err = STRUMPACK_factor(*S));
368   switch (sp_err) {
369   case STRUMPACK_SUCCESS: break;
370   case STRUMPACK_MATRIX_NOT_SET: {
371     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
372     break;
373   }
374   case STRUMPACK_REORDERING_ERROR: {
375     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
376     break;
377   }
378   default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: factorization failed");
379   }
380   F->assembled    = PETSC_TRUE;
381   F->preallocated = PETSC_TRUE;
382   PetscFunctionReturn(0);
383 }
384 
385 static PetscErrorCode MatLUFactorSymbolic_STRUMPACK(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info) {
386   STRUMPACK_SparseSolver       *S = (STRUMPACK_SparseSolver *)F->spptr;
387   PetscBool                     flg, set;
388   PetscReal                     ctol;
389   PetscInt                      hssminsize, max_rank, leaf_size;
390   STRUMPACK_REORDERING_STRATEGY ndcurrent, ndvalue;
391   STRUMPACK_KRYLOV_SOLVER       itcurrent, itsolver;
392   const char *const             STRUMPACKNDTypes[] = {"NATURAL", "METIS", "PARMETIS", "SCOTCH", "PTSCOTCH", "RCM", "STRUMPACKNDTypes", "", 0};
393   const char *const             SolverTypes[]      = {"AUTO", "NONE", "REFINE", "PREC_GMRES", "GMRES", "PREC_BICGSTAB", "BICGSTAB", "SolverTypes", "", 0};
394 
395   PetscFunctionBegin;
396   /* Set options to F */
397   PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "STRUMPACK Options", "Mat");
398   PetscStackCallExternalVoid("STRUMPACK_HSS_rel_tol", ctol = (PetscReal)STRUMPACK_HSS_rel_tol(*S));
399   PetscCall(PetscOptionsReal("-mat_strumpack_hss_rel_tol", "Relative compression tolerance", "None", ctol, &ctol, &set));
400   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_rel_tol", STRUMPACK_set_HSS_rel_tol(*S, (double)ctol));
401 
402   PetscStackCallExternalVoid("STRUMPACK_HSS_abs_tol", ctol = (PetscReal)STRUMPACK_HSS_abs_tol(*S));
403   PetscCall(PetscOptionsReal("-mat_strumpack_hss_abs_tol", "Absolute compression tolerance", "None", ctol, &ctol, &set));
404   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_abs_tol", STRUMPACK_set_HSS_abs_tol(*S, (double)ctol));
405 
406   PetscStackCallExternalVoid("STRUMPACK_mc64job", flg = (STRUMPACK_mc64job(*S) == 0) ? PETSC_FALSE : PETSC_TRUE);
407   PetscCall(PetscOptionsBool("-mat_strumpack_colperm", "Find a col perm to get nonzero diagonal", "None", flg, &flg, &set));
408   if (set) PetscStackCallExternalVoid("STRUMPACK_set_mc64job", STRUMPACK_set_mc64job(*S, flg ? 5 : 0));
409 
410   PetscStackCallExternalVoid("STRUMPACK_HSS_min_sep_size", hssminsize = (PetscInt)STRUMPACK_HSS_min_sep_size(*S));
411   PetscCall(PetscOptionsInt("-mat_strumpack_hss_min_sep_size", "Minimum size of separator for HSS compression", "None", hssminsize, &hssminsize, &set));
412   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_min_sep_size", STRUMPACK_set_HSS_min_sep_size(*S, (int)hssminsize));
413 
414   PetscStackCallExternalVoid("STRUMPACK_HSS_max_rank", max_rank = (PetscInt)STRUMPACK_HSS_max_rank(*S));
415   PetscCall(PetscOptionsInt("-mat_strumpack_max_rank", "Maximum rank in HSS approximation", "None", max_rank, &max_rank, &set));
416   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_max_rank", STRUMPACK_set_HSS_max_rank(*S, (int)max_rank));
417 
418   PetscStackCallExternalVoid("STRUMPACK_HSS_leaf_size", leaf_size = (PetscInt)STRUMPACK_HSS_leaf_size(*S));
419   PetscCall(PetscOptionsInt("-mat_strumpack_leaf_size", "Size of diagonal blocks in HSS approximation", "None", leaf_size, &leaf_size, &set));
420   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_leaf_size", STRUMPACK_set_HSS_leaf_size(*S, (int)leaf_size));
421 
422   PetscStackCallExternalVoid("STRUMPACK_reordering_method", ndcurrent = STRUMPACK_reordering_method(*S));
423   PetscCall(PetscOptionsEnum("-mat_strumpack_reordering", "Sparsity reducing matrix reordering", "None", STRUMPACKNDTypes, (PetscEnum)ndcurrent, (PetscEnum *)&ndvalue, &set));
424   if (set) PetscStackCallExternalVoid("STRUMPACK_set_reordering_method", STRUMPACK_set_reordering_method(*S, ndvalue));
425 
426   /* Disable the outer iterative solver from STRUMPACK.                                       */
427   /* When STRUMPACK is used as a direct solver, it will by default do iterative refinement.   */
428   /* When STRUMPACK is used as an approximate factorization preconditioner (by enabling       */
429   /* low-rank compression), it will use it's own preconditioned GMRES. Here we can disable    */
430   /* the outer iterative solver, as PETSc uses STRUMPACK from within a KSP.                   */
431   PetscStackCallExternalVoid("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, STRUMPACK_DIRECT));
432 
433   PetscStackCallExternalVoid("STRUMPACK_Krylov_solver", itcurrent = STRUMPACK_Krylov_solver(*S));
434   PetscCall(PetscOptionsEnum("-mat_strumpack_iterative_solver", "Select iterative solver from STRUMPACK", "None", SolverTypes, (PetscEnum)itcurrent, (PetscEnum *)&itsolver, &set));
435   if (set) PetscStackCallExternalVoid("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, itsolver));
436   PetscOptionsEnd();
437 
438   F->ops->lufactornumeric = MatLUFactorNumeric_STRUMPACK;
439   F->ops->solve           = MatSolve_STRUMPACK;
440   F->ops->matsolve        = MatMatSolve_STRUMPACK;
441   PetscFunctionReturn(0);
442 }
443 
444 static PetscErrorCode MatFactorGetSolverType_aij_strumpack(Mat A, MatSolverType *type) {
445   PetscFunctionBegin;
446   *type = MATSOLVERSTRUMPACK;
447   PetscFunctionReturn(0);
448 }
449 
450 /*MC
451   MATSOLVERSSTRUMPACK = "strumpack" - A solver package providing a direct sparse solver (PCLU)
452   and a preconditioner (PCILU) using low-rank compression via the external package STRUMPACK.
453 
454   Consult the STRUMPACK-sparse manual for more info.
455 
456   Use
457      ./configure --download-strumpack
458   to have PETSc installed with STRUMPACK
459 
460   Use
461     -pc_type lu -pc_factor_mat_solver_type strumpack
462   to use this as an exact (direct) solver, use
463     -pc_type ilu -pc_factor_mat_solver_type strumpack
464   to enable low-rank compression (i.e, use as a preconditioner).
465 
466   Works with AIJ matrices
467 
468   Options Database Keys:
469 + -mat_strumpack_verbose                    - verbose info
470 . -mat_strumpack_hss_rel_tol <1e-2>         - Relative compression tolerance (None)
471 . -mat_strumpack_hss_abs_tol <1e-8>         - Absolute compression tolerance (None)
472 . -mat_strumpack_colperm <TRUE>             - Permute matrix to make diagonal nonzeros (None)
473 . -mat_strumpack_hss_min_sep_size <256>     - Minimum size of separator for HSS compression (None)
474 . -mat_strumpack_max_rank                   - Maximum rank in HSS compression, when using pctype ilu (None)
475 . -mat_strumpack_leaf_size                  - Size of diagonal blocks in HSS approximation, when using pctype ilu (None)
476 . -mat_strumpack_reordering <METIS>         - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM (None)
477 - -mat_strumpack_iterative_solver <DIRECT>  - Select iterative solver from STRUMPACK (choose one of) AUTO DIRECT REFINE PREC_GMRES GMRES PREC_BICGSTAB BICGSTAB (None)
478 
479  Level: beginner
480 
481 .seealso: `PCLU`, `PCILU`, `MATSOLVERSUPERLU_DIST`, `MATSOLVERMUMPS`, `PCFactorSetMatSolverType()`, `MatSolverType`, `MatGetFactor()`
482 M*/
483 static PetscErrorCode MatGetFactor_aij_strumpack(Mat A, MatFactorType ftype, Mat *F) {
484   Mat                       B;
485   PetscInt                  M = A->rmap->N, N = A->cmap->N;
486   PetscBool                 verb, flg;
487   STRUMPACK_SparseSolver   *S;
488   STRUMPACK_INTERFACE       iface;
489   const STRUMPACK_PRECISION table[2][2][2] = {
490     {{STRUMPACK_FLOATCOMPLEX_64, STRUMPACK_DOUBLECOMPLEX_64}, {STRUMPACK_FLOAT_64, STRUMPACK_DOUBLE_64}},
491     {{STRUMPACK_FLOATCOMPLEX, STRUMPACK_DOUBLECOMPLEX},       {STRUMPACK_FLOAT, STRUMPACK_DOUBLE}      }
492   };
493   const STRUMPACK_PRECISION prec = table[(sizeof(PetscInt) == 8) ? 0 : 1][(PETSC_SCALAR == PETSC_COMPLEX) ? 0 : 1][(PETSC_REAL == PETSC_FLOAT) ? 0 : 1];
494 
495   PetscFunctionBegin;
496   /* Create the factorization matrix */
497   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
498   PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, M, N));
499   PetscCall(MatSetType(B, ((PetscObject)A)->type_name));
500   PetscCall(MatSeqAIJSetPreallocation(B, 0, NULL));
501   PetscCall(MatMPIAIJSetPreallocation(B, 0, NULL, 0, NULL));
502   B->trivialsymbolic = PETSC_TRUE;
503   if (ftype == MAT_FACTOR_LU || ftype == MAT_FACTOR_ILU) {
504     B->ops->lufactorsymbolic  = MatLUFactorSymbolic_STRUMPACK;
505     B->ops->ilufactorsymbolic = MatLUFactorSymbolic_STRUMPACK;
506   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Factor type not supported");
507   B->ops->view        = MatView_STRUMPACK;
508   B->ops->destroy     = MatDestroy_STRUMPACK;
509   B->ops->getdiagonal = MatGetDiagonal_STRUMPACK;
510   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_aij_strumpack));
511   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetReordering_C", MatSTRUMPACKSetReordering_STRUMPACK));
512   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetColPerm_C", MatSTRUMPACKSetColPerm_STRUMPACK));
513   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSRelTol_C", MatSTRUMPACKSetHSSRelTol_STRUMPACK));
514   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSAbsTol_C", MatSTRUMPACKSetHSSAbsTol_STRUMPACK));
515   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSMaxRank_C", MatSTRUMPACKSetHSSMaxRank_STRUMPACK));
516   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSLeafSize_C", MatSTRUMPACKSetHSSLeafSize_STRUMPACK));
517   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSMinSepSize_C", MatSTRUMPACKSetHSSMinSepSize_STRUMPACK));
518   B->factortype = ftype;
519   PetscCall(PetscNewLog(B, &S));
520   B->spptr = S;
521 
522   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &flg));
523   iface = flg ? STRUMPACK_MT : STRUMPACK_MPI_DIST;
524 
525   PetscOptionsBegin(PetscObjectComm((PetscObject)B), ((PetscObject)B)->prefix, "STRUMPACK Options", "Mat");
526   verb = PetscLogPrintInfo ? PETSC_TRUE : PETSC_FALSE;
527   PetscCall(PetscOptionsBool("-mat_strumpack_verbose", "Print STRUMPACK information", "None", verb, &verb, NULL));
528 
529   PetscStackCallExternalVoid("STRUMPACK_init", STRUMPACK_init(S, PetscObjectComm((PetscObject)A), prec, iface, 0, NULL, verb));
530 
531   if (ftype == MAT_FACTOR_ILU) {
532     /* When enabling HSS compression, the STRUMPACK solver becomes an incomplete              */
533     /* (or approximate) LU factorization.                                                     */
534     PetscStackCallExternalVoid("STRUMPACK_enable_HSS", STRUMPACK_enable_HSS(*S));
535   }
536   PetscOptionsEnd();
537 
538   /* set solvertype */
539   PetscCall(PetscFree(B->solvertype));
540   PetscCall(PetscStrallocpy(MATSOLVERSTRUMPACK, &B->solvertype));
541 
542   *F = B;
543   PetscFunctionReturn(0);
544 }
545 
546 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void) {
547   PetscFunctionBegin;
548   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack));
549   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack));
550   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack));
551   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack));
552   PetscFunctionReturn(0);
553 }
554