xref: /petsc/src/mat/impls/aij/mpi/strumpack/strumpack.c (revision 43cdf1ebbcbfa05bee08e48007ef1bae3f20f4e9)
1 #include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/
2 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3 #include <StrumpackSparseSolver.h>
4 
5 static PetscErrorCode MatGetDiagonal_STRUMPACK(Mat A, Vec v)
6 {
7   PetscFunctionBegin;
8   SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type: STRUMPACK factor");
9   PetscFunctionReturn(0);
10 }
11 
12 static PetscErrorCode MatDestroy_STRUMPACK(Mat A)
13 {
14   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->spptr;
15   PetscBool               flg;
16 
17   PetscFunctionBegin;
18   /* Deallocate STRUMPACK storage */
19   PetscStackCallExternalVoid("STRUMPACK_destroy", STRUMPACK_destroy(S));
20   PetscCall(PetscFree(A->spptr));
21   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &flg));
22   if (flg) {
23     PetscCall(MatDestroy_SeqAIJ(A));
24   } else {
25     PetscCall(MatDestroy_MPIAIJ(A));
26   }
27 
28   /* clear composed functions */
29   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
30   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetReordering_C", NULL));
31   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetColPerm_C", NULL));
32   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSRelTol_C", NULL));
33   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSAbsTol_C", NULL));
34   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSMaxRank_C", NULL));
35   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSLeafSize_C", NULL));
36   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSMinSepSize_C", NULL));
37 
38   PetscFunctionReturn(0);
39 }
40 
41 static PetscErrorCode MatSTRUMPACKSetReordering_STRUMPACK(Mat F, MatSTRUMPACKReordering reordering)
42 {
43   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
44 
45   PetscFunctionBegin;
46   PetscStackCallExternalVoid("STRUMPACK_reordering_method", STRUMPACK_set_reordering_method(*S, (STRUMPACK_REORDERING_STRATEGY)reordering));
47   PetscFunctionReturn(0);
48 }
49 
50 /*@
51   MatSTRUMPACKSetReordering - Set STRUMPACK fill-reducing reordering
52 
53    Input Parameters:
54 +  F - the factored matrix obtained by calling `MatGetFactor(`) from PETSc-STRUMPACK interface
55 -  reordering - the code to be used to find the fill-reducing reordering
56       Possible values: NATURAL=0 METIS=1 PARMETIS=2 SCOTCH=3 PTSCOTCH=4 RCM=5
57 
58   Options Database Key:
59 .   -mat_strumpack_reordering <METIS>  - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM (None)
60 
61    Level: beginner
62 
63    References:
64 .  * - STRUMPACK manual
65 
66 .seealso: `MatGetFactor()`
67 @*/
68 PetscErrorCode MatSTRUMPACKSetReordering(Mat F, MatSTRUMPACKReordering reordering)
69 {
70   PetscFunctionBegin;
71   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
72   PetscValidLogicalCollectiveEnum(F, reordering, 2);
73   PetscTryMethod(F, "MatSTRUMPACKSetReordering_C", (Mat, MatSTRUMPACKReordering), (F, reordering));
74   PetscFunctionReturn(0);
75 }
76 
77 static PetscErrorCode MatSTRUMPACKSetColPerm_STRUMPACK(Mat F, PetscBool cperm)
78 {
79   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
80 
81   PetscFunctionBegin;
82   PetscStackCallExternalVoid("STRUMPACK_set_mc64job", STRUMPACK_set_mc64job(*S, cperm ? 5 : 0));
83   PetscFunctionReturn(0);
84 }
85 
86 /*@
87   MatSTRUMPACKSetColPerm - Set whether STRUMPACK should try to permute the columns of the matrix in order to get a nonzero diagonal
88 
89    Logically Collective
90 
91    Input Parameters:
92 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
93 -  cperm - `PETSC_TRUE` to permute (internally) the columns of the matrix
94 
95   Options Database Key:
96 .   -mat_strumpack_colperm <cperm> - true to use the permutation
97 
98    Level: beginner
99 
100    References:
101 .  * - STRUMPACK manual
102 
103 .seealso: `MatGetFactor()`
104 @*/
105 PetscErrorCode MatSTRUMPACKSetColPerm(Mat F, PetscBool cperm)
106 {
107   PetscFunctionBegin;
108   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
109   PetscValidLogicalCollectiveBool(F, cperm, 2);
110   PetscTryMethod(F, "MatSTRUMPACKSetColPerm_C", (Mat, PetscBool), (F, cperm));
111   PetscFunctionReturn(0);
112 }
113 
114 static PetscErrorCode MatSTRUMPACKSetHSSRelTol_STRUMPACK(Mat F, PetscReal rtol)
115 {
116   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
117 
118   PetscFunctionBegin;
119   PetscStackCallExternalVoid("STRUMPACK_set_HSS_rel_tol", STRUMPACK_set_HSS_rel_tol(*S, rtol));
120   PetscFunctionReturn(0);
121 }
122 
123 /*@
124   MatSTRUMPACKSetHSSRelTol - Set STRUMPACK relative tolerance for HSS compression
125 
126   Logically Collective
127 
128    Input Parameters:
129 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
130 -  rtol - relative compression tolerance
131 
132   Options Database Key:
133 .   -mat_strumpack_hss_rel_tol <1e-2>         - Relative compression tolerance (None)
134 
135    Level: beginner
136 
137    References:
138 .  * - STRUMPACK manual
139 
140 .seealso: `MatGetFactor()`
141 @*/
142 PetscErrorCode MatSTRUMPACKSetHSSRelTol(Mat F, PetscReal rtol)
143 {
144   PetscFunctionBegin;
145   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
146   PetscValidLogicalCollectiveReal(F, rtol, 2);
147   PetscTryMethod(F, "MatSTRUMPACKSetHSSRelTol_C", (Mat, PetscReal), (F, rtol));
148   PetscFunctionReturn(0);
149 }
150 
151 static PetscErrorCode MatSTRUMPACKSetHSSAbsTol_STRUMPACK(Mat F, PetscReal atol)
152 {
153   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
154 
155   PetscFunctionBegin;
156   PetscStackCallExternalVoid("STRUMPACK_set_HSS_abs_tol", STRUMPACK_set_HSS_abs_tol(*S, atol));
157   PetscFunctionReturn(0);
158 }
159 
160 /*@
161   MatSTRUMPACKSetHSSAbsTol - Set STRUMPACK absolute tolerance for HSS compression
162 
163    Logically Collective
164 
165    Input Parameters:
166 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
167 -  atol - absolute compression tolerance
168 
169   Options Database Key:
170 .   -mat_strumpack_hss_abs_tol <1e-8>         - Absolute compression tolerance (None)
171 
172    Level: beginner
173 
174    References:
175 .  * - STRUMPACK manual
176 
177 .seealso: `MatGetFactor()`
178 @*/
179 PetscErrorCode MatSTRUMPACKSetHSSAbsTol(Mat F, PetscReal atol)
180 {
181   PetscFunctionBegin;
182   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
183   PetscValidLogicalCollectiveReal(F, atol, 2);
184   PetscTryMethod(F, "MatSTRUMPACKSetHSSAbsTol_C", (Mat, PetscReal), (F, atol));
185   PetscFunctionReturn(0);
186 }
187 
188 static PetscErrorCode MatSTRUMPACKSetHSSMaxRank_STRUMPACK(Mat F, PetscInt hssmaxrank)
189 {
190   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
191 
192   PetscFunctionBegin;
193   PetscStackCallExternalVoid("STRUMPACK_set_HSS_max_rank", STRUMPACK_set_HSS_max_rank(*S, hssmaxrank));
194   PetscFunctionReturn(0);
195 }
196 
197 /*@
198   MatSTRUMPACKSetHSSMaxRank - Set STRUMPACK maximum HSS rank
199 
200    Logically Collective
201 
202    Input Parameters:
203 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
204 -  hssmaxrank - maximum rank used in low-rank approximation
205 
206   Options Database Key:
207 .   -mat_strumpack_max_rank - Maximum rank in HSS compression, when using pctype ilu (None)
208 
209    Level: beginner
210 
211    References:
212 .  * - STRUMPACK manual
213 
214 .seealso: `MatGetFactor()`
215 @*/
216 PetscErrorCode MatSTRUMPACKSetHSSMaxRank(Mat F, PetscInt hssmaxrank)
217 {
218   PetscFunctionBegin;
219   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
220   PetscValidLogicalCollectiveInt(F, hssmaxrank, 2);
221   PetscTryMethod(F, "MatSTRUMPACKSetHSSMaxRank_C", (Mat, PetscInt), (F, hssmaxrank));
222   PetscFunctionReturn(0);
223 }
224 
225 static PetscErrorCode MatSTRUMPACKSetHSSLeafSize_STRUMPACK(Mat F, PetscInt leaf_size)
226 {
227   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
228 
229   PetscFunctionBegin;
230   PetscStackCallExternalVoid("STRUMPACK_set_HSS_leaf_size", STRUMPACK_set_HSS_leaf_size(*S, leaf_size));
231   PetscFunctionReturn(0);
232 }
233 
234 /*@
235   MatSTRUMPACKSetHSSLeafSize - Set STRUMPACK HSS leaf size
236 
237    Logically Collective
238 
239    Input Parameters:
240 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
241 -  leaf_size - Size of diagonal blocks in HSS approximation
242 
243   Options Database Key:
244 .   -mat_strumpack_leaf_size - Size of diagonal blocks in HSS approximation, when using pctype ilu (None)
245 
246    Level: beginner
247 
248    References:
249 .  * - STRUMPACK manual
250 
251 .seealso: `MatGetFactor()`
252 @*/
253 PetscErrorCode MatSTRUMPACKSetHSSLeafSize(Mat F, PetscInt leaf_size)
254 {
255   PetscFunctionBegin;
256   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
257   PetscValidLogicalCollectiveInt(F, leaf_size, 2);
258   PetscTryMethod(F, "MatSTRUMPACKSetHSSLeafSize_C", (Mat, PetscInt), (F, leaf_size));
259   PetscFunctionReturn(0);
260 }
261 
262 static PetscErrorCode MatSTRUMPACKSetHSSMinSepSize_STRUMPACK(Mat F, PetscInt hssminsize)
263 {
264   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
265 
266   PetscFunctionBegin;
267   PetscStackCallExternalVoid("STRUMPACK_set_HSS_min_sep_size", STRUMPACK_set_HSS_min_sep_size(*S, hssminsize));
268   PetscFunctionReturn(0);
269 }
270 
271 /*@
272   MatSTRUMPACKSetHSSMinSepSize - Set STRUMPACK minimum separator size for low-rank approximation
273 
274    Logically Collective
275 
276    Input Parameters:
277 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
278 -  hssminsize - minimum dense matrix size for low-rank approximation
279 
280   Options Database Key:
281 .   -mat_strumpack_hss_min_sep_size <hssminsize> - set the minimum separator size
282 
283    Level: beginner
284 
285    References:
286 .  * - STRUMPACK manual
287 
288 .seealso: `MatGetFactor()`
289 @*/
290 PetscErrorCode MatSTRUMPACKSetHSSMinSepSize(Mat F, PetscInt hssminsize)
291 {
292   PetscFunctionBegin;
293   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
294   PetscValidLogicalCollectiveInt(F, hssminsize, 2);
295   PetscTryMethod(F, "MatSTRUMPACKSetHSSMinSepSize_C", (Mat, PetscInt), (F, hssminsize));
296   PetscFunctionReturn(0);
297 }
298 
299 static PetscErrorCode MatSolve_STRUMPACK(Mat A, Vec b_mpi, Vec x)
300 {
301   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->spptr;
302   STRUMPACK_RETURN_CODE   sp_err;
303   const PetscScalar      *bptr;
304   PetscScalar            *xptr;
305 
306   PetscFunctionBegin;
307   PetscCall(VecGetArray(x, &xptr));
308   PetscCall(VecGetArrayRead(b_mpi, &bptr));
309 
310   PetscStackCallExternalVoid("STRUMPACK_solve", sp_err = STRUMPACK_solve(*S, (PetscScalar *)bptr, xptr, 0));
311   switch (sp_err) {
312   case STRUMPACK_SUCCESS:
313     break;
314   case STRUMPACK_MATRIX_NOT_SET: {
315     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
316     break;
317   }
318   case STRUMPACK_REORDERING_ERROR: {
319     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
320     break;
321   }
322   default:
323     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: solve failed");
324   }
325   PetscCall(VecRestoreArray(x, &xptr));
326   PetscCall(VecRestoreArrayRead(b_mpi, &bptr));
327   PetscFunctionReturn(0);
328 }
329 
330 static PetscErrorCode MatMatSolve_STRUMPACK(Mat A, Mat B_mpi, Mat X)
331 {
332   PetscBool flg;
333 
334   PetscFunctionBegin;
335   PetscCall(PetscObjectTypeCompareAny((PetscObject)B_mpi, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
336   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix B must be MATDENSE matrix");
337   PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
338   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix X must be MATDENSE matrix");
339   SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "MatMatSolve_STRUMPACK() is not implemented yet");
340   PetscFunctionReturn(0);
341 }
342 
343 static PetscErrorCode MatView_Info_STRUMPACK(Mat A, PetscViewer viewer)
344 {
345   PetscFunctionBegin;
346   /* check if matrix is strumpack type */
347   if (A->ops->solve != MatSolve_STRUMPACK) PetscFunctionReturn(0);
348   PetscCall(PetscViewerASCIIPrintf(viewer, "STRUMPACK sparse solver!\n"));
349   PetscFunctionReturn(0);
350 }
351 
352 static PetscErrorCode MatView_STRUMPACK(Mat A, PetscViewer viewer)
353 {
354   PetscBool         iascii;
355   PetscViewerFormat format;
356 
357   PetscFunctionBegin;
358   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
359   if (iascii) {
360     PetscCall(PetscViewerGetFormat(viewer, &format));
361     if (format == PETSC_VIEWER_ASCII_INFO) PetscCall(MatView_Info_STRUMPACK(A, viewer));
362   }
363   PetscFunctionReturn(0);
364 }
365 
366 static PetscErrorCode MatLUFactorNumeric_STRUMPACK(Mat F, Mat A, const MatFactorInfo *info)
367 {
368   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
369   STRUMPACK_RETURN_CODE   sp_err;
370   Mat_SeqAIJ             *A_d, *A_o;
371   Mat_MPIAIJ             *mat;
372   PetscInt                M = A->rmap->N, m = A->rmap->n;
373   PetscBool               flg;
374 
375   PetscFunctionBegin;
376   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATMPIAIJ, &flg));
377   if (flg) { /* A is MATMPIAIJ */
378     mat = (Mat_MPIAIJ *)A->data;
379     A_d = (Mat_SeqAIJ *)(mat->A)->data;
380     A_o = (Mat_SeqAIJ *)(mat->B)->data;
381     PetscStackCallExternalVoid("STRUMPACK_set_MPIAIJ_matrix", STRUMPACK_set_MPIAIJ_matrix(*S, &m, A_d->i, A_d->j, A_d->a, A_o->i, A_o->j, A_o->a, mat->garray));
382   } else { /* A is MATSEQAIJ */
383     A_d = (Mat_SeqAIJ *)A->data;
384     PetscStackCallExternalVoid("STRUMPACK_set_csr_matrix", STRUMPACK_set_csr_matrix(*S, &M, A_d->i, A_d->j, A_d->a, 0));
385   }
386 
387   /* Reorder and Factor the matrix. */
388   /* TODO figure out how to avoid reorder if the matrix values changed, but the pattern remains the same. */
389   PetscStackCallExternalVoid("STRUMPACK_reorder", sp_err = STRUMPACK_reorder(*S));
390   PetscStackCallExternalVoid("STRUMPACK_factor", sp_err = STRUMPACK_factor(*S));
391   switch (sp_err) {
392   case STRUMPACK_SUCCESS:
393     break;
394   case STRUMPACK_MATRIX_NOT_SET: {
395     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
396     break;
397   }
398   case STRUMPACK_REORDERING_ERROR: {
399     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
400     break;
401   }
402   default:
403     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: factorization failed");
404   }
405   F->assembled    = PETSC_TRUE;
406   F->preallocated = PETSC_TRUE;
407   PetscFunctionReturn(0);
408 }
409 
410 static PetscErrorCode MatLUFactorSymbolic_STRUMPACK(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
411 {
412   STRUMPACK_SparseSolver       *S = (STRUMPACK_SparseSolver *)F->spptr;
413   PetscBool                     flg, set;
414   PetscReal                     ctol;
415   PetscInt                      hssminsize, max_rank, leaf_size;
416   STRUMPACK_REORDERING_STRATEGY ndcurrent, ndvalue;
417   STRUMPACK_KRYLOV_SOLVER       itcurrent, itsolver;
418   const char *const             STRUMPACKNDTypes[] = {"NATURAL", "METIS", "PARMETIS", "SCOTCH", "PTSCOTCH", "RCM", "STRUMPACKNDTypes", "", 0};
419   const char *const             SolverTypes[]      = {"AUTO", "NONE", "REFINE", "PREC_GMRES", "GMRES", "PREC_BICGSTAB", "BICGSTAB", "SolverTypes", "", 0};
420 
421   PetscFunctionBegin;
422   /* Set options to F */
423   PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "STRUMPACK Options", "Mat");
424   PetscStackCallExternalVoid("STRUMPACK_HSS_rel_tol", ctol = (PetscReal)STRUMPACK_HSS_rel_tol(*S));
425   PetscCall(PetscOptionsReal("-mat_strumpack_hss_rel_tol", "Relative compression tolerance", "None", ctol, &ctol, &set));
426   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_rel_tol", STRUMPACK_set_HSS_rel_tol(*S, (double)ctol));
427 
428   PetscStackCallExternalVoid("STRUMPACK_HSS_abs_tol", ctol = (PetscReal)STRUMPACK_HSS_abs_tol(*S));
429   PetscCall(PetscOptionsReal("-mat_strumpack_hss_abs_tol", "Absolute compression tolerance", "None", ctol, &ctol, &set));
430   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_abs_tol", STRUMPACK_set_HSS_abs_tol(*S, (double)ctol));
431 
432   PetscStackCallExternalVoid("STRUMPACK_mc64job", flg = (STRUMPACK_mc64job(*S) == 0) ? PETSC_FALSE : PETSC_TRUE);
433   PetscCall(PetscOptionsBool("-mat_strumpack_colperm", "Find a col perm to get nonzero diagonal", "None", flg, &flg, &set));
434   if (set) PetscStackCallExternalVoid("STRUMPACK_set_mc64job", STRUMPACK_set_mc64job(*S, flg ? 5 : 0));
435 
436   PetscStackCallExternalVoid("STRUMPACK_HSS_min_sep_size", hssminsize = (PetscInt)STRUMPACK_HSS_min_sep_size(*S));
437   PetscCall(PetscOptionsInt("-mat_strumpack_hss_min_sep_size", "Minimum size of separator for HSS compression", "None", hssminsize, &hssminsize, &set));
438   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_min_sep_size", STRUMPACK_set_HSS_min_sep_size(*S, (int)hssminsize));
439 
440   PetscStackCallExternalVoid("STRUMPACK_HSS_max_rank", max_rank = (PetscInt)STRUMPACK_HSS_max_rank(*S));
441   PetscCall(PetscOptionsInt("-mat_strumpack_max_rank", "Maximum rank in HSS approximation", "None", max_rank, &max_rank, &set));
442   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_max_rank", STRUMPACK_set_HSS_max_rank(*S, (int)max_rank));
443 
444   PetscStackCallExternalVoid("STRUMPACK_HSS_leaf_size", leaf_size = (PetscInt)STRUMPACK_HSS_leaf_size(*S));
445   PetscCall(PetscOptionsInt("-mat_strumpack_leaf_size", "Size of diagonal blocks in HSS approximation", "None", leaf_size, &leaf_size, &set));
446   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_leaf_size", STRUMPACK_set_HSS_leaf_size(*S, (int)leaf_size));
447 
448   PetscStackCallExternalVoid("STRUMPACK_reordering_method", ndcurrent = STRUMPACK_reordering_method(*S));
449   PetscCall(PetscOptionsEnum("-mat_strumpack_reordering", "Sparsity reducing matrix reordering", "None", STRUMPACKNDTypes, (PetscEnum)ndcurrent, (PetscEnum *)&ndvalue, &set));
450   if (set) PetscStackCallExternalVoid("STRUMPACK_set_reordering_method", STRUMPACK_set_reordering_method(*S, ndvalue));
451 
452   /* Disable the outer iterative solver from STRUMPACK.                                       */
453   /* When STRUMPACK is used as a direct solver, it will by default do iterative refinement.   */
454   /* When STRUMPACK is used as an approximate factorization preconditioner (by enabling       */
455   /* low-rank compression), it will use it's own preconditioned GMRES. Here we can disable    */
456   /* the outer iterative solver, as PETSc uses STRUMPACK from within a KSP.                   */
457   PetscStackCallExternalVoid("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, STRUMPACK_DIRECT));
458 
459   PetscStackCallExternalVoid("STRUMPACK_Krylov_solver", itcurrent = STRUMPACK_Krylov_solver(*S));
460   PetscCall(PetscOptionsEnum("-mat_strumpack_iterative_solver", "Select iterative solver from STRUMPACK", "None", SolverTypes, (PetscEnum)itcurrent, (PetscEnum *)&itsolver, &set));
461   if (set) PetscStackCallExternalVoid("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, itsolver));
462   PetscOptionsEnd();
463 
464   F->ops->lufactornumeric = MatLUFactorNumeric_STRUMPACK;
465   F->ops->solve           = MatSolve_STRUMPACK;
466   F->ops->matsolve        = MatMatSolve_STRUMPACK;
467   PetscFunctionReturn(0);
468 }
469 
470 static PetscErrorCode MatFactorGetSolverType_aij_strumpack(Mat A, MatSolverType *type)
471 {
472   PetscFunctionBegin;
473   *type = MATSOLVERSTRUMPACK;
474   PetscFunctionReturn(0);
475 }
476 
477 /*MC
478   MATSOLVERSSTRUMPACK = "strumpack" - A solver package providing a direct sparse solver (PCLU)
479   and a preconditioner (PCILU) using low-rank compression via the external package STRUMPACK.
480 
481   Consult the STRUMPACK-sparse manual for more info.
482 
483   Use
484      ./configure --download-strumpack
485   to have PETSc installed with STRUMPACK
486 
487   Use
488     -pc_type lu -pc_factor_mat_solver_type strumpack
489   to use this as an exact (direct) solver, use
490     -pc_type ilu -pc_factor_mat_solver_type strumpack
491   to enable low-rank compression (i.e, use as a preconditioner).
492 
493   Works with AIJ matrices
494 
495   Options Database Keys:
496 + -mat_strumpack_verbose                    - verbose info
497 . -mat_strumpack_hss_rel_tol <1e-2>         - Relative compression tolerance (None)
498 . -mat_strumpack_hss_abs_tol <1e-8>         - Absolute compression tolerance (None)
499 . -mat_strumpack_colperm <TRUE>             - Permute matrix to make diagonal nonzeros (None)
500 . -mat_strumpack_hss_min_sep_size <256>     - Minimum size of separator for HSS compression (None)
501 . -mat_strumpack_max_rank                   - Maximum rank in HSS compression, when using pctype ilu (None)
502 . -mat_strumpack_leaf_size                  - Size of diagonal blocks in HSS approximation, when using pctype ilu (None)
503 . -mat_strumpack_reordering <METIS>         - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM (None)
504 - -mat_strumpack_iterative_solver <DIRECT>  - Select iterative solver from STRUMPACK (choose one of) AUTO DIRECT REFINE PREC_GMRES GMRES PREC_BICGSTAB BICGSTAB (None)
505 
506  Level: beginner
507 
508 .seealso: `PCLU`, `PCILU`, `MATSOLVERSUPERLU_DIST`, `MATSOLVERMUMPS`, `PCFactorSetMatSolverType()`, `MatSolverType`, `MatGetFactor()`
509 M*/
510 static PetscErrorCode MatGetFactor_aij_strumpack(Mat A, MatFactorType ftype, Mat *F)
511 {
512   Mat                       B;
513   PetscInt                  M = A->rmap->N, N = A->cmap->N;
514   PetscBool                 verb, flg;
515   STRUMPACK_SparseSolver   *S;
516   STRUMPACK_INTERFACE       iface;
517   const STRUMPACK_PRECISION table[2][2][2] = {
518     {{STRUMPACK_FLOATCOMPLEX_64, STRUMPACK_DOUBLECOMPLEX_64}, {STRUMPACK_FLOAT_64, STRUMPACK_DOUBLE_64}},
519     {{STRUMPACK_FLOATCOMPLEX, STRUMPACK_DOUBLECOMPLEX},       {STRUMPACK_FLOAT, STRUMPACK_DOUBLE}      }
520   };
521   const STRUMPACK_PRECISION prec = table[(sizeof(PetscInt) == 8) ? 0 : 1][(PETSC_SCALAR == PETSC_COMPLEX) ? 0 : 1][(PETSC_REAL == PETSC_FLOAT) ? 0 : 1];
522 
523   PetscFunctionBegin;
524   /* Create the factorization matrix */
525   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
526   PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, M, N));
527   PetscCall(MatSetType(B, ((PetscObject)A)->type_name));
528   PetscCall(MatSeqAIJSetPreallocation(B, 0, NULL));
529   PetscCall(MatMPIAIJSetPreallocation(B, 0, NULL, 0, NULL));
530   B->trivialsymbolic = PETSC_TRUE;
531   if (ftype == MAT_FACTOR_LU || ftype == MAT_FACTOR_ILU) {
532     B->ops->lufactorsymbolic  = MatLUFactorSymbolic_STRUMPACK;
533     B->ops->ilufactorsymbolic = MatLUFactorSymbolic_STRUMPACK;
534   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Factor type not supported");
535   B->ops->view        = MatView_STRUMPACK;
536   B->ops->destroy     = MatDestroy_STRUMPACK;
537   B->ops->getdiagonal = MatGetDiagonal_STRUMPACK;
538   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_aij_strumpack));
539   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetReordering_C", MatSTRUMPACKSetReordering_STRUMPACK));
540   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetColPerm_C", MatSTRUMPACKSetColPerm_STRUMPACK));
541   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSRelTol_C", MatSTRUMPACKSetHSSRelTol_STRUMPACK));
542   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSAbsTol_C", MatSTRUMPACKSetHSSAbsTol_STRUMPACK));
543   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSMaxRank_C", MatSTRUMPACKSetHSSMaxRank_STRUMPACK));
544   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSLeafSize_C", MatSTRUMPACKSetHSSLeafSize_STRUMPACK));
545   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSMinSepSize_C", MatSTRUMPACKSetHSSMinSepSize_STRUMPACK));
546   B->factortype = ftype;
547   PetscCall(PetscNew(&S));
548   B->spptr = S;
549 
550   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &flg));
551   iface = flg ? STRUMPACK_MT : STRUMPACK_MPI_DIST;
552 
553   PetscOptionsBegin(PetscObjectComm((PetscObject)B), ((PetscObject)B)->prefix, "STRUMPACK Options", "Mat");
554   verb = PetscLogPrintInfo ? PETSC_TRUE : PETSC_FALSE;
555   PetscCall(PetscOptionsBool("-mat_strumpack_verbose", "Print STRUMPACK information", "None", verb, &verb, NULL));
556 
557   PetscStackCallExternalVoid("STRUMPACK_init", STRUMPACK_init(S, PetscObjectComm((PetscObject)A), prec, iface, 0, NULL, verb));
558 
559   if (ftype == MAT_FACTOR_ILU) {
560     /* When enabling HSS compression, the STRUMPACK solver becomes an incomplete              */
561     /* (or approximate) LU factorization.                                                     */
562     PetscStackCallExternalVoid("STRUMPACK_enable_HSS", STRUMPACK_enable_HSS(*S));
563   }
564   PetscOptionsEnd();
565 
566   /* set solvertype */
567   PetscCall(PetscFree(B->solvertype));
568   PetscCall(PetscStrallocpy(MATSOLVERSTRUMPACK, &B->solvertype));
569 
570   *F = B;
571   PetscFunctionReturn(0);
572 }
573 
574 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void)
575 {
576   PetscFunctionBegin;
577   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack));
578   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack));
579   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack));
580   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack));
581   PetscFunctionReturn(0);
582 }
583