xref: /petsc/src/mat/impls/aij/mpi/strumpack/strumpack.c (revision d9acb416d05abeed0a33bde3a81aeb2ea0364f6a)
1 #include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/
2 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3 #include <StrumpackSparseSolver.h>
4 
5 static PetscErrorCode MatGetDiagonal_STRUMPACK(Mat A, Vec v)
6 {
7   PetscFunctionBegin;
8   SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type: STRUMPACK factor");
9   PetscFunctionReturn(PETSC_SUCCESS);
10 }
11 
12 static PetscErrorCode MatDestroy_STRUMPACK(Mat A)
13 {
14   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->data;
15 
16   PetscFunctionBegin;
17   /* Deallocate STRUMPACK storage */
18   PetscStackCallExternalVoid("STRUMPACK_destroy", STRUMPACK_destroy(S));
19   PetscCall(PetscFree(A->data));
20 
21   /* clear composed functions */
22   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
23   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetReordering_C", NULL));
24   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetReordering_C", NULL));
25   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetColPerm_C", NULL));
26   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetColPerm_C", NULL));
27   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGeometricNxyz_C", NULL));
28   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGeometricComponents_C", NULL));
29   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGeometricWidth_C", NULL));
30   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGPU_C", NULL));
31   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetGPU_C", NULL));
32   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompression_C", NULL));
33   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompression_C", NULL));
34   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompRelTol_C", NULL));
35   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompRelTol_C", NULL));
36   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompAbsTol_C", NULL));
37   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompAbsTol_C", NULL));
38   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompMaxRank_C", NULL));
39   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompMaxRank_C", NULL));
40   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompLeafSize_C", NULL));
41   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompLeafSize_C", NULL));
42   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompMinSepSize_C", NULL));
43   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompMinSepSize_C", NULL));
44   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompLossyPrecision_C", NULL));
45   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompLossyPrecision_C", NULL));
46   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompButterflyLevels_C", NULL));
47   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompButterflyLevels_C", NULL));
48 
49   PetscFunctionReturn(PETSC_SUCCESS);
50 }
51 
52 static PetscErrorCode MatSTRUMPACKSetReordering_STRUMPACK(Mat F, MatSTRUMPACKReordering reordering)
53 {
54   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
55 
56   PetscFunctionBegin;
57   PetscStackCallExternalVoid("STRUMPACK_set_reordering_method", STRUMPACK_set_reordering_method(*S, (STRUMPACK_REORDERING_STRATEGY)reordering));
58   PetscFunctionReturn(PETSC_SUCCESS);
59 }
60 static PetscErrorCode MatSTRUMPACKGetReordering_STRUMPACK(Mat F, MatSTRUMPACKReordering *reordering)
61 {
62   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
63 
64   PetscFunctionBegin;
65   PetscStackCallExternalVoid("STRUMPACK_reordering_method", *reordering = (MatSTRUMPACKReordering)STRUMPACK_reordering_method(*S));
66   PetscFunctionReturn(PETSC_SUCCESS);
67 }
68 
69 /*@
70   MatSTRUMPACKSetReordering - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> fill-reducing reordering
71 
72   Logically Collective
73 
74   Input Parameters:
75 + F          - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
76 - reordering - the code to be used to find the fill-reducing reordering
77 
78   Options Database Key:
79 . -mat_strumpack_reordering <METIS> - Sparsity reducing matrix reordering, see `MatSTRUMPACKReordering`
80 
81   Level: intermediate
82 
83 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatSTRUMPACKReordering`, `MatGetFactor()`, `MatSTRUMPACKSetColPerm()`, `MatSTRUMPACKGetReordering()`
84 @*/
85 PetscErrorCode MatSTRUMPACKSetReordering(Mat F, MatSTRUMPACKReordering reordering)
86 {
87   PetscFunctionBegin;
88   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
89   PetscValidLogicalCollectiveEnum(F, reordering, 2);
90   PetscTryMethod(F, "MatSTRUMPACKSetReordering_C", (Mat, MatSTRUMPACKReordering), (F, reordering));
91   PetscFunctionReturn(PETSC_SUCCESS);
92 }
93 /*@
94   MatSTRUMPACKGetReordering - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> fill-reducing reordering
95 
96   Logically Collective
97 
98   Input Parameters:
99 . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
100 
101   Output Parameter:
102 . reordering - the code to be used to find the fill-reducing reordering
103 
104   Level: intermediate
105 
106 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatSTRUMPACKReordering`, `MatGetFactor()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
107 @*/
108 PetscErrorCode MatSTRUMPACKGetReordering(Mat F, MatSTRUMPACKReordering *reordering)
109 {
110   PetscFunctionBegin;
111   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
112   PetscTryMethod(F, "MatSTRUMPACKGetReordering_C", (Mat, MatSTRUMPACKReordering *), (F, reordering));
113   PetscValidLogicalCollectiveEnum(F, *reordering, 2);
114   PetscFunctionReturn(PETSC_SUCCESS);
115 }
116 
117 static PetscErrorCode MatSTRUMPACKSetColPerm_STRUMPACK(Mat F, PetscBool cperm)
118 {
119   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
120 
121   PetscFunctionBegin;
122   PetscStackCallExternalVoid("STRUMPACK_set_matching", STRUMPACK_set_matching(*S, cperm ? STRUMPACK_MATCHING_MAX_DIAGONAL_PRODUCT_SCALING : STRUMPACK_MATCHING_NONE));
123   PetscFunctionReturn(PETSC_SUCCESS);
124 }
125 static PetscErrorCode MatSTRUMPACKGetColPerm_STRUMPACK(Mat F, PetscBool *cperm)
126 {
127   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
128 
129   PetscFunctionBegin;
130   PetscStackCallExternalVoid("STRUMPACK_matching", *cperm = (PetscBool)(STRUMPACK_matching(*S) != STRUMPACK_MATCHING_NONE));
131   PetscFunctionReturn(PETSC_SUCCESS);
132 }
133 
134 /*@
135   MatSTRUMPACKSetColPerm - Set whether STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
136   should try to permute the columns of the matrix in order to get a nonzero diagonal
137 
138   Logically Collective
139 
140   Input Parameters:
141 + F     - the factored matrix obtained by calling `MatGetFactor()`
142 - cperm - `PETSC_TRUE` to permute (internally) the columns of the matrix
143 
144   Options Database Key:
145 . -mat_strumpack_colperm <cperm> - true to use the permutation
146 
147   Level: intermediate
148 
149 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `MatSTRUMPACKSetReordering()`, `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetColPerm()`
150 @*/
151 PetscErrorCode MatSTRUMPACKSetColPerm(Mat F, PetscBool cperm)
152 {
153   PetscFunctionBegin;
154   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
155   PetscValidLogicalCollectiveBool(F, cperm, 2);
156   PetscTryMethod(F, "MatSTRUMPACKSetColPerm_C", (Mat, PetscBool), (F, cperm));
157   PetscFunctionReturn(PETSC_SUCCESS);
158 }
159 /*@
160   MatSTRUMPACKGetColPerm - Get whether STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
161   will try to permute the columns of the matrix in order to get a nonzero diagonal
162 
163   Logically Collective
164 
165   Input Parameters:
166 . F - the factored matrix obtained by calling `MatGetFactor()`
167 
168   Output Parameter:
169 . cperm - Indicates whether STRUMPACK will permute columns
170 
171   Level: intermediate
172 
173 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `MatSTRUMPACKSetReordering()`, `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetColPerm()`
174 @*/
175 PetscErrorCode MatSTRUMPACKGetColPerm(Mat F, PetscBool *cperm)
176 {
177   PetscFunctionBegin;
178   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
179   PetscTryMethod(F, "MatSTRUMPACKGetColPerm_C", (Mat, PetscBool *), (F, cperm));
180   PetscValidLogicalCollectiveBool(F, *cperm, 2);
181   PetscFunctionReturn(PETSC_SUCCESS);
182 }
183 
184 static PetscErrorCode MatSTRUMPACKSetGPU_STRUMPACK(Mat F, PetscBool gpu)
185 {
186   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
187 
188   PetscFunctionBegin;
189   if (gpu) {
190 #if !(defined(STRUMPACK_USE_CUDA) || defined(STRUMPACK_USE_HIP) || defined(STRUMPACK_USE_SYCL))
191     PetscCall(PetscPrintf(PETSC_COMM_SELF, "  Warning: strumpack was not configured with GPU support\n"));
192 #endif
193     PetscStackCallExternalVoid("STRUMPACK_enable_gpu", STRUMPACK_enable_gpu(*S));
194   } else PetscStackCallExternalVoid("STRUMPACK_disable_gpu", STRUMPACK_disable_gpu(*S));
195   PetscFunctionReturn(PETSC_SUCCESS);
196 }
197 static PetscErrorCode MatSTRUMPACKGetGPU_STRUMPACK(Mat F, PetscBool *gpu)
198 {
199   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
200 
201   PetscFunctionBegin;
202   PetscStackCallExternalVoid("STRUMPACK_use_gpu", *gpu = (PetscBool)STRUMPACK_use_gpu(*S));
203   PetscFunctionReturn(PETSC_SUCCESS);
204 }
205 
206 /*@
207   MatSTRUMPACKSetGPU - Set whether STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
208   should enable GPU acceleration (not supported for all compression types)
209 
210   Logically Collective
211 
212   Input Parameters:
213 + F   - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
214 - gpu - whether or not to use GPU acceleration
215 
216   Options Database Key:
217 . -mat_strumpack_gpu <gpu> - true to use gpu offload
218 
219   Level: intermediate
220 
221 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKGetGPU()`
222 @*/
223 PetscErrorCode MatSTRUMPACKSetGPU(Mat F, PetscBool gpu)
224 {
225   PetscFunctionBegin;
226   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
227   PetscValidLogicalCollectiveBool(F, gpu, 2);
228   PetscTryMethod(F, "MatSTRUMPACKSetGPU_C", (Mat, PetscBool), (F, gpu));
229   PetscFunctionReturn(PETSC_SUCCESS);
230 }
231 /*@
232   MatSTRUMPACKGetGPU - Get whether STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
233   will try to use GPU acceleration (not supported for all compression types)
234 
235   Logically Collective
236 
237   Input Parameters:
238 . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
239 
240   Output Parameter:
241 . gpu - whether or not STRUMPACK will try to use GPU acceleration
242 
243   Level: intermediate
244 
245 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKSetGPU()`
246 @*/
247 PetscErrorCode MatSTRUMPACKGetGPU(Mat F, PetscBool *gpu)
248 {
249   PetscFunctionBegin;
250   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
251   PetscTryMethod(F, "MatSTRUMPACKGetGPU_C", (Mat, PetscBool *), (F, gpu));
252   PetscValidLogicalCollectiveBool(F, *gpu, 2);
253   PetscFunctionReturn(PETSC_SUCCESS);
254 }
255 
256 static PetscErrorCode MatSTRUMPACKSetCompression_STRUMPACK(Mat F, MatSTRUMPACKCompressionType comp)
257 {
258   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
259 
260   PetscFunctionBegin;
261 #if !defined(STRUMPACK_USE_BPACK)
262   PetscCheck(comp != MAT_STRUMPACK_COMPRESSION_TYPE_HODLR && comp != MAT_STRUMPACK_COMPRESSION_TYPE_BLR_HODLR && comp != MAT_STRUMPACK_COMPRESSION_TYPE_ZFP_BLR_HODLR, PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Compression scheme requires ButterflyPACK, please reconfigure with --download-butterflypack");
263 #endif
264 #if !defined(STRUMPACK_USE_ZFP)
265   PetscCheck(comp != MAT_STRUMPACK_COMPRESSION_TYPE_ZFP_BLR_HODLR && comp != MAT_STRUMPACK_COMPRESSION_TYPE_LOSSLESS && comp != MAT_STRUMPACK_COMPRESSION_TYPE_LOSSY, PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Compression scheme requires ZFP, please reconfigure with --download-zfp");
266 #endif
267   PetscStackCallExternalVoid("STRUMPACK_set_compression", STRUMPACK_set_compression(*S, (STRUMPACK_COMPRESSION_TYPE)comp));
268   PetscFunctionReturn(PETSC_SUCCESS);
269 }
270 static PetscErrorCode MatSTRUMPACKGetCompression_STRUMPACK(Mat F, MatSTRUMPACKCompressionType *comp)
271 {
272   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
273 
274   PetscFunctionBegin;
275   PetscStackCallExternalVoid("STRUMPACK_compression", *comp = (MatSTRUMPACKCompressionType)STRUMPACK_compression(*S));
276   PetscFunctionReturn(PETSC_SUCCESS);
277 }
278 
279 /*@
280   MatSTRUMPACKSetCompression - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> compression type
281 
282   Input Parameters:
283 + F    - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
284 - comp - Type of compression to be used in the approximate sparse factorization
285 
286   Options Database Key:
287 . -mat_strumpack_compression <NONE> - Type of rank-structured compression in sparse LU factors (choose one of) NONE HSS BLR HODLR BLR_HODLR ZFP_BLR_HODLR LOSSLESS LOSSY
288 
289   Level: intermediate
290 
291   Note:
292   Default for `comp` is `MAT_STRUMPACK_COMPRESSION_TYPE_NONE` for `-pc_type lu` and `MAT_STRUMPACK_COMPRESSION_TYPE_BLR`
293   for `-pc_type ilu`
294 
295 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKCompressionType`, `MatSTRUMPACKGetCompression()`
296 @*/
297 PetscErrorCode MatSTRUMPACKSetCompression(Mat F, MatSTRUMPACKCompressionType comp)
298 {
299   PetscFunctionBegin;
300   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
301   PetscValidLogicalCollectiveEnum(F, comp, 2);
302   PetscTryMethod(F, "MatSTRUMPACKSetCompression_C", (Mat, MatSTRUMPACKCompressionType), (F, comp));
303   PetscFunctionReturn(PETSC_SUCCESS);
304 }
305 /*@
306   MatSTRUMPACKGetCompression - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> compression type
307 
308   Input Parameters:
309 . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
310 
311   Output Parameter:
312 . comp - Type of compression to be used in the approximate sparse factorization
313 
314   Level: intermediate
315 
316   Note:
317   Default is `MAT_STRUMPACK_COMPRESSION_TYPE_NONE` for `-pc_type lu` and `MAT_STRUMPACK_COMPRESSION_TYPE_BLR` for `-pc_type ilu`
318 
319 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKCompressionType`, `MatSTRUMPACKSetCompression()`
320 @*/
321 PetscErrorCode MatSTRUMPACKGetCompression(Mat F, MatSTRUMPACKCompressionType *comp)
322 {
323   PetscFunctionBegin;
324   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
325   PetscTryMethod(F, "MatSTRUMPACKGetCompression_C", (Mat, MatSTRUMPACKCompressionType *), (F, comp));
326   PetscValidLogicalCollectiveEnum(F, *comp, 2);
327   PetscFunctionReturn(PETSC_SUCCESS);
328 }
329 
330 static PetscErrorCode MatSTRUMPACKSetCompRelTol_STRUMPACK(Mat F, PetscReal rtol)
331 {
332   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
333 
334   PetscFunctionBegin;
335   PetscStackCallExternalVoid("STRUMPACK_set_compression_rel_tol", STRUMPACK_set_compression_rel_tol(*S, rtol));
336   PetscFunctionReturn(PETSC_SUCCESS);
337 }
338 static PetscErrorCode MatSTRUMPACKGetCompRelTol_STRUMPACK(Mat F, PetscReal *rtol)
339 {
340   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
341 
342   PetscFunctionBegin;
343   PetscStackCallExternalVoid("STRUMPACK_compression_rel_tol", *rtol = (PetscReal)STRUMPACK_compression_rel_tol(*S));
344   PetscFunctionReturn(PETSC_SUCCESS);
345 }
346 
347 /*@
348   MatSTRUMPACKSetCompRelTol - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> relative tolerance for compression
349 
350   Logically Collective
351 
352   Input Parameters:
353 + F    - the factored matrix obtained by calling `MatGetFactor()`
354 - rtol - relative compression tolerance
355 
356   Options Database Key:
357 . -mat_strumpack_compression_rel_tol <1e-4> - Relative compression tolerance, when using `-pctype ilu`
358 
359   Level: intermediate
360 
361 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetCompRelTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
362 @*/
363 PetscErrorCode MatSTRUMPACKSetCompRelTol(Mat F, PetscReal rtol)
364 {
365   PetscFunctionBegin;
366   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
367   PetscValidLogicalCollectiveReal(F, rtol, 2);
368   PetscTryMethod(F, "MatSTRUMPACKSetCompRelTol_C", (Mat, PetscReal), (F, rtol));
369   PetscFunctionReturn(PETSC_SUCCESS);
370 }
371 /*@
372   MatSTRUMPACKGetCompRelTol - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> relative tolerance for compression
373 
374   Logically Collective
375 
376   Input Parameters:
377 . F - the factored matrix obtained by calling `MatGetFactor()`
378 
379   Output Parameter:
380 . rtol - relative compression tolerance
381 
382   Level: intermediate
383 
384 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetCompRelTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
385 @*/
386 PetscErrorCode MatSTRUMPACKGetCompRelTol(Mat F, PetscReal *rtol)
387 {
388   PetscFunctionBegin;
389   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
390   PetscTryMethod(F, "MatSTRUMPACKGetCompRelTol_C", (Mat, PetscReal *), (F, rtol));
391   PetscValidLogicalCollectiveReal(F, *rtol, 2);
392   PetscFunctionReturn(PETSC_SUCCESS);
393 }
394 
395 static PetscErrorCode MatSTRUMPACKSetCompAbsTol_STRUMPACK(Mat F, PetscReal atol)
396 {
397   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
398 
399   PetscFunctionBegin;
400   PetscStackCallExternalVoid("STRUMPACK_set_compression_abs_tol", STRUMPACK_set_compression_abs_tol(*S, atol));
401   PetscFunctionReturn(PETSC_SUCCESS);
402 }
403 static PetscErrorCode MatSTRUMPACKGetCompAbsTol_STRUMPACK(Mat F, PetscReal *atol)
404 {
405   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
406 
407   PetscFunctionBegin;
408   PetscStackCallExternalVoid("STRUMPACK_compression_abs_tol", *atol = (PetscReal)STRUMPACK_compression_abs_tol(*S));
409   PetscFunctionReturn(PETSC_SUCCESS);
410 }
411 
412 /*@
413   MatSTRUMPACKSetCompAbsTol - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> absolute tolerance for compression
414 
415   Logically Collective
416 
417   Input Parameters:
418 + F    - the factored matrix obtained by calling `MatGetFactor()`
419 - atol - absolute compression tolerance
420 
421   Options Database Key:
422 . -mat_strumpack_compression_abs_tol <1e-10> - Absolute compression tolerance, when using `-pctype ilu`
423 
424   Level: intermediate
425 
426 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetCompAbsTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
427 @*/
428 PetscErrorCode MatSTRUMPACKSetCompAbsTol(Mat F, PetscReal atol)
429 {
430   PetscFunctionBegin;
431   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
432   PetscValidLogicalCollectiveReal(F, atol, 2);
433   PetscTryMethod(F, "MatSTRUMPACKSetCompAbsTol_C", (Mat, PetscReal), (F, atol));
434   PetscFunctionReturn(PETSC_SUCCESS);
435 }
436 /*@
437   MatSTRUMPACKGetCompAbsTol - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> absolute tolerance for compression
438 
439   Logically Collective
440 
441   Input Parameters:
442 . F - the factored matrix obtained by calling `MatGetFactor()`
443 
444   Output Parameter:
445 . atol - absolute compression tolerance
446 
447   Level: intermediate
448 
449 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetCompAbsTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
450 @*/
451 PetscErrorCode MatSTRUMPACKGetCompAbsTol(Mat F, PetscReal *atol)
452 {
453   PetscFunctionBegin;
454   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
455   PetscTryMethod(F, "MatSTRUMPACKGetCompAbsTol_C", (Mat, PetscReal *), (F, atol));
456   PetscValidLogicalCollectiveReal(F, *atol, 2);
457   PetscFunctionReturn(PETSC_SUCCESS);
458 }
459 
460 static PetscErrorCode MatSTRUMPACKSetCompLeafSize_STRUMPACK(Mat F, PetscInt leaf_size)
461 {
462   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
463 
464   PetscFunctionBegin;
465   PetscStackCallExternalVoid("STRUMPACK_set_compression_leaf_size", STRUMPACK_set_compression_leaf_size(*S, leaf_size));
466   PetscFunctionReturn(PETSC_SUCCESS);
467 }
468 static PetscErrorCode MatSTRUMPACKGetCompLeafSize_STRUMPACK(Mat F, PetscInt *leaf_size)
469 {
470   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
471 
472   PetscFunctionBegin;
473   PetscStackCallExternalVoid("STRUMPACK_compression_leaf_size", *leaf_size = (PetscInt)STRUMPACK_compression_leaf_size(*S));
474   PetscFunctionReturn(PETSC_SUCCESS);
475 }
476 
477 /*@
478   MatSTRUMPACKSetCompLeafSize - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> leaf size for HSS, BLR, HODLR...
479 
480   Logically Collective
481 
482   Input Parameters:
483 + F         - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
484 - leaf_size - Size of diagonal blocks in rank-structured approximation
485 
486   Options Database Key:
487 . -mat_strumpack_compression_leaf_size - Size of diagonal blocks in rank-structured approximation, when using `-pctype ilu`
488 
489   Level: intermediate
490 
491 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetCompLeafSize()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
492 @*/
493 PetscErrorCode MatSTRUMPACKSetCompLeafSize(Mat F, PetscInt leaf_size)
494 {
495   PetscFunctionBegin;
496   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
497   PetscValidLogicalCollectiveInt(F, leaf_size, 2);
498   PetscTryMethod(F, "MatSTRUMPACKSetCompLeafSize_C", (Mat, PetscInt), (F, leaf_size));
499   PetscFunctionReturn(PETSC_SUCCESS);
500 }
501 /*@
502   MatSTRUMPACKGetCompLeafSize - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> leaf size for HSS, BLR, HODLR...
503 
504   Logically Collective
505 
506   Input Parameters:
507 . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
508 
509   Output Parameter:
510 . leaf_size - Size of diagonal blocks in rank-structured approximation
511 
512   Level: intermediate
513 
514 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetCompLeafSize()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
515 @*/
516 PetscErrorCode MatSTRUMPACKGetCompLeafSize(Mat F, PetscInt *leaf_size)
517 {
518   PetscFunctionBegin;
519   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
520   PetscTryMethod(F, "MatSTRUMPACKGetCompLeafSize_C", (Mat, PetscInt *), (F, leaf_size));
521   PetscValidLogicalCollectiveInt(F, *leaf_size, 2);
522   PetscFunctionReturn(PETSC_SUCCESS);
523 }
524 
525 static PetscErrorCode MatSTRUMPACKSetGeometricNxyz_STRUMPACK(Mat F, PetscInt nx, PetscInt ny, PetscInt nz)
526 {
527   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
528 
529   PetscFunctionBegin;
530   if (nx < 1) {
531     PetscCheck(nx == PETSC_DECIDE || nx == PETSC_DEFAULT, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "nx < 1");
532     nx = 1;
533   }
534   PetscStackCallExternalVoid("STRUMPACK_set_nx", STRUMPACK_set_nx(*S, nx));
535   if (ny < 1) {
536     PetscCheck(ny == PETSC_DECIDE || ny == PETSC_DEFAULT, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "ny < 1");
537     ny = 1;
538   }
539   PetscStackCallExternalVoid("STRUMPACK_set_ny", STRUMPACK_set_ny(*S, ny));
540   if (nz < 1) {
541     PetscCheck(nz == PETSC_DECIDE || nz == PETSC_DEFAULT, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "nz < 1");
542     nz = 1;
543   }
544   PetscStackCallExternalVoid("STRUMPACK_set_nz", STRUMPACK_set_nz(*S, nz));
545   PetscFunctionReturn(PETSC_SUCCESS);
546 }
547 static PetscErrorCode MatSTRUMPACKSetGeometricComponents_STRUMPACK(Mat F, PetscInt nc)
548 {
549   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
550 
551   PetscFunctionBegin;
552   PetscStackCallExternalVoid("STRUMPACK_set_components", STRUMPACK_set_components(*S, nc));
553   PetscFunctionReturn(PETSC_SUCCESS);
554 }
555 static PetscErrorCode MatSTRUMPACKSetGeometricWidth_STRUMPACK(Mat F, PetscInt w)
556 {
557   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
558 
559   PetscFunctionBegin;
560   PetscStackCallExternalVoid("STRUMPACK_set_separator_width", STRUMPACK_set_separator_width(*S, w));
561   PetscFunctionReturn(PETSC_SUCCESS);
562 }
563 
564 /*@
565   MatSTRUMPACKSetGeometricNxyz - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> mesh x, y and z dimensions, for use with GEOMETRIC ordering.
566 
567   Logically Collective
568 
569   Input Parameters:
570 + F  - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
571 . nx - x dimension of the mesh
572 . ny - y dimension of the mesh
573 - nz - z dimension of the mesh
574 
575   Level: intermediate
576 
577   Note:
578   If the mesh is two (or one) dimensional one can use 1, `PETSC_DECIDE` or `PETSC_DEFAULT`
579   for the missing z (and y) dimensions.
580 
581 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`
582 @*/
583 PetscErrorCode MatSTRUMPACKSetGeometricNxyz(Mat F, PetscInt nx, PetscInt ny, PetscInt nz)
584 {
585   PetscFunctionBegin;
586   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
587   PetscValidLogicalCollectiveInt(F, nx, 2);
588   PetscValidLogicalCollectiveInt(F, ny, 3);
589   PetscValidLogicalCollectiveInt(F, nz, 4);
590   PetscTryMethod(F, "MatSTRUMPACKSetGeometricNxyz_C", (Mat, PetscInt, PetscInt, PetscInt), (F, nx, ny, nz));
591   PetscFunctionReturn(PETSC_SUCCESS);
592 }
593 /*@
594   MatSTRUMPACKSetGeometricComponents - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
595   number of degrees of freedom per mesh point, for use with GEOMETRIC ordering.
596 
597   Logically Collective
598 
599   Input Parameters:
600 + F  - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
601 - nc - Number of components/dof's per grid point
602 
603   Options Database Key:
604 . -mat_strumpack_geometric_components <1> - Number of components per mesh point, for geometric nested dissection ordering
605 
606   Level: intermediate
607 
608 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`
609 @*/
610 PetscErrorCode MatSTRUMPACKSetGeometricComponents(Mat F, PetscInt nc)
611 {
612   PetscFunctionBegin;
613   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
614   PetscValidLogicalCollectiveInt(F, nc, 2);
615   PetscTryMethod(F, "MatSTRUMPACKSetGeometricComponents_C", (Mat, PetscInt), (F, nc));
616   PetscFunctionReturn(PETSC_SUCCESS);
617 }
618 /*@
619   MatSTRUMPACKSetGeometricWidth - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> width of the separator, for use with GEOMETRIC ordering.
620 
621   Logically Collective
622 
623   Input Parameters:
624 + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
625 - w - width of the separator
626 
627   Options Database Key:
628 . -mat_strumpack_geometric_width <1> - Width of the separator of the mesh, for geometric nested dissection ordering
629 
630   Level: intermediate
631 
632 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`
633 @*/
634 PetscErrorCode MatSTRUMPACKSetGeometricWidth(Mat F, PetscInt w)
635 {
636   PetscFunctionBegin;
637   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
638   PetscValidLogicalCollectiveInt(F, w, 2);
639   PetscTryMethod(F, "MatSTRUMPACKSetGeometricWidth_C", (Mat, PetscInt), (F, w));
640   PetscFunctionReturn(PETSC_SUCCESS);
641 }
642 
643 static PetscErrorCode MatSTRUMPACKSetCompMinSepSize_STRUMPACK(Mat F, PetscInt min_sep_size)
644 {
645   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
646 
647   PetscFunctionBegin;
648   PetscStackCallExternalVoid("STRUMPACK_set_compression_min_sep_size", STRUMPACK_set_compression_min_sep_size(*S, min_sep_size));
649   PetscFunctionReturn(PETSC_SUCCESS);
650 }
651 static PetscErrorCode MatSTRUMPACKGetCompMinSepSize_STRUMPACK(Mat F, PetscInt *min_sep_size)
652 {
653   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
654 
655   PetscFunctionBegin;
656   PetscStackCallExternalVoid("STRUMPACK_compression_min_sep_size", *min_sep_size = (PetscInt)STRUMPACK_compression_min_sep_size(*S));
657   PetscFunctionReturn(PETSC_SUCCESS);
658 }
659 
660 /*@
661   MatSTRUMPACKSetCompMinSepSize - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> minimum separator size for low-rank approximation
662 
663   Logically Collective
664 
665   Input Parameters:
666 + F            - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
667 - min_sep_size - minimum dense matrix size for low-rank approximation
668 
669   Options Database Key:
670 . -mat_strumpack_compression_min_sep_size <min_sep_size> - Minimum size of dense sub-block for low-rank compression
671 
672   Level: intermediate
673 
674 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKGetCompMinSepSize()`
675 @*/
676 PetscErrorCode MatSTRUMPACKSetCompMinSepSize(Mat F, PetscInt min_sep_size)
677 {
678   PetscFunctionBegin;
679   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
680   PetscValidLogicalCollectiveInt(F, min_sep_size, 2);
681   PetscTryMethod(F, "MatSTRUMPACKSetCompMinSepSize_C", (Mat, PetscInt), (F, min_sep_size));
682   PetscFunctionReturn(PETSC_SUCCESS);
683 }
684 /*@
685   MatSTRUMPACKGetCompMinSepSize - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> minimum separator size for low-rank approximation
686 
687   Logically Collective
688 
689   Input Parameters:
690 . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
691 
692   Output Parameter:
693 . min_sep_size - minimum dense matrix size for low-rank approximation
694 
695   Level: intermediate
696 
697 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKSetCompMinSepSize()`
698 @*/
699 PetscErrorCode MatSTRUMPACKGetCompMinSepSize(Mat F, PetscInt *min_sep_size)
700 {
701   PetscFunctionBegin;
702   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
703   PetscTryMethod(F, "MatSTRUMPACKGetCompMinSepSize_C", (Mat, PetscInt *), (F, min_sep_size));
704   PetscValidLogicalCollectiveInt(F, *min_sep_size, 2);
705   PetscFunctionReturn(PETSC_SUCCESS);
706 }
707 
708 static PetscErrorCode MatSTRUMPACKSetCompLossyPrecision_STRUMPACK(Mat F, PetscInt lossy_prec)
709 {
710   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
711 
712   PetscFunctionBegin;
713   PetscStackCallExternalVoid("STRUMPACK_set_compression_lossy_precision", STRUMPACK_set_compression_lossy_precision(*S, lossy_prec));
714   PetscFunctionReturn(PETSC_SUCCESS);
715 }
716 static PetscErrorCode MatSTRUMPACKGetCompLossyPrecision_STRUMPACK(Mat F, PetscInt *lossy_prec)
717 {
718   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
719 
720   PetscFunctionBegin;
721   PetscStackCallExternalVoid("STRUMPACK_compression_lossy_precision", *lossy_prec = (PetscInt)STRUMPACK_compression_lossy_precision(*S));
722   PetscFunctionReturn(PETSC_SUCCESS);
723 }
724 
725 /*@
726   MatSTRUMPACKSetCompLossyPrecision - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> precision for lossy compression (requires ZFP support)
727 
728   Logically Collective
729 
730   Input Parameters:
731 + F          - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
732 - lossy_prec - Number of bitplanes to use in lossy compression
733 
734   Options Database Key:
735 . -mat_strumpack_compression_lossy_precision <lossy_prec> - Precision when using lossy compression [1-64], when using `-pctype ilu -mat_strumpack_compression MAT_STRUMPACK_COMPRESSION_TYPE_LOSSY`
736 
737   Level: intermediate
738 
739 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKGetCompLossyPrecision()`
740 @*/
741 PetscErrorCode MatSTRUMPACKSetCompLossyPrecision(Mat F, PetscInt lossy_prec)
742 {
743   PetscFunctionBegin;
744   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
745   PetscValidLogicalCollectiveInt(F, lossy_prec, 2);
746   PetscTryMethod(F, "MatSTRUMPACKSetCompLossyPrecision_C", (Mat, PetscInt), (F, lossy_prec));
747   PetscFunctionReturn(PETSC_SUCCESS);
748 }
749 /*@
750   MatSTRUMPACKGetCompLossyPrecision - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> precision for lossy compression (requires ZFP support)
751 
752   Logically Collective
753 
754   Input Parameters:
755 . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
756 
757   Output Parameter:
758 . lossy_prec - Number of bitplanes to use in lossy compression
759 
760   Level: intermediate
761 
762 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKSetCompLossyPrecision()`
763 @*/
764 PetscErrorCode MatSTRUMPACKGetCompLossyPrecision(Mat F, PetscInt *lossy_prec)
765 {
766   PetscFunctionBegin;
767   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
768   PetscTryMethod(F, "MatSTRUMPACKGetCompLossyPrecision_C", (Mat, PetscInt *), (F, lossy_prec));
769   PetscValidLogicalCollectiveInt(F, *lossy_prec, 2);
770   PetscFunctionReturn(PETSC_SUCCESS);
771 }
772 
773 static PetscErrorCode MatSTRUMPACKSetCompButterflyLevels_STRUMPACK(Mat F, PetscInt bfly_lvls)
774 {
775   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
776 
777   PetscFunctionBegin;
778   PetscStackCallExternalVoid("STRUMPACK_set_compression_butterfly_levels", STRUMPACK_set_compression_butterfly_levels(*S, bfly_lvls));
779   PetscFunctionReturn(PETSC_SUCCESS);
780 }
781 static PetscErrorCode MatSTRUMPACKGetCompButterflyLevels_STRUMPACK(Mat F, PetscInt *bfly_lvls)
782 {
783   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
784 
785   PetscFunctionBegin;
786   PetscStackCallExternalVoid("STRUMPACK_compression_butterfly_levels", *bfly_lvls = (PetscInt)STRUMPACK_compression_butterfly_levels(*S));
787   PetscFunctionReturn(PETSC_SUCCESS);
788 }
789 
790 /*@
791   MatSTRUMPACKSetCompButterflyLevels - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
792   number of butterfly levels in HODLR compression (requires ButterflyPACK support)
793 
794   Logically Collective
795 
796   Input Parameters:
797 + F         - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
798 - bfly_lvls - Number of levels of butterfly compression in HODLR compression
799 
800   Options Database Key:
801 . -mat_strumpack_compression_butterfly_levels <bfly_lvls> - Number of levels in the hierarchically off-diagonal matrix for which to use butterfly,
802                                                             when using `-pctype ilu`, (BLR_)HODLR compression
803 
804   Level: intermediate
805 
806 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKGetCompButterflyLevels()`
807 @*/
808 PetscErrorCode MatSTRUMPACKSetCompButterflyLevels(Mat F, PetscInt bfly_lvls)
809 {
810   PetscFunctionBegin;
811   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
812   PetscValidLogicalCollectiveInt(F, bfly_lvls, 2);
813   PetscTryMethod(F, "MatSTRUMPACKSetButterflyLevels_C", (Mat, PetscInt), (F, bfly_lvls));
814   PetscFunctionReturn(PETSC_SUCCESS);
815 }
816 /*@
817   MatSTRUMPACKGetCompButterflyLevels - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
818   number of butterfly levels in HODLR compression (requires ButterflyPACK support)
819 
820   Logically Collective
821 
822   Input Parameters:
823 . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
824 
825   Output Parameter:
826 . bfly_lvls - Number of levels of butterfly compression in HODLR compression
827 
828   Level: intermediate
829 
830 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKSetCompButterflyLevels()`
831 @*/
832 PetscErrorCode MatSTRUMPACKGetCompButterflyLevels(Mat F, PetscInt *bfly_lvls)
833 {
834   PetscFunctionBegin;
835   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
836   PetscTryMethod(F, "MatSTRUMPACKGetButterflyLevels_C", (Mat, PetscInt *), (F, bfly_lvls));
837   PetscValidLogicalCollectiveInt(F, *bfly_lvls, 2);
838   PetscFunctionReturn(PETSC_SUCCESS);
839 }
840 
841 static PetscErrorCode MatSolve_STRUMPACK(Mat A, Vec b_mpi, Vec x)
842 {
843   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->data;
844   STRUMPACK_RETURN_CODE   sp_err;
845   const PetscScalar      *bptr;
846   PetscScalar            *xptr;
847 
848   PetscFunctionBegin;
849   PetscCall(VecGetArray(x, &xptr));
850   PetscCall(VecGetArrayRead(b_mpi, &bptr));
851 
852   PetscStackCallExternalVoid("STRUMPACK_solve", sp_err = STRUMPACK_solve(*S, (PetscScalar *)bptr, xptr, 0));
853   switch (sp_err) {
854   case STRUMPACK_SUCCESS:
855     break;
856   case STRUMPACK_MATRIX_NOT_SET: {
857     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
858     break;
859   }
860   case STRUMPACK_REORDERING_ERROR: {
861     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
862     break;
863   }
864   default:
865     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: solve failed");
866   }
867   PetscCall(VecRestoreArray(x, &xptr));
868   PetscCall(VecRestoreArrayRead(b_mpi, &bptr));
869   PetscFunctionReturn(PETSC_SUCCESS);
870 }
871 
872 static PetscErrorCode MatMatSolve_STRUMPACK(Mat A, Mat B_mpi, Mat X)
873 {
874   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->data;
875   STRUMPACK_RETURN_CODE   sp_err;
876   PetscBool               flg;
877   PetscInt                m = A->rmap->n, nrhs;
878   const PetscScalar      *bptr;
879   PetscScalar            *xptr;
880 
881   PetscFunctionBegin;
882   PetscCall(PetscObjectTypeCompareAny((PetscObject)B_mpi, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
883   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix B must be MATDENSE matrix");
884   PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
885   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix X must be MATDENSE matrix");
886 
887   PetscCall(MatGetSize(B_mpi, NULL, &nrhs));
888   PetscCall(MatDenseGetArray(X, &xptr));
889   PetscCall(MatDenseGetArrayRead(B_mpi, &bptr));
890 
891   PetscStackCallExternalVoid("STRUMPACK_matsolve", sp_err = STRUMPACK_matsolve(*S, nrhs, bptr, m, xptr, m, 0));
892   switch (sp_err) {
893   case STRUMPACK_SUCCESS:
894     break;
895   case STRUMPACK_MATRIX_NOT_SET: {
896     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
897     break;
898   }
899   case STRUMPACK_REORDERING_ERROR: {
900     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
901     break;
902   }
903   default:
904     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: solve failed");
905   }
906   PetscCall(MatDenseRestoreArrayRead(B_mpi, &bptr));
907   PetscCall(MatDenseRestoreArray(X, &xptr));
908 
909   PetscFunctionReturn(PETSC_SUCCESS);
910 }
911 
912 static PetscErrorCode MatView_Info_STRUMPACK(Mat A, PetscViewer viewer)
913 {
914   PetscFunctionBegin;
915   /* check if matrix is strumpack type */
916   if (A->ops->solve != MatSolve_STRUMPACK) PetscFunctionReturn(PETSC_SUCCESS);
917   PetscCall(PetscViewerASCIIPrintf(viewer, "STRUMPACK sparse solver!\n"));
918   PetscFunctionReturn(PETSC_SUCCESS);
919 }
920 
921 static PetscErrorCode MatView_STRUMPACK(Mat A, PetscViewer viewer)
922 {
923   PetscBool         iascii;
924   PetscViewerFormat format;
925 
926   PetscFunctionBegin;
927   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
928   if (iascii) {
929     PetscCall(PetscViewerGetFormat(viewer, &format));
930     if (format == PETSC_VIEWER_ASCII_INFO) PetscCall(MatView_Info_STRUMPACK(A, viewer));
931   }
932   PetscFunctionReturn(PETSC_SUCCESS);
933 }
934 
935 static PetscErrorCode MatLUFactorNumeric_STRUMPACK(Mat F, Mat A, const MatFactorInfo *info)
936 {
937   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
938   STRUMPACK_RETURN_CODE   sp_err;
939   Mat                     Aloc;
940   const PetscScalar      *av;
941   const PetscInt         *ai = NULL, *aj = NULL;
942   PetscInt                M = A->rmap->N, m = A->rmap->n, dummy;
943   PetscBool               ismpiaij, isseqaij, flg;
944 
945   PetscFunctionBegin;
946   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
947   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
948   if (ismpiaij) {
949     PetscCall(MatMPIAIJGetLocalMat(A, MAT_INITIAL_MATRIX, &Aloc));
950   } else if (isseqaij) {
951     PetscCall(PetscObjectReference((PetscObject)A));
952     Aloc = A;
953   } else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for type %s", ((PetscObject)A)->type_name);
954 
955   PetscCall(MatGetRowIJ(Aloc, 0, PETSC_FALSE, PETSC_FALSE, &dummy, &ai, &aj, &flg));
956   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "GetRowIJ failed");
957   PetscCall(MatSeqAIJGetArrayRead(Aloc, &av));
958 
959   if (ismpiaij) {
960     const PetscInt *dist = NULL;
961     PetscCall(MatGetOwnershipRanges(A, &dist));
962     PetscStackCallExternalVoid("STRUMPACK_set_distributed_csr_matrix", STRUMPACK_set_distributed_csr_matrix(*S, &m, ai, aj, av, dist, 0));
963   } else if (isseqaij) {
964     PetscStackCallExternalVoid("STRUMPACK_set_csr_matrix", STRUMPACK_set_csr_matrix(*S, &M, ai, aj, av, 0));
965   }
966 
967   PetscCall(MatRestoreRowIJ(Aloc, 0, PETSC_FALSE, PETSC_FALSE, &dummy, &ai, &aj, &flg));
968   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "RestoreRowIJ failed");
969   PetscCall(MatSeqAIJRestoreArrayRead(Aloc, &av));
970   PetscCall(MatDestroy(&Aloc));
971 
972   /* Reorder and Factor the matrix. */
973   /* TODO figure out how to avoid reorder if the matrix values changed, but the pattern remains the same. */
974   PetscStackCallExternalVoid("STRUMPACK_reorder", sp_err = STRUMPACK_reorder(*S));
975   PetscStackCallExternalVoid("STRUMPACK_factor", sp_err = STRUMPACK_factor(*S));
976   switch (sp_err) {
977   case STRUMPACK_SUCCESS:
978     break;
979   case STRUMPACK_MATRIX_NOT_SET: {
980     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
981     break;
982   }
983   case STRUMPACK_REORDERING_ERROR: {
984     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
985     break;
986   }
987   default:
988     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: factorization failed");
989   }
990   F->assembled    = PETSC_TRUE;
991   F->preallocated = PETSC_TRUE;
992   PetscFunctionReturn(PETSC_SUCCESS);
993 }
994 
995 static PetscErrorCode MatLUFactorSymbolic_STRUMPACK(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
996 {
997   PetscFunctionBegin;
998   F->ops->lufactornumeric = MatLUFactorNumeric_STRUMPACK;
999   F->ops->solve           = MatSolve_STRUMPACK;
1000   F->ops->matsolve        = MatMatSolve_STRUMPACK;
1001   PetscFunctionReturn(PETSC_SUCCESS);
1002 }
1003 
1004 static PetscErrorCode MatFactorGetSolverType_aij_strumpack(Mat A, MatSolverType *type)
1005 {
1006   PetscFunctionBegin;
1007   *type = MATSOLVERSTRUMPACK;
1008   PetscFunctionReturn(PETSC_SUCCESS);
1009 }
1010 
1011 /*MC
1012   MATSOLVERSTRUMPACK = "strumpack" - A solver package providing a direct sparse solver (PCLU)
1013   and a preconditioner (PCILU) using low-rank compression via the external package STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>.
1014 
1015   Use `./configure --download-strumpack --download-metis` to have PETSc installed with STRUMPACK.
1016 
1017   For full functionality, add `--download-slate --download-magma --download-parmetis --download-ptscotch --download-zfp --download-butterflypack`.
1018   SLATE provides GPU support in the multi-GPU setting, providing ScaLAPACK functionality but with GPU acceleration.
1019   MAGMA can optionally be used for on node GPU support instead cuBLAS/cuSOLVER, and performs slightly better.
1020   ParMETIS and PTScotch can be used for parallel fill-reducing ordering.
1021   ZFP is used for floating point compression of the sparse factors (LOSSY or LOSSLESS compression).
1022   ButterflyPACK is used for HODLR (Hierarchically Off-Diagonal Low Rank) and HODBF (Hierarchically Off-Diagonal Butterfly) compression of the sparse factors.
1023 
1024   Options Database Keys:
1025 + -mat_strumpack_verbose                      - Enable verbose output
1026 . -mat_strumpack_compression                  - Type of rank-structured compression in sparse LU factors (choose one of) NONE HSS BLR HODLR BLR_HODLR ZFP_BLR_HODLR LOSSLESS LOSSY
1027 . -mat_strumpack_compression_rel_tol          - Relative compression tolerance, when using `-pctype ilu`
1028 . -mat_strumpack_compression_abs_tol          - Absolute compression tolerance, when using `-pctype ilu`
1029 . -mat_strumpack_compression_min_sep_size     - Minimum size of separator for rank-structured compression, when using `-pctype ilu`
1030 . -mat_strumpack_compression_leaf_size        - Size of diagonal blocks in rank-structured approximation, when using `-pctype ilu`
1031 . -mat_strumpack_compression_lossy_precision  - Precision when using lossy compression [1-64], when using `-pctype ilu`, compression LOSSY (requires ZFP support)
1032 . -mat_strumpack_compression_butterfly_levels - Number of levels in the hierarchically off-diagonal matrix for which to use butterfly, when using `-pctype ilu`, (BLR_)HODLR compression (requires ButterflyPACK support)
1033 . -mat_strumpack_gpu                          - Enable GPU acceleration in numerical factorization (not supported for all compression types)
1034 . -mat_strumpack_colperm <TRUE>               - Permute matrix to make diagonal nonzeros
1035 . -mat_strumpack_reordering <METIS>           - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM GEOMETRIC AMD MMD AND MLF SPECTRAL
1036 . -mat_strumpack_geometric_xyz <1,1,1>        - Mesh x,y,z dimensions, for use with GEOMETRIC ordering
1037 . -mat_strumpack_geometric_components <1>     - Number of components per mesh point, for geometric nested dissection ordering
1038 . -mat_strumpack_geometric_width <1>          - Width of the separator of the mesh, for geometric nested dissection ordering
1039 - -mat_strumpack_metis_nodeNDP                - Use METIS_NodeNDP instead of METIS_NodeND, for a more balanced tree
1040 
1041  Level: beginner
1042 
1043  Notes:
1044  Recommended use is 1 MPI process per GPU.
1045 
1046  Use `-pc_type lu` `-pc_factor_mat_solver_type strumpack` to use this as an exact (direct) solver.
1047 
1048  Use `-pc_type ilu` `-pc_factor_mat_solver_type strumpack` to enable low-rank compression (i.e, use as a preconditioner), by default using block low rank (BLR).
1049 
1050  Works with `MATAIJ` matrices
1051 
1052  HODLR, BLR_HODBF and ZFP_BLR_HODLR compression require STRUMPACK to be configured with ButterflyPACK support (`--download-butterflypack`).
1053 
1054  LOSSY, LOSSLESS and ZFP_BLR_HODLR compression require STRUMPACK to be configured with ZFP support (`--download-zfp`).
1055 
1056 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `PCLU`, `PCILU`, `MATSOLVERSUPERLU_DIST`, `MATSOLVERMUMPS`, `PCFactorSetMatSolverType()`, `MatSolverType`,
1057           `MatGetFactor()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKReordering`, `MatSTRUMPACKCompressionType`, `MatSTRUMPACKSetColPerm()`.
1058 M*/
1059 static PetscErrorCode MatGetFactor_aij_strumpack(Mat A, MatFactorType ftype, Mat *F)
1060 {
1061   Mat       B;
1062   PetscInt  M = A->rmap->N, N = A->cmap->N;
1063   PetscBool verb, flg, set;
1064   PetscReal ctol;
1065   PetscInt  min_sep_size, leaf_size, nxyz[3], nrdims, nc, w;
1066 #if defined(STRUMPACK_USE_ZFP)
1067   PetscInt lossy_prec;
1068 #endif
1069 #if defined(STRUMPACK_USE_BPACK)
1070   PetscInt bfly_lvls;
1071 #endif
1072 #if defined(STRUMPACK_USE_SLATE_SCALAPACK)
1073   PetscMPIInt mpithreads;
1074 #endif
1075   STRUMPACK_SparseSolver       *S;
1076   STRUMPACK_INTERFACE           iface;
1077   STRUMPACK_REORDERING_STRATEGY ndcurrent, ndvalue;
1078   STRUMPACK_COMPRESSION_TYPE    compcurrent, compvalue;
1079   const STRUMPACK_PRECISION     table[2][2][2] = {
1080     {{STRUMPACK_FLOATCOMPLEX_64, STRUMPACK_DOUBLECOMPLEX_64}, {STRUMPACK_FLOAT_64, STRUMPACK_DOUBLE_64}},
1081     {{STRUMPACK_FLOATCOMPLEX, STRUMPACK_DOUBLECOMPLEX},       {STRUMPACK_FLOAT, STRUMPACK_DOUBLE}      }
1082   };
1083   const STRUMPACK_PRECISION prec               = table[(sizeof(PetscInt) == 8) ? 0 : 1][(PETSC_SCALAR == PETSC_COMPLEX) ? 0 : 1][(PETSC_REAL == PETSC_FLOAT) ? 0 : 1];
1084   const char *const         STRUMPACKNDTypes[] = {"NATURAL", "METIS", "PARMETIS", "SCOTCH", "PTSCOTCH", "RCM", "GEOMETRIC", "AMD", "MMD", "AND", "MLF", "SPECTRAL", "STRUMPACKNDTypes", "", 0};
1085   const char *const         CompTypes[]        = {"NONE", "HSS", "BLR", "HODLR", "BLR_HODLR", "ZFP_BLR_HODLR", "LOSSLESS", "LOSSY", "CompTypes", "", 0};
1086 
1087   PetscFunctionBegin;
1088 #if defined(STRUMPACK_USE_SLATE_SCALAPACK)
1089   PetscCallMPI(MPI_Query_thread(&mpithreads));
1090   PetscCheck(mpithreads == MPI_THREAD_MULTIPLE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP_SYS, "SLATE requires MPI_THREAD_MULTIPLE");
1091 #endif
1092   /* Create the factorization matrix */
1093   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
1094   PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, M, N));
1095   PetscCall(PetscStrallocpy("strumpack", &((PetscObject)B)->type_name));
1096   PetscCall(MatSetUp(B));
1097   PetscCall(MatSeqAIJSetPreallocation(B, 0, NULL));
1098   PetscCall(MatMPIAIJSetPreallocation(B, 0, NULL, 0, NULL));
1099   B->trivialsymbolic = PETSC_TRUE;
1100   if (ftype == MAT_FACTOR_LU || ftype == MAT_FACTOR_ILU) {
1101     B->ops->lufactorsymbolic  = MatLUFactorSymbolic_STRUMPACK;
1102     B->ops->ilufactorsymbolic = MatLUFactorSymbolic_STRUMPACK;
1103   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Factor type not supported");
1104   B->ops->getinfo     = MatGetInfo_External;
1105   B->ops->view        = MatView_STRUMPACK;
1106   B->ops->destroy     = MatDestroy_STRUMPACK;
1107   B->ops->getdiagonal = MatGetDiagonal_STRUMPACK;
1108   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_aij_strumpack));
1109   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetReordering_C", MatSTRUMPACKSetReordering_STRUMPACK));
1110   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetReordering_C", MatSTRUMPACKGetReordering_STRUMPACK));
1111   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetColPerm_C", MatSTRUMPACKSetColPerm_STRUMPACK));
1112   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetColPerm_C", MatSTRUMPACKGetColPerm_STRUMPACK));
1113   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGeometricNxyz_C", MatSTRUMPACKSetGeometricNxyz_STRUMPACK));
1114   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGeometricComponents_C", MatSTRUMPACKSetGeometricComponents_STRUMPACK));
1115   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGeometricWidth_C", MatSTRUMPACKSetGeometricWidth_STRUMPACK));
1116   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGPU_C", MatSTRUMPACKSetGPU_STRUMPACK));
1117   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetGPU_C", MatSTRUMPACKGetGPU_STRUMPACK));
1118   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompression_C", MatSTRUMPACKSetCompression_STRUMPACK));
1119   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompression_C", MatSTRUMPACKGetCompression_STRUMPACK));
1120   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompRelTol_C", MatSTRUMPACKSetCompRelTol_STRUMPACK));
1121   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompRelTol_C", MatSTRUMPACKGetCompRelTol_STRUMPACK));
1122   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompAbsTol_C", MatSTRUMPACKSetCompAbsTol_STRUMPACK));
1123   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompAbsTol_C", MatSTRUMPACKGetCompAbsTol_STRUMPACK));
1124   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompLeafSize_C", MatSTRUMPACKSetCompLeafSize_STRUMPACK));
1125   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompLeafSize_C", MatSTRUMPACKGetCompLeafSize_STRUMPACK));
1126   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompMinSepSize_C", MatSTRUMPACKSetCompMinSepSize_STRUMPACK));
1127   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompMinSepSize_C", MatSTRUMPACKGetCompMinSepSize_STRUMPACK));
1128   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompLossyPrecision_C", MatSTRUMPACKSetCompLossyPrecision_STRUMPACK));
1129   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompLossyPrecision_C", MatSTRUMPACKGetCompLossyPrecision_STRUMPACK));
1130   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompButterflyLevels_C", MatSTRUMPACKSetCompButterflyLevels_STRUMPACK));
1131   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompButterflyLevels_C", MatSTRUMPACKGetCompButterflyLevels_STRUMPACK));
1132   B->factortype = ftype;
1133 
1134   /* set solvertype */
1135   PetscCall(PetscFree(B->solvertype));
1136   PetscCall(PetscStrallocpy(MATSOLVERSTRUMPACK, &B->solvertype));
1137 
1138   PetscCall(PetscNew(&S));
1139   B->data = S;
1140 
1141   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &flg)); /* A might be MATSEQAIJCUSPARSE */
1142   iface = flg ? STRUMPACK_MT : STRUMPACK_MPI_DIST;
1143 
1144   PetscOptionsBegin(PetscObjectComm((PetscObject)B), ((PetscObject)B)->prefix, "STRUMPACK Options", "Mat");
1145   verb = PetscLogPrintInfo ? PETSC_TRUE : PETSC_FALSE;
1146   PetscCall(PetscOptionsBool("-mat_strumpack_verbose", "Print STRUMPACK information", "None", verb, &verb, NULL));
1147 
1148   PetscStackCallExternalVoid("STRUMPACK_init", STRUMPACK_init(S, PetscObjectComm((PetscObject)A), prec, iface, 0, NULL, verb));
1149 
1150   /* By default, no compression is done. Compression is enabled when the user enables it with        */
1151   /*  -mat_strumpack_compression with anything else than NONE, or when selecting ilu                 */
1152   /* preconditioning, in which case we default to STRUMPACK_BLR compression.                         */
1153   /* When compression is enabled, the STRUMPACK solver becomes an incomplete                         */
1154   /* (or approximate) LU factorization.                                                              */
1155   PetscStackCallExternalVoid("STRUMPACK_compression", compcurrent = STRUMPACK_compression(*S));
1156   PetscCall(PetscOptionsEnum("-mat_strumpack_compression", "Rank-structured compression type", "None", CompTypes, (PetscEnum)compcurrent, (PetscEnum *)&compvalue, &set));
1157   if (set) {
1158     PetscCall(MatSTRUMPACKSetCompression(B, (MatSTRUMPACKCompressionType)compvalue));
1159   } else {
1160     if (ftype == MAT_FACTOR_ILU) { PetscStackCallExternalVoid("STRUMPACK_set_compression", STRUMPACK_set_compression(*S, STRUMPACK_BLR)); }
1161   }
1162 
1163   PetscStackCallExternalVoid("STRUMPACK_compression_rel_tol", ctol = (PetscReal)STRUMPACK_compression_rel_tol(*S));
1164   PetscCall(PetscOptionsReal("-mat_strumpack_compression_rel_tol", "Relative compression tolerance", "None", ctol, &ctol, &set));
1165   if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_rel_tol", STRUMPACK_set_compression_rel_tol(*S, (double)ctol));
1166 
1167   PetscStackCallExternalVoid("STRUMPACK_compression_abs_tol", ctol = (PetscReal)STRUMPACK_compression_abs_tol(*S));
1168   PetscCall(PetscOptionsReal("-mat_strumpack_compression_abs_tol", "Absolute compression tolerance", "None", ctol, &ctol, &set));
1169   if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_abs_tol", STRUMPACK_set_compression_abs_tol(*S, (double)ctol));
1170 
1171   PetscStackCallExternalVoid("STRUMPACK_compression_min_sep_size", min_sep_size = (PetscInt)STRUMPACK_compression_min_sep_size(*S));
1172   PetscCall(PetscOptionsInt("-mat_strumpack_compression_min_sep_size", "Minimum size of separator for compression", "None", min_sep_size, &min_sep_size, &set));
1173   if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_min_sep_size", STRUMPACK_set_compression_min_sep_size(*S, (int)min_sep_size));
1174 
1175   PetscStackCallExternalVoid("STRUMPACK_compression_leaf_size", leaf_size = (PetscInt)STRUMPACK_compression_leaf_size(*S));
1176   PetscCall(PetscOptionsInt("-mat_strumpack_compression_leaf_size", "Size of diagonal blocks in rank-structured approximation", "None", leaf_size, &leaf_size, &set));
1177   if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_leaf_size", STRUMPACK_set_compression_leaf_size(*S, (int)leaf_size));
1178 
1179 #if defined(STRUMPACK_USE_ZFP)
1180   PetscStackCallExternalVoid("STRUMPACK_compression_lossy_precision", lossy_prec = (PetscInt)STRUMPACK_compression_lossy_precision(*S));
1181   PetscCall(PetscOptionsInt("-mat_strumpack_compression_lossy_precision", "Number of bitplanes to use in lossy compression", "None", lossy_prec, &lossy_prec, &set));
1182   if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_lossy_precision", STRUMPACK_set_compression_lossy_precision(*S, (int)lossy_prec));
1183 #endif
1184 
1185 #if defined(STRUMPACK_USE_BPACK)
1186   PetscStackCallExternalVoid("STRUMPACK_compression_butterfly_levels", bfly_lvls = (PetscInt)STRUMPACK_compression_butterfly_levels(*S));
1187   PetscCall(PetscOptionsInt("-mat_strumpack_compression_butterfly_levels", "Number of levels in the HODLR matrix for which to use butterfly compression", "None", bfly_lvls, &bfly_lvls, &set));
1188   if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_butterfly_levels", STRUMPACK_set_compression_butterfly_levels(*S, (int)bfly_lvls));
1189 #endif
1190 
1191 #if defined(STRUMPACK_USE_CUDA) || defined(STRUMPACK_USE_HIP) || defined(STRUMPACK_USE_SYCL)
1192   PetscStackCallExternalVoid("STRUMPACK_use_gpu", flg = (STRUMPACK_use_gpu(*S) == 0) ? PETSC_FALSE : PETSC_TRUE);
1193   PetscCall(PetscOptionsBool("-mat_strumpack_gpu", "Enable GPU acceleration (not supported for all compression types)", "None", flg, &flg, &set));
1194   if (set) MatSTRUMPACKSetGPU(B, flg);
1195 #endif
1196 
1197   PetscStackCallExternalVoid("STRUMPACK_matching", flg = (STRUMPACK_matching(*S) == STRUMPACK_MATCHING_NONE) ? PETSC_FALSE : PETSC_TRUE);
1198   PetscCall(PetscOptionsBool("-mat_strumpack_colperm", "Find a col perm to get nonzero diagonal", "None", flg, &flg, &set));
1199   if (set) PetscStackCallExternalVoid("STRUMPACK_set_matching", STRUMPACK_set_matching(*S, flg ? STRUMPACK_MATCHING_MAX_DIAGONAL_PRODUCT_SCALING : STRUMPACK_MATCHING_NONE));
1200 
1201   PetscStackCallExternalVoid("STRUMPACK_reordering_method", ndcurrent = STRUMPACK_reordering_method(*S));
1202   PetscCall(PetscOptionsEnum("-mat_strumpack_reordering", "Sparsity reducing matrix reordering", "None", STRUMPACKNDTypes, (PetscEnum)ndcurrent, (PetscEnum *)&ndvalue, &set));
1203   if (set) PetscStackCallExternalVoid("STRUMPACK_set_reordering_method", STRUMPACK_set_reordering_method(*S, ndvalue));
1204 
1205   /* geometric ordering, for a regular 1D/2D/3D mesh in the natural ordering, */
1206   /* with nc DOF's per gridpoint, and possibly a wider stencil                */
1207   nrdims  = 3;
1208   nxyz[0] = nxyz[1] = nxyz[2] = 1;
1209   PetscCall(PetscOptionsIntArray("-mat_strumpack_geometric_xyz", "Mesh sizes nx,ny,nz (Use 1 for default)", "", nxyz, &nrdims, &set));
1210   if (set) {
1211     PetscCheck(nrdims == 1 || nrdims == 2 || nrdims == 3, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "'-mat_strumpack_geometric_xyz' requires 1, 2, or 3 values.");
1212     PetscCall(MatSTRUMPACKSetGeometricNxyz(B, (int)nxyz[0], (int)nxyz[1], (int)nxyz[2]));
1213   }
1214   PetscCall(PetscOptionsInt("-mat_strumpack_geometric_components", "Number of components per mesh point, for geometric nested dissection ordering", "None", 1, &nc, &set));
1215   if (set) PetscStackCallExternalVoid("STRUMPACK_set_components", STRUMPACK_set_components(*S, (int)nc));
1216   PetscCall(PetscOptionsInt("-mat_strumpack_geometric_width", "Width of the separator (for instance a 1D 3-point wide stencil needs a 1 point wide separator, a 1D 5-point stencil needs a 2 point wide separator), for geometric nested dissection ordering", "None", 1, &w, &set));
1217   if (set) PetscStackCallExternalVoid("STRUMPACK_set_separator_width", STRUMPACK_set_separator_width(*S, (int)w));
1218 
1219   PetscStackCallExternalVoid("STRUMPACK_use_METIS_NodeNDP", flg = (STRUMPACK_use_METIS_NodeNDP(*S) == 0) ? PETSC_FALSE : PETSC_TRUE);
1220   PetscCall(PetscOptionsBool("-mat_strumpack_metis_nodeNDP", "Use METIS_NodeNDP instead of METIS_NodeND, for a more balanced tree", "None", flg, &flg, &set));
1221   if (set) {
1222     if (flg) {
1223       PetscStackCallExternalVoid("STRUMPACK_enable_METIS_NodeNDP", STRUMPACK_enable_METIS_NodeNDP(*S));
1224     } else {
1225       PetscStackCallExternalVoid("STRUMPACK_disable_METIS_NodeNDP", STRUMPACK_disable_METIS_NodeNDP(*S));
1226     }
1227   }
1228 
1229   /* Disable the outer iterative solver from STRUMPACK.                                       */
1230   /* When STRUMPACK is used as a direct solver, it will by default do iterative refinement.   */
1231   /* When STRUMPACK is used as an approximate factorization preconditioner (by enabling       */
1232   /* low-rank compression), it will use it's own preconditioned GMRES. Here we can disable    */
1233   /* the outer iterative solver, as PETSc uses STRUMPACK from within a KSP.                   */
1234   PetscStackCallExternalVoid("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, STRUMPACK_DIRECT));
1235 
1236   PetscOptionsEnd();
1237 
1238   *F = B;
1239   PetscFunctionReturn(PETSC_SUCCESS);
1240 }
1241 
1242 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void)
1243 {
1244   PetscFunctionBegin;
1245   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack));
1246   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack));
1247   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack));
1248   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack));
1249   PetscFunctionReturn(PETSC_SUCCESS);
1250 }
1251