1 #include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/
2 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3 #include <StrumpackSparseSolver.h>
4
MatGetDiagonal_STRUMPACK(Mat A,Vec v)5 static PetscErrorCode MatGetDiagonal_STRUMPACK(Mat A, Vec v)
6 {
7 PetscFunctionBegin;
8 SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type: STRUMPACK factor");
9 PetscFunctionReturn(PETSC_SUCCESS);
10 }
11
MatDestroy_STRUMPACK(Mat A)12 static PetscErrorCode MatDestroy_STRUMPACK(Mat A)
13 {
14 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->data;
15
16 PetscFunctionBegin;
17 /* Deallocate STRUMPACK storage */
18 PetscStackCallExternalVoid("STRUMPACK_destroy", STRUMPACK_destroy(S));
19 PetscCall(PetscFree(A->data));
20
21 /* clear composed functions */
22 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
23 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetReordering_C", NULL));
24 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetReordering_C", NULL));
25 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetColPerm_C", NULL));
26 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetColPerm_C", NULL));
27 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGeometricNxyz_C", NULL));
28 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGeometricComponents_C", NULL));
29 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGeometricWidth_C", NULL));
30 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGPU_C", NULL));
31 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetGPU_C", NULL));
32 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompression_C", NULL));
33 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompression_C", NULL));
34 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompRelTol_C", NULL));
35 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompRelTol_C", NULL));
36 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompAbsTol_C", NULL));
37 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompAbsTol_C", NULL));
38 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompMaxRank_C", NULL));
39 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompMaxRank_C", NULL));
40 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompLeafSize_C", NULL));
41 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompLeafSize_C", NULL));
42 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompMinSepSize_C", NULL));
43 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompMinSepSize_C", NULL));
44 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompLossyPrecision_C", NULL));
45 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompLossyPrecision_C", NULL));
46 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompButterflyLevels_C", NULL));
47 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompButterflyLevels_C", NULL));
48 PetscFunctionReturn(PETSC_SUCCESS);
49 }
50
MatSTRUMPACKSetReordering_STRUMPACK(Mat F,MatSTRUMPACKReordering reordering)51 static PetscErrorCode MatSTRUMPACKSetReordering_STRUMPACK(Mat F, MatSTRUMPACKReordering reordering)
52 {
53 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
54
55 PetscFunctionBegin;
56 PetscStackCallExternalVoid("STRUMPACK_set_reordering_method", STRUMPACK_set_reordering_method(*S, (STRUMPACK_REORDERING_STRATEGY)reordering));
57 PetscFunctionReturn(PETSC_SUCCESS);
58 }
MatSTRUMPACKGetReordering_STRUMPACK(Mat F,MatSTRUMPACKReordering * reordering)59 static PetscErrorCode MatSTRUMPACKGetReordering_STRUMPACK(Mat F, MatSTRUMPACKReordering *reordering)
60 {
61 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
62
63 PetscFunctionBegin;
64 PetscStackCallExternalVoid("STRUMPACK_reordering_method", *reordering = (MatSTRUMPACKReordering)STRUMPACK_reordering_method(*S));
65 PetscFunctionReturn(PETSC_SUCCESS);
66 }
67
68 /*@
69 MatSTRUMPACKSetReordering - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> fill-reducing reordering
70
71 Logically Collective
72
73 Input Parameters:
74 + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
75 - reordering - the code to be used to find the fill-reducing reordering
76
77 Options Database Key:
78 . -mat_strumpack_reordering <METIS> - Sparsity reducing matrix reordering, see `MatSTRUMPACKReordering`
79
80 Level: intermediate
81
82 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatSTRUMPACKReordering`, `MatGetFactor()`, `MatSTRUMPACKSetColPerm()`, `MatSTRUMPACKGetReordering()`
83 @*/
MatSTRUMPACKSetReordering(Mat F,MatSTRUMPACKReordering reordering)84 PetscErrorCode MatSTRUMPACKSetReordering(Mat F, MatSTRUMPACKReordering reordering)
85 {
86 PetscFunctionBegin;
87 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
88 PetscValidLogicalCollectiveEnum(F, reordering, 2);
89 PetscTryMethod(F, "MatSTRUMPACKSetReordering_C", (Mat, MatSTRUMPACKReordering), (F, reordering));
90 PetscFunctionReturn(PETSC_SUCCESS);
91 }
92 /*@
93 MatSTRUMPACKGetReordering - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> fill-reducing reordering
94
95 Logically Collective
96
97 Input Parameters:
98 . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
99
100 Output Parameter:
101 . reordering - the code to be used to find the fill-reducing reordering
102
103 Level: intermediate
104
105 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatSTRUMPACKReordering`, `MatGetFactor()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
106 @*/
MatSTRUMPACKGetReordering(Mat F,MatSTRUMPACKReordering * reordering)107 PetscErrorCode MatSTRUMPACKGetReordering(Mat F, MatSTRUMPACKReordering *reordering)
108 {
109 PetscFunctionBegin;
110 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
111 PetscTryMethod(F, "MatSTRUMPACKGetReordering_C", (Mat, MatSTRUMPACKReordering *), (F, reordering));
112 PetscValidLogicalCollectiveEnum(F, *reordering, 2);
113 PetscFunctionReturn(PETSC_SUCCESS);
114 }
115
MatSTRUMPACKSetColPerm_STRUMPACK(Mat F,PetscBool cperm)116 static PetscErrorCode MatSTRUMPACKSetColPerm_STRUMPACK(Mat F, PetscBool cperm)
117 {
118 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
119
120 PetscFunctionBegin;
121 PetscStackCallExternalVoid("STRUMPACK_set_matching", STRUMPACK_set_matching(*S, cperm ? STRUMPACK_MATCHING_MAX_DIAGONAL_PRODUCT_SCALING : STRUMPACK_MATCHING_NONE));
122 PetscFunctionReturn(PETSC_SUCCESS);
123 }
MatSTRUMPACKGetColPerm_STRUMPACK(Mat F,PetscBool * cperm)124 static PetscErrorCode MatSTRUMPACKGetColPerm_STRUMPACK(Mat F, PetscBool *cperm)
125 {
126 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
127
128 PetscFunctionBegin;
129 PetscStackCallExternalVoid("STRUMPACK_matching", *cperm = (PetscBool)(STRUMPACK_matching(*S) != STRUMPACK_MATCHING_NONE));
130 PetscFunctionReturn(PETSC_SUCCESS);
131 }
132
133 /*@
134 MatSTRUMPACKSetColPerm - Set whether STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
135 should try to permute the columns of the matrix in order to get a nonzero diagonal
136
137 Logically Collective
138
139 Input Parameters:
140 + F - the factored matrix obtained by calling `MatGetFactor()`
141 - cperm - `PETSC_TRUE` to permute (internally) the columns of the matrix
142
143 Options Database Key:
144 . -mat_strumpack_colperm <cperm> - true to use the permutation
145
146 Level: intermediate
147
148 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `MatSTRUMPACKSetReordering()`, `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetColPerm()`
149 @*/
MatSTRUMPACKSetColPerm(Mat F,PetscBool cperm)150 PetscErrorCode MatSTRUMPACKSetColPerm(Mat F, PetscBool cperm)
151 {
152 PetscFunctionBegin;
153 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
154 PetscValidLogicalCollectiveBool(F, cperm, 2);
155 PetscTryMethod(F, "MatSTRUMPACKSetColPerm_C", (Mat, PetscBool), (F, cperm));
156 PetscFunctionReturn(PETSC_SUCCESS);
157 }
158 /*@
159 MatSTRUMPACKGetColPerm - Get whether STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
160 will try to permute the columns of the matrix in order to get a nonzero diagonal
161
162 Logically Collective
163
164 Input Parameters:
165 . F - the factored matrix obtained by calling `MatGetFactor()`
166
167 Output Parameter:
168 . cperm - Indicates whether STRUMPACK will permute columns
169
170 Level: intermediate
171
172 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `MatSTRUMPACKSetReordering()`, `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetColPerm()`
173 @*/
MatSTRUMPACKGetColPerm(Mat F,PetscBool * cperm)174 PetscErrorCode MatSTRUMPACKGetColPerm(Mat F, PetscBool *cperm)
175 {
176 PetscFunctionBegin;
177 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
178 PetscTryMethod(F, "MatSTRUMPACKGetColPerm_C", (Mat, PetscBool *), (F, cperm));
179 PetscValidLogicalCollectiveBool(F, *cperm, 2);
180 PetscFunctionReturn(PETSC_SUCCESS);
181 }
182
MatSTRUMPACKSetGPU_STRUMPACK(Mat F,PetscBool gpu)183 static PetscErrorCode MatSTRUMPACKSetGPU_STRUMPACK(Mat F, PetscBool gpu)
184 {
185 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
186
187 PetscFunctionBegin;
188 if (gpu) {
189 #if !(defined(STRUMPACK_USE_CUDA) || defined(STRUMPACK_USE_HIP) || defined(STRUMPACK_USE_SYCL))
190 PetscCall(PetscPrintf(PETSC_COMM_SELF, " Warning: strumpack was not configured with GPU support\n"));
191 #endif
192 PetscStackCallExternalVoid("STRUMPACK_enable_gpu", STRUMPACK_enable_gpu(*S));
193 } else PetscStackCallExternalVoid("STRUMPACK_disable_gpu", STRUMPACK_disable_gpu(*S));
194 PetscFunctionReturn(PETSC_SUCCESS);
195 }
MatSTRUMPACKGetGPU_STRUMPACK(Mat F,PetscBool * gpu)196 static PetscErrorCode MatSTRUMPACKGetGPU_STRUMPACK(Mat F, PetscBool *gpu)
197 {
198 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
199
200 PetscFunctionBegin;
201 PetscStackCallExternalVoid("STRUMPACK_use_gpu", *gpu = (PetscBool)STRUMPACK_use_gpu(*S));
202 PetscFunctionReturn(PETSC_SUCCESS);
203 }
204
205 /*@
206 MatSTRUMPACKSetGPU - Set whether STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
207 should enable GPU acceleration (not supported for all compression types)
208
209 Logically Collective
210
211 Input Parameters:
212 + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
213 - gpu - whether or not to use GPU acceleration
214
215 Options Database Key:
216 . -mat_strumpack_gpu <gpu> - true to use gpu offload
217
218 Level: intermediate
219
220 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKGetGPU()`
221 @*/
MatSTRUMPACKSetGPU(Mat F,PetscBool gpu)222 PetscErrorCode MatSTRUMPACKSetGPU(Mat F, PetscBool gpu)
223 {
224 PetscFunctionBegin;
225 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
226 PetscValidLogicalCollectiveBool(F, gpu, 2);
227 PetscTryMethod(F, "MatSTRUMPACKSetGPU_C", (Mat, PetscBool), (F, gpu));
228 PetscFunctionReturn(PETSC_SUCCESS);
229 }
230 /*@
231 MatSTRUMPACKGetGPU - Get whether STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
232 will try to use GPU acceleration (not supported for all compression types)
233
234 Logically Collective
235
236 Input Parameters:
237 . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
238
239 Output Parameter:
240 . gpu - whether or not STRUMPACK will try to use GPU acceleration
241
242 Level: intermediate
243
244 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKSetGPU()`
245 @*/
MatSTRUMPACKGetGPU(Mat F,PetscBool * gpu)246 PetscErrorCode MatSTRUMPACKGetGPU(Mat F, PetscBool *gpu)
247 {
248 PetscFunctionBegin;
249 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
250 PetscTryMethod(F, "MatSTRUMPACKGetGPU_C", (Mat, PetscBool *), (F, gpu));
251 PetscValidLogicalCollectiveBool(F, *gpu, 2);
252 PetscFunctionReturn(PETSC_SUCCESS);
253 }
254
MatSTRUMPACKSetCompression_STRUMPACK(Mat F,MatSTRUMPACKCompressionType comp)255 static PetscErrorCode MatSTRUMPACKSetCompression_STRUMPACK(Mat F, MatSTRUMPACKCompressionType comp)
256 {
257 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
258
259 PetscFunctionBegin;
260 #if !defined(STRUMPACK_USE_BPACK)
261 PetscCheck(comp != MAT_STRUMPACK_COMPRESSION_TYPE_HODLR && comp != MAT_STRUMPACK_COMPRESSION_TYPE_BLR_HODLR && comp != MAT_STRUMPACK_COMPRESSION_TYPE_ZFP_BLR_HODLR, PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Compression scheme requires ButterflyPACK, please reconfigure with --download-butterflypack");
262 #endif
263 #if !defined(STRUMPACK_USE_ZFP)
264 PetscCheck(comp != MAT_STRUMPACK_COMPRESSION_TYPE_ZFP_BLR_HODLR && comp != MAT_STRUMPACK_COMPRESSION_TYPE_LOSSLESS && comp != MAT_STRUMPACK_COMPRESSION_TYPE_LOSSY, PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Compression scheme requires ZFP, please reconfigure with --download-zfp");
265 #endif
266 PetscStackCallExternalVoid("STRUMPACK_set_compression", STRUMPACK_set_compression(*S, (STRUMPACK_COMPRESSION_TYPE)comp));
267 PetscFunctionReturn(PETSC_SUCCESS);
268 }
MatSTRUMPACKGetCompression_STRUMPACK(Mat F,MatSTRUMPACKCompressionType * comp)269 static PetscErrorCode MatSTRUMPACKGetCompression_STRUMPACK(Mat F, MatSTRUMPACKCompressionType *comp)
270 {
271 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
272
273 PetscFunctionBegin;
274 PetscStackCallExternalVoid("STRUMPACK_compression", *comp = (MatSTRUMPACKCompressionType)STRUMPACK_compression(*S));
275 PetscFunctionReturn(PETSC_SUCCESS);
276 }
277
278 /*@
279 MatSTRUMPACKSetCompression - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> compression type
280
281 Input Parameters:
282 + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
283 - comp - Type of compression to be used in the approximate sparse factorization
284
285 Options Database Key:
286 . -mat_strumpack_compression <NONE> - Type of rank-structured compression in sparse LU factors (choose one of) NONE HSS BLR HODLR BLR_HODLR ZFP_BLR_HODLR LOSSLESS LOSSY
287
288 Level: intermediate
289
290 Note:
291 Default for `comp` is `MAT_STRUMPACK_COMPRESSION_TYPE_NONE` for `-pc_type lu` and `MAT_STRUMPACK_COMPRESSION_TYPE_BLR`
292 for `-pc_type ilu`
293
294 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKCompressionType`, `MatSTRUMPACKGetCompression()`
295 @*/
MatSTRUMPACKSetCompression(Mat F,MatSTRUMPACKCompressionType comp)296 PetscErrorCode MatSTRUMPACKSetCompression(Mat F, MatSTRUMPACKCompressionType comp)
297 {
298 PetscFunctionBegin;
299 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
300 PetscValidLogicalCollectiveEnum(F, comp, 2);
301 PetscTryMethod(F, "MatSTRUMPACKSetCompression_C", (Mat, MatSTRUMPACKCompressionType), (F, comp));
302 PetscFunctionReturn(PETSC_SUCCESS);
303 }
304 /*@
305 MatSTRUMPACKGetCompression - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> compression type
306
307 Input Parameters:
308 . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
309
310 Output Parameter:
311 . comp - Type of compression to be used in the approximate sparse factorization
312
313 Level: intermediate
314
315 Note:
316 Default is `MAT_STRUMPACK_COMPRESSION_TYPE_NONE` for `-pc_type lu` and `MAT_STRUMPACK_COMPRESSION_TYPE_BLR` for `-pc_type ilu`
317
318 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKCompressionType`, `MatSTRUMPACKSetCompression()`
319 @*/
MatSTRUMPACKGetCompression(Mat F,MatSTRUMPACKCompressionType * comp)320 PetscErrorCode MatSTRUMPACKGetCompression(Mat F, MatSTRUMPACKCompressionType *comp)
321 {
322 PetscFunctionBegin;
323 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
324 PetscTryMethod(F, "MatSTRUMPACKGetCompression_C", (Mat, MatSTRUMPACKCompressionType *), (F, comp));
325 PetscValidLogicalCollectiveEnum(F, *comp, 2);
326 PetscFunctionReturn(PETSC_SUCCESS);
327 }
328
MatSTRUMPACKSetCompRelTol_STRUMPACK(Mat F,PetscReal rtol)329 static PetscErrorCode MatSTRUMPACKSetCompRelTol_STRUMPACK(Mat F, PetscReal rtol)
330 {
331 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
332
333 PetscFunctionBegin;
334 PetscStackCallExternalVoid("STRUMPACK_set_compression_rel_tol", STRUMPACK_set_compression_rel_tol(*S, rtol));
335 PetscFunctionReturn(PETSC_SUCCESS);
336 }
MatSTRUMPACKGetCompRelTol_STRUMPACK(Mat F,PetscReal * rtol)337 static PetscErrorCode MatSTRUMPACKGetCompRelTol_STRUMPACK(Mat F, PetscReal *rtol)
338 {
339 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
340
341 PetscFunctionBegin;
342 PetscStackCallExternalVoid("STRUMPACK_compression_rel_tol", *rtol = (PetscReal)STRUMPACK_compression_rel_tol(*S));
343 PetscFunctionReturn(PETSC_SUCCESS);
344 }
345
346 /*@
347 MatSTRUMPACKSetCompRelTol - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> relative tolerance for compression
348
349 Logically Collective
350
351 Input Parameters:
352 + F - the factored matrix obtained by calling `MatGetFactor()`
353 - rtol - relative compression tolerance
354
355 Options Database Key:
356 . -mat_strumpack_compression_rel_tol <1e-4> - Relative compression tolerance, when using `-pctype ilu`
357
358 Level: intermediate
359
360 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetCompRelTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
361 @*/
MatSTRUMPACKSetCompRelTol(Mat F,PetscReal rtol)362 PetscErrorCode MatSTRUMPACKSetCompRelTol(Mat F, PetscReal rtol)
363 {
364 PetscFunctionBegin;
365 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
366 PetscValidLogicalCollectiveReal(F, rtol, 2);
367 PetscTryMethod(F, "MatSTRUMPACKSetCompRelTol_C", (Mat, PetscReal), (F, rtol));
368 PetscFunctionReturn(PETSC_SUCCESS);
369 }
370 /*@
371 MatSTRUMPACKGetCompRelTol - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> relative tolerance for compression
372
373 Logically Collective
374
375 Input Parameters:
376 . F - the factored matrix obtained by calling `MatGetFactor()`
377
378 Output Parameter:
379 . rtol - relative compression tolerance
380
381 Level: intermediate
382
383 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetCompRelTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
384 @*/
MatSTRUMPACKGetCompRelTol(Mat F,PetscReal * rtol)385 PetscErrorCode MatSTRUMPACKGetCompRelTol(Mat F, PetscReal *rtol)
386 {
387 PetscFunctionBegin;
388 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
389 PetscTryMethod(F, "MatSTRUMPACKGetCompRelTol_C", (Mat, PetscReal *), (F, rtol));
390 PetscValidLogicalCollectiveReal(F, *rtol, 2);
391 PetscFunctionReturn(PETSC_SUCCESS);
392 }
393
MatSTRUMPACKSetCompAbsTol_STRUMPACK(Mat F,PetscReal atol)394 static PetscErrorCode MatSTRUMPACKSetCompAbsTol_STRUMPACK(Mat F, PetscReal atol)
395 {
396 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
397
398 PetscFunctionBegin;
399 PetscStackCallExternalVoid("STRUMPACK_set_compression_abs_tol", STRUMPACK_set_compression_abs_tol(*S, atol));
400 PetscFunctionReturn(PETSC_SUCCESS);
401 }
MatSTRUMPACKGetCompAbsTol_STRUMPACK(Mat F,PetscReal * atol)402 static PetscErrorCode MatSTRUMPACKGetCompAbsTol_STRUMPACK(Mat F, PetscReal *atol)
403 {
404 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
405
406 PetscFunctionBegin;
407 PetscStackCallExternalVoid("STRUMPACK_compression_abs_tol", *atol = (PetscReal)STRUMPACK_compression_abs_tol(*S));
408 PetscFunctionReturn(PETSC_SUCCESS);
409 }
410
411 /*@
412 MatSTRUMPACKSetCompAbsTol - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> absolute tolerance for compression
413
414 Logically Collective
415
416 Input Parameters:
417 + F - the factored matrix obtained by calling `MatGetFactor()`
418 - atol - absolute compression tolerance
419
420 Options Database Key:
421 . -mat_strumpack_compression_abs_tol <1e-10> - Absolute compression tolerance, when using `-pctype ilu`
422
423 Level: intermediate
424
425 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetCompAbsTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
426 @*/
MatSTRUMPACKSetCompAbsTol(Mat F,PetscReal atol)427 PetscErrorCode MatSTRUMPACKSetCompAbsTol(Mat F, PetscReal atol)
428 {
429 PetscFunctionBegin;
430 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
431 PetscValidLogicalCollectiveReal(F, atol, 2);
432 PetscTryMethod(F, "MatSTRUMPACKSetCompAbsTol_C", (Mat, PetscReal), (F, atol));
433 PetscFunctionReturn(PETSC_SUCCESS);
434 }
435 /*@
436 MatSTRUMPACKGetCompAbsTol - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> absolute tolerance for compression
437
438 Logically Collective
439
440 Input Parameters:
441 . F - the factored matrix obtained by calling `MatGetFactor()`
442
443 Output Parameter:
444 . atol - absolute compression tolerance
445
446 Level: intermediate
447
448 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetCompAbsTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
449 @*/
MatSTRUMPACKGetCompAbsTol(Mat F,PetscReal * atol)450 PetscErrorCode MatSTRUMPACKGetCompAbsTol(Mat F, PetscReal *atol)
451 {
452 PetscFunctionBegin;
453 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
454 PetscTryMethod(F, "MatSTRUMPACKGetCompAbsTol_C", (Mat, PetscReal *), (F, atol));
455 PetscValidLogicalCollectiveReal(F, *atol, 2);
456 PetscFunctionReturn(PETSC_SUCCESS);
457 }
458
MatSTRUMPACKSetCompLeafSize_STRUMPACK(Mat F,PetscInt leaf_size)459 static PetscErrorCode MatSTRUMPACKSetCompLeafSize_STRUMPACK(Mat F, PetscInt leaf_size)
460 {
461 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
462
463 PetscFunctionBegin;
464 PetscStackCallExternalVoid("STRUMPACK_set_compression_leaf_size", STRUMPACK_set_compression_leaf_size(*S, leaf_size));
465 PetscFunctionReturn(PETSC_SUCCESS);
466 }
MatSTRUMPACKGetCompLeafSize_STRUMPACK(Mat F,PetscInt * leaf_size)467 static PetscErrorCode MatSTRUMPACKGetCompLeafSize_STRUMPACK(Mat F, PetscInt *leaf_size)
468 {
469 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
470
471 PetscFunctionBegin;
472 PetscStackCallExternalVoid("STRUMPACK_compression_leaf_size", *leaf_size = (PetscInt)STRUMPACK_compression_leaf_size(*S));
473 PetscFunctionReturn(PETSC_SUCCESS);
474 }
475
476 /*@
477 MatSTRUMPACKSetCompLeafSize - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> leaf size for HSS, BLR, HODLR...
478
479 Logically Collective
480
481 Input Parameters:
482 + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
483 - leaf_size - Size of diagonal blocks in rank-structured approximation
484
485 Options Database Key:
486 . -mat_strumpack_compression_leaf_size - Size of diagonal blocks in rank-structured approximation, when using `-pctype ilu`
487
488 Level: intermediate
489
490 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetCompLeafSize()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
491 @*/
MatSTRUMPACKSetCompLeafSize(Mat F,PetscInt leaf_size)492 PetscErrorCode MatSTRUMPACKSetCompLeafSize(Mat F, PetscInt leaf_size)
493 {
494 PetscFunctionBegin;
495 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
496 PetscValidLogicalCollectiveInt(F, leaf_size, 2);
497 PetscTryMethod(F, "MatSTRUMPACKSetCompLeafSize_C", (Mat, PetscInt), (F, leaf_size));
498 PetscFunctionReturn(PETSC_SUCCESS);
499 }
500 /*@
501 MatSTRUMPACKGetCompLeafSize - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> leaf size for HSS, BLR, HODLR...
502
503 Logically Collective
504
505 Input Parameters:
506 . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
507
508 Output Parameter:
509 . leaf_size - Size of diagonal blocks in rank-structured approximation
510
511 Level: intermediate
512
513 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetCompLeafSize()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
514 @*/
MatSTRUMPACKGetCompLeafSize(Mat F,PetscInt * leaf_size)515 PetscErrorCode MatSTRUMPACKGetCompLeafSize(Mat F, PetscInt *leaf_size)
516 {
517 PetscFunctionBegin;
518 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
519 PetscTryMethod(F, "MatSTRUMPACKGetCompLeafSize_C", (Mat, PetscInt *), (F, leaf_size));
520 PetscValidLogicalCollectiveInt(F, *leaf_size, 2);
521 PetscFunctionReturn(PETSC_SUCCESS);
522 }
523
MatSTRUMPACKSetGeometricNxyz_STRUMPACK(Mat F,PetscInt nx,PetscInt ny,PetscInt nz)524 static PetscErrorCode MatSTRUMPACKSetGeometricNxyz_STRUMPACK(Mat F, PetscInt nx, PetscInt ny, PetscInt nz)
525 {
526 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
527
528 PetscFunctionBegin;
529 if (nx < 1) {
530 PetscCheck(nx == PETSC_DECIDE || nx == PETSC_DEFAULT, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "nx < 1");
531 nx = 1;
532 }
533 PetscStackCallExternalVoid("STRUMPACK_set_nx", STRUMPACK_set_nx(*S, nx));
534 if (ny < 1) {
535 PetscCheck(ny == PETSC_DECIDE || ny == PETSC_DEFAULT, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "ny < 1");
536 ny = 1;
537 }
538 PetscStackCallExternalVoid("STRUMPACK_set_ny", STRUMPACK_set_ny(*S, ny));
539 if (nz < 1) {
540 PetscCheck(nz == PETSC_DECIDE || nz == PETSC_DEFAULT, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "nz < 1");
541 nz = 1;
542 }
543 PetscStackCallExternalVoid("STRUMPACK_set_nz", STRUMPACK_set_nz(*S, nz));
544 PetscFunctionReturn(PETSC_SUCCESS);
545 }
MatSTRUMPACKSetGeometricComponents_STRUMPACK(Mat F,PetscInt nc)546 static PetscErrorCode MatSTRUMPACKSetGeometricComponents_STRUMPACK(Mat F, PetscInt nc)
547 {
548 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
549
550 PetscFunctionBegin;
551 PetscStackCallExternalVoid("STRUMPACK_set_components", STRUMPACK_set_components(*S, nc));
552 PetscFunctionReturn(PETSC_SUCCESS);
553 }
MatSTRUMPACKSetGeometricWidth_STRUMPACK(Mat F,PetscInt w)554 static PetscErrorCode MatSTRUMPACKSetGeometricWidth_STRUMPACK(Mat F, PetscInt w)
555 {
556 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
557
558 PetscFunctionBegin;
559 PetscStackCallExternalVoid("STRUMPACK_set_separator_width", STRUMPACK_set_separator_width(*S, w));
560 PetscFunctionReturn(PETSC_SUCCESS);
561 }
562
563 /*@
564 MatSTRUMPACKSetGeometricNxyz - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> mesh x, y and z dimensions, for use with GEOMETRIC ordering.
565
566 Logically Collective
567
568 Input Parameters:
569 + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
570 . nx - x dimension of the mesh
571 . ny - y dimension of the mesh
572 - nz - z dimension of the mesh
573
574 Level: intermediate
575
576 Note:
577 If the mesh is two (or one) dimensional one can use 1, `PETSC_DECIDE` or `PETSC_DEFAULT`
578 for the missing z (and y) dimensions.
579
580 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`
581 @*/
MatSTRUMPACKSetGeometricNxyz(Mat F,PetscInt nx,PetscInt ny,PetscInt nz)582 PetscErrorCode MatSTRUMPACKSetGeometricNxyz(Mat F, PetscInt nx, PetscInt ny, PetscInt nz)
583 {
584 PetscFunctionBegin;
585 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
586 PetscValidLogicalCollectiveInt(F, nx, 2);
587 PetscValidLogicalCollectiveInt(F, ny, 3);
588 PetscValidLogicalCollectiveInt(F, nz, 4);
589 PetscTryMethod(F, "MatSTRUMPACKSetGeometricNxyz_C", (Mat, PetscInt, PetscInt, PetscInt), (F, nx, ny, nz));
590 PetscFunctionReturn(PETSC_SUCCESS);
591 }
592 /*@
593 MatSTRUMPACKSetGeometricComponents - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
594 number of degrees of freedom per mesh point, for use with GEOMETRIC ordering.
595
596 Logically Collective
597
598 Input Parameters:
599 + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
600 - nc - Number of components/dof's per grid point
601
602 Options Database Key:
603 . -mat_strumpack_geometric_components <1> - Number of components per mesh point, for geometric nested dissection ordering
604
605 Level: intermediate
606
607 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`
608 @*/
MatSTRUMPACKSetGeometricComponents(Mat F,PetscInt nc)609 PetscErrorCode MatSTRUMPACKSetGeometricComponents(Mat F, PetscInt nc)
610 {
611 PetscFunctionBegin;
612 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
613 PetscValidLogicalCollectiveInt(F, nc, 2);
614 PetscTryMethod(F, "MatSTRUMPACKSetGeometricComponents_C", (Mat, PetscInt), (F, nc));
615 PetscFunctionReturn(PETSC_SUCCESS);
616 }
617 /*@
618 MatSTRUMPACKSetGeometricWidth - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> width of the separator, for use with GEOMETRIC ordering.
619
620 Logically Collective
621
622 Input Parameters:
623 + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
624 - w - width of the separator
625
626 Options Database Key:
627 . -mat_strumpack_geometric_width <1> - Width of the separator of the mesh, for geometric nested dissection ordering
628
629 Level: intermediate
630
631 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`
632 @*/
MatSTRUMPACKSetGeometricWidth(Mat F,PetscInt w)633 PetscErrorCode MatSTRUMPACKSetGeometricWidth(Mat F, PetscInt w)
634 {
635 PetscFunctionBegin;
636 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
637 PetscValidLogicalCollectiveInt(F, w, 2);
638 PetscTryMethod(F, "MatSTRUMPACKSetGeometricWidth_C", (Mat, PetscInt), (F, w));
639 PetscFunctionReturn(PETSC_SUCCESS);
640 }
641
MatSTRUMPACKSetCompMinSepSize_STRUMPACK(Mat F,PetscInt min_sep_size)642 static PetscErrorCode MatSTRUMPACKSetCompMinSepSize_STRUMPACK(Mat F, PetscInt min_sep_size)
643 {
644 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
645
646 PetscFunctionBegin;
647 PetscStackCallExternalVoid("STRUMPACK_set_compression_min_sep_size", STRUMPACK_set_compression_min_sep_size(*S, min_sep_size));
648 PetscFunctionReturn(PETSC_SUCCESS);
649 }
MatSTRUMPACKGetCompMinSepSize_STRUMPACK(Mat F,PetscInt * min_sep_size)650 static PetscErrorCode MatSTRUMPACKGetCompMinSepSize_STRUMPACK(Mat F, PetscInt *min_sep_size)
651 {
652 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
653
654 PetscFunctionBegin;
655 PetscStackCallExternalVoid("STRUMPACK_compression_min_sep_size", *min_sep_size = (PetscInt)STRUMPACK_compression_min_sep_size(*S));
656 PetscFunctionReturn(PETSC_SUCCESS);
657 }
658
659 /*@
660 MatSTRUMPACKSetCompMinSepSize - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> minimum separator size for low-rank approximation
661
662 Logically Collective
663
664 Input Parameters:
665 + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
666 - min_sep_size - minimum dense matrix size for low-rank approximation
667
668 Options Database Key:
669 . -mat_strumpack_compression_min_sep_size <min_sep_size> - Minimum size of dense sub-block for low-rank compression
670
671 Level: intermediate
672
673 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKGetCompMinSepSize()`
674 @*/
MatSTRUMPACKSetCompMinSepSize(Mat F,PetscInt min_sep_size)675 PetscErrorCode MatSTRUMPACKSetCompMinSepSize(Mat F, PetscInt min_sep_size)
676 {
677 PetscFunctionBegin;
678 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
679 PetscValidLogicalCollectiveInt(F, min_sep_size, 2);
680 PetscTryMethod(F, "MatSTRUMPACKSetCompMinSepSize_C", (Mat, PetscInt), (F, min_sep_size));
681 PetscFunctionReturn(PETSC_SUCCESS);
682 }
683 /*@
684 MatSTRUMPACKGetCompMinSepSize - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> minimum separator size for low-rank approximation
685
686 Logically Collective
687
688 Input Parameters:
689 . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
690
691 Output Parameter:
692 . min_sep_size - minimum dense matrix size for low-rank approximation
693
694 Level: intermediate
695
696 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKSetCompMinSepSize()`
697 @*/
MatSTRUMPACKGetCompMinSepSize(Mat F,PetscInt * min_sep_size)698 PetscErrorCode MatSTRUMPACKGetCompMinSepSize(Mat F, PetscInt *min_sep_size)
699 {
700 PetscFunctionBegin;
701 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
702 PetscTryMethod(F, "MatSTRUMPACKGetCompMinSepSize_C", (Mat, PetscInt *), (F, min_sep_size));
703 PetscValidLogicalCollectiveInt(F, *min_sep_size, 2);
704 PetscFunctionReturn(PETSC_SUCCESS);
705 }
706
MatSTRUMPACKSetCompLossyPrecision_STRUMPACK(Mat F,PetscInt lossy_prec)707 static PetscErrorCode MatSTRUMPACKSetCompLossyPrecision_STRUMPACK(Mat F, PetscInt lossy_prec)
708 {
709 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
710
711 PetscFunctionBegin;
712 PetscStackCallExternalVoid("STRUMPACK_set_compression_lossy_precision", STRUMPACK_set_compression_lossy_precision(*S, lossy_prec));
713 PetscFunctionReturn(PETSC_SUCCESS);
714 }
MatSTRUMPACKGetCompLossyPrecision_STRUMPACK(Mat F,PetscInt * lossy_prec)715 static PetscErrorCode MatSTRUMPACKGetCompLossyPrecision_STRUMPACK(Mat F, PetscInt *lossy_prec)
716 {
717 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
718
719 PetscFunctionBegin;
720 PetscStackCallExternalVoid("STRUMPACK_compression_lossy_precision", *lossy_prec = (PetscInt)STRUMPACK_compression_lossy_precision(*S));
721 PetscFunctionReturn(PETSC_SUCCESS);
722 }
723
724 /*@
725 MatSTRUMPACKSetCompLossyPrecision - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> precision for lossy compression (requires ZFP support)
726
727 Logically Collective
728
729 Input Parameters:
730 + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
731 - lossy_prec - Number of bitplanes to use in lossy compression
732
733 Options Database Key:
734 . -mat_strumpack_compression_lossy_precision <lossy_prec> - Precision when using lossy compression [1-64], when using `-pctype ilu -mat_strumpack_compression MAT_STRUMPACK_COMPRESSION_TYPE_LOSSY`
735
736 Level: intermediate
737
738 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKGetCompLossyPrecision()`
739 @*/
MatSTRUMPACKSetCompLossyPrecision(Mat F,PetscInt lossy_prec)740 PetscErrorCode MatSTRUMPACKSetCompLossyPrecision(Mat F, PetscInt lossy_prec)
741 {
742 PetscFunctionBegin;
743 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
744 PetscValidLogicalCollectiveInt(F, lossy_prec, 2);
745 PetscTryMethod(F, "MatSTRUMPACKSetCompLossyPrecision_C", (Mat, PetscInt), (F, lossy_prec));
746 PetscFunctionReturn(PETSC_SUCCESS);
747 }
748 /*@
749 MatSTRUMPACKGetCompLossyPrecision - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> precision for lossy compression (requires ZFP support)
750
751 Logically Collective
752
753 Input Parameters:
754 . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
755
756 Output Parameter:
757 . lossy_prec - Number of bitplanes to use in lossy compression
758
759 Level: intermediate
760
761 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKSetCompLossyPrecision()`
762 @*/
MatSTRUMPACKGetCompLossyPrecision(Mat F,PetscInt * lossy_prec)763 PetscErrorCode MatSTRUMPACKGetCompLossyPrecision(Mat F, PetscInt *lossy_prec)
764 {
765 PetscFunctionBegin;
766 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
767 PetscTryMethod(F, "MatSTRUMPACKGetCompLossyPrecision_C", (Mat, PetscInt *), (F, lossy_prec));
768 PetscValidLogicalCollectiveInt(F, *lossy_prec, 2);
769 PetscFunctionReturn(PETSC_SUCCESS);
770 }
771
MatSTRUMPACKSetCompButterflyLevels_STRUMPACK(Mat F,PetscInt bfly_lvls)772 static PetscErrorCode MatSTRUMPACKSetCompButterflyLevels_STRUMPACK(Mat F, PetscInt bfly_lvls)
773 {
774 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
775
776 PetscFunctionBegin;
777 PetscStackCallExternalVoid("STRUMPACK_set_compression_butterfly_levels", STRUMPACK_set_compression_butterfly_levels(*S, bfly_lvls));
778 PetscFunctionReturn(PETSC_SUCCESS);
779 }
MatSTRUMPACKGetCompButterflyLevels_STRUMPACK(Mat F,PetscInt * bfly_lvls)780 static PetscErrorCode MatSTRUMPACKGetCompButterflyLevels_STRUMPACK(Mat F, PetscInt *bfly_lvls)
781 {
782 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
783
784 PetscFunctionBegin;
785 PetscStackCallExternalVoid("STRUMPACK_compression_butterfly_levels", *bfly_lvls = (PetscInt)STRUMPACK_compression_butterfly_levels(*S));
786 PetscFunctionReturn(PETSC_SUCCESS);
787 }
788
789 /*@
790 MatSTRUMPACKSetCompButterflyLevels - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
791 number of butterfly levels in HODLR compression (requires ButterflyPACK support)
792
793 Logically Collective
794
795 Input Parameters:
796 + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
797 - bfly_lvls - Number of levels of butterfly compression in HODLR compression
798
799 Options Database Key:
800 . -mat_strumpack_compression_butterfly_levels <bfly_lvls> - Number of levels in the hierarchically off-diagonal matrix for which to use butterfly,
801 when using `-pctype ilu`, (BLR_)HODLR compression
802
803 Level: intermediate
804
805 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKGetCompButterflyLevels()`
806 @*/
MatSTRUMPACKSetCompButterflyLevels(Mat F,PetscInt bfly_lvls)807 PetscErrorCode MatSTRUMPACKSetCompButterflyLevels(Mat F, PetscInt bfly_lvls)
808 {
809 PetscFunctionBegin;
810 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
811 PetscValidLogicalCollectiveInt(F, bfly_lvls, 2);
812 PetscTryMethod(F, "MatSTRUMPACKSetButterflyLevels_C", (Mat, PetscInt), (F, bfly_lvls));
813 PetscFunctionReturn(PETSC_SUCCESS);
814 }
815 /*@
816 MatSTRUMPACKGetCompButterflyLevels - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
817 number of butterfly levels in HODLR compression (requires ButterflyPACK support)
818
819 Logically Collective
820
821 Input Parameters:
822 . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
823
824 Output Parameter:
825 . bfly_lvls - Number of levels of butterfly compression in HODLR compression
826
827 Level: intermediate
828
829 .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKSetCompButterflyLevels()`
830 @*/
MatSTRUMPACKGetCompButterflyLevels(Mat F,PetscInt * bfly_lvls)831 PetscErrorCode MatSTRUMPACKGetCompButterflyLevels(Mat F, PetscInt *bfly_lvls)
832 {
833 PetscFunctionBegin;
834 PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
835 PetscTryMethod(F, "MatSTRUMPACKGetButterflyLevels_C", (Mat, PetscInt *), (F, bfly_lvls));
836 PetscValidLogicalCollectiveInt(F, *bfly_lvls, 2);
837 PetscFunctionReturn(PETSC_SUCCESS);
838 }
839
MatSolve_STRUMPACK(Mat A,Vec b_mpi,Vec x)840 static PetscErrorCode MatSolve_STRUMPACK(Mat A, Vec b_mpi, Vec x)
841 {
842 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->data;
843 STRUMPACK_RETURN_CODE sp_err;
844 const PetscScalar *bptr;
845 PetscScalar *xptr;
846
847 PetscFunctionBegin;
848 PetscCall(VecGetArray(x, &xptr));
849 PetscCall(VecGetArrayRead(b_mpi, &bptr));
850
851 PetscStackCallExternalVoid("STRUMPACK_solve", sp_err = STRUMPACK_solve(*S, (PetscScalar *)bptr, xptr, 0));
852 switch (sp_err) {
853 case STRUMPACK_SUCCESS:
854 break;
855 case STRUMPACK_MATRIX_NOT_SET: {
856 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
857 break;
858 }
859 case STRUMPACK_REORDERING_ERROR: {
860 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
861 break;
862 }
863 default:
864 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: solve failed");
865 }
866 PetscCall(VecRestoreArray(x, &xptr));
867 PetscCall(VecRestoreArrayRead(b_mpi, &bptr));
868 PetscFunctionReturn(PETSC_SUCCESS);
869 }
870
MatMatSolve_STRUMPACK(Mat A,Mat B_mpi,Mat X)871 static PetscErrorCode MatMatSolve_STRUMPACK(Mat A, Mat B_mpi, Mat X)
872 {
873 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->data;
874 STRUMPACK_RETURN_CODE sp_err;
875 PetscBool flg;
876 PetscInt m = A->rmap->n, nrhs;
877 const PetscScalar *bptr;
878 PetscScalar *xptr;
879
880 PetscFunctionBegin;
881 PetscCall(PetscObjectTypeCompareAny((PetscObject)B_mpi, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
882 PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix B must be MATDENSE matrix");
883 PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
884 PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix X must be MATDENSE matrix");
885
886 PetscCall(MatGetSize(B_mpi, NULL, &nrhs));
887 PetscCall(MatDenseGetArray(X, &xptr));
888 PetscCall(MatDenseGetArrayRead(B_mpi, &bptr));
889
890 PetscStackCallExternalVoid("STRUMPACK_matsolve", sp_err = STRUMPACK_matsolve(*S, nrhs, bptr, m, xptr, m, 0));
891 switch (sp_err) {
892 case STRUMPACK_SUCCESS:
893 break;
894 case STRUMPACK_MATRIX_NOT_SET: {
895 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
896 break;
897 }
898 case STRUMPACK_REORDERING_ERROR: {
899 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
900 break;
901 }
902 default:
903 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: solve failed");
904 }
905 PetscCall(MatDenseRestoreArrayRead(B_mpi, &bptr));
906 PetscCall(MatDenseRestoreArray(X, &xptr));
907 PetscFunctionReturn(PETSC_SUCCESS);
908 }
909
MatView_Info_STRUMPACK(Mat A,PetscViewer viewer)910 static PetscErrorCode MatView_Info_STRUMPACK(Mat A, PetscViewer viewer)
911 {
912 PetscFunctionBegin;
913 /* check if matrix is strumpack type */
914 if (A->ops->solve != MatSolve_STRUMPACK) PetscFunctionReturn(PETSC_SUCCESS);
915 PetscCall(PetscViewerASCIIPrintf(viewer, "STRUMPACK sparse solver!\n"));
916 PetscFunctionReturn(PETSC_SUCCESS);
917 }
918
MatView_STRUMPACK(Mat A,PetscViewer viewer)919 static PetscErrorCode MatView_STRUMPACK(Mat A, PetscViewer viewer)
920 {
921 PetscBool isascii;
922 PetscViewerFormat format;
923
924 PetscFunctionBegin;
925 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
926 if (isascii) {
927 PetscCall(PetscViewerGetFormat(viewer, &format));
928 if (format == PETSC_VIEWER_ASCII_INFO) PetscCall(MatView_Info_STRUMPACK(A, viewer));
929 }
930 PetscFunctionReturn(PETSC_SUCCESS);
931 }
932
MatLUFactorNumeric_STRUMPACK(Mat F,Mat A,const MatFactorInfo * info)933 static PetscErrorCode MatLUFactorNumeric_STRUMPACK(Mat F, Mat A, const MatFactorInfo *info)
934 {
935 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
936 STRUMPACK_RETURN_CODE sp_err;
937 Mat Aloc;
938 const PetscScalar *av;
939 const PetscInt *ai = NULL, *aj = NULL;
940 PetscInt M = A->rmap->N, m = A->rmap->n, dummy;
941 PetscBool ismpiaij, isseqaij, flg;
942
943 PetscFunctionBegin;
944 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
945 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
946 if (ismpiaij) {
947 PetscCall(MatMPIAIJGetLocalMat(A, MAT_INITIAL_MATRIX, &Aloc));
948 } else if (isseqaij) {
949 PetscCall(PetscObjectReference((PetscObject)A));
950 Aloc = A;
951 } else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for type %s", ((PetscObject)A)->type_name);
952
953 PetscCall(MatGetRowIJ(Aloc, 0, PETSC_FALSE, PETSC_FALSE, &dummy, &ai, &aj, &flg));
954 PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "GetRowIJ failed");
955 PetscCall(MatSeqAIJGetArrayRead(Aloc, &av));
956
957 if (ismpiaij) {
958 const PetscInt *dist = NULL;
959 PetscCall(MatGetOwnershipRanges(A, &dist));
960 PetscStackCallExternalVoid("STRUMPACK_set_distributed_csr_matrix", STRUMPACK_set_distributed_csr_matrix(*S, &m, ai, aj, av, dist, 0));
961 } else if (isseqaij) {
962 PetscStackCallExternalVoid("STRUMPACK_set_csr_matrix", STRUMPACK_set_csr_matrix(*S, &M, ai, aj, av, 0));
963 }
964
965 PetscCall(MatRestoreRowIJ(Aloc, 0, PETSC_FALSE, PETSC_FALSE, &dummy, &ai, &aj, &flg));
966 PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "RestoreRowIJ failed");
967 PetscCall(MatSeqAIJRestoreArrayRead(Aloc, &av));
968 PetscCall(MatDestroy(&Aloc));
969
970 /* Reorder and Factor the matrix. */
971 /* TODO figure out how to avoid reorder if the matrix values changed, but the pattern remains the same. */
972 PetscStackCallExternalVoid("STRUMPACK_reorder", sp_err = STRUMPACK_reorder(*S));
973 PetscStackCallExternalVoid("STRUMPACK_factor", sp_err = STRUMPACK_factor(*S));
974 switch (sp_err) {
975 case STRUMPACK_SUCCESS:
976 break;
977 case STRUMPACK_MATRIX_NOT_SET: {
978 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
979 break;
980 }
981 case STRUMPACK_REORDERING_ERROR: {
982 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
983 break;
984 }
985 default:
986 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: factorization failed");
987 }
988 F->assembled = PETSC_TRUE;
989 F->preallocated = PETSC_TRUE;
990 PetscFunctionReturn(PETSC_SUCCESS);
991 }
992
MatLUFactorSymbolic_STRUMPACK(Mat F,Mat A,IS r,IS c,const MatFactorInfo * info)993 static PetscErrorCode MatLUFactorSymbolic_STRUMPACK(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
994 {
995 PetscFunctionBegin;
996 F->ops->lufactornumeric = MatLUFactorNumeric_STRUMPACK;
997 F->ops->solve = MatSolve_STRUMPACK;
998 F->ops->matsolve = MatMatSolve_STRUMPACK;
999 PetscFunctionReturn(PETSC_SUCCESS);
1000 }
1001
MatFactorGetSolverType_aij_strumpack(Mat A,MatSolverType * type)1002 static PetscErrorCode MatFactorGetSolverType_aij_strumpack(Mat A, MatSolverType *type)
1003 {
1004 PetscFunctionBegin;
1005 *type = MATSOLVERSTRUMPACK;
1006 PetscFunctionReturn(PETSC_SUCCESS);
1007 }
1008
1009 /*MC
1010 MATSOLVERSTRUMPACK = "strumpack" - A solver package providing a direct sparse solver (PCLU)
1011 and a preconditioner (PCILU) using low-rank compression via the external package STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>.
1012
1013 Use `./configure --download-strumpack --download-metis` to have PETSc installed with STRUMPACK.
1014
1015 For full functionality, add `--download-slate --download-magma --download-parmetis --download-ptscotch --download-zfp --download-butterflypack`.
1016 SLATE provides GPU support in the multi-GPU setting, providing ScaLAPACK functionality but with GPU acceleration.
1017 MAGMA can optionally be used for on node GPU support instead cuBLAS/cuSOLVER, and performs slightly better.
1018 ParMETIS and PTScotch can be used for parallel fill-reducing ordering.
1019 ZFP is used for floating point compression of the sparse factors (LOSSY or LOSSLESS compression).
1020 ButterflyPACK is used for HODLR (Hierarchically Off-Diagonal Low Rank) and HODBF (Hierarchically Off-Diagonal Butterfly) compression of the sparse factors.
1021
1022 Options Database Keys:
1023 + -mat_strumpack_verbose - Enable verbose output
1024 . -mat_strumpack_compression - Type of rank-structured compression in sparse LU factors (choose one of) NONE HSS BLR HODLR BLR_HODLR ZFP_BLR_HODLR LOSSLESS LOSSY
1025 . -mat_strumpack_compression_rel_tol - Relative compression tolerance, when using `-pctype ilu`
1026 . -mat_strumpack_compression_abs_tol - Absolute compression tolerance, when using `-pctype ilu`
1027 . -mat_strumpack_compression_min_sep_size - Minimum size of separator for rank-structured compression, when using `-pctype ilu`
1028 . -mat_strumpack_compression_leaf_size - Size of diagonal blocks in rank-structured approximation, when using `-pctype ilu`
1029 . -mat_strumpack_compression_lossy_precision - Precision when using lossy compression [1-64], when using `-pctype ilu`, compression LOSSY (requires ZFP support)
1030 . -mat_strumpack_compression_butterfly_levels - Number of levels in the hierarchically off-diagonal matrix for which to use butterfly, when using `-pctype ilu`, (BLR_)HODLR compression (requires ButterflyPACK support)
1031 . -mat_strumpack_gpu - Enable GPU acceleration in numerical factorization (not supported for all compression types)
1032 . -mat_strumpack_colperm <TRUE> - Permute matrix to make diagonal nonzeros
1033 . -mat_strumpack_reordering <METIS> - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM GEOMETRIC AMD MMD AND MLF SPECTRAL
1034 . -mat_strumpack_geometric_xyz <1,1,1> - Mesh x,y,z dimensions, for use with GEOMETRIC ordering
1035 . -mat_strumpack_geometric_components <1> - Number of components per mesh point, for geometric nested dissection ordering
1036 . -mat_strumpack_geometric_width <1> - Width of the separator of the mesh, for geometric nested dissection ordering
1037 - -mat_strumpack_metis_nodeNDP - Use METIS_NodeNDP instead of METIS_NodeND, for a more balanced tree
1038
1039 Level: beginner
1040
1041 Notes:
1042 Recommended use is 1 MPI process per GPU.
1043
1044 Use `-pc_type lu` `-pc_factor_mat_solver_type strumpack` to use this as an exact (direct) solver.
1045
1046 Use `-pc_type ilu` `-pc_factor_mat_solver_type strumpack` to enable low-rank compression (i.e, use as a preconditioner), by default using block low rank (BLR).
1047
1048 Works with `MATAIJ` matrices
1049
1050 HODLR, BLR_HODBF and ZFP_BLR_HODLR compression require STRUMPACK to be configured with ButterflyPACK support (`--download-butterflypack`).
1051
1052 LOSSY, LOSSLESS and ZFP_BLR_HODLR compression require STRUMPACK to be configured with ZFP support (`--download-zfp`).
1053
1054 .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `PCLU`, `PCILU`, `MATSOLVERSUPERLU_DIST`, `MATSOLVERMUMPS`, `PCFactorSetMatSolverType()`, `MatSolverType`,
1055 `MatGetFactor()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKReordering`, `MatSTRUMPACKCompressionType`, `MatSTRUMPACKSetColPerm()`.
1056 M*/
MatGetFactor_aij_strumpack(Mat A,MatFactorType ftype,Mat * F)1057 static PetscErrorCode MatGetFactor_aij_strumpack(Mat A, MatFactorType ftype, Mat *F)
1058 {
1059 Mat B;
1060 PetscInt M = A->rmap->N, N = A->cmap->N;
1061 PetscBool verb, flg, set;
1062 PetscReal ctol;
1063 PetscInt min_sep_size, leaf_size, nxyz[3], nrdims, nc, w;
1064 #if defined(STRUMPACK_USE_ZFP)
1065 PetscInt lossy_prec;
1066 #endif
1067 #if defined(STRUMPACK_USE_BPACK)
1068 PetscInt bfly_lvls;
1069 #endif
1070 #if defined(STRUMPACK_USE_SLATE_SCALAPACK)
1071 PetscMPIInt mpithreads;
1072 #endif
1073 STRUMPACK_SparseSolver *S;
1074 STRUMPACK_INTERFACE iface;
1075 STRUMPACK_REORDERING_STRATEGY ndcurrent, ndvalue;
1076 STRUMPACK_COMPRESSION_TYPE compcurrent, compvalue;
1077 const STRUMPACK_PRECISION table[2][2][2] = {
1078 {{STRUMPACK_FLOATCOMPLEX_64, STRUMPACK_DOUBLECOMPLEX_64}, {STRUMPACK_FLOAT_64, STRUMPACK_DOUBLE_64}},
1079 {{STRUMPACK_FLOATCOMPLEX, STRUMPACK_DOUBLECOMPLEX}, {STRUMPACK_FLOAT, STRUMPACK_DOUBLE} }
1080 };
1081 const STRUMPACK_PRECISION prec = table[(sizeof(PetscInt) == 8) ? 0 : 1][(PETSC_SCALAR == PETSC_COMPLEX) ? 0 : 1][(PETSC_REAL == PETSC_FLOAT) ? 0 : 1];
1082 const char *const STRUMPACKNDTypes[] = {"NATURAL", "METIS", "PARMETIS", "SCOTCH", "PTSCOTCH", "RCM", "GEOMETRIC", "AMD", "MMD", "AND", "MLF", "SPECTRAL", "STRUMPACKNDTypes", "", 0};
1083 const char *const CompTypes[] = {"NONE", "HSS", "BLR", "HODLR", "BLR_HODLR", "ZFP_BLR_HODLR", "LOSSLESS", "LOSSY", "CompTypes", "", 0};
1084
1085 PetscFunctionBegin;
1086 #if defined(STRUMPACK_USE_SLATE_SCALAPACK)
1087 PetscCallMPI(MPI_Query_thread(&mpithreads));
1088 PetscCheck(mpithreads == MPI_THREAD_MULTIPLE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP_SYS, "SLATE requires MPI_THREAD_MULTIPLE");
1089 #endif
1090 /* Create the factorization matrix */
1091 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
1092 PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, M, N));
1093 PetscCall(PetscStrallocpy("strumpack", &((PetscObject)B)->type_name));
1094 PetscCall(MatSetUp(B));
1095 PetscCall(MatSeqAIJSetPreallocation(B, 0, NULL));
1096 PetscCall(MatMPIAIJSetPreallocation(B, 0, NULL, 0, NULL));
1097 B->trivialsymbolic = PETSC_TRUE;
1098 PetscCheck(ftype == MAT_FACTOR_LU || ftype == MAT_FACTOR_ILU, PETSC_COMM_SELF, PETSC_ERR_SUP, "Factor type not supported");
1099 B->ops->lufactorsymbolic = MatLUFactorSymbolic_STRUMPACK;
1100 B->ops->ilufactorsymbolic = MatLUFactorSymbolic_STRUMPACK;
1101 B->ops->getinfo = MatGetInfo_External;
1102 B->ops->view = MatView_STRUMPACK;
1103 B->ops->destroy = MatDestroy_STRUMPACK;
1104 B->ops->getdiagonal = MatGetDiagonal_STRUMPACK;
1105 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_aij_strumpack));
1106 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetReordering_C", MatSTRUMPACKSetReordering_STRUMPACK));
1107 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetReordering_C", MatSTRUMPACKGetReordering_STRUMPACK));
1108 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetColPerm_C", MatSTRUMPACKSetColPerm_STRUMPACK));
1109 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetColPerm_C", MatSTRUMPACKGetColPerm_STRUMPACK));
1110 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGeometricNxyz_C", MatSTRUMPACKSetGeometricNxyz_STRUMPACK));
1111 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGeometricComponents_C", MatSTRUMPACKSetGeometricComponents_STRUMPACK));
1112 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGeometricWidth_C", MatSTRUMPACKSetGeometricWidth_STRUMPACK));
1113 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGPU_C", MatSTRUMPACKSetGPU_STRUMPACK));
1114 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetGPU_C", MatSTRUMPACKGetGPU_STRUMPACK));
1115 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompression_C", MatSTRUMPACKSetCompression_STRUMPACK));
1116 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompression_C", MatSTRUMPACKGetCompression_STRUMPACK));
1117 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompRelTol_C", MatSTRUMPACKSetCompRelTol_STRUMPACK));
1118 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompRelTol_C", MatSTRUMPACKGetCompRelTol_STRUMPACK));
1119 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompAbsTol_C", MatSTRUMPACKSetCompAbsTol_STRUMPACK));
1120 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompAbsTol_C", MatSTRUMPACKGetCompAbsTol_STRUMPACK));
1121 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompLeafSize_C", MatSTRUMPACKSetCompLeafSize_STRUMPACK));
1122 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompLeafSize_C", MatSTRUMPACKGetCompLeafSize_STRUMPACK));
1123 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompMinSepSize_C", MatSTRUMPACKSetCompMinSepSize_STRUMPACK));
1124 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompMinSepSize_C", MatSTRUMPACKGetCompMinSepSize_STRUMPACK));
1125 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompLossyPrecision_C", MatSTRUMPACKSetCompLossyPrecision_STRUMPACK));
1126 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompLossyPrecision_C", MatSTRUMPACKGetCompLossyPrecision_STRUMPACK));
1127 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompButterflyLevels_C", MatSTRUMPACKSetCompButterflyLevels_STRUMPACK));
1128 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompButterflyLevels_C", MatSTRUMPACKGetCompButterflyLevels_STRUMPACK));
1129 B->factortype = ftype;
1130
1131 /* set solvertype */
1132 PetscCall(PetscFree(B->solvertype));
1133 PetscCall(PetscStrallocpy(MATSOLVERSTRUMPACK, &B->solvertype));
1134
1135 PetscCall(PetscNew(&S));
1136 B->data = S;
1137
1138 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &flg)); /* A might be MATSEQAIJCUSPARSE */
1139 iface = flg ? STRUMPACK_MT : STRUMPACK_MPI_DIST;
1140
1141 PetscOptionsBegin(PetscObjectComm((PetscObject)B), ((PetscObject)B)->prefix, "STRUMPACK Options", "Mat");
1142 verb = PetscLogPrintInfo ? PETSC_TRUE : PETSC_FALSE;
1143 PetscCall(PetscOptionsBool("-mat_strumpack_verbose", "Print STRUMPACK information", "None", verb, &verb, NULL));
1144
1145 PetscStackCallExternalVoid("STRUMPACK_init", STRUMPACK_init(S, PetscObjectComm((PetscObject)A), prec, iface, 0, NULL, verb));
1146
1147 /* By default, no compression is done. Compression is enabled when the user enables it with */
1148 /* -mat_strumpack_compression with anything else than NONE, or when selecting ilu */
1149 /* preconditioning, in which case we default to STRUMPACK_BLR compression. */
1150 /* When compression is enabled, the STRUMPACK solver becomes an incomplete */
1151 /* (or approximate) LU factorization. */
1152 PetscStackCallExternalVoid("STRUMPACK_compression", compcurrent = STRUMPACK_compression(*S));
1153 PetscCall(PetscOptionsEnum("-mat_strumpack_compression", "Rank-structured compression type", "None", CompTypes, (PetscEnum)compcurrent, (PetscEnum *)&compvalue, &set));
1154 if (set) {
1155 PetscCall(MatSTRUMPACKSetCompression(B, (MatSTRUMPACKCompressionType)compvalue));
1156 } else {
1157 if (ftype == MAT_FACTOR_ILU) PetscStackCallExternalVoid("STRUMPACK_set_compression", STRUMPACK_set_compression(*S, STRUMPACK_BLR));
1158 }
1159
1160 PetscStackCallExternalVoid("STRUMPACK_compression_rel_tol", ctol = (PetscReal)STRUMPACK_compression_rel_tol(*S));
1161 PetscCall(PetscOptionsReal("-mat_strumpack_compression_rel_tol", "Relative compression tolerance", "None", ctol, &ctol, &set));
1162 if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_rel_tol", STRUMPACK_set_compression_rel_tol(*S, (double)ctol));
1163
1164 PetscStackCallExternalVoid("STRUMPACK_compression_abs_tol", ctol = (PetscReal)STRUMPACK_compression_abs_tol(*S));
1165 PetscCall(PetscOptionsReal("-mat_strumpack_compression_abs_tol", "Absolute compression tolerance", "None", ctol, &ctol, &set));
1166 if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_abs_tol", STRUMPACK_set_compression_abs_tol(*S, (double)ctol));
1167
1168 PetscStackCallExternalVoid("STRUMPACK_compression_min_sep_size", min_sep_size = (PetscInt)STRUMPACK_compression_min_sep_size(*S));
1169 PetscCall(PetscOptionsInt("-mat_strumpack_compression_min_sep_size", "Minimum size of separator for compression", "None", min_sep_size, &min_sep_size, &set));
1170 if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_min_sep_size", STRUMPACK_set_compression_min_sep_size(*S, (int)min_sep_size));
1171
1172 PetscStackCallExternalVoid("STRUMPACK_compression_leaf_size", leaf_size = (PetscInt)STRUMPACK_compression_leaf_size(*S));
1173 PetscCall(PetscOptionsInt("-mat_strumpack_compression_leaf_size", "Size of diagonal blocks in rank-structured approximation", "None", leaf_size, &leaf_size, &set));
1174 if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_leaf_size", STRUMPACK_set_compression_leaf_size(*S, (int)leaf_size));
1175
1176 #if defined(STRUMPACK_USE_ZFP)
1177 PetscStackCallExternalVoid("STRUMPACK_compression_lossy_precision", lossy_prec = (PetscInt)STRUMPACK_compression_lossy_precision(*S));
1178 PetscCall(PetscOptionsInt("-mat_strumpack_compression_lossy_precision", "Number of bitplanes to use in lossy compression", "None", lossy_prec, &lossy_prec, &set));
1179 if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_lossy_precision", STRUMPACK_set_compression_lossy_precision(*S, (int)lossy_prec));
1180 #endif
1181
1182 #if defined(STRUMPACK_USE_BPACK)
1183 PetscStackCallExternalVoid("STRUMPACK_compression_butterfly_levels", bfly_lvls = (PetscInt)STRUMPACK_compression_butterfly_levels(*S));
1184 PetscCall(PetscOptionsInt("-mat_strumpack_compression_butterfly_levels", "Number of levels in the HODLR matrix for which to use butterfly compression", "None", bfly_lvls, &bfly_lvls, &set));
1185 if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_butterfly_levels", STRUMPACK_set_compression_butterfly_levels(*S, (int)bfly_lvls));
1186 #endif
1187
1188 #if defined(STRUMPACK_USE_CUDA) || defined(STRUMPACK_USE_HIP) || defined(STRUMPACK_USE_SYCL)
1189 PetscStackCallExternalVoid("STRUMPACK_use_gpu", flg = (STRUMPACK_use_gpu(*S) == 0) ? PETSC_FALSE : PETSC_TRUE);
1190 PetscCall(PetscOptionsBool("-mat_strumpack_gpu", "Enable GPU acceleration (not supported for all compression types)", "None", flg, &flg, &set));
1191 if (set) MatSTRUMPACKSetGPU(B, flg);
1192 #endif
1193
1194 PetscStackCallExternalVoid("STRUMPACK_matching", flg = (STRUMPACK_matching(*S) == STRUMPACK_MATCHING_NONE) ? PETSC_FALSE : PETSC_TRUE);
1195 PetscCall(PetscOptionsBool("-mat_strumpack_colperm", "Find a col perm to get nonzero diagonal", "None", flg, &flg, &set));
1196 if (set) PetscStackCallExternalVoid("STRUMPACK_set_matching", STRUMPACK_set_matching(*S, flg ? STRUMPACK_MATCHING_MAX_DIAGONAL_PRODUCT_SCALING : STRUMPACK_MATCHING_NONE));
1197
1198 PetscStackCallExternalVoid("STRUMPACK_reordering_method", ndcurrent = STRUMPACK_reordering_method(*S));
1199 PetscCall(PetscOptionsEnum("-mat_strumpack_reordering", "Sparsity reducing matrix reordering", "None", STRUMPACKNDTypes, (PetscEnum)ndcurrent, (PetscEnum *)&ndvalue, &set));
1200 if (set) PetscStackCallExternalVoid("STRUMPACK_set_reordering_method", STRUMPACK_set_reordering_method(*S, ndvalue));
1201
1202 /* geometric ordering, for a regular 1D/2D/3D mesh in the natural ordering, */
1203 /* with nc DOF's per gridpoint, and possibly a wider stencil */
1204 nrdims = 3;
1205 nxyz[0] = nxyz[1] = nxyz[2] = 1;
1206 PetscCall(PetscOptionsIntArray("-mat_strumpack_geometric_xyz", "Mesh sizes nx,ny,nz (Use 1 for default)", "", nxyz, &nrdims, &set));
1207 if (set) {
1208 PetscCheck(nrdims == 1 || nrdims == 2 || nrdims == 3, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "'-mat_strumpack_geometric_xyz' requires 1, 2, or 3 values.");
1209 PetscCall(MatSTRUMPACKSetGeometricNxyz(B, (int)nxyz[0], (int)nxyz[1], (int)nxyz[2]));
1210 }
1211 PetscCall(PetscOptionsInt("-mat_strumpack_geometric_components", "Number of components per mesh point, for geometric nested dissection ordering", "None", 1, &nc, &set));
1212 if (set) PetscStackCallExternalVoid("STRUMPACK_set_components", STRUMPACK_set_components(*S, (int)nc));
1213 PetscCall(PetscOptionsInt("-mat_strumpack_geometric_width", "Width of the separator (for instance a 1D 3-point wide stencil needs a 1 point wide separator, a 1D 5-point stencil needs a 2 point wide separator), for geometric nested dissection ordering", "None", 1, &w, &set));
1214 if (set) PetscStackCallExternalVoid("STRUMPACK_set_separator_width", STRUMPACK_set_separator_width(*S, (int)w));
1215
1216 PetscStackCallExternalVoid("STRUMPACK_use_METIS_NodeNDP", flg = (STRUMPACK_use_METIS_NodeNDP(*S) == 0) ? PETSC_FALSE : PETSC_TRUE);
1217 PetscCall(PetscOptionsBool("-mat_strumpack_metis_nodeNDP", "Use METIS_NodeNDP instead of METIS_NodeND, for a more balanced tree", "None", flg, &flg, &set));
1218 if (set) {
1219 if (flg) {
1220 PetscStackCallExternalVoid("STRUMPACK_enable_METIS_NodeNDP", STRUMPACK_enable_METIS_NodeNDP(*S));
1221 } else {
1222 PetscStackCallExternalVoid("STRUMPACK_disable_METIS_NodeNDP", STRUMPACK_disable_METIS_NodeNDP(*S));
1223 }
1224 }
1225
1226 /* Disable the outer iterative solver from STRUMPACK. */
1227 /* When STRUMPACK is used as a direct solver, it will by default do iterative refinement. */
1228 /* When STRUMPACK is used as an approximate factorization preconditioner (by enabling */
1229 /* low-rank compression), it will use it's own preconditioned GMRES. Here we can disable */
1230 /* the outer iterative solver, as PETSc uses STRUMPACK from within a KSP. */
1231 PetscStackCallExternalVoid("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, STRUMPACK_DIRECT));
1232
1233 PetscOptionsEnd();
1234
1235 *F = B;
1236 PetscFunctionReturn(PETSC_SUCCESS);
1237 }
1238
MatSolverTypeRegister_STRUMPACK(void)1239 PETSC_INTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void)
1240 {
1241 PetscFunctionBegin;
1242 PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack));
1243 PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack));
1244 PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack));
1245 PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack));
1246 PetscFunctionReturn(PETSC_SUCCESS);
1247 }
1248