xref: /petsc/src/ksp/pc/impls/hmg/hmg.c (revision bcee047adeeb73090d7e36cc71e39fc287cdbb97)
1 #include <petscdm.h>
2 #include <petsc/private/hashmapi.h>
3 #include <petsc/private/matimpl.h>
4 #include <petsc/private/pcmgimpl.h>
5 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/
6 
7 typedef struct {
8   PC        innerpc;       /* A MG inner PC (Hypre or PCGAMG) to setup interpolations and coarse operators  */
9   char     *innerpctype;   /* PCGAMG or PCHYPRE */
10   PetscBool reuseinterp;   /* A flag indicates if or not to reuse the interpolations */
11   PetscBool subcoarsening; /* If or not to use a subspace-based coarsening algorithm */
12   PetscBool usematmaij;    /* If or not to use MatMAIJ for saving memory */
13   PetscInt  component;     /* Which subspace is used for the subspace-based coarsening algorithm? */
14 } PC_HMG;
15 
16 PetscErrorCode PCSetFromOptions_HMG(PC, PetscOptionItems *);
17 PetscErrorCode PCReset_MG(PC);
18 
19 static PetscErrorCode PCHMGExtractSubMatrix_Private(Mat pmat, Mat *submat, MatReuse reuse, PetscInt component, PetscInt blocksize)
20 {
21   IS       isrow;
22   PetscInt rstart, rend;
23   MPI_Comm comm;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectGetComm((PetscObject)pmat, &comm));
27   PetscCheck(component < blocksize, comm, PETSC_ERR_ARG_INCOMP, "Component %" PetscInt_FMT " should be less than block size %" PetscInt_FMT " ", component, blocksize);
28   PetscCall(MatGetOwnershipRange(pmat, &rstart, &rend));
29   PetscCheck((rend - rstart) % blocksize == 0, comm, PETSC_ERR_ARG_INCOMP, "Block size %" PetscInt_FMT " is inconsistent for [%" PetscInt_FMT ", %" PetscInt_FMT ") ", blocksize, rstart, rend);
30   PetscCall(ISCreateStride(comm, (rend - rstart) / blocksize, rstart + component, blocksize, &isrow));
31   PetscCall(MatCreateSubMatrix(pmat, isrow, isrow, reuse, submat));
32   PetscCall(ISDestroy(&isrow));
33   PetscFunctionReturn(PETSC_SUCCESS);
34 }
35 
36 static PetscErrorCode PCHMGExpandInterpolation_Private(Mat subinterp, Mat *interp, PetscInt blocksize)
37 {
38   PetscInt           subrstart, subrend, subrowsize, subcolsize, subcstart, subcend, rowsize, colsize;
39   PetscInt           subrow, row, nz, *d_nnz, *o_nnz, i, j, dnz, onz, max_nz, *indices;
40   const PetscInt    *idx;
41   const PetscScalar *values;
42   MPI_Comm           comm;
43 
44   PetscFunctionBegin;
45   PetscCall(PetscObjectGetComm((PetscObject)subinterp, &comm));
46   PetscCall(MatGetOwnershipRange(subinterp, &subrstart, &subrend));
47   subrowsize = subrend - subrstart;
48   rowsize    = subrowsize * blocksize;
49   PetscCall(PetscCalloc2(rowsize, &d_nnz, rowsize, &o_nnz));
50   PetscCall(MatGetOwnershipRangeColumn(subinterp, &subcstart, &subcend));
51   subcolsize = subcend - subcstart;
52   colsize    = subcolsize * blocksize;
53   max_nz     = 0;
54   for (subrow = subrstart; subrow < subrend; subrow++) {
55     PetscCall(MatGetRow(subinterp, subrow, &nz, &idx, NULL));
56     if (max_nz < nz) max_nz = nz;
57     dnz = 0;
58     onz = 0;
59     for (i = 0; i < nz; i++) {
60       if (idx[i] >= subcstart && idx[i] < subcend) dnz++;
61       else onz++;
62     }
63     for (i = 0; i < blocksize; i++) {
64       d_nnz[(subrow - subrstart) * blocksize + i] = dnz;
65       o_nnz[(subrow - subrstart) * blocksize + i] = onz;
66     }
67     PetscCall(MatRestoreRow(subinterp, subrow, &nz, &idx, NULL));
68   }
69   PetscCall(MatCreateAIJ(comm, rowsize, colsize, PETSC_DETERMINE, PETSC_DETERMINE, 0, d_nnz, 0, o_nnz, interp));
70   PetscCall(MatSetOption(*interp, MAT_IGNORE_OFF_PROC_ENTRIES, PETSC_TRUE));
71   PetscCall(MatSetOption(*interp, MAT_IGNORE_ZERO_ENTRIES, PETSC_TRUE));
72   PetscCall(MatSetOption(*interp, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
73   PetscCall(MatSetFromOptions(*interp));
74 
75   PetscCall(MatSetUp(*interp));
76   PetscCall(PetscFree2(d_nnz, o_nnz));
77   PetscCall(PetscMalloc1(max_nz, &indices));
78   for (subrow = subrstart; subrow < subrend; subrow++) {
79     PetscCall(MatGetRow(subinterp, subrow, &nz, &idx, &values));
80     for (i = 0; i < blocksize; i++) {
81       row = subrow * blocksize + i;
82       for (j = 0; j < nz; j++) indices[j] = idx[j] * blocksize + i;
83       PetscCall(MatSetValues(*interp, 1, &row, nz, indices, values, INSERT_VALUES));
84     }
85     PetscCall(MatRestoreRow(subinterp, subrow, &nz, &idx, &values));
86   }
87   PetscCall(PetscFree(indices));
88   PetscCall(MatAssemblyBegin(*interp, MAT_FINAL_ASSEMBLY));
89   PetscCall(MatAssemblyEnd(*interp, MAT_FINAL_ASSEMBLY));
90   PetscFunctionReturn(PETSC_SUCCESS);
91 }
92 
93 PetscErrorCode PCSetUp_HMG(PC pc)
94 {
95   Mat              PA, submat;
96   PC_MG           *mg  = (PC_MG *)pc->data;
97   PC_HMG          *hmg = (PC_HMG *)mg->innerctx;
98   MPI_Comm         comm;
99   PetscInt         level;
100   PetscInt         num_levels;
101   Mat             *operators, *interpolations;
102   PetscInt         blocksize;
103   const char      *prefix;
104   PCMGGalerkinType galerkin;
105 
106   PetscFunctionBegin;
107   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
108   if (pc->setupcalled) {
109     if (hmg->reuseinterp) {
110       /* If we did not use Galerkin in the last call or we have a different sparsity pattern now,
111       * we have to build from scratch
112       * */
113       PetscCall(PCMGGetGalerkin(pc, &galerkin));
114       if (galerkin == PC_MG_GALERKIN_NONE || pc->flag != SAME_NONZERO_PATTERN) pc->setupcalled = PETSC_FALSE;
115       PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_PMAT));
116       PetscCall(PCSetUp_MG(pc));
117       PetscFunctionReturn(PETSC_SUCCESS);
118     } else {
119       PetscCall(PCReset_MG(pc));
120       pc->setupcalled = PETSC_FALSE;
121     }
122   }
123 
124   /* Create an inner PC (GAMG or HYPRE) */
125   if (!hmg->innerpc) {
126     PetscCall(PCCreate(comm, &hmg->innerpc));
127     /* If users do not set an inner pc type, we need to set a default value */
128     if (!hmg->innerpctype) {
129       /* If hypre is available, use hypre, otherwise, use gamg */
130 #if PetscDefined(HAVE_HYPRE)
131       PetscCall(PetscStrallocpy(PCHYPRE, &(hmg->innerpctype)));
132 #else
133       PetscCall(PetscStrallocpy(PCGAMG, &(hmg->innerpctype)));
134 #endif
135     }
136     PetscCall(PCSetType(hmg->innerpc, hmg->innerpctype));
137   }
138   PetscCall(PCGetOperators(pc, NULL, &PA));
139   /* Users need to correctly set a block size of matrix in order to use subspace coarsening */
140   PetscCall(MatGetBlockSize(PA, &blocksize));
141   if (blocksize <= 1) hmg->subcoarsening = PETSC_FALSE;
142   /* Extract a submatrix for constructing subinterpolations */
143   if (hmg->subcoarsening) {
144     PetscCall(PCHMGExtractSubMatrix_Private(PA, &submat, MAT_INITIAL_MATRIX, hmg->component, blocksize));
145     PA = submat;
146   }
147   PetscCall(PCSetOperators(hmg->innerpc, PA, PA));
148   if (hmg->subcoarsening) PetscCall(MatDestroy(&PA));
149   /* Setup inner PC correctly. During this step, matrix will be coarsened */
150   PetscCall(PCSetUseAmat(hmg->innerpc, PETSC_FALSE));
151   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, &prefix));
152   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)hmg->innerpc, prefix));
153   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)hmg->innerpc, "hmg_inner_"));
154   PetscCall(PCSetFromOptions(hmg->innerpc));
155   PetscCall(PCSetUp(hmg->innerpc));
156 
157   /* Obtain interpolations IN PLACE. For BoomerAMG, (I,J,data) is reused to avoid memory overhead */
158   PetscCall(PCGetInterpolations(hmg->innerpc, &num_levels, &interpolations));
159   /* We can reuse the coarse operators when we do the full space coarsening */
160   if (!hmg->subcoarsening) PetscCall(PCGetCoarseOperators(hmg->innerpc, &num_levels, &operators));
161 
162   PetscCall(PCDestroy(&hmg->innerpc));
163   hmg->innerpc = NULL;
164   PetscCall(PCMGSetLevels_MG(pc, num_levels, NULL));
165   /* Set coarse matrices and interpolations to PCMG */
166   for (level = num_levels - 1; level > 0; level--) {
167     Mat P = NULL, pmat = NULL;
168     Vec b, x, r;
169     if (hmg->subcoarsening) {
170       if (hmg->usematmaij) {
171         PetscCall(MatCreateMAIJ(interpolations[level - 1], blocksize, &P));
172         PetscCall(MatDestroy(&interpolations[level - 1]));
173       } else {
174         /* Grow interpolation. In the future, we should use MAIJ */
175         PetscCall(PCHMGExpandInterpolation_Private(interpolations[level - 1], &P, blocksize));
176         PetscCall(MatDestroy(&interpolations[level - 1]));
177       }
178     } else {
179       P = interpolations[level - 1];
180     }
181     PetscCall(MatCreateVecs(P, &b, &r));
182     PetscCall(PCMGSetInterpolation(pc, level, P));
183     PetscCall(PCMGSetRestriction(pc, level, P));
184     PetscCall(MatDestroy(&P));
185     /* We reuse the matrices when we do not do subspace coarsening */
186     if ((level - 1) >= 0 && !hmg->subcoarsening) {
187       pmat = operators[level - 1];
188       PetscCall(PCMGSetOperators(pc, level - 1, pmat, pmat));
189       PetscCall(MatDestroy(&pmat));
190     }
191     PetscCall(PCMGSetRhs(pc, level - 1, b));
192 
193     PetscCall(PCMGSetR(pc, level, r));
194     PetscCall(VecDestroy(&r));
195 
196     PetscCall(VecDuplicate(b, &x));
197     PetscCall(PCMGSetX(pc, level - 1, x));
198     PetscCall(VecDestroy(&x));
199     PetscCall(VecDestroy(&b));
200   }
201   PetscCall(PetscFree(interpolations));
202   if (!hmg->subcoarsening) PetscCall(PetscFree(operators));
203   /* Turn Galerkin off when we already have coarse operators */
204   PetscCall(PCMGSetGalerkin(pc, hmg->subcoarsening ? PC_MG_GALERKIN_PMAT : PC_MG_GALERKIN_NONE));
205   PetscCall(PCSetDM(pc, NULL));
206   PetscCall(PCSetUseAmat(pc, PETSC_FALSE));
207   PetscObjectOptionsBegin((PetscObject)pc);
208   PetscCall(PCSetFromOptions_MG(pc, PetscOptionsObject)); /* should be called in PCSetFromOptions_HMG(), but cannot be called prior to PCMGSetLevels() */
209   PetscOptionsEnd();
210   PetscCall(PCSetUp_MG(pc));
211   PetscFunctionReturn(PETSC_SUCCESS);
212 }
213 
214 PetscErrorCode PCDestroy_HMG(PC pc)
215 {
216   PC_MG  *mg  = (PC_MG *)pc->data;
217   PC_HMG *hmg = (PC_HMG *)mg->innerctx;
218 
219   PetscFunctionBegin;
220   PetscCall(PCDestroy(&hmg->innerpc));
221   PetscCall(PetscFree(hmg->innerpctype));
222   PetscCall(PetscFree(hmg));
223   PetscCall(PCDestroy_MG(pc));
224 
225   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetReuseInterpolation_C", NULL));
226   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetUseSubspaceCoarsening_C", NULL));
227   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetInnerPCType_C", NULL));
228   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetCoarseningComponent_C", NULL));
229   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGUseMatMAIJ_C", NULL));
230   PetscFunctionReturn(PETSC_SUCCESS);
231 }
232 
233 PetscErrorCode PCView_HMG(PC pc, PetscViewer viewer)
234 {
235   PC_MG    *mg  = (PC_MG *)pc->data;
236   PC_HMG   *hmg = (PC_HMG *)mg->innerctx;
237   PetscBool iascii;
238 
239   PetscFunctionBegin;
240   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
241   if (iascii) {
242     PetscCall(PetscViewerASCIIPrintf(viewer, " Reuse interpolation: %s\n", hmg->reuseinterp ? "true" : "false"));
243     PetscCall(PetscViewerASCIIPrintf(viewer, " Use subspace coarsening: %s\n", hmg->subcoarsening ? "true" : "false"));
244     PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsening component: %" PetscInt_FMT " \n", hmg->component));
245     PetscCall(PetscViewerASCIIPrintf(viewer, " Use MatMAIJ: %s \n", hmg->usematmaij ? "true" : "false"));
246     PetscCall(PetscViewerASCIIPrintf(viewer, " Inner PC type: %s \n", hmg->innerpctype));
247   }
248   PetscCall(PCView_MG(pc, viewer));
249   PetscFunctionReturn(PETSC_SUCCESS);
250 }
251 
252 PetscErrorCode PCSetFromOptions_HMG(PC pc, PetscOptionItems *PetscOptionsObject)
253 {
254   PC_MG  *mg  = (PC_MG *)pc->data;
255   PC_HMG *hmg = (PC_HMG *)mg->innerctx;
256 
257   PetscFunctionBegin;
258   PetscOptionsHeadBegin(PetscOptionsObject, "HMG");
259   PetscCall(PetscOptionsBool("-pc_hmg_reuse_interpolation", "Reuse the interpolation operators when possible (cheaper, weaker when matrix entries change a lot)", "PCHMGSetReuseInterpolation", hmg->reuseinterp, &hmg->reuseinterp, NULL));
260   PetscCall(PetscOptionsBool("-pc_hmg_use_subspace_coarsening", "Use the subspace coarsening to compute the interpolations", "PCHMGSetUseSubspaceCoarsening", hmg->subcoarsening, &hmg->subcoarsening, NULL));
261   PetscCall(PetscOptionsBool("-pc_hmg_use_matmaij", "Use MatMAIJ store interpolation for saving memory", "PCHMGSetInnerPCType", hmg->usematmaij, &hmg->usematmaij, NULL));
262   PetscCall(PetscOptionsInt("-pc_hmg_coarsening_component", "Which component is chosen for the subspace-based coarsening algorithm", "PCHMGSetCoarseningComponent", hmg->component, &hmg->component, NULL));
263   PetscOptionsHeadEnd();
264   PetscFunctionReturn(PETSC_SUCCESS);
265 }
266 
267 static PetscErrorCode PCHMGSetReuseInterpolation_HMG(PC pc, PetscBool reuse)
268 {
269   PC_MG  *mg  = (PC_MG *)pc->data;
270   PC_HMG *hmg = (PC_HMG *)mg->innerctx;
271 
272   PetscFunctionBegin;
273   hmg->reuseinterp = reuse;
274   PetscFunctionReturn(PETSC_SUCCESS);
275 }
276 
277 /*@
278    PCHMGSetReuseInterpolation - Reuse the interpolation matrices in `PCHMG` after changing the matrices numerical values
279 
280    Logically Collective
281 
282    Input Parameters:
283 +  pc - the `PCHMG` context
284 -  reuse - `PETSC_TRUE` indicates that `PCHMG` will reuse the interpolations
285 
286    Options Database Key:
287 .  -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations. If true, it potentially save the compute time.
288 
289    Level: beginner
290 
291 .seealso: `PCHMG`, `PCGAMG`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetCoarseningComponent()`, `PCHMGSetInnerPCType()`
292 @*/
293 PetscErrorCode PCHMGSetReuseInterpolation(PC pc, PetscBool reuse)
294 {
295   PetscFunctionBegin;
296   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
297   PetscUseMethod(pc, "PCHMGSetReuseInterpolation_C", (PC, PetscBool), (pc, reuse));
298   PetscFunctionReturn(PETSC_SUCCESS);
299 }
300 
301 static PetscErrorCode PCHMGSetUseSubspaceCoarsening_HMG(PC pc, PetscBool subspace)
302 {
303   PC_MG  *mg  = (PC_MG *)pc->data;
304   PC_HMG *hmg = (PC_HMG *)mg->innerctx;
305 
306   PetscFunctionBegin;
307   hmg->subcoarsening = subspace;
308   PetscFunctionReturn(PETSC_SUCCESS);
309 }
310 
311 /*@
312    PCHMGSetUseSubspaceCoarsening - Use subspace coarsening in `PCHMG`
313 
314    Logically Collective
315 
316    Input Parameters:
317 +  pc - the `PCHMG` context
318 -  reuse - `PETSC_TRUE` indicates that `PCHMG` will use the subspace coarsening
319 
320    Options Database Key:
321 .  -pc_hmg_use_subspace_coarsening  <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix).
322 
323    Level: beginner
324 
325 .seealso: `PCHMG`, `PCHMGSetReuseInterpolation()`, `PCHMGSetCoarseningComponent()`, `PCHMGSetInnerPCType()`
326 @*/
327 PetscErrorCode PCHMGSetUseSubspaceCoarsening(PC pc, PetscBool subspace)
328 {
329   PetscFunctionBegin;
330   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
331   PetscUseMethod(pc, "PCHMGSetUseSubspaceCoarsening_C", (PC, PetscBool), (pc, subspace));
332   PetscFunctionReturn(PETSC_SUCCESS);
333 }
334 
335 static PetscErrorCode PCHMGSetInnerPCType_HMG(PC pc, PCType type)
336 {
337   PC_MG  *mg  = (PC_MG *)pc->data;
338   PC_HMG *hmg = (PC_HMG *)mg->innerctx;
339 
340   PetscFunctionBegin;
341   PetscCall(PetscStrallocpy(type, &(hmg->innerpctype)));
342   PetscFunctionReturn(PETSC_SUCCESS);
343 }
344 
345 /*@C
346    PCHMGSetInnerPCType - Set an inner `PC` type
347 
348    Logically Collective
349 
350    Input Parameters:
351 +  pc - the `PCHMG` context
352 -  type - `PCHYPRE` or `PCGAMG` coarsening algorithm
353 
354    Options Database Key:
355 .  -hmg_inner_pc_type <hypre, gamg> - What method is used to coarsen matrix
356 
357    Level: beginner
358 
359 .seealso: `PCHMG`, `PCType`, `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetCoarseningComponent()`
360 @*/
361 PetscErrorCode PCHMGSetInnerPCType(PC pc, PCType type)
362 {
363   PetscFunctionBegin;
364   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
365   PetscUseMethod(pc, "PCHMGSetInnerPCType_C", (PC, PCType), (pc, type));
366   PetscFunctionReturn(PETSC_SUCCESS);
367 }
368 
369 static PetscErrorCode PCHMGSetCoarseningComponent_HMG(PC pc, PetscInt component)
370 {
371   PC_MG  *mg  = (PC_MG *)pc->data;
372   PC_HMG *hmg = (PC_HMG *)mg->innerctx;
373 
374   PetscFunctionBegin;
375   hmg->component = component;
376   PetscFunctionReturn(PETSC_SUCCESS);
377 }
378 
379 /*@
380    PCHMGSetCoarseningComponent - Set which component of the PDE is used for the subspace-based coarsening algorithm
381 
382    Logically Collective
383 
384    Input Parameters:
385 +  pc - the `PCHMG` context
386 -  component - which component `PC` will coarsen
387 
388    Options Database Key:
389 .  -pc_hmg_coarsening_component <i> - Which component is chosen for the subspace-based coarsening algorithm
390 
391    Level: beginner
392 
393 .seealso: `PCHMG`, `PCType`, `PCGAMG`, `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetInnerPCType()`
394 @*/
395 PetscErrorCode PCHMGSetCoarseningComponent(PC pc, PetscInt component)
396 {
397   PetscFunctionBegin;
398   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
399   PetscUseMethod(pc, "PCHMGSetCoarseningComponent_C", (PC, PetscInt), (pc, component));
400   PetscFunctionReturn(PETSC_SUCCESS);
401 }
402 
403 static PetscErrorCode PCHMGUseMatMAIJ_HMG(PC pc, PetscBool usematmaij)
404 {
405   PC_MG  *mg  = (PC_MG *)pc->data;
406   PC_HMG *hmg = (PC_HMG *)mg->innerctx;
407 
408   PetscFunctionBegin;
409   hmg->usematmaij = usematmaij;
410   PetscFunctionReturn(PETSC_SUCCESS);
411 }
412 
413 /*@
414    PCHMGUseMatMAIJ - Set a flag that indicates if or not to use `MATMAIJ` for the interpolation matrices for saving memory
415 
416    Logically Collective
417 
418    Input Parameters:
419 +  pc - the `PCHMG` context
420 -  usematmaij - `PETSC_TRUE` (default) to use `MATMAIJ` for interpolations.
421 
422    Options Database Key:
423 .  -pc_hmg_use_matmaij - <true | false >
424 
425    Level: beginner
426 
427 .seealso: `PCHMG`, `PCType`, `PCGAMG`
428 @*/
429 PetscErrorCode PCHMGUseMatMAIJ(PC pc, PetscBool usematmaij)
430 {
431   PetscFunctionBegin;
432   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
433   PetscUseMethod(pc, "PCHMGUseMatMAIJ_C", (PC, PetscBool), (pc, usematmaij));
434   PetscFunctionReturn(PETSC_SUCCESS);
435 }
436 
437 /*MC
438    PCHMG - For multiple component PDE problems constructs a hierarchy of restriction operators to coarse grid problems using the submatrix of
439    a single component with either `PCHYPRE` or `PCGAMG`. The same restriction operators are used for each of the components of the PDE with `PCMG`
440    resulting in a much more efficient to build and apply preconditioner than using `PCGAMG` on the entire system.
441 
442    Options Database Keys:
443 +  -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations for new matrix values. It can potentially save compute time.
444 .  -pc_hmg_use_subspace_coarsening  <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix).
445 .  -hmg_inner_pc_type <hypre, gamg, ...> - What method is used to coarsen matrix
446 -  -pc_hmg_use_matmaij <true | false> - Whether or not to use `MATMAIJ` for multicomponent problems for saving memory
447 
448    Level: intermediate
449 
450    Note:
451    `MatSetBlockSize()` must be called on the linear system matrix to set the number of components of the PDE.
452 
453     References:
454 .   * - Fande Kong, Yaqi Wang, Derek R Gaston, Cody J Permann, Andrew E Slaughter, Alexander D Lindsay, Richard C Martineau, A highly parallel multilevel
455     Newton-Krylov-Schwarz method with subspace-based coarsening and partition-based balancing for the multigroup neutron transport equations on
456     3D unstructured meshes, arXiv preprint arXiv:1903.03659, 2019
457 
458 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMG`, `PCHYPRE`, `PCHMG`, `PCGetCoarseOperators()`, `PCGetInterpolations()`,
459           `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetInnerPCType()`
460 M*/
461 PETSC_EXTERN PetscErrorCode PCCreate_HMG(PC pc)
462 {
463   PC_HMG *hmg;
464   PC_MG  *mg;
465 
466   PetscFunctionBegin;
467   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
468   PetscTryTypeMethod(pc, destroy);
469   pc->data = NULL;
470   PetscCall(PetscFree(((PetscObject)pc)->type_name));
471 
472   PetscCall(PCSetType(pc, PCMG));
473   PetscCall(PetscObjectChangeTypeName((PetscObject)pc, PCHMG));
474   PetscCall(PetscNew(&hmg));
475 
476   mg                 = (PC_MG *)pc->data;
477   mg->innerctx       = hmg;
478   hmg->reuseinterp   = PETSC_FALSE;
479   hmg->subcoarsening = PETSC_FALSE;
480   hmg->usematmaij    = PETSC_TRUE;
481   hmg->component     = 0;
482   hmg->innerpc       = NULL;
483 
484   pc->ops->setfromoptions = PCSetFromOptions_HMG;
485   pc->ops->view           = PCView_HMG;
486   pc->ops->destroy        = PCDestroy_HMG;
487   pc->ops->setup          = PCSetUp_HMG;
488 
489   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetReuseInterpolation_C", PCHMGSetReuseInterpolation_HMG));
490   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetUseSubspaceCoarsening_C", PCHMGSetUseSubspaceCoarsening_HMG));
491   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetInnerPCType_C", PCHMGSetInnerPCType_HMG));
492   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetCoarseningComponent_C", PCHMGSetCoarseningComponent_HMG));
493   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGUseMatMAIJ_C", PCHMGUseMatMAIJ_HMG));
494   PetscFunctionReturn(PETSC_SUCCESS);
495 }
496