xref: /petsc/src/ksp/pc/impls/hypre/hypre.c (revision c36a59bee2e66f2e80e3e9e7603aedea4197893c)
1 /*
2    Provides an interface to the LLNL package hypre
3 */
4 
5 #include <petscpkg_version.h>
6 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/
7 /* this include is needed ONLY to allow access to the private data inside the Mat object specific to hypre */
8 #include <petsc/private/matimpl.h>
9 #include <petsc/private/vecimpl.h>
10 #include <../src/vec/vec/impls/hypre/vhyp.h>
11 #include <../src/mat/impls/hypre/mhypre.h>
12 #include <../src/dm/impls/da/hypre/mhyp.h>
13 #include <_hypre_parcsr_ls.h>
14 #include <petscmathypre.h>
15 
16 #if defined(PETSC_HAVE_HYPRE_DEVICE)
17   #include <petsc/private/deviceimpl.h>
18 #endif
19 
20 static PetscBool  cite            = PETSC_FALSE;
21 static const char hypreCitation[] = "@manual{hypre-web-page,\n  title  = {{\\sl hypre}: High Performance Preconditioners},\n  organization = {Lawrence Livermore National Laboratory},\n  note  = "
22                                     "{\\url{https://www.llnl.gov/casc/hypre}}\n}\n";
23 
24 /*
25    Private context (data structure) for the  preconditioner.
26 */
27 typedef struct {
28   HYPRE_Solver hsolver;
29   Mat          hpmat; /* MatHYPRE */
30 
31   HYPRE_Int (*destroy)(HYPRE_Solver);
32   HYPRE_Int (*solve)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector);
33   HYPRE_Int (*setup)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector);
34 
35   MPI_Comm comm_hypre;
36   char    *hypre_type;
37 
38   /* options for Pilut and BoomerAMG*/
39   PetscInt  maxiter;
40   PetscReal tol;
41 
42   /* options for Pilut */
43   PetscInt factorrowsize;
44 
45   /* options for ParaSails */
46   PetscInt  nlevels;
47   PetscReal threshold;
48   PetscReal filter;
49   PetscReal loadbal;
50   PetscInt  logging;
51   PetscInt  ruse;
52   PetscInt  symt;
53 
54   /* options for BoomerAMG */
55   PetscBool printstatistics;
56 
57   /* options for BoomerAMG */
58   PetscInt  cycletype;
59   PetscInt  maxlevels;
60   PetscReal strongthreshold;
61   PetscReal maxrowsum;
62   PetscInt  gridsweeps[3];
63   PetscInt  coarsentype;
64   PetscInt  measuretype;
65   PetscInt  smoothtype;
66   PetscInt  smoothnumlevels;
67   PetscInt  eu_level;         /* Number of levels for ILU(k) in Euclid */
68   PetscReal eu_droptolerance; /* Drop tolerance for ILU(k) in Euclid */
69   PetscInt  eu_bj;            /* Defines use of Block Jacobi ILU in Euclid */
70   PetscInt  relaxtype[3];
71   PetscReal relaxweight;
72   PetscReal outerrelaxweight;
73   PetscInt  relaxorder;
74   PetscReal truncfactor;
75   PetscBool applyrichardson;
76   PetscInt  pmax;
77   PetscInt  interptype;
78   PetscInt  maxc;
79   PetscInt  minc;
80 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
81   char *spgemm_type; // this is a global hypre parameter but is closely associated with BoomerAMG
82 #endif
83   /* GPU */
84   PetscBool keeptranspose;
85   PetscInt  rap2;
86   PetscInt  mod_rap2;
87 
88   /* AIR */
89   PetscInt  Rtype;
90   PetscReal Rstrongthreshold;
91   PetscReal Rfilterthreshold;
92   PetscInt  Adroptype;
93   PetscReal Adroptol;
94 
95   PetscInt  agg_nl;
96   PetscInt  agg_interptype;
97   PetscInt  agg_num_paths;
98   PetscBool nodal_relax;
99   PetscInt  nodal_relax_levels;
100 
101   PetscInt  nodal_coarsening;
102   PetscInt  nodal_coarsening_diag;
103   PetscInt  vec_interp_variant;
104   PetscInt  vec_interp_qmax;
105   PetscBool vec_interp_smooth;
106   PetscInt  interp_refine;
107 
108   /* NearNullSpace support */
109   VecHYPRE_IJVector *hmnull;
110   HYPRE_ParVector   *phmnull;
111   PetscInt           n_hmnull;
112   Vec                hmnull_constant;
113 
114   /* options for AS (Auxiliary Space preconditioners) */
115   PetscInt  as_print;
116   PetscInt  as_max_iter;
117   PetscReal as_tol;
118   PetscInt  as_relax_type;
119   PetscInt  as_relax_times;
120   PetscReal as_relax_weight;
121   PetscReal as_omega;
122   PetscInt  as_amg_alpha_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for vector Poisson (AMS) or Curl problem (ADS) */
123   PetscReal as_amg_alpha_theta;   /* AMG strength for vector Poisson (AMS) or Curl problem (ADS) */
124   PetscInt  as_amg_beta_opts[5];  /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for scalar Poisson (AMS) or vector Poisson (ADS) */
125   PetscReal as_amg_beta_theta;    /* AMG strength for scalar Poisson (AMS) or vector Poisson (ADS)  */
126   PetscInt  ams_cycle_type;
127   PetscInt  ads_cycle_type;
128 
129   /* additional data */
130   Mat G;             /* MatHYPRE */
131   Mat C;             /* MatHYPRE */
132   Mat alpha_Poisson; /* MatHYPRE */
133   Mat beta_Poisson;  /* MatHYPRE */
134 
135   /* extra information for AMS */
136   PetscInt          dim; /* geometrical dimension */
137   VecHYPRE_IJVector coords[3];
138   VecHYPRE_IJVector constants[3];
139   VecHYPRE_IJVector interior;
140   Mat               RT_PiFull, RT_Pi[3];
141   Mat               ND_PiFull, ND_Pi[3];
142   PetscBool         ams_beta_is_zero;
143   PetscBool         ams_beta_is_zero_part;
144   PetscInt          ams_proj_freq;
145 } PC_HYPRE;
146 
147 PetscErrorCode PCHYPREGetSolver(PC pc, HYPRE_Solver *hsolver)
148 {
149   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
150 
151   PetscFunctionBegin;
152   *hsolver = jac->hsolver;
153   PetscFunctionReturn(PETSC_SUCCESS);
154 }
155 
156 /*
157   Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix
158   is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine.
159   It is used in PCHMG. Other users should avoid using this function.
160 */
161 static PetscErrorCode PCGetCoarseOperators_BoomerAMG(PC pc, PetscInt *nlevels, Mat *operators[])
162 {
163   PC_HYPRE            *jac  = (PC_HYPRE *)pc->data;
164   PetscBool            same = PETSC_FALSE;
165   PetscInt             num_levels, l;
166   Mat                 *mattmp;
167   hypre_ParCSRMatrix **A_array;
168 
169   PetscFunctionBegin;
170   PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same));
171   PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG ");
172   num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)(jac->hsolver));
173   PetscCall(PetscMalloc1(num_levels, &mattmp));
174   A_array = hypre_ParAMGDataAArray((hypre_ParAMGData *)(jac->hsolver));
175   for (l = 1; l < num_levels; l++) {
176     PetscCall(MatCreateFromParCSR(A_array[l], MATAIJ, PETSC_OWN_POINTER, &(mattmp[num_levels - 1 - l])));
177     /* We want to own the data, and HYPRE can not touch this matrix any more */
178     A_array[l] = NULL;
179   }
180   *nlevels   = num_levels;
181   *operators = mattmp;
182   PetscFunctionReturn(PETSC_SUCCESS);
183 }
184 
185 /*
186   Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix
187   is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine.
188   It is used in PCHMG. Other users should avoid using this function.
189 */
190 static PetscErrorCode PCGetInterpolations_BoomerAMG(PC pc, PetscInt *nlevels, Mat *interpolations[])
191 {
192   PC_HYPRE            *jac  = (PC_HYPRE *)pc->data;
193   PetscBool            same = PETSC_FALSE;
194   PetscInt             num_levels, l;
195   Mat                 *mattmp;
196   hypre_ParCSRMatrix **P_array;
197 
198   PetscFunctionBegin;
199   PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same));
200   PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG ");
201   num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)(jac->hsolver));
202   PetscCall(PetscMalloc1(num_levels, &mattmp));
203   P_array = hypre_ParAMGDataPArray((hypre_ParAMGData *)(jac->hsolver));
204   for (l = 1; l < num_levels; l++) {
205     PetscCall(MatCreateFromParCSR(P_array[num_levels - 1 - l], MATAIJ, PETSC_OWN_POINTER, &(mattmp[l - 1])));
206     /* We want to own the data, and HYPRE can not touch this matrix any more */
207     P_array[num_levels - 1 - l] = NULL;
208   }
209   *nlevels        = num_levels;
210   *interpolations = mattmp;
211   PetscFunctionReturn(PETSC_SUCCESS);
212 }
213 
214 /* Resets (frees) Hypre's representation of the near null space */
215 static PetscErrorCode PCHYPREResetNearNullSpace_Private(PC pc)
216 {
217   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
218   PetscInt  i;
219 
220   PetscFunctionBegin;
221   for (i = 0; i < jac->n_hmnull; i++) PetscCall(VecHYPRE_IJVectorDestroy(&jac->hmnull[i]));
222   PetscCall(PetscFree(jac->hmnull));
223   PetscCall(PetscFree(jac->phmnull));
224   PetscCall(VecDestroy(&jac->hmnull_constant));
225   jac->n_hmnull = 0;
226   PetscFunctionReturn(PETSC_SUCCESS);
227 }
228 
229 static PetscErrorCode PCSetUp_HYPRE(PC pc)
230 {
231   PC_HYPRE          *jac = (PC_HYPRE *)pc->data;
232   Mat_HYPRE         *hjac;
233   HYPRE_ParCSRMatrix hmat;
234   HYPRE_ParVector    bv, xv;
235   PetscBool          ishypre;
236 
237   PetscFunctionBegin;
238   if (!jac->hypre_type) PetscCall(PCHYPRESetType(pc, "boomeramg"));
239 
240   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRE, &ishypre));
241   if (!ishypre) {
242     PetscCall(MatDestroy(&jac->hpmat));
243     PetscCall(MatConvert(pc->pmat, MATHYPRE, MAT_INITIAL_MATRIX, &jac->hpmat));
244   } else {
245     PetscCall(PetscObjectReference((PetscObject)pc->pmat));
246     PetscCall(MatDestroy(&jac->hpmat));
247     jac->hpmat = pc->pmat;
248   }
249   /* allow debug */
250   PetscCall(MatViewFromOptions(jac->hpmat, NULL, "-pc_hypre_mat_view"));
251   hjac = (Mat_HYPRE *)(jac->hpmat->data);
252 
253   /* special case for BoomerAMG */
254   if (jac->setup == HYPRE_BoomerAMGSetup) {
255     MatNullSpace mnull;
256     PetscBool    has_const;
257     PetscInt     bs, nvec, i;
258     const Vec   *vecs;
259 
260     PetscCall(MatGetBlockSize(pc->pmat, &bs));
261     if (bs > 1) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs);
262     PetscCall(MatGetNearNullSpace(pc->mat, &mnull));
263     if (mnull) {
264       PetscCall(PCHYPREResetNearNullSpace_Private(pc));
265       PetscCall(MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs));
266       PetscCall(PetscMalloc1(nvec + 1, &jac->hmnull));
267       PetscCall(PetscMalloc1(nvec + 1, &jac->phmnull));
268       for (i = 0; i < nvec; i++) {
269         PetscCall(VecHYPRE_IJVectorCreate(vecs[i]->map, &jac->hmnull[i]));
270         PetscCall(VecHYPRE_IJVectorCopy(vecs[i], jac->hmnull[i]));
271         PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[i]->ij, (void **)&jac->phmnull[i]);
272       }
273       if (has_const) {
274         PetscCall(MatCreateVecs(pc->pmat, &jac->hmnull_constant, NULL));
275         PetscCall(VecSet(jac->hmnull_constant, 1));
276         PetscCall(VecNormalize(jac->hmnull_constant, NULL));
277         PetscCall(VecHYPRE_IJVectorCreate(jac->hmnull_constant->map, &jac->hmnull[nvec]));
278         PetscCall(VecHYPRE_IJVectorCopy(jac->hmnull_constant, jac->hmnull[nvec]));
279         PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[nvec]->ij, (void **)&jac->phmnull[nvec]);
280         nvec++;
281       }
282       PetscCallExternal(HYPRE_BoomerAMGSetInterpVectors, jac->hsolver, nvec, jac->phmnull);
283       jac->n_hmnull = nvec;
284     }
285   }
286 
287   /* special case for AMS */
288   if (jac->setup == HYPRE_AMSSetup) {
289     Mat_HYPRE         *hm;
290     HYPRE_ParCSRMatrix parcsr;
291     if (!jac->coords[0] && !jac->constants[0] && !(jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) {
292       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the edge constant vectors via PCHYPRESetEdgeConstantVectors() or the interpolation matrix via PCHYPRESetInterpolations()");
293     }
294     if (jac->dim) PetscCallExternal(HYPRE_AMSSetDimension, jac->hsolver, jac->dim);
295     if (jac->constants[0]) {
296       HYPRE_ParVector ozz, zoz, zzo = NULL;
297       PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[0]->ij, (void **)(&ozz));
298       PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[1]->ij, (void **)(&zoz));
299       if (jac->constants[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[2]->ij, (void **)(&zzo));
300       PetscCallExternal(HYPRE_AMSSetEdgeConstantVectors, jac->hsolver, ozz, zoz, zzo);
301     }
302     if (jac->coords[0]) {
303       HYPRE_ParVector coords[3];
304       coords[0] = NULL;
305       coords[1] = NULL;
306       coords[2] = NULL;
307       if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0]));
308       if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1]));
309       if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2]));
310       PetscCallExternal(HYPRE_AMSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]);
311     }
312     PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient");
313     hm = (Mat_HYPRE *)(jac->G->data);
314     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
315     PetscCallExternal(HYPRE_AMSSetDiscreteGradient, jac->hsolver, parcsr);
316     if (jac->alpha_Poisson) {
317       hm = (Mat_HYPRE *)(jac->alpha_Poisson->data);
318       PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
319       PetscCallExternal(HYPRE_AMSSetAlphaPoissonMatrix, jac->hsolver, parcsr);
320     }
321     if (jac->ams_beta_is_zero) {
322       PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, NULL);
323     } else if (jac->beta_Poisson) {
324       hm = (Mat_HYPRE *)(jac->beta_Poisson->data);
325       PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
326       PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, parcsr);
327     } else if (jac->ams_beta_is_zero_part) {
328       if (jac->interior) {
329         HYPRE_ParVector interior = NULL;
330         PetscCallExternal(HYPRE_IJVectorGetObject, jac->interior->ij, (void **)(&interior));
331         PetscCallExternal(HYPRE_AMSSetInteriorNodes, jac->hsolver, interior);
332       } else {
333         jac->ams_beta_is_zero_part = PETSC_FALSE;
334       }
335     }
336     if (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])) {
337       PetscInt           i;
338       HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3];
339       if (jac->ND_PiFull) {
340         hm = (Mat_HYPRE *)(jac->ND_PiFull->data);
341         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull));
342       } else {
343         nd_parcsrfull = NULL;
344       }
345       for (i = 0; i < 3; ++i) {
346         if (jac->ND_Pi[i]) {
347           hm = (Mat_HYPRE *)(jac->ND_Pi[i]->data);
348           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i]));
349         } else {
350           nd_parcsr[i] = NULL;
351         }
352       }
353       PetscCallExternal(HYPRE_AMSSetInterpolations, jac->hsolver, nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]);
354     }
355   }
356   /* special case for ADS */
357   if (jac->setup == HYPRE_ADSSetup) {
358     Mat_HYPRE         *hm;
359     HYPRE_ParCSRMatrix parcsr;
360     if (!jac->coords[0] && !((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])))) {
361       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the interpolation matrices via PCHYPRESetInterpolations");
362     } else PetscCheck(jac->coords[1] && jac->coords[2], PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner has been designed for three dimensional problems! For two dimensional problems, use HYPRE AMS instead");
363     PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient");
364     PetscCheck(jac->C, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete curl operator via PCHYPRESetDiscreteGradient");
365     if (jac->coords[0]) {
366       HYPRE_ParVector coords[3];
367       coords[0] = NULL;
368       coords[1] = NULL;
369       coords[2] = NULL;
370       if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0]));
371       if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1]));
372       if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2]));
373       PetscCallExternal(HYPRE_ADSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]);
374     }
375     hm = (Mat_HYPRE *)(jac->G->data);
376     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
377     PetscCallExternal(HYPRE_ADSSetDiscreteGradient, jac->hsolver, parcsr);
378     hm = (Mat_HYPRE *)(jac->C->data);
379     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
380     PetscCallExternal(HYPRE_ADSSetDiscreteCurl, jac->hsolver, parcsr);
381     if ((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) {
382       PetscInt           i;
383       HYPRE_ParCSRMatrix rt_parcsrfull, rt_parcsr[3];
384       HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3];
385       if (jac->RT_PiFull) {
386         hm = (Mat_HYPRE *)(jac->RT_PiFull->data);
387         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsrfull));
388       } else {
389         rt_parcsrfull = NULL;
390       }
391       for (i = 0; i < 3; ++i) {
392         if (jac->RT_Pi[i]) {
393           hm = (Mat_HYPRE *)(jac->RT_Pi[i]->data);
394           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsr[i]));
395         } else {
396           rt_parcsr[i] = NULL;
397         }
398       }
399       if (jac->ND_PiFull) {
400         hm = (Mat_HYPRE *)(jac->ND_PiFull->data);
401         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull));
402       } else {
403         nd_parcsrfull = NULL;
404       }
405       for (i = 0; i < 3; ++i) {
406         if (jac->ND_Pi[i]) {
407           hm = (Mat_HYPRE *)(jac->ND_Pi[i]->data);
408           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i]));
409         } else {
410           nd_parcsr[i] = NULL;
411         }
412       }
413       PetscCallExternal(HYPRE_ADSSetInterpolations, jac->hsolver, rt_parcsrfull, rt_parcsr[0], rt_parcsr[1], rt_parcsr[2], nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]);
414     }
415   }
416   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
417   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&bv);
418   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&xv);
419   PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF));
420   PetscCallExternal(jac->setup, jac->hsolver, hmat, bv, xv);
421   PetscCall(PetscFPTrapPop());
422   PetscFunctionReturn(PETSC_SUCCESS);
423 }
424 
425 static PetscErrorCode PCApply_HYPRE(PC pc, Vec b, Vec x)
426 {
427   PC_HYPRE          *jac  = (PC_HYPRE *)pc->data;
428   Mat_HYPRE         *hjac = (Mat_HYPRE *)(jac->hpmat->data);
429   HYPRE_ParCSRMatrix hmat;
430   HYPRE_ParVector    jbv, jxv;
431 
432   PetscFunctionBegin;
433   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
434   if (!jac->applyrichardson) PetscCall(VecSet(x, 0.0));
435   PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->b, b));
436   if (jac->applyrichardson) PetscCall(VecHYPRE_IJVectorPushVec(hjac->x, x));
437   else PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->x, x));
438   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
439   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv);
440   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv);
441   PetscStackCallExternalVoid(
442     "Hypre solve", do {
443       HYPRE_Int hierr = (*jac->solve)(jac->hsolver, hmat, jbv, jxv);
444       if (hierr) {
445         PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
446         HYPRE_ClearAllErrors();
447       }
448     } while (0));
449 
450   if (jac->setup == HYPRE_AMSSetup && jac->ams_beta_is_zero_part) PetscCallExternal(HYPRE_AMSProjectOutGradients, jac->hsolver, jxv);
451   PetscCall(VecHYPRE_IJVectorPopVec(hjac->x));
452   PetscCall(VecHYPRE_IJVectorPopVec(hjac->b));
453   PetscFunctionReturn(PETSC_SUCCESS);
454 }
455 
456 static PetscErrorCode PCMatApply_HYPRE_BoomerAMG(PC pc, Mat B, Mat X)
457 {
458   PC_HYPRE           *jac  = (PC_HYPRE *)pc->data;
459   Mat_HYPRE          *hjac = (Mat_HYPRE *)(jac->hpmat->data);
460   hypre_ParCSRMatrix *par_matrix;
461   HYPRE_ParVector     hb, hx;
462   const PetscScalar  *b;
463   PetscScalar        *x;
464   PetscInt            m, N, lda;
465   hypre_Vector       *x_local;
466   PetscMemType        type;
467 
468   PetscFunctionBegin;
469   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
470   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&par_matrix);
471   PetscCall(MatGetLocalSize(B, &m, NULL));
472   PetscCall(MatGetSize(B, NULL, &N));
473   PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hb);
474   PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hx);
475   PetscCall(MatZeroEntries(X));
476   PetscCall(MatDenseGetArrayReadAndMemType(B, &b, &type));
477   PetscCall(MatDenseGetLDA(B, &lda));
478   PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m);
479   PetscCall(MatDenseGetLDA(X, &lda));
480   PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m);
481   x_local = hypre_ParVectorLocalVector(hb);
482   PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0);
483   hypre_VectorData(x_local) = (HYPRE_Complex *)b;
484   PetscCall(MatDenseGetArrayWriteAndMemType(X, &x, NULL));
485   x_local = hypre_ParVectorLocalVector(hx);
486   PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0);
487   hypre_VectorData(x_local) = (HYPRE_Complex *)x;
488   PetscCallExternal(hypre_ParVectorInitialize_v2, hb, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE);
489   PetscCallExternal(hypre_ParVectorInitialize_v2, hx, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE);
490   PetscStackCallExternalVoid(
491     "Hypre solve", do {
492       HYPRE_Int hierr = (*jac->solve)(jac->hsolver, par_matrix, hb, hx);
493       if (hierr) {
494         PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
495         HYPRE_ClearAllErrors();
496       }
497     } while (0));
498   PetscCallExternal(HYPRE_ParVectorDestroy, hb);
499   PetscCallExternal(HYPRE_ParVectorDestroy, hx);
500   PetscCall(MatDenseRestoreArrayReadAndMemType(B, &b));
501   PetscCall(MatDenseRestoreArrayWriteAndMemType(X, &x));
502   PetscFunctionReturn(PETSC_SUCCESS);
503 }
504 
505 static PetscErrorCode PCReset_HYPRE(PC pc)
506 {
507   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
508 
509   PetscFunctionBegin;
510   PetscCall(MatDestroy(&jac->hpmat));
511   PetscCall(MatDestroy(&jac->G));
512   PetscCall(MatDestroy(&jac->C));
513   PetscCall(MatDestroy(&jac->alpha_Poisson));
514   PetscCall(MatDestroy(&jac->beta_Poisson));
515   PetscCall(MatDestroy(&jac->RT_PiFull));
516   PetscCall(MatDestroy(&jac->RT_Pi[0]));
517   PetscCall(MatDestroy(&jac->RT_Pi[1]));
518   PetscCall(MatDestroy(&jac->RT_Pi[2]));
519   PetscCall(MatDestroy(&jac->ND_PiFull));
520   PetscCall(MatDestroy(&jac->ND_Pi[0]));
521   PetscCall(MatDestroy(&jac->ND_Pi[1]));
522   PetscCall(MatDestroy(&jac->ND_Pi[2]));
523   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0]));
524   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1]));
525   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2]));
526   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0]));
527   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1]));
528   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2]));
529   PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior));
530   PetscCall(PCHYPREResetNearNullSpace_Private(pc));
531   jac->ams_beta_is_zero      = PETSC_FALSE;
532   jac->ams_beta_is_zero_part = PETSC_FALSE;
533   jac->dim                   = 0;
534   PetscFunctionReturn(PETSC_SUCCESS);
535 }
536 
537 static PetscErrorCode PCDestroy_HYPRE(PC pc)
538 {
539   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
540 
541   PetscFunctionBegin;
542   PetscCall(PCReset_HYPRE(pc));
543   if (jac->destroy) PetscCallExternal(jac->destroy, jac->hsolver);
544   PetscCall(PetscFree(jac->hypre_type));
545 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
546   PetscCall(PetscFree(jac->spgemm_type));
547 #endif
548   if (jac->comm_hypre != MPI_COMM_NULL) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
549   PetscCall(PetscFree(pc->data));
550 
551   PetscCall(PetscObjectChangeTypeName((PetscObject)pc, 0));
552   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", NULL));
553   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", NULL));
554   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", NULL));
555   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", NULL));
556   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", NULL));
557   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetConstantEdgeVectors_C", NULL));
558   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", NULL));
559   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", NULL));
560   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", NULL));
561   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
562   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
563   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", NULL));
564   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", NULL));
565   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
566   PetscFunctionReturn(PETSC_SUCCESS);
567 }
568 
569 static PetscErrorCode PCSetFromOptions_HYPRE_Pilut(PC pc, PetscOptionItems *PetscOptionsObject)
570 {
571   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
572   PetscBool flag;
573 
574   PetscFunctionBegin;
575   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Pilut Options");
576   PetscCall(PetscOptionsInt("-pc_hypre_pilut_maxiter", "Number of iterations", "None", jac->maxiter, &jac->maxiter, &flag));
577   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetMaxIter, jac->hsolver, jac->maxiter);
578   PetscCall(PetscOptionsReal("-pc_hypre_pilut_tol", "Drop tolerance", "None", jac->tol, &jac->tol, &flag));
579   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetDropTolerance, jac->hsolver, jac->tol);
580   PetscCall(PetscOptionsInt("-pc_hypre_pilut_factorrowsize", "FactorRowSize", "None", jac->factorrowsize, &jac->factorrowsize, &flag));
581   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetFactorRowSize, jac->hsolver, jac->factorrowsize);
582   PetscOptionsHeadEnd();
583   PetscFunctionReturn(PETSC_SUCCESS);
584 }
585 
586 static PetscErrorCode PCView_HYPRE_Pilut(PC pc, PetscViewer viewer)
587 {
588   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
589   PetscBool iascii;
590 
591   PetscFunctionBegin;
592   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
593   if (iascii) {
594     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE Pilut preconditioning\n"));
595     if (jac->maxiter != PETSC_DEFAULT) {
596       PetscCall(PetscViewerASCIIPrintf(viewer, "    maximum number of iterations %" PetscInt_FMT "\n", jac->maxiter));
597     } else {
598       PetscCall(PetscViewerASCIIPrintf(viewer, "    default maximum number of iterations \n"));
599     }
600     if (jac->tol != PETSC_DEFAULT) {
601       PetscCall(PetscViewerASCIIPrintf(viewer, "    drop tolerance %g\n", (double)jac->tol));
602     } else {
603       PetscCall(PetscViewerASCIIPrintf(viewer, "    default drop tolerance \n"));
604     }
605     if (jac->factorrowsize != PETSC_DEFAULT) {
606       PetscCall(PetscViewerASCIIPrintf(viewer, "    factor row size %" PetscInt_FMT "\n", jac->factorrowsize));
607     } else {
608       PetscCall(PetscViewerASCIIPrintf(viewer, "    default factor row size \n"));
609     }
610   }
611   PetscFunctionReturn(PETSC_SUCCESS);
612 }
613 
614 static PetscErrorCode PCSetFromOptions_HYPRE_Euclid(PC pc, PetscOptionItems *PetscOptionsObject)
615 {
616   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
617   PetscBool flag, eu_bj = jac->eu_bj ? PETSC_TRUE : PETSC_FALSE;
618 
619   PetscFunctionBegin;
620   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Euclid Options");
621   PetscCall(PetscOptionsInt("-pc_hypre_euclid_level", "Factorization levels", "None", jac->eu_level, &jac->eu_level, &flag));
622   if (flag) PetscCallExternal(HYPRE_EuclidSetLevel, jac->hsolver, jac->eu_level);
623 
624   PetscCall(PetscOptionsReal("-pc_hypre_euclid_droptolerance", "Drop tolerance for ILU(k) in Euclid", "None", jac->eu_droptolerance, &jac->eu_droptolerance, &flag));
625   if (flag) {
626     PetscMPIInt size;
627 
628     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
629     PetscCheck(size == 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "hypre's Euclid does not support a parallel drop tolerance");
630     PetscCallExternal(HYPRE_EuclidSetILUT, jac->hsolver, jac->eu_droptolerance);
631   }
632 
633   PetscCall(PetscOptionsBool("-pc_hypre_euclid_bj", "Use Block Jacobi for ILU in Euclid", "None", eu_bj, &eu_bj, &flag));
634   if (flag) {
635     jac->eu_bj = eu_bj ? 1 : 0;
636     PetscCallExternal(HYPRE_EuclidSetBJ, jac->hsolver, jac->eu_bj);
637   }
638   PetscOptionsHeadEnd();
639   PetscFunctionReturn(PETSC_SUCCESS);
640 }
641 
642 static PetscErrorCode PCView_HYPRE_Euclid(PC pc, PetscViewer viewer)
643 {
644   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
645   PetscBool iascii;
646 
647   PetscFunctionBegin;
648   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
649   if (iascii) {
650     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE Euclid preconditioning\n"));
651     if (jac->eu_level != PETSC_DEFAULT) {
652       PetscCall(PetscViewerASCIIPrintf(viewer, "    factorization levels %" PetscInt_FMT "\n", jac->eu_level));
653     } else {
654       PetscCall(PetscViewerASCIIPrintf(viewer, "    default factorization levels \n"));
655     }
656     PetscCall(PetscViewerASCIIPrintf(viewer, "    drop tolerance %g\n", (double)jac->eu_droptolerance));
657     PetscCall(PetscViewerASCIIPrintf(viewer, "    use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj));
658   }
659   PetscFunctionReturn(PETSC_SUCCESS);
660 }
661 
662 static PetscErrorCode PCApplyTranspose_HYPRE_BoomerAMG(PC pc, Vec b, Vec x)
663 {
664   PC_HYPRE          *jac  = (PC_HYPRE *)pc->data;
665   Mat_HYPRE         *hjac = (Mat_HYPRE *)(jac->hpmat->data);
666   HYPRE_ParCSRMatrix hmat;
667   HYPRE_ParVector    jbv, jxv;
668 
669   PetscFunctionBegin;
670   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
671   PetscCall(VecSet(x, 0.0));
672   PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->x, b));
673   PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->b, x));
674 
675   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
676   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv);
677   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv);
678 
679   PetscStackCallExternalVoid(
680     "Hypre Transpose solve", do {
681       HYPRE_Int hierr = HYPRE_BoomerAMGSolveT(jac->hsolver, hmat, jbv, jxv);
682       if (hierr) {
683         /* error code of 1 in BoomerAMG merely means convergence not achieved */
684         PetscCheck(hierr == 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
685         HYPRE_ClearAllErrors();
686       }
687     } while (0));
688 
689   PetscCall(VecHYPRE_IJVectorPopVec(hjac->x));
690   PetscCall(VecHYPRE_IJVectorPopVec(hjac->b));
691   PetscFunctionReturn(PETSC_SUCCESS);
692 }
693 
694 static PetscErrorCode PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char name[])
695 {
696   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
697   PetscBool flag;
698 
699 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
700   PetscFunctionBegin;
701   if (jac->spgemm_type) {
702     PetscCall(PetscStrcmp(jac->spgemm_type, name, &flag));
703     PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE SpGEMM (really we can)");
704     PetscFunctionReturn(PETSC_SUCCESS);
705   } else {
706     PetscCall(PetscStrallocpy(name, &jac->spgemm_type));
707   }
708   PetscCall(PetscStrcmp("cusparse", jac->spgemm_type, &flag));
709   if (flag) {
710     PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 1);
711     PetscFunctionReturn(PETSC_SUCCESS);
712   }
713   PetscCall(PetscStrcmp("hypre", jac->spgemm_type, &flag));
714   if (flag) {
715     PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 0);
716     PetscFunctionReturn(PETSC_SUCCESS);
717   }
718   jac->spgemm_type = NULL;
719   SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE SpGEM type %s; Choices are cusparse, hypre", name);
720 #endif
721 }
722 
723 static PetscErrorCode PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char *spgemm[])
724 {
725   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
726 
727   PetscFunctionBegin;
728   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
729 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
730   *spgemm = jac->spgemm_type;
731 #endif
732   PetscFunctionReturn(PETSC_SUCCESS);
733 }
734 
735 static const char *HYPREBoomerAMGCycleType[]   = {"", "V", "W"};
736 static const char *HYPREBoomerAMGCoarsenType[] = {"CLJP", "Ruge-Stueben", "", "modifiedRuge-Stueben", "", "", "Falgout", "", "PMIS", "", "HMIS"};
737 static const char *HYPREBoomerAMGMeasureType[] = {"local", "global"};
738 /* The following corresponds to HYPRE_BoomerAMGSetRelaxType which has many missing numbers in the enum */
739 static const char *HYPREBoomerAMGSmoothType[] = {"Schwarz-smoothers", "Pilut", "ParaSails", "Euclid"};
740 static const char *HYPREBoomerAMGRelaxType[] = {"Jacobi", "sequential-Gauss-Seidel", "seqboundary-Gauss-Seidel", "SOR/Jacobi", "backward-SOR/Jacobi", "" /* [5] hybrid chaotic Gauss-Seidel (works only with OpenMP) */, "symmetric-SOR/Jacobi", "" /* 7 */, "l1scaled-SOR/Jacobi", "Gaussian-elimination", "" /* 10 */, "" /* 11 */, "" /* 12 */, "l1-Gauss-Seidel" /* nonsymmetric */, "backward-l1-Gauss-Seidel" /* nonsymmetric */, "CG" /* non-stationary */, "Chebyshev", "FCF-Jacobi", "l1scaled-Jacobi"};
741 static const char    *HYPREBoomerAMGInterpType[] = {"classical", "", "", "direct", "multipass", "multipass-wts", "ext+i", "ext+i-cc", "standard", "standard-wts", "block", "block-wtd", "FF", "FF1", "ext", "ad-wts", "ext-mm", "ext+i-mm", "ext+e-mm"};
742 static PetscErrorCode PCSetFromOptions_HYPRE_BoomerAMG(PC pc, PetscOptionItems *PetscOptionsObject)
743 {
744   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
745   PetscInt    bs, n, indx, level;
746   PetscBool   flg, tmp_truth;
747   double      tmpdbl, twodbl[2];
748   const char *symtlist[]           = {"nonsymmetric", "SPD", "nonsymmetric,SPD"};
749   const char *PCHYPRESpgemmTypes[] = {"cusparse", "hypre"};
750 
751   PetscFunctionBegin;
752   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE BoomerAMG Options");
753   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_cycle_type", "Cycle type", "None", HYPREBoomerAMGCycleType + 1, 2, HYPREBoomerAMGCycleType[jac->cycletype], &indx, &flg));
754   if (flg) {
755     jac->cycletype = indx + 1;
756     PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype);
757   }
758   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_levels", "Number of levels (of grids) allowed", "None", jac->maxlevels, &jac->maxlevels, &flg));
759   if (flg) {
760     PetscCheck(jac->maxlevels >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of levels %" PetscInt_FMT " must be at least two", jac->maxlevels);
761     PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels);
762   }
763   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_iter", "Maximum iterations used PER hypre call", "None", jac->maxiter, &jac->maxiter, &flg));
764   if (flg) {
765     PetscCheck(jac->maxiter >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of iterations %" PetscInt_FMT " must be at least one", jac->maxiter);
766     PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
767   }
768   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_tol", "Convergence tolerance PER hypre call (0.0 = use a fixed number of iterations)", "None", jac->tol, &jac->tol, &flg));
769   if (flg) {
770     PetscCheck(jac->tol >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Tolerance %g must be greater than or equal to zero", (double)jac->tol);
771     PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
772   }
773   bs = 1;
774   if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &bs));
775   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_numfunctions", "Number of functions", "HYPRE_BoomerAMGSetNumFunctions", bs, &bs, &flg));
776   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs);
777 
778   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_truncfactor", "Truncation factor for interpolation (0=no truncation)", "None", jac->truncfactor, &jac->truncfactor, &flg));
779   if (flg) {
780     PetscCheck(jac->truncfactor >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Truncation factor %g must be great than or equal zero", (double)jac->truncfactor);
781     PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor);
782   }
783 
784   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_P_max", "Max elements per row for interpolation operator (0=unlimited)", "None", jac->pmax, &jac->pmax, &flg));
785   if (flg) {
786     PetscCheck(jac->pmax >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "P_max %" PetscInt_FMT " must be greater than or equal to zero", jac->pmax);
787     PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax);
788   }
789 
790   PetscCall(PetscOptionsRangeInt("-pc_hypre_boomeramg_agg_nl", "Number of levels of aggressive coarsening", "None", jac->agg_nl, &jac->agg_nl, &flg, 0, jac->maxlevels));
791   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl);
792 
793   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_agg_num_paths", "Number of paths for aggressive coarsening", "None", jac->agg_num_paths, &jac->agg_num_paths, &flg));
794   if (flg) {
795     PetscCheck(jac->agg_num_paths >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of paths %" PetscInt_FMT " must be greater than or equal to 1", jac->agg_num_paths);
796     PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths);
797   }
798 
799   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_strong_threshold", "Threshold for being strongly connected", "None", jac->strongthreshold, &jac->strongthreshold, &flg));
800   if (flg) {
801     PetscCheck(jac->strongthreshold >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Strong threshold %g must be great than or equal zero", (double)jac->strongthreshold);
802     PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold);
803   }
804   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_max_row_sum", "Maximum row sum", "None", jac->maxrowsum, &jac->maxrowsum, &flg));
805   if (flg) {
806     PetscCheck(jac->maxrowsum >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Maximum row sum %g must be greater than zero", (double)jac->maxrowsum);
807     PetscCheck(jac->maxrowsum <= 1.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Maximum row sum %g must be less than or equal one", (double)jac->maxrowsum);
808     PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum);
809   }
810 
811   /* Grid sweeps */
812   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_all", "Number of sweeps for the up and down grid levels", "None", jac->gridsweeps[0], &indx, &flg));
813   if (flg) {
814     PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, indx);
815     /* modify the jac structure so we can view the updated options with PC_View */
816     jac->gridsweeps[0] = indx;
817     jac->gridsweeps[1] = indx;
818     /*defaults coarse to 1 */
819     jac->gridsweeps[2] = 1;
820   }
821   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen", "Use a nodal based coarsening 1-6", "HYPRE_BoomerAMGSetNodal", jac->nodal_coarsening, &jac->nodal_coarsening, &flg));
822   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodal, jac->hsolver, jac->nodal_coarsening);
823   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen_diag", "Diagonal in strength matrix for nodal based coarsening 0-2", "HYPRE_BoomerAMGSetNodalDiag", jac->nodal_coarsening_diag, &jac->nodal_coarsening_diag, &flg));
824   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodalDiag, jac->hsolver, jac->nodal_coarsening_diag);
825   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_variant", "Variant of algorithm 1-3", "HYPRE_BoomerAMGSetInterpVecVariant", jac->vec_interp_variant, &jac->vec_interp_variant, &flg));
826   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecVariant, jac->hsolver, jac->vec_interp_variant);
827   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_qmax", "Max elements per row for each Q", "HYPRE_BoomerAMGSetInterpVecQMax", jac->vec_interp_qmax, &jac->vec_interp_qmax, &flg));
828   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecQMax, jac->hsolver, jac->vec_interp_qmax);
829   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_vec_interp_smooth", "Whether to smooth the interpolation vectors", "HYPRE_BoomerAMGSetSmoothInterpVectors", jac->vec_interp_smooth, &jac->vec_interp_smooth, &flg));
830   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetSmoothInterpVectors, jac->hsolver, jac->vec_interp_smooth);
831   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_interp_refine", "Preprocess the interpolation matrix through iterative weight refinement", "HYPRE_BoomerAMGSetInterpRefine", jac->interp_refine, &jac->interp_refine, &flg));
832   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpRefine, jac->hsolver, jac->interp_refine);
833   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_down", "Number of sweeps for the down cycles", "None", jac->gridsweeps[0], &indx, &flg));
834   if (flg) {
835     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 1);
836     jac->gridsweeps[0] = indx;
837   }
838   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_up", "Number of sweeps for the up cycles", "None", jac->gridsweeps[1], &indx, &flg));
839   if (flg) {
840     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 2);
841     jac->gridsweeps[1] = indx;
842   }
843   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_coarse", "Number of sweeps for the coarse level", "None", jac->gridsweeps[2], &indx, &flg));
844   if (flg) {
845     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 3);
846     jac->gridsweeps[2] = indx;
847   }
848 
849   /* Smooth type */
850   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_smooth_type", "Enable more complex smoothers", "None", HYPREBoomerAMGSmoothType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGSmoothType), HYPREBoomerAMGSmoothType[0], &indx, &flg));
851   if (flg) {
852     jac->smoothtype = indx;
853     PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, indx + 6);
854     jac->smoothnumlevels = 25;
855     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, 25);
856   }
857 
858   /* Number of smoothing levels */
859   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_smooth_num_levels", "Number of levels on which more complex smoothers are used", "None", 25, &indx, &flg));
860   if (flg && (jac->smoothtype != -1)) {
861     jac->smoothnumlevels = indx;
862     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, indx);
863   }
864 
865   /* Number of levels for ILU(k) for Euclid */
866   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_eu_level", "Number of levels for ILU(k) in Euclid smoother", "None", 0, &indx, &flg));
867   if (flg && (jac->smoothtype == 3)) {
868     jac->eu_level = indx;
869     PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, indx);
870   }
871 
872   /* Filter for ILU(k) for Euclid */
873   double droptolerance;
874   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_eu_droptolerance", "Drop tolerance for ILU(k) in Euclid smoother", "None", 0, &droptolerance, &flg));
875   if (flg && (jac->smoothtype == 3)) {
876     jac->eu_droptolerance = droptolerance;
877     PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, droptolerance);
878   }
879 
880   /* Use Block Jacobi ILUT for Euclid */
881   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_eu_bj", "Use Block Jacobi for ILU in Euclid smoother?", "None", PETSC_FALSE, &tmp_truth, &flg));
882   if (flg && (jac->smoothtype == 3)) {
883     jac->eu_bj = tmp_truth;
884     PetscCallExternal(HYPRE_BoomerAMGSetEuBJ, jac->hsolver, jac->eu_bj);
885   }
886 
887   /* Relax type */
888   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_all", "Relax type for the up and down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg));
889   if (flg) {
890     jac->relaxtype[0] = jac->relaxtype[1] = indx;
891     PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, indx);
892     /* by default, coarse type set to 9 */
893     jac->relaxtype[2] = 9;
894     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, 9, 3);
895   }
896   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_down", "Relax type for the down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg));
897   if (flg) {
898     jac->relaxtype[0] = indx;
899     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 1);
900   }
901   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_up", "Relax type for the up cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg));
902   if (flg) {
903     jac->relaxtype[1] = indx;
904     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 2);
905   }
906   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_coarse", "Relax type on coarse grid", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[9], &indx, &flg));
907   if (flg) {
908     jac->relaxtype[2] = indx;
909     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 3);
910   }
911 
912   /* Relaxation Weight */
913   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_relax_weight_all", "Relaxation weight for all levels (0 = hypre estimates, -k = determined with k CG steps)", "None", jac->relaxweight, &tmpdbl, &flg));
914   if (flg) {
915     PetscCallExternal(HYPRE_BoomerAMGSetRelaxWt, jac->hsolver, tmpdbl);
916     jac->relaxweight = tmpdbl;
917   }
918 
919   n         = 2;
920   twodbl[0] = twodbl[1] = 1.0;
921   PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_relax_weight_level", "Set the relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg));
922   if (flg) {
923     PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight level: you must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n);
924     indx = (int)PetscAbsReal(twodbl[1]);
925     PetscCallExternal(HYPRE_BoomerAMGSetLevelRelaxWt, jac->hsolver, twodbl[0], indx);
926   }
927 
928   /* Outer relaxation Weight */
929   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_outer_relax_weight_all", "Outer relaxation weight for all levels (-k = determined with k CG steps)", "None", jac->outerrelaxweight, &tmpdbl, &flg));
930   if (flg) {
931     PetscCallExternal(HYPRE_BoomerAMGSetOuterWt, jac->hsolver, tmpdbl);
932     jac->outerrelaxweight = tmpdbl;
933   }
934 
935   n         = 2;
936   twodbl[0] = twodbl[1] = 1.0;
937   PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_outer_relax_weight_level", "Set the outer relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg));
938   if (flg) {
939     PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight outer level: You must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n);
940     indx = (int)PetscAbsReal(twodbl[1]);
941     PetscCallExternal(HYPRE_BoomerAMGSetLevelOuterWt, jac->hsolver, twodbl[0], indx);
942   }
943 
944   /* the Relax Order */
945   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_no_CF", "Do not use CF-relaxation", "None", PETSC_FALSE, &tmp_truth, &flg));
946 
947   if (flg && tmp_truth) {
948     jac->relaxorder = 0;
949     PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder);
950   }
951   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_measure_type", "Measure type", "None", HYPREBoomerAMGMeasureType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGMeasureType), HYPREBoomerAMGMeasureType[0], &indx, &flg));
952   if (flg) {
953     jac->measuretype = indx;
954     PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype);
955   }
956   /* update list length 3/07 */
957   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_coarsen_type", "Coarsen type", "None", HYPREBoomerAMGCoarsenType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGCoarsenType), HYPREBoomerAMGCoarsenType[6], &indx, &flg));
958   if (flg) {
959     jac->coarsentype = indx;
960     PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype);
961   }
962 
963   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_coarse_size", "Maximum size of coarsest grid", "None", jac->maxc, &jac->maxc, &flg));
964   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc);
965   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_min_coarse_size", "Minimum size of coarsest grid", "None", jac->minc, &jac->minc, &flg));
966   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc);
967 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
968   // global parameter but is closely associated with BoomerAMG
969   PetscCall(PetscOptionsEList("-pc_mg_galerkin_mat_product_algorithm", "Type of SpGEMM to use in hypre (only for now)", "PCMGGalerkinSetMatProductAlgorithm", PCHYPRESpgemmTypes, PETSC_STATIC_ARRAY_LENGTH(PCHYPRESpgemmTypes), PCHYPRESpgemmTypes[0], &indx, &flg));
970   if (!flg) indx = 0;
971   PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, PCHYPRESpgemmTypes[indx]));
972 #endif
973   /* AIR */
974 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
975   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_restriction_type", "Type of AIR method (distance 1 or 2, 0 means no AIR)", "None", jac->Rtype, &jac->Rtype, NULL));
976   PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype);
977   if (jac->Rtype) {
978     jac->interptype = 100; /* no way we can pass this with strings... Set it as default as in MFEM, then users can still customize it back to a different one */
979 
980     PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_strongthresholdR", "Threshold for R", "None", jac->Rstrongthreshold, &jac->Rstrongthreshold, NULL));
981     PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold);
982 
983     PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_filterthresholdR", "Filter threshold for R", "None", jac->Rfilterthreshold, &jac->Rfilterthreshold, NULL));
984     PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold);
985 
986     PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_Adroptol", "Defines the drop tolerance for the A-matrices from the 2nd level of AMG", "None", jac->Adroptol, &jac->Adroptol, NULL));
987     PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol);
988 
989     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_Adroptype", "Drops the entries that are not on the diagonal and smaller than its row norm: type 1: 1-norm, 2: 2-norm, -1: infinity norm", "None", jac->Adroptype, &jac->Adroptype, NULL));
990     PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype);
991   }
992 #endif
993 
994 #if PETSC_PKG_HYPRE_VERSION_LE(9, 9, 9)
995   PetscCheck(!jac->Rtype || !jac->agg_nl, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "-pc_hypre_boomeramg_restriction_type (%" PetscInt_FMT ") and -pc_hypre_boomeramg_agg_nl (%" PetscInt_FMT ")", jac->Rtype, jac->agg_nl);
996 #endif
997 
998   /* new 3/07 */
999   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_interp_type", "Interpolation type", "None", HYPREBoomerAMGInterpType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGInterpType), HYPREBoomerAMGInterpType[0], &indx, &flg));
1000   if (flg || jac->Rtype) {
1001     if (flg) jac->interptype = indx;
1002     PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype);
1003   }
1004 
1005   PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", &flg));
1006   if (flg) {
1007     level = 3;
1008     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", level, &level, NULL));
1009 
1010     jac->printstatistics = PETSC_TRUE;
1011     PetscCallExternal(HYPRE_BoomerAMGSetPrintLevel, jac->hsolver, level);
1012   }
1013 
1014   PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", &flg));
1015   if (flg) {
1016     level = 3;
1017     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", level, &level, NULL));
1018 
1019     jac->printstatistics = PETSC_TRUE;
1020     PetscCallExternal(HYPRE_BoomerAMGSetDebugFlag, jac->hsolver, level);
1021   }
1022 
1023   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", PETSC_FALSE, &tmp_truth, &flg));
1024   if (flg && tmp_truth) {
1025     PetscInt tmp_int;
1026     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", jac->nodal_relax_levels, &tmp_int, &flg));
1027     if (flg) jac->nodal_relax_levels = tmp_int;
1028     PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, 6);
1029     PetscCallExternal(HYPRE_BoomerAMGSetDomainType, jac->hsolver, 1);
1030     PetscCallExternal(HYPRE_BoomerAMGSetOverlap, jac->hsolver, 0);
1031     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, jac->nodal_relax_levels);
1032   }
1033 
1034   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_keeptranspose", "Avoid transpose matvecs in preconditioner application", "None", jac->keeptranspose, &jac->keeptranspose, NULL));
1035   PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0);
1036 
1037   /* options for ParaSails solvers */
1038   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flg));
1039   if (flg) {
1040     jac->symt = indx;
1041     PetscCallExternal(HYPRE_BoomerAMGSetSym, jac->hsolver, jac->symt);
1042   }
1043 
1044   PetscOptionsHeadEnd();
1045   PetscFunctionReturn(PETSC_SUCCESS);
1046 }
1047 
1048 static PetscErrorCode PCApplyRichardson_HYPRE_BoomerAMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
1049 {
1050   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1051   HYPRE_Int oits;
1052 
1053   PetscFunctionBegin;
1054   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
1055   PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, its * jac->maxiter);
1056   PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, rtol);
1057   jac->applyrichardson = PETSC_TRUE;
1058   PetscCall(PCApply_HYPRE(pc, b, y));
1059   jac->applyrichardson = PETSC_FALSE;
1060   PetscCallExternal(HYPRE_BoomerAMGGetNumIterations, jac->hsolver, &oits);
1061   *outits = oits;
1062   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
1063   else *reason = PCRICHARDSON_CONVERGED_RTOL;
1064   PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
1065   PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
1066   PetscFunctionReturn(PETSC_SUCCESS);
1067 }
1068 
1069 static PetscErrorCode PCView_HYPRE_BoomerAMG(PC pc, PetscViewer viewer)
1070 {
1071   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1072   PetscBool iascii;
1073 
1074   PetscFunctionBegin;
1075   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1076   if (iascii) {
1077     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE BoomerAMG preconditioning\n"));
1078     PetscCall(PetscViewerASCIIPrintf(viewer, "    Cycle type %s\n", HYPREBoomerAMGCycleType[jac->cycletype]));
1079     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum number of levels %" PetscInt_FMT "\n", jac->maxlevels));
1080     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum number of iterations PER hypre call %" PetscInt_FMT "\n", jac->maxiter));
1081     PetscCall(PetscViewerASCIIPrintf(viewer, "    Convergence tolerance PER hypre call %g\n", (double)jac->tol));
1082     PetscCall(PetscViewerASCIIPrintf(viewer, "    Threshold for strong coupling %g\n", (double)jac->strongthreshold));
1083     PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation truncation factor %g\n", (double)jac->truncfactor));
1084     PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation: max elements per row %" PetscInt_FMT "\n", jac->pmax));
1085     if (jac->interp_refine) PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation: number of steps of weighted refinement %" PetscInt_FMT "\n", jac->interp_refine));
1086     PetscCall(PetscViewerASCIIPrintf(viewer, "    Number of levels of aggressive coarsening %" PetscInt_FMT "\n", jac->agg_nl));
1087     PetscCall(PetscViewerASCIIPrintf(viewer, "    Number of paths for aggressive coarsening %" PetscInt_FMT "\n", jac->agg_num_paths));
1088 
1089     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum row sums %g\n", (double)jac->maxrowsum));
1090 
1091     PetscCall(PetscViewerASCIIPrintf(viewer, "    Sweeps down         %" PetscInt_FMT "\n", jac->gridsweeps[0]));
1092     PetscCall(PetscViewerASCIIPrintf(viewer, "    Sweeps up           %" PetscInt_FMT "\n", jac->gridsweeps[1]));
1093     PetscCall(PetscViewerASCIIPrintf(viewer, "    Sweeps on coarse    %" PetscInt_FMT "\n", jac->gridsweeps[2]));
1094 
1095     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax down          %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[0]]));
1096     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax up            %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[1]]));
1097     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax on coarse     %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[2]]));
1098 
1099     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax weight  (all)      %g\n", (double)jac->relaxweight));
1100     PetscCall(PetscViewerASCIIPrintf(viewer, "    Outer relax weight (all) %g\n", (double)jac->outerrelaxweight));
1101 
1102     if (jac->relaxorder) {
1103       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using CF-relaxation\n"));
1104     } else {
1105       PetscCall(PetscViewerASCIIPrintf(viewer, "    Not using CF-relaxation\n"));
1106     }
1107     if (jac->smoothtype != -1) {
1108       PetscCall(PetscViewerASCIIPrintf(viewer, "    Smooth type          %s\n", HYPREBoomerAMGSmoothType[jac->smoothtype]));
1109       PetscCall(PetscViewerASCIIPrintf(viewer, "    Smooth num levels    %" PetscInt_FMT "\n", jac->smoothnumlevels));
1110     } else {
1111       PetscCall(PetscViewerASCIIPrintf(viewer, "    Not using more complex smoothers.\n"));
1112     }
1113     if (jac->smoothtype == 3) {
1114       PetscCall(PetscViewerASCIIPrintf(viewer, "    Euclid ILU(k) levels %" PetscInt_FMT "\n", jac->eu_level));
1115       PetscCall(PetscViewerASCIIPrintf(viewer, "    Euclid ILU(k) drop tolerance %g\n", (double)jac->eu_droptolerance));
1116       PetscCall(PetscViewerASCIIPrintf(viewer, "    Euclid ILU use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj));
1117     }
1118     PetscCall(PetscViewerASCIIPrintf(viewer, "    Measure type        %s\n", HYPREBoomerAMGMeasureType[jac->measuretype]));
1119     PetscCall(PetscViewerASCIIPrintf(viewer, "    Coarsen type        %s\n", HYPREBoomerAMGCoarsenType[jac->coarsentype]));
1120     PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation type  %s\n", jac->interptype != 100 ? HYPREBoomerAMGInterpType[jac->interptype] : "1pt"));
1121     if (jac->nodal_coarsening) PetscCall(PetscViewerASCIIPrintf(viewer, "    Using nodal coarsening with HYPRE_BOOMERAMGSetNodal() %" PetscInt_FMT "\n", jac->nodal_coarsening));
1122     if (jac->vec_interp_variant) {
1123       PetscCall(PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetInterpVecVariant() %" PetscInt_FMT "\n", jac->vec_interp_variant));
1124       PetscCall(PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetInterpVecQMax() %" PetscInt_FMT "\n", jac->vec_interp_qmax));
1125       PetscCall(PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetSmoothInterpVectors() %d\n", jac->vec_interp_smooth));
1126     }
1127     if (jac->nodal_relax) PetscCall(PetscViewerASCIIPrintf(viewer, "    Using nodal relaxation via Schwarz smoothing on levels %" PetscInt_FMT "\n", jac->nodal_relax_levels));
1128 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
1129     PetscCall(PetscViewerASCIIPrintf(viewer, "    SpGEMM type         %s\n", jac->spgemm_type));
1130 #endif
1131     /* AIR */
1132     if (jac->Rtype) {
1133       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using approximate ideal restriction type %" PetscInt_FMT "\n", jac->Rtype));
1134       PetscCall(PetscViewerASCIIPrintf(viewer, "      Threshold for R %g\n", (double)jac->Rstrongthreshold));
1135       PetscCall(PetscViewerASCIIPrintf(viewer, "      Filter for R %g\n", (double)jac->Rfilterthreshold));
1136       PetscCall(PetscViewerASCIIPrintf(viewer, "      A drop tolerance %g\n", (double)jac->Adroptol));
1137       PetscCall(PetscViewerASCIIPrintf(viewer, "      A drop type %" PetscInt_FMT "\n", jac->Adroptype));
1138     }
1139   }
1140   PetscFunctionReturn(PETSC_SUCCESS);
1141 }
1142 
1143 static PetscErrorCode PCSetFromOptions_HYPRE_ParaSails(PC pc, PetscOptionItems *PetscOptionsObject)
1144 {
1145   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
1146   PetscInt    indx;
1147   PetscBool   flag;
1148   const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"};
1149 
1150   PetscFunctionBegin;
1151   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ParaSails Options");
1152   PetscCall(PetscOptionsInt("-pc_hypre_parasails_nlevels", "Number of number of levels", "None", jac->nlevels, &jac->nlevels, 0));
1153   PetscCall(PetscOptionsReal("-pc_hypre_parasails_thresh", "Threshold", "None", jac->threshold, &jac->threshold, &flag));
1154   if (flag) PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels);
1155 
1156   PetscCall(PetscOptionsReal("-pc_hypre_parasails_filter", "filter", "None", jac->filter, &jac->filter, &flag));
1157   if (flag) PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter);
1158 
1159   PetscCall(PetscOptionsReal("-pc_hypre_parasails_loadbal", "Load balance", "None", jac->loadbal, &jac->loadbal, &flag));
1160   if (flag) PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal);
1161 
1162   PetscCall(PetscOptionsBool("-pc_hypre_parasails_logging", "Print info to screen", "None", (PetscBool)jac->logging, (PetscBool *)&jac->logging, &flag));
1163   if (flag) PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging);
1164 
1165   PetscCall(PetscOptionsBool("-pc_hypre_parasails_reuse", "Reuse nonzero pattern in preconditioner", "None", (PetscBool)jac->ruse, (PetscBool *)&jac->ruse, &flag));
1166   if (flag) PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse);
1167 
1168   PetscCall(PetscOptionsEList("-pc_hypre_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flag));
1169   if (flag) {
1170     jac->symt = indx;
1171     PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt);
1172   }
1173 
1174   PetscOptionsHeadEnd();
1175   PetscFunctionReturn(PETSC_SUCCESS);
1176 }
1177 
1178 static PetscErrorCode PCView_HYPRE_ParaSails(PC pc, PetscViewer viewer)
1179 {
1180   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
1181   PetscBool   iascii;
1182   const char *symt = 0;
1183 
1184   PetscFunctionBegin;
1185   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1186   if (iascii) {
1187     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE ParaSails preconditioning\n"));
1188     PetscCall(PetscViewerASCIIPrintf(viewer, "    nlevels %" PetscInt_FMT "\n", jac->nlevels));
1189     PetscCall(PetscViewerASCIIPrintf(viewer, "    threshold %g\n", (double)jac->threshold));
1190     PetscCall(PetscViewerASCIIPrintf(viewer, "    filter %g\n", (double)jac->filter));
1191     PetscCall(PetscViewerASCIIPrintf(viewer, "    load balance %g\n", (double)jac->loadbal));
1192     PetscCall(PetscViewerASCIIPrintf(viewer, "    reuse nonzero structure %s\n", PetscBools[jac->ruse]));
1193     PetscCall(PetscViewerASCIIPrintf(viewer, "    print info to screen %s\n", PetscBools[jac->logging]));
1194     if (!jac->symt) symt = "nonsymmetric matrix and preconditioner";
1195     else if (jac->symt == 1) symt = "SPD matrix and preconditioner";
1196     else if (jac->symt == 2) symt = "nonsymmetric matrix but SPD preconditioner";
1197     else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Unknown HYPRE ParaSails symmetric option %" PetscInt_FMT, jac->symt);
1198     PetscCall(PetscViewerASCIIPrintf(viewer, "    %s\n", symt));
1199   }
1200   PetscFunctionReturn(PETSC_SUCCESS);
1201 }
1202 
1203 static PetscErrorCode PCSetFromOptions_HYPRE_AMS(PC pc, PetscOptionItems *PetscOptionsObject)
1204 {
1205   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1206   PetscInt  n;
1207   PetscBool flag, flag2, flag3, flag4;
1208 
1209   PetscFunctionBegin;
1210   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE AMS Options");
1211   PetscCall(PetscOptionsInt("-pc_hypre_ams_print_level", "Debugging output level for AMS", "None", jac->as_print, &jac->as_print, &flag));
1212   if (flag) PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print);
1213   PetscCall(PetscOptionsInt("-pc_hypre_ams_max_iter", "Maximum number of AMS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag));
1214   if (flag) PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter);
1215   PetscCall(PetscOptionsInt("-pc_hypre_ams_cycle_type", "Cycle type for AMS multigrid", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag));
1216   if (flag) PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type);
1217   PetscCall(PetscOptionsReal("-pc_hypre_ams_tol", "Error tolerance for AMS multigrid", "None", jac->as_tol, &jac->as_tol, &flag));
1218   if (flag) PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol);
1219   PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_type", "Relaxation type for AMS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag));
1220   PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_times", "Number of relaxation steps for AMS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2));
1221   PetscCall(PetscOptionsReal("-pc_hypre_ams_relax_weight", "Relaxation weight for AMS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3));
1222   PetscCall(PetscOptionsReal("-pc_hypre_ams_omega", "SSOR coefficient for AMS smoother", "None", jac->as_omega, &jac->as_omega, &flag4));
1223   if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
1224   PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_alpha_theta", "Threshold for strong coupling of vector Poisson AMG solver", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag));
1225   n = 5;
1226   PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_alpha_options", "AMG options for vector Poisson", "None", jac->as_amg_alpha_opts, &n, &flag2));
1227   if (flag || flag2) {
1228     PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */
1229                       jac->as_amg_alpha_opts[1],                                            /* AMG agg_levels */
1230                       jac->as_amg_alpha_opts[2],                                            /* AMG relax_type */
1231                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],                   /* AMG interp_type */
1232                       jac->as_amg_alpha_opts[4]);                                           /* AMG Pmax */
1233   }
1234   PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_beta_theta", "Threshold for strong coupling of scalar Poisson AMG solver", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag));
1235   n = 5;
1236   PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_beta_options", "AMG options for scalar Poisson solver", "None", jac->as_amg_beta_opts, &n, &flag2));
1237   if (flag || flag2) {
1238     PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
1239                       jac->as_amg_beta_opts[1],                                           /* AMG agg_levels */
1240                       jac->as_amg_beta_opts[2],                                           /* AMG relax_type */
1241                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],                   /* AMG interp_type */
1242                       jac->as_amg_beta_opts[4]);                                          /* AMG Pmax */
1243   }
1244   PetscCall(PetscOptionsInt("-pc_hypre_ams_projection_frequency", "Frequency at which a projection onto the compatible subspace for problems with zero conductivity regions is performed", "None", jac->ams_proj_freq, &jac->ams_proj_freq, &flag));
1245   if (flag) { /* override HYPRE's default only if the options is used */
1246     PetscCallExternal(HYPRE_AMSSetProjectionFrequency, jac->hsolver, jac->ams_proj_freq);
1247   }
1248   PetscOptionsHeadEnd();
1249   PetscFunctionReturn(PETSC_SUCCESS);
1250 }
1251 
1252 static PetscErrorCode PCView_HYPRE_AMS(PC pc, PetscViewer viewer)
1253 {
1254   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1255   PetscBool iascii;
1256 
1257   PetscFunctionBegin;
1258   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1259   if (iascii) {
1260     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE AMS preconditioning\n"));
1261     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter));
1262     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type));
1263     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iteration tolerance %g\n", (double)jac->as_tol));
1264     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother type %" PetscInt_FMT "\n", jac->as_relax_type));
1265     PetscCall(PetscViewerASCIIPrintf(viewer, "    number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times));
1266     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother weight %g\n", (double)jac->as_relax_weight));
1267     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother omega %g\n", (double)jac->as_omega));
1268     if (jac->alpha_Poisson) {
1269       PetscCall(PetscViewerASCIIPrintf(viewer, "    vector Poisson solver (passed in by user)\n"));
1270     } else {
1271       PetscCall(PetscViewerASCIIPrintf(viewer, "    vector Poisson solver (computed) \n"));
1272     }
1273     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0]));
1274     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1]));
1275     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2]));
1276     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3]));
1277     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4]));
1278     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG strength threshold %g\n", (double)jac->as_amg_alpha_theta));
1279     if (!jac->ams_beta_is_zero) {
1280       if (jac->beta_Poisson) {
1281         PetscCall(PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver (passed in by user)\n"));
1282       } else {
1283         PetscCall(PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver (computed) \n"));
1284       }
1285       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0]));
1286       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1]));
1287       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2]));
1288       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3]));
1289       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4]));
1290       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG strength threshold %g\n", (double)jac->as_amg_beta_theta));
1291       if (jac->ams_beta_is_zero_part) PetscCall(PetscViewerASCIIPrintf(viewer, "        compatible subspace projection frequency %" PetscInt_FMT " (-1 HYPRE uses default)\n", jac->ams_proj_freq));
1292     } else {
1293       PetscCall(PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver not used (zero-conductivity everywhere) \n"));
1294     }
1295   }
1296   PetscFunctionReturn(PETSC_SUCCESS);
1297 }
1298 
1299 static PetscErrorCode PCSetFromOptions_HYPRE_ADS(PC pc, PetscOptionItems *PetscOptionsObject)
1300 {
1301   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1302   PetscInt  n;
1303   PetscBool flag, flag2, flag3, flag4;
1304 
1305   PetscFunctionBegin;
1306   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ADS Options");
1307   PetscCall(PetscOptionsInt("-pc_hypre_ads_print_level", "Debugging output level for ADS", "None", jac->as_print, &jac->as_print, &flag));
1308   if (flag) PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print);
1309   PetscCall(PetscOptionsInt("-pc_hypre_ads_max_iter", "Maximum number of ADS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag));
1310   if (flag) PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter);
1311   PetscCall(PetscOptionsInt("-pc_hypre_ads_cycle_type", "Cycle type for ADS multigrid", "None", jac->ads_cycle_type, &jac->ads_cycle_type, &flag));
1312   if (flag) PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ads_cycle_type);
1313   PetscCall(PetscOptionsReal("-pc_hypre_ads_tol", "Error tolerance for ADS multigrid", "None", jac->as_tol, &jac->as_tol, &flag));
1314   if (flag) PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol);
1315   PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_type", "Relaxation type for ADS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag));
1316   PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_times", "Number of relaxation steps for ADS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2));
1317   PetscCall(PetscOptionsReal("-pc_hypre_ads_relax_weight", "Relaxation weight for ADS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3));
1318   PetscCall(PetscOptionsReal("-pc_hypre_ads_omega", "SSOR coefficient for ADS smoother", "None", jac->as_omega, &jac->as_omega, &flag4));
1319   if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
1320   PetscCall(PetscOptionsReal("-pc_hypre_ads_ams_theta", "Threshold for strong coupling of AMS solver inside ADS", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag));
1321   n = 5;
1322   PetscCall(PetscOptionsIntArray("-pc_hypre_ads_ams_options", "AMG options for AMS solver inside ADS", "None", jac->as_amg_alpha_opts, &n, &flag2));
1323   PetscCall(PetscOptionsInt("-pc_hypre_ads_ams_cycle_type", "Cycle type for AMS solver inside ADS", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag3));
1324   if (flag || flag2 || flag3) {
1325     PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMS cycle type */
1326                       jac->as_amg_alpha_opts[0],                                 /* AMG coarsen type */
1327                       jac->as_amg_alpha_opts[1],                                 /* AMG agg_levels */
1328                       jac->as_amg_alpha_opts[2],                                 /* AMG relax_type */
1329                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],        /* AMG interp_type */
1330                       jac->as_amg_alpha_opts[4]);                                /* AMG Pmax */
1331   }
1332   PetscCall(PetscOptionsReal("-pc_hypre_ads_amg_theta", "Threshold for strong coupling of vector AMG solver inside ADS", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag));
1333   n = 5;
1334   PetscCall(PetscOptionsIntArray("-pc_hypre_ads_amg_options", "AMG options for vector AMG solver inside ADS", "None", jac->as_amg_beta_opts, &n, &flag2));
1335   if (flag || flag2) {
1336     PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
1337                       jac->as_amg_beta_opts[1],                                       /* AMG agg_levels */
1338                       jac->as_amg_beta_opts[2],                                       /* AMG relax_type */
1339                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],               /* AMG interp_type */
1340                       jac->as_amg_beta_opts[4]);                                      /* AMG Pmax */
1341   }
1342   PetscOptionsHeadEnd();
1343   PetscFunctionReturn(PETSC_SUCCESS);
1344 }
1345 
1346 static PetscErrorCode PCView_HYPRE_ADS(PC pc, PetscViewer viewer)
1347 {
1348   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1349   PetscBool iascii;
1350 
1351   PetscFunctionBegin;
1352   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1353   if (iascii) {
1354     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE ADS preconditioning\n"));
1355     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter));
1356     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace cycle type %" PetscInt_FMT "\n", jac->ads_cycle_type));
1357     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iteration tolerance %g\n", (double)jac->as_tol));
1358     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother type %" PetscInt_FMT "\n", jac->as_relax_type));
1359     PetscCall(PetscViewerASCIIPrintf(viewer, "    number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times));
1360     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother weight %g\n", (double)jac->as_relax_weight));
1361     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother omega %g\n", (double)jac->as_omega));
1362     PetscCall(PetscViewerASCIIPrintf(viewer, "    AMS solver using boomerAMG\n"));
1363     PetscCall(PetscViewerASCIIPrintf(viewer, "        subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type));
1364     PetscCall(PetscViewerASCIIPrintf(viewer, "        coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0]));
1365     PetscCall(PetscViewerASCIIPrintf(viewer, "        levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1]));
1366     PetscCall(PetscViewerASCIIPrintf(viewer, "        relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2]));
1367     PetscCall(PetscViewerASCIIPrintf(viewer, "        interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3]));
1368     PetscCall(PetscViewerASCIIPrintf(viewer, "        max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4]));
1369     PetscCall(PetscViewerASCIIPrintf(viewer, "        strength threshold %g\n", (double)jac->as_amg_alpha_theta));
1370     PetscCall(PetscViewerASCIIPrintf(viewer, "    vector Poisson solver using boomerAMG\n"));
1371     PetscCall(PetscViewerASCIIPrintf(viewer, "        coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0]));
1372     PetscCall(PetscViewerASCIIPrintf(viewer, "        levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1]));
1373     PetscCall(PetscViewerASCIIPrintf(viewer, "        relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2]));
1374     PetscCall(PetscViewerASCIIPrintf(viewer, "        interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3]));
1375     PetscCall(PetscViewerASCIIPrintf(viewer, "        max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4]));
1376     PetscCall(PetscViewerASCIIPrintf(viewer, "        strength threshold %g\n", (double)jac->as_amg_beta_theta));
1377   }
1378   PetscFunctionReturn(PETSC_SUCCESS);
1379 }
1380 
1381 static PetscErrorCode PCHYPRESetDiscreteGradient_HYPRE(PC pc, Mat G)
1382 {
1383   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1384   PetscBool ishypre;
1385 
1386   PetscFunctionBegin;
1387   PetscCall(PetscObjectTypeCompare((PetscObject)G, MATHYPRE, &ishypre));
1388   if (ishypre) {
1389     PetscCall(PetscObjectReference((PetscObject)G));
1390     PetscCall(MatDestroy(&jac->G));
1391     jac->G = G;
1392   } else {
1393     PetscCall(MatDestroy(&jac->G));
1394     PetscCall(MatConvert(G, MATHYPRE, MAT_INITIAL_MATRIX, &jac->G));
1395   }
1396   PetscFunctionReturn(PETSC_SUCCESS);
1397 }
1398 
1399 /*@
1400    PCHYPRESetDiscreteGradient - Set discrete gradient matrix for `PCHYPRE` type of ams or ads
1401 
1402    Collective
1403 
1404    Input Parameters:
1405 +  pc - the preconditioning context
1406 -  G - the discrete gradient
1407 
1408    Level: intermediate
1409 
1410    Notes:
1411     G should have as many rows as the number of edges and as many columns as the number of vertices in the mesh
1412 
1413     Each row of G has 2 nonzeros, with column indexes being the global indexes of edge's endpoints: matrix entries are +1 and -1 depending on edge orientation
1414 
1415    Developer Note:
1416    This automatically converts the matrix to `MATHYPRE` if it is not already of that type
1417 
1418 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteCurl()`
1419 @*/
1420 PetscErrorCode PCHYPRESetDiscreteGradient(PC pc, Mat G)
1421 {
1422   PetscFunctionBegin;
1423   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1424   PetscValidHeaderSpecific(G, MAT_CLASSID, 2);
1425   PetscCheckSameComm(pc, 1, G, 2);
1426   PetscTryMethod(pc, "PCHYPRESetDiscreteGradient_C", (PC, Mat), (pc, G));
1427   PetscFunctionReturn(PETSC_SUCCESS);
1428 }
1429 
1430 static PetscErrorCode PCHYPRESetDiscreteCurl_HYPRE(PC pc, Mat C)
1431 {
1432   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1433   PetscBool ishypre;
1434 
1435   PetscFunctionBegin;
1436   PetscCall(PetscObjectTypeCompare((PetscObject)C, MATHYPRE, &ishypre));
1437   if (ishypre) {
1438     PetscCall(PetscObjectReference((PetscObject)C));
1439     PetscCall(MatDestroy(&jac->C));
1440     jac->C = C;
1441   } else {
1442     PetscCall(MatDestroy(&jac->C));
1443     PetscCall(MatConvert(C, MATHYPRE, MAT_INITIAL_MATRIX, &jac->C));
1444   }
1445   PetscFunctionReturn(PETSC_SUCCESS);
1446 }
1447 
1448 /*@
1449    PCHYPRESetDiscreteCurl - Set discrete curl matrx for `PCHYPRE` type of ads
1450 
1451    Collective
1452 
1453    Input Parameters:
1454 +  pc - the preconditioning context
1455 -  C - the discrete curl
1456 
1457    Level: intermediate
1458 
1459    Notes:
1460     C should have as many rows as the number of faces and as many columns as the number of edges in the mesh
1461 
1462     Each row of G has as many nonzeros as the number of edges of a face, with column indexes being the global indexes of the corresponding edge: matrix entries are +1 and -1 depending on edge orientation with respect to the face orientation
1463 
1464    Developer Note:
1465    This automatically converts the matrix to `MATHYPRE` if it is not already of that type
1466 
1467    If this is only for  `PCHYPRE` type of ads it should be called `PCHYPREADSSetDiscreteCurl()`
1468 
1469 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`
1470 @*/
1471 PetscErrorCode PCHYPRESetDiscreteCurl(PC pc, Mat C)
1472 {
1473   PetscFunctionBegin;
1474   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1475   PetscValidHeaderSpecific(C, MAT_CLASSID, 2);
1476   PetscCheckSameComm(pc, 1, C, 2);
1477   PetscTryMethod(pc, "PCHYPRESetDiscreteCurl_C", (PC, Mat), (pc, C));
1478   PetscFunctionReturn(PETSC_SUCCESS);
1479 }
1480 
1481 static PetscErrorCode PCHYPRESetInterpolations_HYPRE(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[])
1482 {
1483   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1484   PetscBool ishypre;
1485   PetscInt  i;
1486   PetscFunctionBegin;
1487 
1488   PetscCall(MatDestroy(&jac->RT_PiFull));
1489   PetscCall(MatDestroy(&jac->ND_PiFull));
1490   for (i = 0; i < 3; ++i) {
1491     PetscCall(MatDestroy(&jac->RT_Pi[i]));
1492     PetscCall(MatDestroy(&jac->ND_Pi[i]));
1493   }
1494 
1495   jac->dim = dim;
1496   if (RT_PiFull) {
1497     PetscCall(PetscObjectTypeCompare((PetscObject)RT_PiFull, MATHYPRE, &ishypre));
1498     if (ishypre) {
1499       PetscCall(PetscObjectReference((PetscObject)RT_PiFull));
1500       jac->RT_PiFull = RT_PiFull;
1501     } else {
1502       PetscCall(MatConvert(RT_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_PiFull));
1503     }
1504   }
1505   if (RT_Pi) {
1506     for (i = 0; i < dim; ++i) {
1507       if (RT_Pi[i]) {
1508         PetscCall(PetscObjectTypeCompare((PetscObject)RT_Pi[i], MATHYPRE, &ishypre));
1509         if (ishypre) {
1510           PetscCall(PetscObjectReference((PetscObject)RT_Pi[i]));
1511           jac->RT_Pi[i] = RT_Pi[i];
1512         } else {
1513           PetscCall(MatConvert(RT_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_Pi[i]));
1514         }
1515       }
1516     }
1517   }
1518   if (ND_PiFull) {
1519     PetscCall(PetscObjectTypeCompare((PetscObject)ND_PiFull, MATHYPRE, &ishypre));
1520     if (ishypre) {
1521       PetscCall(PetscObjectReference((PetscObject)ND_PiFull));
1522       jac->ND_PiFull = ND_PiFull;
1523     } else {
1524       PetscCall(MatConvert(ND_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_PiFull));
1525     }
1526   }
1527   if (ND_Pi) {
1528     for (i = 0; i < dim; ++i) {
1529       if (ND_Pi[i]) {
1530         PetscCall(PetscObjectTypeCompare((PetscObject)ND_Pi[i], MATHYPRE, &ishypre));
1531         if (ishypre) {
1532           PetscCall(PetscObjectReference((PetscObject)ND_Pi[i]));
1533           jac->ND_Pi[i] = ND_Pi[i];
1534         } else {
1535           PetscCall(MatConvert(ND_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_Pi[i]));
1536         }
1537       }
1538     }
1539   }
1540 
1541   PetscFunctionReturn(PETSC_SUCCESS);
1542 }
1543 
1544 /*@
1545    PCHYPRESetInterpolations - Set interpolation matrices for `PCHYPRE` type of ams or ads
1546 
1547    Collective
1548 
1549    Input Parameters:
1550 +  pc - the preconditioning context
1551 .  dim - the dimension of the problem, only used in AMS
1552 .  RT_PiFull - Raviart-Thomas interpolation matrix
1553 .  RT_Pi - x/y/z component of Raviart-Thomas interpolation matrix
1554 .  ND_PiFull - Nedelec interpolation matrix
1555 -  ND_Pi - x/y/z component of Nedelec interpolation matrix
1556 
1557    Level: intermediate
1558 
1559    Notes:
1560     For AMS, only Nedelec interpolation matrices are needed, the Raviart-Thomas interpolation matrices can be set to NULL.
1561 
1562     For ADS, both type of interpolation matrices are needed.
1563 
1564    Developer Note:
1565    This automatically converts the matrix to `MATHYPRE` if it is not already of that type
1566 
1567 .seealso: `PCHYPRE`
1568 @*/
1569 PetscErrorCode PCHYPRESetInterpolations(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[])
1570 {
1571   PetscInt i;
1572 
1573   PetscFunctionBegin;
1574   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1575   if (RT_PiFull) {
1576     PetscValidHeaderSpecific(RT_PiFull, MAT_CLASSID, 3);
1577     PetscCheckSameComm(pc, 1, RT_PiFull, 3);
1578   }
1579   if (RT_Pi) {
1580     PetscValidPointer(RT_Pi, 4);
1581     for (i = 0; i < dim; ++i) {
1582       if (RT_Pi[i]) {
1583         PetscValidHeaderSpecific(RT_Pi[i], MAT_CLASSID, 4);
1584         PetscCheckSameComm(pc, 1, RT_Pi[i], 4);
1585       }
1586     }
1587   }
1588   if (ND_PiFull) {
1589     PetscValidHeaderSpecific(ND_PiFull, MAT_CLASSID, 5);
1590     PetscCheckSameComm(pc, 1, ND_PiFull, 5);
1591   }
1592   if (ND_Pi) {
1593     PetscValidPointer(ND_Pi, 6);
1594     for (i = 0; i < dim; ++i) {
1595       if (ND_Pi[i]) {
1596         PetscValidHeaderSpecific(ND_Pi[i], MAT_CLASSID, 6);
1597         PetscCheckSameComm(pc, 1, ND_Pi[i], 6);
1598       }
1599     }
1600   }
1601   PetscTryMethod(pc, "PCHYPRESetInterpolations_C", (PC, PetscInt, Mat, Mat[], Mat, Mat[]), (pc, dim, RT_PiFull, RT_Pi, ND_PiFull, ND_Pi));
1602   PetscFunctionReturn(PETSC_SUCCESS);
1603 }
1604 
1605 static PetscErrorCode PCHYPRESetPoissonMatrix_HYPRE(PC pc, Mat A, PetscBool isalpha)
1606 {
1607   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1608   PetscBool ishypre;
1609 
1610   PetscFunctionBegin;
1611   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1612   if (ishypre) {
1613     if (isalpha) {
1614       PetscCall(PetscObjectReference((PetscObject)A));
1615       PetscCall(MatDestroy(&jac->alpha_Poisson));
1616       jac->alpha_Poisson = A;
1617     } else {
1618       if (A) {
1619         PetscCall(PetscObjectReference((PetscObject)A));
1620       } else {
1621         jac->ams_beta_is_zero = PETSC_TRUE;
1622       }
1623       PetscCall(MatDestroy(&jac->beta_Poisson));
1624       jac->beta_Poisson = A;
1625     }
1626   } else {
1627     if (isalpha) {
1628       PetscCall(MatDestroy(&jac->alpha_Poisson));
1629       PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->alpha_Poisson));
1630     } else {
1631       if (A) {
1632         PetscCall(MatDestroy(&jac->beta_Poisson));
1633         PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->beta_Poisson));
1634       } else {
1635         PetscCall(MatDestroy(&jac->beta_Poisson));
1636         jac->ams_beta_is_zero = PETSC_TRUE;
1637       }
1638     }
1639   }
1640   PetscFunctionReturn(PETSC_SUCCESS);
1641 }
1642 
1643 /*@
1644    PCHYPRESetAlphaPoissonMatrix - Set vector Poisson matrix for `PCHYPRE` of type ams
1645 
1646    Collective
1647 
1648    Input Parameters:
1649 +  pc - the preconditioning context
1650 -  A - the matrix
1651 
1652    Level: intermediate
1653 
1654    Note:
1655     A should be obtained by discretizing the vector valued Poisson problem with linear finite elements
1656 
1657    Developer Note:
1658    This automatically converts the matrix to `MATHYPRE` if it is not already of that type
1659 
1660    If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSSetAlphaPoissonMatrix()`
1661 
1662 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetBetaPoissonMatrix()`
1663 @*/
1664 PetscErrorCode PCHYPRESetAlphaPoissonMatrix(PC pc, Mat A)
1665 {
1666   PetscFunctionBegin;
1667   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1668   PetscValidHeaderSpecific(A, MAT_CLASSID, 2);
1669   PetscCheckSameComm(pc, 1, A, 2);
1670   PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_TRUE));
1671   PetscFunctionReturn(PETSC_SUCCESS);
1672 }
1673 
1674 /*@
1675    PCHYPRESetBetaPoissonMatrix - Set Poisson matrix for `PCHYPRE` of type ams
1676 
1677    Collective
1678 
1679    Input Parameters:
1680 +  pc - the preconditioning context
1681 -  A - the matrix, or NULL to turn it off
1682 
1683    Level: intermediate
1684 
1685    Note:
1686    A should be obtained by discretizing the Poisson problem with linear finite elements.
1687 
1688    Developer Note:
1689    This automatically converts the matrix to `MATHYPRE` if it is not already of that type
1690 
1691    If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSPCHYPRESetBetaPoissonMatrix()`
1692 
1693 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
1694 @*/
1695 PetscErrorCode PCHYPRESetBetaPoissonMatrix(PC pc, Mat A)
1696 {
1697   PetscFunctionBegin;
1698   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1699   if (A) {
1700     PetscValidHeaderSpecific(A, MAT_CLASSID, 2);
1701     PetscCheckSameComm(pc, 1, A, 2);
1702   }
1703   PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_FALSE));
1704   PetscFunctionReturn(PETSC_SUCCESS);
1705 }
1706 
1707 static PetscErrorCode PCHYPRESetEdgeConstantVectors_HYPRE(PC pc, Vec ozz, Vec zoz, Vec zzo)
1708 {
1709   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1710 
1711   PetscFunctionBegin;
1712   /* throw away any vector if already set */
1713   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0]));
1714   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1]));
1715   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2]));
1716   PetscCall(VecHYPRE_IJVectorCreate(ozz->map, &jac->constants[0]));
1717   PetscCall(VecHYPRE_IJVectorCopy(ozz, jac->constants[0]));
1718   PetscCall(VecHYPRE_IJVectorCreate(zoz->map, &jac->constants[1]));
1719   PetscCall(VecHYPRE_IJVectorCopy(zoz, jac->constants[1]));
1720   jac->dim = 2;
1721   if (zzo) {
1722     PetscCall(VecHYPRE_IJVectorCreate(zzo->map, &jac->constants[2]));
1723     PetscCall(VecHYPRE_IJVectorCopy(zzo, jac->constants[2]));
1724     jac->dim++;
1725   }
1726   PetscFunctionReturn(PETSC_SUCCESS);
1727 }
1728 
1729 /*@
1730    PCHYPRESetEdgeConstantVectors - Set the representation of the constant vector fields in the edge element basis for `PCHYPRE` of type ams
1731 
1732    Collective
1733 
1734    Input Parameters:
1735 +  pc - the preconditioning context
1736 .  ozz - vector representing (1,0,0) (or (1,0) in 2D)
1737 .  zoz - vector representing (0,1,0) (or (0,1) in 2D)
1738 -  zzo - vector representing (0,0,1) (use NULL in 2D)
1739 
1740    Level: intermediate
1741 
1742    Developer Note:
1743    If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSSetEdgeConstantVectors()`
1744 
1745 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
1746 @*/
1747 PetscErrorCode PCHYPRESetEdgeConstantVectors(PC pc, Vec ozz, Vec zoz, Vec zzo)
1748 {
1749   PetscFunctionBegin;
1750   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1751   PetscValidHeaderSpecific(ozz, VEC_CLASSID, 2);
1752   PetscValidHeaderSpecific(zoz, VEC_CLASSID, 3);
1753   if (zzo) PetscValidHeaderSpecific(zzo, VEC_CLASSID, 4);
1754   PetscCheckSameComm(pc, 1, ozz, 2);
1755   PetscCheckSameComm(pc, 1, zoz, 3);
1756   if (zzo) PetscCheckSameComm(pc, 1, zzo, 4);
1757   PetscTryMethod(pc, "PCHYPRESetEdgeConstantVectors_C", (PC, Vec, Vec, Vec), (pc, ozz, zoz, zzo));
1758   PetscFunctionReturn(PETSC_SUCCESS);
1759 }
1760 
1761 static PetscErrorCode PCHYPREAMSSetInteriorNodes_HYPRE(PC pc, Vec interior)
1762 {
1763   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1764 
1765   PetscFunctionBegin;
1766   PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior));
1767   PetscCall(VecHYPRE_IJVectorCreate(interior->map, &jac->interior));
1768   PetscCall(VecHYPRE_IJVectorCopy(interior, jac->interior));
1769   jac->ams_beta_is_zero_part = PETSC_TRUE;
1770   PetscFunctionReturn(PETSC_SUCCESS);
1771 }
1772 
1773 /*@
1774   PCHYPREAMSSetInteriorNodes - Set the list of interior nodes to a zero-conductivity region for `PCHYPRE` of type ams
1775 
1776    Collective
1777 
1778    Input Parameters:
1779 +  pc - the preconditioning context
1780 -  interior - vector. node is interior if its entry in the array is 1.0.
1781 
1782    Level: intermediate
1783 
1784    Note:
1785    This calls `HYPRE_AMSSetInteriorNodes()`
1786 
1787    Developer Note:
1788    If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSSetInteriorNodes()`
1789 
1790 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
1791 @*/
1792 PetscErrorCode PCHYPREAMSSetInteriorNodes(PC pc, Vec interior)
1793 {
1794   PetscFunctionBegin;
1795   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1796   PetscValidHeaderSpecific(interior, VEC_CLASSID, 2);
1797   PetscCheckSameComm(pc, 1, interior, 2);
1798   PetscTryMethod(pc, "PCHYPREAMSSetInteriorNodes_C", (PC, Vec), (pc, interior));
1799   PetscFunctionReturn(PETSC_SUCCESS);
1800 }
1801 
1802 static PetscErrorCode PCSetCoordinates_HYPRE(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords)
1803 {
1804   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1805   Vec       tv;
1806   PetscInt  i;
1807 
1808   PetscFunctionBegin;
1809   /* throw away any coordinate vector if already set */
1810   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0]));
1811   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1]));
1812   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2]));
1813   jac->dim = dim;
1814 
1815   /* compute IJ vector for coordinates */
1816   PetscCall(VecCreate(PetscObjectComm((PetscObject)pc), &tv));
1817   PetscCall(VecSetType(tv, VECSTANDARD));
1818   PetscCall(VecSetSizes(tv, nloc, PETSC_DECIDE));
1819   for (i = 0; i < dim; i++) {
1820     PetscScalar *array;
1821     PetscInt     j;
1822 
1823     PetscCall(VecHYPRE_IJVectorCreate(tv->map, &jac->coords[i]));
1824     PetscCall(VecGetArrayWrite(tv, &array));
1825     for (j = 0; j < nloc; j++) array[j] = coords[j * dim + i];
1826     PetscCall(VecRestoreArrayWrite(tv, &array));
1827     PetscCall(VecHYPRE_IJVectorCopy(tv, jac->coords[i]));
1828   }
1829   PetscCall(VecDestroy(&tv));
1830   PetscFunctionReturn(PETSC_SUCCESS);
1831 }
1832 
1833 static PetscErrorCode PCHYPREGetType_HYPRE(PC pc, const char *name[])
1834 {
1835   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1836 
1837   PetscFunctionBegin;
1838   *name = jac->hypre_type;
1839   PetscFunctionReturn(PETSC_SUCCESS);
1840 }
1841 
1842 static PetscErrorCode PCHYPRESetType_HYPRE(PC pc, const char name[])
1843 {
1844   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1845   PetscBool flag;
1846 
1847   PetscFunctionBegin;
1848   if (jac->hypre_type) {
1849     PetscCall(PetscStrcmp(jac->hypre_type, name, &flag));
1850     PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE preconditioner type once it has been set");
1851     PetscFunctionReturn(PETSC_SUCCESS);
1852   } else {
1853     PetscCall(PetscStrallocpy(name, &jac->hypre_type));
1854   }
1855 
1856   jac->maxiter         = PETSC_DEFAULT;
1857   jac->tol             = PETSC_DEFAULT;
1858   jac->printstatistics = PetscLogPrintInfo;
1859 
1860   PetscCall(PetscStrcmp("pilut", jac->hypre_type, &flag));
1861   if (flag) {
1862     PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
1863     PetscCallExternal(HYPRE_ParCSRPilutCreate, jac->comm_hypre, &jac->hsolver);
1864     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Pilut;
1865     pc->ops->view           = PCView_HYPRE_Pilut;
1866     jac->destroy            = HYPRE_ParCSRPilutDestroy;
1867     jac->setup              = HYPRE_ParCSRPilutSetup;
1868     jac->solve              = HYPRE_ParCSRPilutSolve;
1869     jac->factorrowsize      = PETSC_DEFAULT;
1870     PetscFunctionReturn(PETSC_SUCCESS);
1871   }
1872   PetscCall(PetscStrcmp("euclid", jac->hypre_type, &flag));
1873   if (flag) {
1874 #if defined(PETSC_USE_64BIT_INDICES)
1875     SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Hypre Euclid does not support 64 bit indices");
1876 #endif
1877     PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
1878     PetscCallExternal(HYPRE_EuclidCreate, jac->comm_hypre, &jac->hsolver);
1879     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Euclid;
1880     pc->ops->view           = PCView_HYPRE_Euclid;
1881     jac->destroy            = HYPRE_EuclidDestroy;
1882     jac->setup              = HYPRE_EuclidSetup;
1883     jac->solve              = HYPRE_EuclidSolve;
1884     jac->factorrowsize      = PETSC_DEFAULT;
1885     jac->eu_level           = PETSC_DEFAULT; /* default */
1886     PetscFunctionReturn(PETSC_SUCCESS);
1887   }
1888   PetscCall(PetscStrcmp("parasails", jac->hypre_type, &flag));
1889   if (flag) {
1890     PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
1891     PetscCallExternal(HYPRE_ParaSailsCreate, jac->comm_hypre, &jac->hsolver);
1892     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ParaSails;
1893     pc->ops->view           = PCView_HYPRE_ParaSails;
1894     jac->destroy            = HYPRE_ParaSailsDestroy;
1895     jac->setup              = HYPRE_ParaSailsSetup;
1896     jac->solve              = HYPRE_ParaSailsSolve;
1897     /* initialize */
1898     jac->nlevels   = 1;
1899     jac->threshold = .1;
1900     jac->filter    = .1;
1901     jac->loadbal   = 0;
1902     if (PetscLogPrintInfo) jac->logging = (int)PETSC_TRUE;
1903     else jac->logging = (int)PETSC_FALSE;
1904 
1905     jac->ruse = (int)PETSC_FALSE;
1906     jac->symt = 0;
1907     PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels);
1908     PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter);
1909     PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal);
1910     PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging);
1911     PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse);
1912     PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt);
1913     PetscFunctionReturn(PETSC_SUCCESS);
1914   }
1915   PetscCall(PetscStrcmp("boomeramg", jac->hypre_type, &flag));
1916   if (flag) {
1917     PetscCallExternal(HYPRE_BoomerAMGCreate, &jac->hsolver);
1918     pc->ops->setfromoptions  = PCSetFromOptions_HYPRE_BoomerAMG;
1919     pc->ops->view            = PCView_HYPRE_BoomerAMG;
1920     pc->ops->applytranspose  = PCApplyTranspose_HYPRE_BoomerAMG;
1921     pc->ops->applyrichardson = PCApplyRichardson_HYPRE_BoomerAMG;
1922     pc->ops->matapply        = PCMatApply_HYPRE_BoomerAMG;
1923     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_BoomerAMG));
1924     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_BoomerAMG));
1925     jac->destroy         = HYPRE_BoomerAMGDestroy;
1926     jac->setup           = HYPRE_BoomerAMGSetup;
1927     jac->solve           = HYPRE_BoomerAMGSolve;
1928     jac->applyrichardson = PETSC_FALSE;
1929     /* these defaults match the hypre defaults */
1930     jac->cycletype       = 1;
1931     jac->maxlevels       = 25;
1932     jac->maxiter         = 1;
1933     jac->tol             = 0.0; /* tolerance of zero indicates use as preconditioner (suppresses convergence errors) */
1934     jac->truncfactor     = 0.0;
1935     jac->strongthreshold = .25;
1936     jac->maxrowsum       = .9;
1937     jac->coarsentype     = 6;
1938     jac->measuretype     = 0;
1939     jac->gridsweeps[0] = jac->gridsweeps[1] = jac->gridsweeps[2] = 1;
1940     jac->smoothtype                                              = -1; /* Not set by default */
1941     jac->smoothnumlevels                                         = 25;
1942     jac->eu_level                                                = 0;
1943     jac->eu_droptolerance                                        = 0;
1944     jac->eu_bj                                                   = 0;
1945     jac->relaxtype[0] = jac->relaxtype[1] = 6; /* Defaults to SYMMETRIC since in PETSc we are using a PC - most likely with CG */
1946     jac->relaxtype[2]                     = 9; /*G.E. */
1947     jac->relaxweight                      = 1.0;
1948     jac->outerrelaxweight                 = 1.0;
1949     jac->relaxorder                       = 1;
1950     jac->interptype                       = 0;
1951     jac->Rtype                            = 0;
1952     jac->Rstrongthreshold                 = 0.25;
1953     jac->Rfilterthreshold                 = 0.0;
1954     jac->Adroptype                        = -1;
1955     jac->Adroptol                         = 0.0;
1956     jac->agg_nl                           = 0;
1957     jac->agg_interptype                   = 4;
1958     jac->pmax                             = 0;
1959     jac->truncfactor                      = 0.0;
1960     jac->agg_num_paths                    = 1;
1961     jac->maxc                             = 9;
1962     jac->minc                             = 1;
1963     jac->nodal_coarsening                 = 0;
1964     jac->nodal_coarsening_diag            = 0;
1965     jac->vec_interp_variant               = 0;
1966     jac->vec_interp_qmax                  = 0;
1967     jac->vec_interp_smooth                = PETSC_FALSE;
1968     jac->interp_refine                    = 0;
1969     jac->nodal_relax                      = PETSC_FALSE;
1970     jac->nodal_relax_levels               = 1;
1971     jac->rap2                             = 0;
1972 
1973     /* GPU defaults
1974          from https://hypre.readthedocs.io/en/latest/solvers-boomeramg.html#gpu-supported-options
1975          and /src/parcsr_ls/par_amg.c */
1976 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1977     jac->keeptranspose  = PETSC_TRUE;
1978     jac->mod_rap2       = 1;
1979     jac->coarsentype    = 8;
1980     jac->relaxorder     = 0;
1981     jac->interptype     = 6;
1982     jac->relaxtype[0]   = 18;
1983     jac->relaxtype[1]   = 18;
1984     jac->agg_interptype = 7;
1985 #else
1986     jac->keeptranspose = PETSC_FALSE;
1987     jac->mod_rap2      = 0;
1988 #endif
1989     PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype);
1990     PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels);
1991     PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
1992     PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
1993     PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor);
1994     PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold);
1995     PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum);
1996     PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype);
1997     PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype);
1998     PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder);
1999     PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype);
2000     PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl);
2001     PetscCallExternal(HYPRE_BoomerAMGSetAggInterpType, jac->hsolver, jac->agg_interptype);
2002     PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax);
2003     PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths);
2004     PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, jac->relaxtype[0]);  /* defaults coarse to 9 */
2005     PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, jac->gridsweeps[0]); /* defaults coarse to 1 */
2006     PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc);
2007     PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc);
2008     /* GPU */
2009 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
2010     PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0);
2011     PetscCallExternal(HYPRE_BoomerAMGSetRAP2, jac->hsolver, jac->rap2);
2012     PetscCallExternal(HYPRE_BoomerAMGSetModuleRAP2, jac->hsolver, jac->mod_rap2);
2013 #endif
2014 
2015     /* AIR */
2016 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
2017     PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype);
2018     PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold);
2019     PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold);
2020     PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol);
2021     PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype);
2022 #endif
2023     PetscFunctionReturn(PETSC_SUCCESS);
2024   }
2025   PetscCall(PetscStrcmp("ams", jac->hypre_type, &flag));
2026   if (flag) {
2027     PetscCallExternal(HYPRE_AMSCreate, &jac->hsolver);
2028     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_AMS;
2029     pc->ops->view           = PCView_HYPRE_AMS;
2030     jac->destroy            = HYPRE_AMSDestroy;
2031     jac->setup              = HYPRE_AMSSetup;
2032     jac->solve              = HYPRE_AMSSolve;
2033     jac->coords[0]          = NULL;
2034     jac->coords[1]          = NULL;
2035     jac->coords[2]          = NULL;
2036     jac->interior           = NULL;
2037     /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE AMS */
2038     jac->as_print       = 0;
2039     jac->as_max_iter    = 1;  /* used as a preconditioner */
2040     jac->as_tol         = 0.; /* used as a preconditioner */
2041     jac->ams_cycle_type = 13;
2042     /* Smoothing options */
2043     jac->as_relax_type   = 2;
2044     jac->as_relax_times  = 1;
2045     jac->as_relax_weight = 1.0;
2046     jac->as_omega        = 1.0;
2047     /* Vector valued Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2048     jac->as_amg_alpha_opts[0] = 10;
2049     jac->as_amg_alpha_opts[1] = 1;
2050     jac->as_amg_alpha_opts[2] = 6;
2051     jac->as_amg_alpha_opts[3] = 6;
2052     jac->as_amg_alpha_opts[4] = 4;
2053     jac->as_amg_alpha_theta   = 0.25;
2054     /* Scalar Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2055     jac->as_amg_beta_opts[0] = 10;
2056     jac->as_amg_beta_opts[1] = 1;
2057     jac->as_amg_beta_opts[2] = 6;
2058     jac->as_amg_beta_opts[3] = 6;
2059     jac->as_amg_beta_opts[4] = 4;
2060     jac->as_amg_beta_theta   = 0.25;
2061     PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print);
2062     PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter);
2063     PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type);
2064     PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol);
2065     PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
2066     PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */
2067                       jac->as_amg_alpha_opts[1],                                            /* AMG agg_levels */
2068                       jac->as_amg_alpha_opts[2],                                            /* AMG relax_type */
2069                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],                   /* AMG interp_type */
2070                       jac->as_amg_alpha_opts[4]);                                           /* AMG Pmax */
2071     PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0],   /* AMG coarsen type */
2072                       jac->as_amg_beta_opts[1],                                             /* AMG agg_levels */
2073                       jac->as_amg_beta_opts[2],                                             /* AMG relax_type */
2074                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],                     /* AMG interp_type */
2075                       jac->as_amg_beta_opts[4]);                                            /* AMG Pmax */
2076     /* Zero conductivity */
2077     jac->ams_beta_is_zero      = PETSC_FALSE;
2078     jac->ams_beta_is_zero_part = PETSC_FALSE;
2079     PetscFunctionReturn(PETSC_SUCCESS);
2080   }
2081   PetscCall(PetscStrcmp("ads", jac->hypre_type, &flag));
2082   if (flag) {
2083     PetscCallExternal(HYPRE_ADSCreate, &jac->hsolver);
2084     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ADS;
2085     pc->ops->view           = PCView_HYPRE_ADS;
2086     jac->destroy            = HYPRE_ADSDestroy;
2087     jac->setup              = HYPRE_ADSSetup;
2088     jac->solve              = HYPRE_ADSSolve;
2089     jac->coords[0]          = NULL;
2090     jac->coords[1]          = NULL;
2091     jac->coords[2]          = NULL;
2092     /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE ADS */
2093     jac->as_print       = 0;
2094     jac->as_max_iter    = 1;  /* used as a preconditioner */
2095     jac->as_tol         = 0.; /* used as a preconditioner */
2096     jac->ads_cycle_type = 13;
2097     /* Smoothing options */
2098     jac->as_relax_type   = 2;
2099     jac->as_relax_times  = 1;
2100     jac->as_relax_weight = 1.0;
2101     jac->as_omega        = 1.0;
2102     /* AMS solver parameters: cycle_type, coarsen type, agg_levels, relax_type, interp_type, Pmax */
2103     jac->ams_cycle_type       = 14;
2104     jac->as_amg_alpha_opts[0] = 10;
2105     jac->as_amg_alpha_opts[1] = 1;
2106     jac->as_amg_alpha_opts[2] = 6;
2107     jac->as_amg_alpha_opts[3] = 6;
2108     jac->as_amg_alpha_opts[4] = 4;
2109     jac->as_amg_alpha_theta   = 0.25;
2110     /* Vector Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2111     jac->as_amg_beta_opts[0] = 10;
2112     jac->as_amg_beta_opts[1] = 1;
2113     jac->as_amg_beta_opts[2] = 6;
2114     jac->as_amg_beta_opts[3] = 6;
2115     jac->as_amg_beta_opts[4] = 4;
2116     jac->as_amg_beta_theta   = 0.25;
2117     PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print);
2118     PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter);
2119     PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ams_cycle_type);
2120     PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol);
2121     PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
2122     PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type,      /* AMG coarsen type */
2123                       jac->as_amg_alpha_opts[0],                                      /* AMG coarsen type */
2124                       jac->as_amg_alpha_opts[1],                                      /* AMG agg_levels */
2125                       jac->as_amg_alpha_opts[2],                                      /* AMG relax_type */
2126                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],             /* AMG interp_type */
2127                       jac->as_amg_alpha_opts[4]);                                     /* AMG Pmax */
2128     PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
2129                       jac->as_amg_beta_opts[1],                                       /* AMG agg_levels */
2130                       jac->as_amg_beta_opts[2],                                       /* AMG relax_type */
2131                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],               /* AMG interp_type */
2132                       jac->as_amg_beta_opts[4]);                                      /* AMG Pmax */
2133     PetscFunctionReturn(PETSC_SUCCESS);
2134   }
2135   PetscCall(PetscFree(jac->hypre_type));
2136 
2137   jac->hypre_type = NULL;
2138   SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE preconditioner %s; Choices are euclid, pilut, parasails, boomeramg, ams", name);
2139 }
2140 
2141 /*
2142     It only gets here if the HYPRE type has not been set before the call to
2143    ...SetFromOptions() which actually is most of the time
2144 */
2145 PetscErrorCode PCSetFromOptions_HYPRE(PC pc, PetscOptionItems *PetscOptionsObject)
2146 {
2147   PetscInt    indx;
2148   const char *type[] = {"euclid", "pilut", "parasails", "boomeramg", "ams", "ads"};
2149   PetscBool   flg;
2150 
2151   PetscFunctionBegin;
2152   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE preconditioner options");
2153   PetscCall(PetscOptionsEList("-pc_hypre_type", "HYPRE preconditioner type", "PCHYPRESetType", type, PETSC_STATIC_ARRAY_LENGTH(type), "boomeramg", &indx, &flg));
2154   if (flg) {
2155     PetscCall(PCHYPRESetType_HYPRE(pc, type[indx]));
2156   } else {
2157     PetscCall(PCHYPRESetType_HYPRE(pc, "boomeramg"));
2158   }
2159   PetscTryTypeMethod(pc, setfromoptions, PetscOptionsObject);
2160   PetscOptionsHeadEnd();
2161   PetscFunctionReturn(PETSC_SUCCESS);
2162 }
2163 
2164 /*@C
2165      PCHYPRESetType - Sets which hypre preconditioner you wish to use
2166 
2167    Input Parameters:
2168 +     pc - the preconditioner context
2169 -     name - either  euclid, pilut, parasails, boomeramg, ams, ads
2170 
2171    Options Database Key:
2172    -pc_hypre_type - One of euclid, pilut, parasails, boomeramg, ams, ads
2173 
2174    Level: intermediate
2175 
2176 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRE`
2177 @*/
2178 PetscErrorCode PCHYPRESetType(PC pc, const char name[])
2179 {
2180   PetscFunctionBegin;
2181   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2182   PetscValidCharPointer(name, 2);
2183   PetscTryMethod(pc, "PCHYPRESetType_C", (PC, const char[]), (pc, name));
2184   PetscFunctionReturn(PETSC_SUCCESS);
2185 }
2186 
2187 /*@C
2188      PCHYPREGetType - Gets which hypre preconditioner you are using
2189 
2190    Input Parameter:
2191 .     pc - the preconditioner context
2192 
2193    Output Parameter:
2194 .     name - either  euclid, pilut, parasails, boomeramg, ams, ads
2195 
2196    Level: intermediate
2197 
2198 .seealso: `PCCreate()`, `PCHYPRESetType()`, `PCType`, `PC`, `PCHYPRE`
2199 @*/
2200 PetscErrorCode PCHYPREGetType(PC pc, const char *name[])
2201 {
2202   PetscFunctionBegin;
2203   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2204   PetscValidPointer(name, 2);
2205   PetscTryMethod(pc, "PCHYPREGetType_C", (PC, const char *[]), (pc, name));
2206   PetscFunctionReturn(PETSC_SUCCESS);
2207 }
2208 
2209 /*@C
2210    PCMGGalerkinSetMatProductAlgorithm - Set type of SpGEMM for hypre to use on GPUs
2211 
2212    Logically Collective
2213 
2214    Input Parameters:
2215 +  pc - the hypre context
2216 -  type - one of 'cusparse', 'hypre'
2217 
2218    Options Database Key:
2219 .  -pc_mg_galerkin_mat_product_algorithm <cusparse,hypre> - Type of SpGEMM to use in hypre
2220 
2221    Level: intermediate
2222 
2223    Developer Note:
2224    How the name starts with `PCMG`, should it not be `PCHYPREBoomerAMG`?
2225 
2226 .seealso: `PCHYPRE`, `PCMGGalerkinGetMatProductAlgorithm()`
2227 @*/
2228 PetscErrorCode PCMGGalerkinSetMatProductAlgorithm(PC pc, const char name[])
2229 {
2230   PetscFunctionBegin;
2231   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2232   PetscTryMethod(pc, "PCMGGalerkinSetMatProductAlgorithm_C", (PC, const char[]), (pc, name));
2233   PetscFunctionReturn(PETSC_SUCCESS);
2234 }
2235 
2236 /*@C
2237    PCMGGalerkinGetMatProductAlgorithm - Get type of SpGEMM for hypre to use on GPUs
2238 
2239    Not Collective
2240 
2241    Input Parameter:
2242 .  pc - the multigrid context
2243 
2244    Output Parameter:
2245 .  name - one of 'cusparse', 'hypre'
2246 
2247    Level: intermediate
2248 
2249 .seealso: `PCHYPRE`, ``PCMGGalerkinSetMatProductAlgorithm()`
2250 @*/
2251 PetscErrorCode PCMGGalerkinGetMatProductAlgorithm(PC pc, const char *name[])
2252 {
2253   PetscFunctionBegin;
2254   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2255   PetscTryMethod(pc, "PCMGGalerkinGetMatProductAlgorithm_C", (PC, const char *[]), (pc, name));
2256   PetscFunctionReturn(PETSC_SUCCESS);
2257 }
2258 
2259 /*MC
2260      PCHYPRE - Allows you to use the matrix element based preconditioners in the LLNL package hypre as PETSc `PC`
2261 
2262    Options Database Keys:
2263 +   -pc_hypre_type - One of euclid, pilut, parasails, boomeramg, ams, ads
2264 .   -pc_hypre_boomeramg_nodal_coarsen <n> - where n is from 1 to 6 (see `HYPRE_BOOMERAMGSetNodal()`)
2265 .   -pc_hypre_boomeramg_vec_interp_variant <v> - where v is from 1 to 3 (see `HYPRE_BoomerAMGSetInterpVecVariant()`)
2266 -   Many others, run with -pc_type hypre -pc_hypre_type XXX -help to see options for the XXX preconditioner
2267 
2268    Level: intermediate
2269 
2270    Notes:
2271     Apart from pc_hypre_type (for which there is `PCHYPRESetType()`),
2272           the many hypre options can ONLY be set via the options database (e.g. the command line
2273           or with `PetscOptionsSetValue()`, there are no functions to set them)
2274 
2275           The options -pc_hypre_boomeramg_max_iter and -pc_hypre_boomeramg_tol refer to the number of iterations
2276           (V-cycles) and tolerance that boomeramg does EACH time it is called. So for example, if
2277           -pc_hypre_boomeramg_max_iter is set to 2 then 2-V-cycles are being used to define the preconditioner
2278           (-pc_hypre_boomeramg_tol should be set to 0.0 - the default - to strictly use a fixed number of
2279           iterations per hypre call). -ksp_max_it and -ksp_rtol STILL determine the total number of iterations
2280           and tolerance for the Krylov solver. For example, if -pc_hypre_boomeramg_max_iter is 2 and -ksp_max_it is 10
2281           then AT MOST twenty V-cycles of boomeramg will be called.
2282 
2283            Note that the option -pc_hypre_boomeramg_relax_type_all defaults to symmetric relaxation
2284            (symmetric-SOR/Jacobi), which is required for Krylov solvers like CG that expect symmetry.
2285            Otherwise, you may want to use -pc_hypre_boomeramg_relax_type_all SOR/Jacobi.
2286           If you wish to use BoomerAMG WITHOUT a Krylov method use -ksp_type richardson NOT -ksp_type preonly
2287           and use -ksp_max_it to control the number of V-cycles.
2288           (see the PETSc FAQ.html at the PETSc website under the Documentation tab).
2289 
2290           `MatSetNearNullSpace()` - if you provide a near null space to your matrix it is ignored by hypre UNLESS you also use
2291           the following two options: ``-pc_hypre_boomeramg_nodal_coarsen <n> -pc_hypre_boomeramg_vec_interp_variant <v>``
2292 
2293           See `PCPFMG`, `PCSMG`, and `PCSYSPFMG` for access to hypre's other (nonalgebraic) multigrid solvers
2294 
2295           For `PCHYPRE` type of ams or ads auxiliary data must be provided to the preconditioner with `PCHYPRESetDiscreteGradient()`,
2296           `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`,
2297           `PCHYPREAMSSetInteriorNodes()`
2298 
2299    PETSc provides its own geometric and algebraic multigrid solvers `PCMG` and `PCGAMG`, also see `PCHMG` which is useful for certain multicomponent problems
2300 
2301    GPU Notes:
2302      To configure hypre BoomerAMG so that it can utilize NVIDIA GPUs run ./configure --download-hypre --with-cuda
2303      Then pass `VECCUDA` vectors and `MATAIJCUSPARSE` matrices to the solvers and PETSc will automatically utilize hypre's GPU solvers.
2304 
2305      To configure hypre BoomerAMG so that it can utilize AMD GPUs run ./configure --download-hypre --with-hip
2306      Then pass `VECHIP` vectors to the solvers and PETSc will automatically utilize hypre's GPU solvers.
2307 
2308 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRESetType()`, `PCPFMG`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`, `PCHYPRESetDiscreteGradient()`,
2309           `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`,
2310           PCHYPREAMSSetInteriorNodes()
2311 M*/
2312 
2313 PETSC_EXTERN PetscErrorCode PCCreate_HYPRE(PC pc)
2314 {
2315   PC_HYPRE *jac;
2316 
2317   PetscFunctionBegin;
2318   PetscCall(PetscNew(&jac));
2319 
2320   pc->data                = jac;
2321   pc->ops->reset          = PCReset_HYPRE;
2322   pc->ops->destroy        = PCDestroy_HYPRE;
2323   pc->ops->setfromoptions = PCSetFromOptions_HYPRE;
2324   pc->ops->setup          = PCSetUp_HYPRE;
2325   pc->ops->apply          = PCApply_HYPRE;
2326   jac->comm_hypre         = MPI_COMM_NULL;
2327   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", PCHYPRESetType_HYPRE));
2328   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", PCHYPREGetType_HYPRE));
2329   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_HYPRE));
2330   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", PCHYPRESetDiscreteGradient_HYPRE));
2331   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", PCHYPRESetDiscreteCurl_HYPRE));
2332   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", PCHYPRESetInterpolations_HYPRE));
2333   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", PCHYPRESetEdgeConstantVectors_HYPRE));
2334   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", PCHYPREAMSSetInteriorNodes_HYPRE));
2335   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", PCHYPRESetPoissonMatrix_HYPRE));
2336   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG));
2337   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG));
2338 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2339   #if defined(HYPRE_USING_HIP)
2340   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP));
2341   #endif
2342   #if defined(HYPRE_USING_CUDA)
2343   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA));
2344   #endif
2345 #endif
2346   PetscFunctionReturn(PETSC_SUCCESS);
2347 }
2348 
2349 typedef struct {
2350   MPI_Comm           hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */
2351   HYPRE_StructSolver hsolver;
2352 
2353   /* keep copy of PFMG options used so may view them */
2354   PetscInt  its;
2355   double    tol;
2356   PetscInt  relax_type;
2357   PetscInt  rap_type;
2358   PetscInt  num_pre_relax, num_post_relax;
2359   PetscInt  max_levels;
2360   PetscInt  skip_relax;
2361   PetscBool print_statistics;
2362 } PC_PFMG;
2363 
2364 PetscErrorCode PCDestroy_PFMG(PC pc)
2365 {
2366   PC_PFMG *ex = (PC_PFMG *)pc->data;
2367 
2368   PetscFunctionBegin;
2369   if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver);
2370   PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2371   PetscCall(PetscFree(pc->data));
2372   PetscFunctionReturn(PETSC_SUCCESS);
2373 }
2374 
2375 static const char *PFMGRelaxType[] = {"Jacobi", "Weighted-Jacobi", "symmetric-Red/Black-Gauss-Seidel", "Red/Black-Gauss-Seidel"};
2376 static const char *PFMGRAPType[]   = {"Galerkin", "non-Galerkin"};
2377 
2378 PetscErrorCode PCView_PFMG(PC pc, PetscViewer viewer)
2379 {
2380   PetscBool iascii;
2381   PC_PFMG  *ex = (PC_PFMG *)pc->data;
2382 
2383   PetscFunctionBegin;
2384   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
2385   if (iascii) {
2386     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE PFMG preconditioning\n"));
2387     PetscCall(PetscViewerASCIIPrintf(viewer, "    max iterations %" PetscInt_FMT "\n", ex->its));
2388     PetscCall(PetscViewerASCIIPrintf(viewer, "    tolerance %g\n", ex->tol));
2389     PetscCall(PetscViewerASCIIPrintf(viewer, "    relax type %s\n", PFMGRelaxType[ex->relax_type]));
2390     PetscCall(PetscViewerASCIIPrintf(viewer, "    RAP type %s\n", PFMGRAPType[ex->rap_type]));
2391     PetscCall(PetscViewerASCIIPrintf(viewer, "    number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
2392     PetscCall(PetscViewerASCIIPrintf(viewer, "    max levels %" PetscInt_FMT "\n", ex->max_levels));
2393     PetscCall(PetscViewerASCIIPrintf(viewer, "    skip relax %" PetscInt_FMT "\n", ex->skip_relax));
2394   }
2395   PetscFunctionReturn(PETSC_SUCCESS);
2396 }
2397 
2398 PetscErrorCode PCSetFromOptions_PFMG(PC pc, PetscOptionItems *PetscOptionsObject)
2399 {
2400   PC_PFMG *ex = (PC_PFMG *)pc->data;
2401 
2402   PetscFunctionBegin;
2403   PetscOptionsHeadBegin(PetscOptionsObject, "PFMG options");
2404   PetscCall(PetscOptionsBool("-pc_pfmg_print_statistics", "Print statistics", "HYPRE_StructPFMGSetPrintLevel", ex->print_statistics, &ex->print_statistics, NULL));
2405   PetscCall(PetscOptionsInt("-pc_pfmg_its", "Number of iterations of PFMG to use as preconditioner", "HYPRE_StructPFMGSetMaxIter", ex->its, &ex->its, NULL));
2406   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its);
2407   PetscCall(PetscOptionsInt("-pc_pfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
2408   PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
2409   PetscCall(PetscOptionsInt("-pc_pfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
2410   PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
2411 
2412   PetscCall(PetscOptionsInt("-pc_pfmg_max_levels", "Max Levels for MG hierarchy", "HYPRE_StructPFMGSetMaxLevels", ex->max_levels, &ex->max_levels, NULL));
2413   PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels);
2414 
2415   PetscCall(PetscOptionsReal("-pc_pfmg_tol", "Tolerance of PFMG", "HYPRE_StructPFMGSetTol", ex->tol, &ex->tol, NULL));
2416   PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol);
2417   PetscCall(PetscOptionsEList("-pc_pfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_StructPFMGSetRelaxType", PFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(PFMGRelaxType), PFMGRelaxType[ex->relax_type], &ex->relax_type, NULL));
2418   PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type);
2419   PetscCall(PetscOptionsEList("-pc_pfmg_rap_type", "RAP type", "HYPRE_StructPFMGSetRAPType", PFMGRAPType, PETSC_STATIC_ARRAY_LENGTH(PFMGRAPType), PFMGRAPType[ex->rap_type], &ex->rap_type, NULL));
2420   PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type);
2421   PetscCall(PetscOptionsInt("-pc_pfmg_skip_relax", "Skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations when the underlying problem is isotropic", "HYPRE_StructPFMGSetSkipRelax", ex->skip_relax, &ex->skip_relax, NULL));
2422   PetscCallExternal(HYPRE_StructPFMGSetSkipRelax, ex->hsolver, ex->skip_relax);
2423   PetscOptionsHeadEnd();
2424   PetscFunctionReturn(PETSC_SUCCESS);
2425 }
2426 
2427 PetscErrorCode PCApply_PFMG(PC pc, Vec x, Vec y)
2428 {
2429   PC_PFMG           *ex = (PC_PFMG *)pc->data;
2430   PetscScalar       *yy;
2431   const PetscScalar *xx;
2432   PetscInt           ilower[3], iupper[3];
2433   HYPRE_Int          hlower[3], hupper[3];
2434   Mat_HYPREStruct   *mx = (Mat_HYPREStruct *)(pc->pmat->data);
2435 
2436   PetscFunctionBegin;
2437   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2438   PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
2439   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
2440   iupper[0] += ilower[0] - 1;
2441   iupper[1] += ilower[1] - 1;
2442   iupper[2] += ilower[2] - 1;
2443   hlower[0] = (HYPRE_Int)ilower[0];
2444   hlower[1] = (HYPRE_Int)ilower[1];
2445   hlower[2] = (HYPRE_Int)ilower[2];
2446   hupper[0] = (HYPRE_Int)iupper[0];
2447   hupper[1] = (HYPRE_Int)iupper[1];
2448   hupper[2] = (HYPRE_Int)iupper[2];
2449 
2450   /* copy x values over to hypre */
2451   PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0);
2452   PetscCall(VecGetArrayRead(x, &xx));
2453   PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx);
2454   PetscCall(VecRestoreArrayRead(x, &xx));
2455   PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb);
2456   PetscCallExternal(HYPRE_StructPFMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx);
2457 
2458   /* copy solution values back to PETSc */
2459   PetscCall(VecGetArray(y, &yy));
2460   PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy);
2461   PetscCall(VecRestoreArray(y, &yy));
2462   PetscFunctionReturn(PETSC_SUCCESS);
2463 }
2464 
2465 static PetscErrorCode PCApplyRichardson_PFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
2466 {
2467   PC_PFMG  *jac = (PC_PFMG *)pc->data;
2468   HYPRE_Int oits;
2469 
2470   PetscFunctionBegin;
2471   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2472   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, its * jac->its);
2473   PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, rtol);
2474 
2475   PetscCall(PCApply_PFMG(pc, b, y));
2476   PetscCallExternal(HYPRE_StructPFMGGetNumIterations, jac->hsolver, &oits);
2477   *outits = oits;
2478   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
2479   else *reason = PCRICHARDSON_CONVERGED_RTOL;
2480   PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, jac->tol);
2481   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, jac->its);
2482   PetscFunctionReturn(PETSC_SUCCESS);
2483 }
2484 
2485 PetscErrorCode PCSetUp_PFMG(PC pc)
2486 {
2487   PC_PFMG         *ex = (PC_PFMG *)pc->data;
2488   Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data);
2489   PetscBool        flg;
2490 
2491   PetscFunctionBegin;
2492   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg));
2493   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner");
2494 
2495   /* create the hypre solver object and set its information */
2496   if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver);
2497   PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver);
2498 
2499   // Print Hypre statistics about the solve process
2500   if (ex->print_statistics) PetscCallExternal(HYPRE_StructPFMGSetPrintLevel, ex->hsolver, 3);
2501 
2502   // The hypre options must be repeated here because the StructPFMG was destroyed and recreated
2503   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its);
2504   PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
2505   PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
2506   PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels);
2507   PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol);
2508   PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type);
2509   PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type);
2510 
2511   PetscCallExternal(HYPRE_StructPFMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx);
2512   PetscCallExternal(HYPRE_StructPFMGSetZeroGuess, ex->hsolver);
2513   PetscFunctionReturn(PETSC_SUCCESS);
2514 }
2515 
2516 /*MC
2517      PCPFMG - the hypre PFMG multigrid solver
2518 
2519    Options Database Keys:
2520 + -pc_pfmg_its <its> - number of iterations of PFMG to use as preconditioner
2521 . -pc_pfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid solve
2522 . -pc_pfmg_num_post_relax <steps> - number of smoothing steps after coarse grid solve
2523 . -pc_pfmg_tol <tol> - tolerance of PFMG
2524 . -pc_pfmg_relax_type - relaxation type for the up and down cycles, one of Jacobi,Weighted-Jacobi,symmetric-Red/Black-Gauss-Seidel,Red/Black-Gauss-Seidel
2525 . -pc_pfmg_rap_type - type of coarse matrix generation, one of Galerkin,non-Galerkin
2526 - -pc_pfmg_skip_relax - skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations
2527                         when the underlying problem is isotropic, one of 0,1
2528 
2529    Level: advanced
2530 
2531    Notes:
2532    This is for CELL-centered descretizations
2533 
2534    See `PCSYSPFMG` for a version suitable for systems of PDEs, and `PCSMG`
2535 
2536    See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver
2537 
2538    This must be used with the `MATHYPRESTRUCT` matrix type.
2539 
2540    This provides only some of the functionality of PFMG, it supports only one block per process defined by a PETSc `DMDA`.
2541 
2542 .seealso: `PCMG`, `MATHYPRESTRUCT`, `PCHYPRE`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`
2543 M*/
2544 
2545 PETSC_EXTERN PetscErrorCode PCCreate_PFMG(PC pc)
2546 {
2547   PC_PFMG *ex;
2548 
2549   PetscFunctionBegin;
2550   PetscCall(PetscNew(&ex));
2551   pc->data = ex;
2552 
2553   ex->its              = 1;
2554   ex->tol              = 1.e-8;
2555   ex->relax_type       = 1;
2556   ex->rap_type         = 0;
2557   ex->num_pre_relax    = 1;
2558   ex->num_post_relax   = 1;
2559   ex->max_levels       = 0;
2560   ex->skip_relax       = 0;
2561   ex->print_statistics = PETSC_FALSE;
2562 
2563   pc->ops->setfromoptions  = PCSetFromOptions_PFMG;
2564   pc->ops->view            = PCView_PFMG;
2565   pc->ops->destroy         = PCDestroy_PFMG;
2566   pc->ops->apply           = PCApply_PFMG;
2567   pc->ops->applyrichardson = PCApplyRichardson_PFMG;
2568   pc->ops->setup           = PCSetUp_PFMG;
2569 
2570   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2571   PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver);
2572   PetscFunctionReturn(PETSC_SUCCESS);
2573 }
2574 
2575 /* we know we are working with a HYPRE_SStructMatrix */
2576 typedef struct {
2577   MPI_Comm            hcomm; /* does not share comm with HYPRE_SStructMatrix because need to create solver before getting matrix */
2578   HYPRE_SStructSolver ss_solver;
2579 
2580   /* keep copy of SYSPFMG options used so may view them */
2581   PetscInt its;
2582   double   tol;
2583   PetscInt relax_type;
2584   PetscInt num_pre_relax, num_post_relax;
2585 } PC_SysPFMG;
2586 
2587 PetscErrorCode PCDestroy_SysPFMG(PC pc)
2588 {
2589   PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;
2590 
2591   PetscFunctionBegin;
2592   if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver);
2593   PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2594   PetscCall(PetscFree(pc->data));
2595   PetscFunctionReturn(PETSC_SUCCESS);
2596 }
2597 
2598 static const char *SysPFMGRelaxType[] = {"Weighted-Jacobi", "Red/Black-Gauss-Seidel"};
2599 
2600 PetscErrorCode PCView_SysPFMG(PC pc, PetscViewer viewer)
2601 {
2602   PetscBool   iascii;
2603   PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;
2604 
2605   PetscFunctionBegin;
2606   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
2607   if (iascii) {
2608     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE SysPFMG preconditioning\n"));
2609     PetscCall(PetscViewerASCIIPrintf(viewer, "  max iterations %" PetscInt_FMT "\n", ex->its));
2610     PetscCall(PetscViewerASCIIPrintf(viewer, "  tolerance %g\n", ex->tol));
2611     PetscCall(PetscViewerASCIIPrintf(viewer, "  relax type %s\n", PFMGRelaxType[ex->relax_type]));
2612     PetscCall(PetscViewerASCIIPrintf(viewer, "  number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
2613   }
2614   PetscFunctionReturn(PETSC_SUCCESS);
2615 }
2616 
2617 PetscErrorCode PCSetFromOptions_SysPFMG(PC pc, PetscOptionItems *PetscOptionsObject)
2618 {
2619   PC_SysPFMG *ex  = (PC_SysPFMG *)pc->data;
2620   PetscBool   flg = PETSC_FALSE;
2621 
2622   PetscFunctionBegin;
2623   PetscOptionsHeadBegin(PetscOptionsObject, "SysPFMG options");
2624   PetscCall(PetscOptionsBool("-pc_syspfmg_print_statistics", "Print statistics", "HYPRE_SStructSysPFMGSetPrintLevel", flg, &flg, NULL));
2625   if (flg) PetscCallExternal(HYPRE_SStructSysPFMGSetPrintLevel, ex->ss_solver, 3);
2626   PetscCall(PetscOptionsInt("-pc_syspfmg_its", "Number of iterations of SysPFMG to use as preconditioner", "HYPRE_SStructSysPFMGSetMaxIter", ex->its, &ex->its, NULL));
2627   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, ex->ss_solver, ex->its);
2628   PetscCall(PetscOptionsInt("-pc_syspfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_SStructSysPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
2629   PetscCallExternal(HYPRE_SStructSysPFMGSetNumPreRelax, ex->ss_solver, ex->num_pre_relax);
2630   PetscCall(PetscOptionsInt("-pc_syspfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_SStructSysPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
2631   PetscCallExternal(HYPRE_SStructSysPFMGSetNumPostRelax, ex->ss_solver, ex->num_post_relax);
2632 
2633   PetscCall(PetscOptionsReal("-pc_syspfmg_tol", "Tolerance of SysPFMG", "HYPRE_SStructSysPFMGSetTol", ex->tol, &ex->tol, NULL));
2634   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, ex->ss_solver, ex->tol);
2635   PetscCall(PetscOptionsEList("-pc_syspfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_SStructSysPFMGSetRelaxType", SysPFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(SysPFMGRelaxType), SysPFMGRelaxType[ex->relax_type], &ex->relax_type, NULL));
2636   PetscCallExternal(HYPRE_SStructSysPFMGSetRelaxType, ex->ss_solver, ex->relax_type);
2637   PetscOptionsHeadEnd();
2638   PetscFunctionReturn(PETSC_SUCCESS);
2639 }
2640 
2641 PetscErrorCode PCApply_SysPFMG(PC pc, Vec x, Vec y)
2642 {
2643   PC_SysPFMG        *ex = (PC_SysPFMG *)pc->data;
2644   PetscScalar       *yy;
2645   const PetscScalar *xx;
2646   PetscInt           ilower[3], iupper[3];
2647   HYPRE_Int          hlower[3], hupper[3];
2648   Mat_HYPRESStruct  *mx       = (Mat_HYPRESStruct *)(pc->pmat->data);
2649   PetscInt           ordering = mx->dofs_order;
2650   PetscInt           nvars    = mx->nvars;
2651   PetscInt           part     = 0;
2652   PetscInt           size;
2653   PetscInt           i;
2654 
2655   PetscFunctionBegin;
2656   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2657   PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
2658   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
2659   iupper[0] += ilower[0] - 1;
2660   iupper[1] += ilower[1] - 1;
2661   iupper[2] += ilower[2] - 1;
2662   hlower[0] = (HYPRE_Int)ilower[0];
2663   hlower[1] = (HYPRE_Int)ilower[1];
2664   hlower[2] = (HYPRE_Int)ilower[2];
2665   hupper[0] = (HYPRE_Int)iupper[0];
2666   hupper[1] = (HYPRE_Int)iupper[1];
2667   hupper[2] = (HYPRE_Int)iupper[2];
2668 
2669   size = 1;
2670   for (i = 0; i < 3; i++) size *= (iupper[i] - ilower[i] + 1);
2671 
2672   /* copy x values over to hypre for variable ordering */
2673   if (ordering) {
2674     PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0);
2675     PetscCall(VecGetArrayRead(x, &xx));
2676     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(xx + (size * i)));
2677     PetscCall(VecRestoreArrayRead(x, &xx));
2678     PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b);
2679     PetscCallExternal(HYPRE_SStructMatrixMatvec, 1.0, mx->ss_mat, mx->ss_b, 0.0, mx->ss_x);
2680     PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);
2681 
2682     /* copy solution values back to PETSc */
2683     PetscCall(VecGetArray(y, &yy));
2684     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(yy + (size * i)));
2685     PetscCall(VecRestoreArray(y, &yy));
2686   } else { /* nodal ordering must be mapped to variable ordering for sys_pfmg */
2687     PetscScalar *z;
2688     PetscInt     j, k;
2689 
2690     PetscCall(PetscMalloc1(nvars * size, &z));
2691     PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0);
2692     PetscCall(VecGetArrayRead(x, &xx));
2693 
2694     /* transform nodal to hypre's variable ordering for sys_pfmg */
2695     for (i = 0; i < size; i++) {
2696       k = i * nvars;
2697       for (j = 0; j < nvars; j++) z[j * size + i] = xx[k + j];
2698     }
2699     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i)));
2700     PetscCall(VecRestoreArrayRead(x, &xx));
2701     PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b);
2702     PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);
2703 
2704     /* copy solution values back to PETSc */
2705     PetscCall(VecGetArray(y, &yy));
2706     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i)));
2707     /* transform hypre's variable ordering for sys_pfmg to nodal ordering */
2708     for (i = 0; i < size; i++) {
2709       k = i * nvars;
2710       for (j = 0; j < nvars; j++) yy[k + j] = z[j * size + i];
2711     }
2712     PetscCall(VecRestoreArray(y, &yy));
2713     PetscCall(PetscFree(z));
2714   }
2715   PetscFunctionReturn(PETSC_SUCCESS);
2716 }
2717 
2718 static PetscErrorCode PCApplyRichardson_SysPFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
2719 {
2720   PC_SysPFMG *jac = (PC_SysPFMG *)pc->data;
2721   HYPRE_Int   oits;
2722 
2723   PetscFunctionBegin;
2724   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2725   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, its * jac->its);
2726   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, rtol);
2727   PetscCall(PCApply_SysPFMG(pc, b, y));
2728   PetscCallExternal(HYPRE_SStructSysPFMGGetNumIterations, jac->ss_solver, &oits);
2729   *outits = oits;
2730   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
2731   else *reason = PCRICHARDSON_CONVERGED_RTOL;
2732   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, jac->tol);
2733   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, jac->its);
2734   PetscFunctionReturn(PETSC_SUCCESS);
2735 }
2736 
2737 PetscErrorCode PCSetUp_SysPFMG(PC pc)
2738 {
2739   PC_SysPFMG       *ex = (PC_SysPFMG *)pc->data;
2740   Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)(pc->pmat->data);
2741   PetscBool         flg;
2742 
2743   PetscFunctionBegin;
2744   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESSTRUCT, &flg));
2745   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESSTRUCT with this preconditioner");
2746 
2747   /* create the hypre sstruct solver object and set its information */
2748   if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver);
2749   PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver);
2750   PetscCallExternal(HYPRE_SStructSysPFMGSetZeroGuess, ex->ss_solver);
2751   PetscCallExternal(HYPRE_SStructSysPFMGSetup, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);
2752   PetscFunctionReturn(PETSC_SUCCESS);
2753 }
2754 
2755 /*MC
2756      PCSYSPFMG - the hypre SysPFMG multigrid solver
2757 
2758    Level: advanced
2759 
2760    Options Database Keys:
2761 + -pc_syspfmg_its <its> - number of iterations of SysPFMG to use as preconditioner
2762 . -pc_syspfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid
2763 . -pc_syspfmg_num_post_relax <steps> - number of smoothing steps after coarse grid
2764 . -pc_syspfmg_tol <tol> - tolerance of SysPFMG
2765 - -pc_syspfmg_relax_type <Weighted-Jacobi,Red/Black-Gauss-Seidel> - relaxation type for the up and down cycles
2766 
2767    Notes:
2768    See `PCPFMG` for hypre's PFMG that works for a scalar PDE and `PCSMG`
2769 
2770    See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver
2771 
2772    This is for CELL-centered descretizations
2773 
2774    This must be used with the `MATHYPRESSTRUCT` matrix type.
2775 
2776    This does not give access to all the functionality of hypres SysPFMG, it supports only one part, and one block per process defined by a PETSc `DMDA`.
2777 
2778 .seealso: `PCMG`, `MATHYPRESSTRUCT`, `PCPFMG`, `PCHYPRE`, `PCGAMG`, `PCSMG`
2779 M*/
2780 
2781 PETSC_EXTERN PetscErrorCode PCCreate_SysPFMG(PC pc)
2782 {
2783   PC_SysPFMG *ex;
2784 
2785   PetscFunctionBegin;
2786   PetscCall(PetscNew(&ex));
2787   pc->data = ex;
2788 
2789   ex->its            = 1;
2790   ex->tol            = 1.e-8;
2791   ex->relax_type     = 1;
2792   ex->num_pre_relax  = 1;
2793   ex->num_post_relax = 1;
2794 
2795   pc->ops->setfromoptions  = PCSetFromOptions_SysPFMG;
2796   pc->ops->view            = PCView_SysPFMG;
2797   pc->ops->destroy         = PCDestroy_SysPFMG;
2798   pc->ops->apply           = PCApply_SysPFMG;
2799   pc->ops->applyrichardson = PCApplyRichardson_SysPFMG;
2800   pc->ops->setup           = PCSetUp_SysPFMG;
2801 
2802   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2803   PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver);
2804   PetscFunctionReturn(PETSC_SUCCESS);
2805 }
2806 
2807 /* PC SMG */
2808 typedef struct {
2809   MPI_Comm           hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */
2810   HYPRE_StructSolver hsolver;
2811   PetscInt           its; /* keep copy of SMG options used so may view them */
2812   double             tol;
2813   PetscBool          print_statistics;
2814   PetscInt           num_pre_relax, num_post_relax;
2815 } PC_SMG;
2816 
2817 PetscErrorCode PCDestroy_SMG(PC pc)
2818 {
2819   PC_SMG *ex = (PC_SMG *)pc->data;
2820 
2821   PetscFunctionBegin;
2822   if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, ex->hsolver);
2823   PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2824   PetscCall(PetscFree(pc->data));
2825   PetscFunctionReturn(PETSC_SUCCESS);
2826 }
2827 
2828 PetscErrorCode PCView_SMG(PC pc, PetscViewer viewer)
2829 {
2830   PetscBool iascii;
2831   PC_SMG   *ex = (PC_SMG *)pc->data;
2832 
2833   PetscFunctionBegin;
2834   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
2835   if (iascii) {
2836     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE SMG preconditioning\n"));
2837     PetscCall(PetscViewerASCIIPrintf(viewer, "    max iterations %" PetscInt_FMT "\n", ex->its));
2838     PetscCall(PetscViewerASCIIPrintf(viewer, "    tolerance %g\n", ex->tol));
2839     PetscCall(PetscViewerASCIIPrintf(viewer, "    number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
2840   }
2841   PetscFunctionReturn(PETSC_SUCCESS);
2842 }
2843 
2844 PetscErrorCode PCSetFromOptions_SMG(PC pc, PetscOptionItems *PetscOptionsObject)
2845 {
2846   PC_SMG *ex = (PC_SMG *)pc->data;
2847 
2848   PetscFunctionBegin;
2849   PetscOptionsHeadBegin(PetscOptionsObject, "SMG options");
2850 
2851   PetscCall(PetscOptionsInt("-pc_smg_its", "Number of iterations of SMG to use as preconditioner", "HYPRE_StructSMGSetMaxIter", ex->its, &ex->its, NULL));
2852   PetscCall(PetscOptionsInt("-pc_smg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructSMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
2853   PetscCall(PetscOptionsInt("-pc_smg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructSMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
2854   PetscCall(PetscOptionsReal("-pc_smg_tol", "Tolerance of SMG", "HYPRE_StructSMGSetTol", ex->tol, &ex->tol, NULL));
2855 
2856   PetscOptionsHeadEnd();
2857   PetscFunctionReturn(PETSC_SUCCESS);
2858 }
2859 
2860 PetscErrorCode PCApply_SMG(PC pc, Vec x, Vec y)
2861 {
2862   PC_SMG            *ex = (PC_SMG *)pc->data;
2863   PetscScalar       *yy;
2864   const PetscScalar *xx;
2865   PetscInt           ilower[3], iupper[3];
2866   HYPRE_Int          hlower[3], hupper[3];
2867   Mat_HYPREStruct   *mx = (Mat_HYPREStruct *)(pc->pmat->data);
2868 
2869   PetscFunctionBegin;
2870   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2871   PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
2872   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
2873   iupper[0] += ilower[0] - 1;
2874   iupper[1] += ilower[1] - 1;
2875   iupper[2] += ilower[2] - 1;
2876   hlower[0] = (HYPRE_Int)ilower[0];
2877   hlower[1] = (HYPRE_Int)ilower[1];
2878   hlower[2] = (HYPRE_Int)ilower[2];
2879   hupper[0] = (HYPRE_Int)iupper[0];
2880   hupper[1] = (HYPRE_Int)iupper[1];
2881   hupper[2] = (HYPRE_Int)iupper[2];
2882 
2883   /* copy x values over to hypre */
2884   PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0);
2885   PetscCall(VecGetArrayRead(x, &xx));
2886   PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx);
2887   PetscCall(VecRestoreArrayRead(x, &xx));
2888   PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb);
2889   PetscCallExternal(HYPRE_StructSMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx);
2890 
2891   /* copy solution values back to PETSc */
2892   PetscCall(VecGetArray(y, &yy));
2893   PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy);
2894   PetscCall(VecRestoreArray(y, &yy));
2895   PetscFunctionReturn(PETSC_SUCCESS);
2896 }
2897 
2898 static PetscErrorCode PCApplyRichardson_SMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
2899 {
2900   PC_SMG   *jac = (PC_SMG *)pc->data;
2901   HYPRE_Int oits;
2902 
2903   PetscFunctionBegin;
2904   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2905   PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, its * jac->its);
2906   PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, rtol);
2907 
2908   PetscCall(PCApply_SMG(pc, b, y));
2909   PetscCallExternal(HYPRE_StructSMGGetNumIterations, jac->hsolver, &oits);
2910   *outits = oits;
2911   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
2912   else *reason = PCRICHARDSON_CONVERGED_RTOL;
2913   PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, jac->tol);
2914   PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, jac->its);
2915   PetscFunctionReturn(PETSC_SUCCESS);
2916 }
2917 
2918 PetscErrorCode PCSetUp_SMG(PC pc)
2919 {
2920   PetscInt         i, dim;
2921   PC_SMG          *ex = (PC_SMG *)pc->data;
2922   Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data);
2923   PetscBool        flg;
2924   DMBoundaryType   p[3];
2925   PetscInt         M[3];
2926 
2927   PetscFunctionBegin;
2928   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg));
2929   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner");
2930 
2931   PetscCall(DMDAGetInfo(mx->da, &dim, &M[0], &M[1], &M[2], 0, 0, 0, 0, 0, &p[0], &p[1], &p[2], 0));
2932   // Check if power of 2 in periodic directions
2933   for (i = 0; i < dim; i++) {
2934     if (((M[i] & (M[i] - 1)) != 0) && (p[i] == DM_BOUNDARY_PERIODIC)) {
2935       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "With SMG, the number of points in a periodic direction must be a power of 2, but is here %" PetscInt_FMT ".", M[i]);
2936     }
2937   }
2938 
2939   /* create the hypre solver object and set its information */
2940   if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, (ex->hsolver));
2941   PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver);
2942   // The hypre options must be set here and not in SetFromOptions because it is created here!
2943   PetscCallExternal(HYPRE_StructSMGSetMaxIter, ex->hsolver, ex->its);
2944   PetscCallExternal(HYPRE_StructSMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
2945   PetscCallExternal(HYPRE_StructSMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
2946   PetscCallExternal(HYPRE_StructSMGSetTol, ex->hsolver, ex->tol);
2947 
2948   PetscCallExternal(HYPRE_StructSMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx);
2949   PetscCallExternal(HYPRE_StructSMGSetZeroGuess, ex->hsolver);
2950   PetscFunctionReturn(PETSC_SUCCESS);
2951 }
2952 
2953 /*MC
2954      PCSMG - the hypre (structured grid) SMG multigrid solver
2955 
2956    Level: advanced
2957 
2958    Options Database Keys:
2959 + -pc_smg_its <its> - number of iterations of SMG to use as preconditioner
2960 . -pc_smg_num_pre_relax <steps> - number of smoothing steps before coarse grid
2961 . -pc_smg_num_post_relax <steps> - number of smoothing steps after coarse grid
2962 - -pc_smg_tol <tol> - tolerance of SMG
2963 
2964    Notes:
2965    This is for CELL-centered descretizations
2966 
2967    This must be used with the `MATHYPRESTRUCT` `MatType`.
2968 
2969    This does not provide all the functionality of  hypre's SMG solver, it supports only one block per process defined by a PETSc `DMDA`.
2970 
2971    See `PCSYSPFMG`, `PCSMG`, `PCPFMG`, and `PCHYPRE` for access to hypre's other preconditioners
2972 
2973 .seealso:  `PCMG`, `MATHYPRESTRUCT`, `PCPFMG`, `PCSYSPFMG`, `PCHYPRE`, `PCGAMG`
2974 M*/
2975 
2976 PETSC_EXTERN PetscErrorCode PCCreate_SMG(PC pc)
2977 {
2978   PC_SMG *ex;
2979 
2980   PetscFunctionBegin;
2981   PetscCall(PetscNew(&ex));
2982   pc->data = ex;
2983 
2984   ex->its            = 1;
2985   ex->tol            = 1.e-8;
2986   ex->num_pre_relax  = 1;
2987   ex->num_post_relax = 1;
2988 
2989   pc->ops->setfromoptions  = PCSetFromOptions_SMG;
2990   pc->ops->view            = PCView_SMG;
2991   pc->ops->destroy         = PCDestroy_SMG;
2992   pc->ops->apply           = PCApply_SMG;
2993   pc->ops->applyrichardson = PCApplyRichardson_SMG;
2994   pc->ops->setup           = PCSetUp_SMG;
2995 
2996   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2997   PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver);
2998   PetscFunctionReturn(PETSC_SUCCESS);
2999 }
3000