xref: /petsc/src/ksp/pc/impls/hypre/hypre.c (revision bcd3bd92eda2d5998e2f14c4bbfb33bd936bdc3e)
1 /*
2    Provides an interface to the LLNL package hypre
3 */
4 
5 #include <petscpkg_version.h>
6 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/
7 /* this include is needed ONLY to allow access to the private data inside the Mat object specific to hypre */
8 #include <petsc/private/matimpl.h>
9 #include <petsc/private/vecimpl.h>
10 #include <../src/vec/vec/impls/hypre/vhyp.h>
11 #include <../src/mat/impls/hypre/mhypre.h>
12 #include <../src/dm/impls/da/hypre/mhyp.h>
13 #include <_hypre_parcsr_ls.h>
14 #include <petscmathypre.h>
15 
16 #if defined(PETSC_HAVE_HYPRE_DEVICE)
17   #include <petsc/private/deviceimpl.h>
18 #endif
19 
20 static PetscBool  cite            = PETSC_FALSE;
21 static const char hypreCitation[] = "@manual{hypre-web-page,\n  title  = {{\\sl hypre}: High Performance Preconditioners},\n  organization = {Lawrence Livermore National Laboratory},\n  note  = "
22                                     "{\\url{https://www.llnl.gov/casc/hypre}}\n}\n";
23 
24 /*
25    Private context (data structure) for the  preconditioner.
26 */
27 typedef struct {
28   HYPRE_Solver hsolver;
29   Mat          hpmat; /* MatHYPRE */
30 
31   HYPRE_Int (*destroy)(HYPRE_Solver);
32   HYPRE_Int (*solve)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector);
33   HYPRE_Int (*setup)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector);
34 
35   MPI_Comm comm_hypre;
36   char    *hypre_type;
37 
38   /* options for Pilut and BoomerAMG*/
39   PetscInt  maxiter;
40   PetscReal tol;
41 
42   /* options for Pilut */
43   PetscInt factorrowsize;
44 
45   /* options for ParaSails */
46   PetscInt  nlevels;
47   PetscReal threshold;
48   PetscReal filter;
49   PetscReal loadbal;
50   PetscInt  logging;
51   PetscInt  ruse;
52   PetscInt  symt;
53 
54   /* options for BoomerAMG */
55   PetscBool printstatistics;
56 
57   /* options for BoomerAMG */
58   PetscInt  cycletype;
59   PetscInt  maxlevels;
60   PetscReal strongthreshold;
61   PetscReal maxrowsum;
62   PetscInt  gridsweeps[3];
63   PetscInt  coarsentype;
64   PetscInt  measuretype;
65   PetscInt  smoothtype;
66   PetscInt  smoothnumlevels;
67   PetscInt  eu_level;         /* Number of levels for ILU(k) in Euclid */
68   PetscReal eu_droptolerance; /* Drop tolerance for ILU(k) in Euclid */
69   PetscInt  eu_bj;            /* Defines use of Block Jacobi ILU in Euclid */
70   PetscInt  relaxtype[3];
71   PetscReal relaxweight;
72   PetscReal outerrelaxweight;
73   PetscInt  relaxorder;
74   PetscReal truncfactor;
75   PetscBool applyrichardson;
76   PetscInt  pmax;
77   PetscInt  interptype;
78   PetscInt  maxc;
79   PetscInt  minc;
80 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
81   char *spgemm_type; // this is a global hypre parameter but is closely associated with BoomerAMG
82 #endif
83   /* GPU */
84   PetscBool keeptranspose;
85   PetscInt  rap2;
86   PetscInt  mod_rap2;
87 
88   /* AIR */
89   PetscInt  Rtype;
90   PetscReal Rstrongthreshold;
91   PetscReal Rfilterthreshold;
92   PetscInt  Adroptype;
93   PetscReal Adroptol;
94 
95   PetscInt  agg_nl;
96   PetscInt  agg_interptype;
97   PetscInt  agg_num_paths;
98   PetscBool nodal_relax;
99   PetscInt  nodal_relax_levels;
100 
101   PetscInt  nodal_coarsening;
102   PetscInt  nodal_coarsening_diag;
103   PetscInt  vec_interp_variant;
104   PetscInt  vec_interp_qmax;
105   PetscBool vec_interp_smooth;
106   PetscInt  interp_refine;
107 
108   /* NearNullSpace support */
109   VecHYPRE_IJVector *hmnull;
110   HYPRE_ParVector   *phmnull;
111   PetscInt           n_hmnull;
112   Vec                hmnull_constant;
113 
114   /* options for AS (Auxiliary Space preconditioners) */
115   PetscInt  as_print;
116   PetscInt  as_max_iter;
117   PetscReal as_tol;
118   PetscInt  as_relax_type;
119   PetscInt  as_relax_times;
120   PetscReal as_relax_weight;
121   PetscReal as_omega;
122   PetscInt  as_amg_alpha_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for vector Poisson (AMS) or Curl problem (ADS) */
123   PetscReal as_amg_alpha_theta;   /* AMG strength for vector Poisson (AMS) or Curl problem (ADS) */
124   PetscInt  as_amg_beta_opts[5];  /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for scalar Poisson (AMS) or vector Poisson (ADS) */
125   PetscReal as_amg_beta_theta;    /* AMG strength for scalar Poisson (AMS) or vector Poisson (ADS)  */
126   PetscInt  ams_cycle_type;
127   PetscInt  ads_cycle_type;
128 
129   /* additional data */
130   Mat G;             /* MatHYPRE */
131   Mat C;             /* MatHYPRE */
132   Mat alpha_Poisson; /* MatHYPRE */
133   Mat beta_Poisson;  /* MatHYPRE */
134 
135   /* extra information for AMS */
136   PetscInt          dim; /* geometrical dimension */
137   VecHYPRE_IJVector coords[3];
138   VecHYPRE_IJVector constants[3];
139   VecHYPRE_IJVector interior;
140   Mat               RT_PiFull, RT_Pi[3];
141   Mat               ND_PiFull, ND_Pi[3];
142   PetscBool         ams_beta_is_zero;
143   PetscBool         ams_beta_is_zero_part;
144   PetscInt          ams_proj_freq;
145 } PC_HYPRE;
146 
147 PetscErrorCode PCHYPREGetSolver(PC pc, HYPRE_Solver *hsolver)
148 {
149   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
150 
151   PetscFunctionBegin;
152   *hsolver = jac->hsolver;
153   PetscFunctionReturn(PETSC_SUCCESS);
154 }
155 
156 /*
157   Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix
158   is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine.
159   It is used in PCHMG. Other users should avoid using this function.
160 */
161 static PetscErrorCode PCGetCoarseOperators_BoomerAMG(PC pc, PetscInt *nlevels, Mat *operators[])
162 {
163   PC_HYPRE            *jac  = (PC_HYPRE *)pc->data;
164   PetscBool            same = PETSC_FALSE;
165   PetscInt             num_levels, l;
166   Mat                 *mattmp;
167   hypre_ParCSRMatrix **A_array;
168 
169   PetscFunctionBegin;
170   PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same));
171   PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG ");
172   num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)(jac->hsolver));
173   PetscCall(PetscMalloc1(num_levels, &mattmp));
174   A_array = hypre_ParAMGDataAArray((hypre_ParAMGData *)(jac->hsolver));
175   for (l = 1; l < num_levels; l++) {
176     PetscCall(MatCreateFromParCSR(A_array[l], MATAIJ, PETSC_OWN_POINTER, &(mattmp[num_levels - 1 - l])));
177     /* We want to own the data, and HYPRE can not touch this matrix any more */
178     A_array[l] = NULL;
179   }
180   *nlevels   = num_levels;
181   *operators = mattmp;
182   PetscFunctionReturn(PETSC_SUCCESS);
183 }
184 
185 /*
186   Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix
187   is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine.
188   It is used in PCHMG. Other users should avoid using this function.
189 */
190 static PetscErrorCode PCGetInterpolations_BoomerAMG(PC pc, PetscInt *nlevels, Mat *interpolations[])
191 {
192   PC_HYPRE            *jac  = (PC_HYPRE *)pc->data;
193   PetscBool            same = PETSC_FALSE;
194   PetscInt             num_levels, l;
195   Mat                 *mattmp;
196   hypre_ParCSRMatrix **P_array;
197 
198   PetscFunctionBegin;
199   PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same));
200   PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG ");
201   num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)(jac->hsolver));
202   PetscCall(PetscMalloc1(num_levels, &mattmp));
203   P_array = hypre_ParAMGDataPArray((hypre_ParAMGData *)(jac->hsolver));
204   for (l = 1; l < num_levels; l++) {
205     PetscCall(MatCreateFromParCSR(P_array[num_levels - 1 - l], MATAIJ, PETSC_OWN_POINTER, &(mattmp[l - 1])));
206     /* We want to own the data, and HYPRE can not touch this matrix any more */
207     P_array[num_levels - 1 - l] = NULL;
208   }
209   *nlevels        = num_levels;
210   *interpolations = mattmp;
211   PetscFunctionReturn(PETSC_SUCCESS);
212 }
213 
214 /* Resets (frees) Hypre's representation of the near null space */
215 static PetscErrorCode PCHYPREResetNearNullSpace_Private(PC pc)
216 {
217   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
218   PetscInt  i;
219 
220   PetscFunctionBegin;
221   for (i = 0; i < jac->n_hmnull; i++) PetscCall(VecHYPRE_IJVectorDestroy(&jac->hmnull[i]));
222   PetscCall(PetscFree(jac->hmnull));
223   PetscCall(PetscFree(jac->phmnull));
224   PetscCall(VecDestroy(&jac->hmnull_constant));
225   jac->n_hmnull = 0;
226   PetscFunctionReturn(PETSC_SUCCESS);
227 }
228 
229 static PetscErrorCode PCSetUp_HYPRE(PC pc)
230 {
231   PC_HYPRE          *jac = (PC_HYPRE *)pc->data;
232   Mat_HYPRE         *hjac;
233   HYPRE_ParCSRMatrix hmat;
234   HYPRE_ParVector    bv, xv;
235   PetscBool          ishypre;
236 
237   PetscFunctionBegin;
238   /* default type is boomerAMG */
239   if (!jac->hypre_type) PetscCall(PCHYPRESetType(pc, "boomeramg"));
240 
241   /* get hypre matrix */
242   if (pc->flag == DIFFERENT_NONZERO_PATTERN) PetscCall(MatDestroy(&jac->hpmat));
243   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRE, &ishypre));
244   if (!ishypre) {
245     /* Temporary fix since we do not support MAT_REUSE_MATRIX with HYPRE device */
246 #if defined(PETSC_HAVE_HYPRE_DEVICE)
247     PetscBool iscuda, iship, iskokkos;
248 
249     PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
250     PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iship, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE, ""));
251     PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iskokkos, MATSEQAIJKOKKOS, MATMPIAIJKOKKOS, ""));
252     if (iscuda || iship || iskokkos) PetscCall(MatDestroy(&jac->hpmat));
253 #endif
254     PetscCall(MatConvert(pc->pmat, MATHYPRE, jac->hpmat ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX, &jac->hpmat));
255   } else {
256     PetscCall(PetscObjectReference((PetscObject)pc->pmat));
257     PetscCall(MatDestroy(&jac->hpmat));
258     jac->hpmat = pc->pmat;
259   }
260 
261   /* allow debug */
262   PetscCall(MatViewFromOptions(jac->hpmat, NULL, "-pc_hypre_mat_view"));
263   hjac = (Mat_HYPRE *)(jac->hpmat->data);
264 
265   /* special case for BoomerAMG */
266   if (jac->setup == HYPRE_BoomerAMGSetup) {
267     MatNullSpace mnull;
268     PetscBool    has_const;
269     PetscInt     bs, nvec, i;
270     const Vec   *vecs;
271 
272     PetscCall(MatGetBlockSize(pc->pmat, &bs));
273     if (bs > 1) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs);
274     PetscCall(MatGetNearNullSpace(pc->mat, &mnull));
275     if (mnull) {
276       PetscCall(PCHYPREResetNearNullSpace_Private(pc));
277       PetscCall(MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs));
278       PetscCall(PetscMalloc1(nvec + 1, &jac->hmnull));
279       PetscCall(PetscMalloc1(nvec + 1, &jac->phmnull));
280       for (i = 0; i < nvec; i++) {
281         PetscCall(VecHYPRE_IJVectorCreate(vecs[i]->map, &jac->hmnull[i]));
282         PetscCall(VecHYPRE_IJVectorCopy(vecs[i], jac->hmnull[i]));
283         PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[i]->ij, (void **)&jac->phmnull[i]);
284       }
285       if (has_const) {
286         PetscCall(MatCreateVecs(pc->pmat, &jac->hmnull_constant, NULL));
287         PetscCall(VecSet(jac->hmnull_constant, 1));
288         PetscCall(VecNormalize(jac->hmnull_constant, NULL));
289         PetscCall(VecHYPRE_IJVectorCreate(jac->hmnull_constant->map, &jac->hmnull[nvec]));
290         PetscCall(VecHYPRE_IJVectorCopy(jac->hmnull_constant, jac->hmnull[nvec]));
291         PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[nvec]->ij, (void **)&jac->phmnull[nvec]);
292         nvec++;
293       }
294       PetscCallExternal(HYPRE_BoomerAMGSetInterpVectors, jac->hsolver, nvec, jac->phmnull);
295       jac->n_hmnull = nvec;
296     }
297   }
298 
299   /* special case for AMS */
300   if (jac->setup == HYPRE_AMSSetup) {
301     Mat_HYPRE         *hm;
302     HYPRE_ParCSRMatrix parcsr;
303     if (!jac->coords[0] && !jac->constants[0] && !(jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) {
304       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the edge constant vectors via PCHYPRESetEdgeConstantVectors() or the interpolation matrix via PCHYPRESetInterpolations()");
305     }
306     if (jac->dim) PetscCallExternal(HYPRE_AMSSetDimension, jac->hsolver, jac->dim);
307     if (jac->constants[0]) {
308       HYPRE_ParVector ozz, zoz, zzo = NULL;
309       PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[0]->ij, (void **)(&ozz));
310       PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[1]->ij, (void **)(&zoz));
311       if (jac->constants[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[2]->ij, (void **)(&zzo));
312       PetscCallExternal(HYPRE_AMSSetEdgeConstantVectors, jac->hsolver, ozz, zoz, zzo);
313     }
314     if (jac->coords[0]) {
315       HYPRE_ParVector coords[3];
316       coords[0] = NULL;
317       coords[1] = NULL;
318       coords[2] = NULL;
319       if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0]));
320       if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1]));
321       if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2]));
322       PetscCallExternal(HYPRE_AMSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]);
323     }
324     PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient");
325     hm = (Mat_HYPRE *)(jac->G->data);
326     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
327     PetscCallExternal(HYPRE_AMSSetDiscreteGradient, jac->hsolver, parcsr);
328     if (jac->alpha_Poisson) {
329       hm = (Mat_HYPRE *)(jac->alpha_Poisson->data);
330       PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
331       PetscCallExternal(HYPRE_AMSSetAlphaPoissonMatrix, jac->hsolver, parcsr);
332     }
333     if (jac->ams_beta_is_zero) {
334       PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, NULL);
335     } else if (jac->beta_Poisson) {
336       hm = (Mat_HYPRE *)(jac->beta_Poisson->data);
337       PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
338       PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, parcsr);
339     } else if (jac->ams_beta_is_zero_part) {
340       if (jac->interior) {
341         HYPRE_ParVector interior = NULL;
342         PetscCallExternal(HYPRE_IJVectorGetObject, jac->interior->ij, (void **)(&interior));
343         PetscCallExternal(HYPRE_AMSSetInteriorNodes, jac->hsolver, interior);
344       } else {
345         jac->ams_beta_is_zero_part = PETSC_FALSE;
346       }
347     }
348     if (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])) {
349       PetscInt           i;
350       HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3];
351       if (jac->ND_PiFull) {
352         hm = (Mat_HYPRE *)(jac->ND_PiFull->data);
353         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull));
354       } else {
355         nd_parcsrfull = NULL;
356       }
357       for (i = 0; i < 3; ++i) {
358         if (jac->ND_Pi[i]) {
359           hm = (Mat_HYPRE *)(jac->ND_Pi[i]->data);
360           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i]));
361         } else {
362           nd_parcsr[i] = NULL;
363         }
364       }
365       PetscCallExternal(HYPRE_AMSSetInterpolations, jac->hsolver, nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]);
366     }
367   }
368   /* special case for ADS */
369   if (jac->setup == HYPRE_ADSSetup) {
370     Mat_HYPRE         *hm;
371     HYPRE_ParCSRMatrix parcsr;
372     if (!jac->coords[0] && !((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])))) {
373       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the interpolation matrices via PCHYPRESetInterpolations");
374     } else PetscCheck(jac->coords[1] && jac->coords[2], PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner has been designed for three dimensional problems! For two dimensional problems, use HYPRE AMS instead");
375     PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient");
376     PetscCheck(jac->C, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete curl operator via PCHYPRESetDiscreteGradient");
377     if (jac->coords[0]) {
378       HYPRE_ParVector coords[3];
379       coords[0] = NULL;
380       coords[1] = NULL;
381       coords[2] = NULL;
382       if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0]));
383       if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1]));
384       if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2]));
385       PetscCallExternal(HYPRE_ADSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]);
386     }
387     hm = (Mat_HYPRE *)(jac->G->data);
388     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
389     PetscCallExternal(HYPRE_ADSSetDiscreteGradient, jac->hsolver, parcsr);
390     hm = (Mat_HYPRE *)(jac->C->data);
391     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
392     PetscCallExternal(HYPRE_ADSSetDiscreteCurl, jac->hsolver, parcsr);
393     if ((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) {
394       PetscInt           i;
395       HYPRE_ParCSRMatrix rt_parcsrfull, rt_parcsr[3];
396       HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3];
397       if (jac->RT_PiFull) {
398         hm = (Mat_HYPRE *)(jac->RT_PiFull->data);
399         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsrfull));
400       } else {
401         rt_parcsrfull = NULL;
402       }
403       for (i = 0; i < 3; ++i) {
404         if (jac->RT_Pi[i]) {
405           hm = (Mat_HYPRE *)(jac->RT_Pi[i]->data);
406           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsr[i]));
407         } else {
408           rt_parcsr[i] = NULL;
409         }
410       }
411       if (jac->ND_PiFull) {
412         hm = (Mat_HYPRE *)(jac->ND_PiFull->data);
413         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull));
414       } else {
415         nd_parcsrfull = NULL;
416       }
417       for (i = 0; i < 3; ++i) {
418         if (jac->ND_Pi[i]) {
419           hm = (Mat_HYPRE *)(jac->ND_Pi[i]->data);
420           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i]));
421         } else {
422           nd_parcsr[i] = NULL;
423         }
424       }
425       PetscCallExternal(HYPRE_ADSSetInterpolations, jac->hsolver, rt_parcsrfull, rt_parcsr[0], rt_parcsr[1], rt_parcsr[2], nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]);
426     }
427   }
428   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
429   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&bv);
430   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&xv);
431   PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF));
432   PetscCallExternal(jac->setup, jac->hsolver, hmat, bv, xv);
433   PetscCall(PetscFPTrapPop());
434   PetscFunctionReturn(PETSC_SUCCESS);
435 }
436 
437 static PetscErrorCode PCApply_HYPRE(PC pc, Vec b, Vec x)
438 {
439   PC_HYPRE          *jac  = (PC_HYPRE *)pc->data;
440   Mat_HYPRE         *hjac = (Mat_HYPRE *)(jac->hpmat->data);
441   HYPRE_ParCSRMatrix hmat;
442   HYPRE_ParVector    jbv, jxv;
443 
444   PetscFunctionBegin;
445   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
446   if (!jac->applyrichardson) PetscCall(VecSet(x, 0.0));
447   PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->b, b));
448   if (jac->applyrichardson) PetscCall(VecHYPRE_IJVectorPushVec(hjac->x, x));
449   else PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->x, x));
450   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
451   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv);
452   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv);
453   PetscStackCallExternalVoid(
454     "Hypre solve", do {
455       HYPRE_Int hierr = (*jac->solve)(jac->hsolver, hmat, jbv, jxv);
456       if (hierr) {
457         PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
458         HYPRE_ClearAllErrors();
459       }
460     } while (0));
461 
462   if (jac->setup == HYPRE_AMSSetup && jac->ams_beta_is_zero_part) PetscCallExternal(HYPRE_AMSProjectOutGradients, jac->hsolver, jxv);
463   PetscCall(VecHYPRE_IJVectorPopVec(hjac->x));
464   PetscCall(VecHYPRE_IJVectorPopVec(hjac->b));
465   PetscFunctionReturn(PETSC_SUCCESS);
466 }
467 
468 static PetscErrorCode PCMatApply_HYPRE_BoomerAMG(PC pc, Mat B, Mat X)
469 {
470   PC_HYPRE           *jac  = (PC_HYPRE *)pc->data;
471   Mat_HYPRE          *hjac = (Mat_HYPRE *)(jac->hpmat->data);
472   hypre_ParCSRMatrix *par_matrix;
473   HYPRE_ParVector     hb, hx;
474   const PetscScalar  *b;
475   PetscScalar        *x;
476   PetscInt            m, N, lda;
477   hypre_Vector       *x_local;
478   PetscMemType        type;
479 
480   PetscFunctionBegin;
481   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
482   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&par_matrix);
483   PetscCall(MatGetLocalSize(B, &m, NULL));
484   PetscCall(MatGetSize(B, NULL, &N));
485   PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hb);
486   PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hx);
487   PetscCall(MatZeroEntries(X));
488   PetscCall(MatDenseGetArrayReadAndMemType(B, &b, &type));
489   PetscCall(MatDenseGetLDA(B, &lda));
490   PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m);
491   PetscCall(MatDenseGetLDA(X, &lda));
492   PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m);
493   x_local = hypre_ParVectorLocalVector(hb);
494   PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0);
495   hypre_VectorData(x_local) = (HYPRE_Complex *)b;
496   PetscCall(MatDenseGetArrayWriteAndMemType(X, &x, NULL));
497   x_local = hypre_ParVectorLocalVector(hx);
498   PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0);
499   hypre_VectorData(x_local) = (HYPRE_Complex *)x;
500   PetscCallExternal(hypre_ParVectorInitialize_v2, hb, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE);
501   PetscCallExternal(hypre_ParVectorInitialize_v2, hx, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE);
502   PetscStackCallExternalVoid(
503     "Hypre solve", do {
504       HYPRE_Int hierr = (*jac->solve)(jac->hsolver, par_matrix, hb, hx);
505       if (hierr) {
506         PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
507         HYPRE_ClearAllErrors();
508       }
509     } while (0));
510   PetscCallExternal(HYPRE_ParVectorDestroy, hb);
511   PetscCallExternal(HYPRE_ParVectorDestroy, hx);
512   PetscCall(MatDenseRestoreArrayReadAndMemType(B, &b));
513   PetscCall(MatDenseRestoreArrayWriteAndMemType(X, &x));
514   PetscFunctionReturn(PETSC_SUCCESS);
515 }
516 
517 static PetscErrorCode PCReset_HYPRE(PC pc)
518 {
519   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
520 
521   PetscFunctionBegin;
522   PetscCall(MatDestroy(&jac->hpmat));
523   PetscCall(MatDestroy(&jac->G));
524   PetscCall(MatDestroy(&jac->C));
525   PetscCall(MatDestroy(&jac->alpha_Poisson));
526   PetscCall(MatDestroy(&jac->beta_Poisson));
527   PetscCall(MatDestroy(&jac->RT_PiFull));
528   PetscCall(MatDestroy(&jac->RT_Pi[0]));
529   PetscCall(MatDestroy(&jac->RT_Pi[1]));
530   PetscCall(MatDestroy(&jac->RT_Pi[2]));
531   PetscCall(MatDestroy(&jac->ND_PiFull));
532   PetscCall(MatDestroy(&jac->ND_Pi[0]));
533   PetscCall(MatDestroy(&jac->ND_Pi[1]));
534   PetscCall(MatDestroy(&jac->ND_Pi[2]));
535   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0]));
536   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1]));
537   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2]));
538   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0]));
539   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1]));
540   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2]));
541   PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior));
542   PetscCall(PCHYPREResetNearNullSpace_Private(pc));
543   jac->ams_beta_is_zero      = PETSC_FALSE;
544   jac->ams_beta_is_zero_part = PETSC_FALSE;
545   jac->dim                   = 0;
546   PetscFunctionReturn(PETSC_SUCCESS);
547 }
548 
549 static PetscErrorCode PCDestroy_HYPRE(PC pc)
550 {
551   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
552 
553   PetscFunctionBegin;
554   PetscCall(PCReset_HYPRE(pc));
555   if (jac->destroy) PetscCallExternal(jac->destroy, jac->hsolver);
556   PetscCall(PetscFree(jac->hypre_type));
557 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
558   PetscCall(PetscFree(jac->spgemm_type));
559 #endif
560   if (jac->comm_hypre != MPI_COMM_NULL) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
561   PetscCall(PetscFree(pc->data));
562 
563   PetscCall(PetscObjectChangeTypeName((PetscObject)pc, 0));
564   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", NULL));
565   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", NULL));
566   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", NULL));
567   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", NULL));
568   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", NULL));
569   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetConstantEdgeVectors_C", NULL));
570   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", NULL));
571   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", NULL));
572   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", NULL));
573   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
574   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
575   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", NULL));
576   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", NULL));
577   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
578   PetscFunctionReturn(PETSC_SUCCESS);
579 }
580 
581 static PetscErrorCode PCSetFromOptions_HYPRE_Pilut(PC pc, PetscOptionItems *PetscOptionsObject)
582 {
583   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
584   PetscBool flag;
585 
586   PetscFunctionBegin;
587   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Pilut Options");
588   PetscCall(PetscOptionsInt("-pc_hypre_pilut_maxiter", "Number of iterations", "None", jac->maxiter, &jac->maxiter, &flag));
589   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetMaxIter, jac->hsolver, jac->maxiter);
590   PetscCall(PetscOptionsReal("-pc_hypre_pilut_tol", "Drop tolerance", "None", jac->tol, &jac->tol, &flag));
591   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetDropTolerance, jac->hsolver, jac->tol);
592   PetscCall(PetscOptionsInt("-pc_hypre_pilut_factorrowsize", "FactorRowSize", "None", jac->factorrowsize, &jac->factorrowsize, &flag));
593   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetFactorRowSize, jac->hsolver, jac->factorrowsize);
594   PetscOptionsHeadEnd();
595   PetscFunctionReturn(PETSC_SUCCESS);
596 }
597 
598 static PetscErrorCode PCView_HYPRE_Pilut(PC pc, PetscViewer viewer)
599 {
600   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
601   PetscBool iascii;
602 
603   PetscFunctionBegin;
604   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
605   if (iascii) {
606     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE Pilut preconditioning\n"));
607     if (jac->maxiter != PETSC_DEFAULT) {
608       PetscCall(PetscViewerASCIIPrintf(viewer, "    maximum number of iterations %" PetscInt_FMT "\n", jac->maxiter));
609     } else {
610       PetscCall(PetscViewerASCIIPrintf(viewer, "    default maximum number of iterations \n"));
611     }
612     if (jac->tol != PETSC_DEFAULT) {
613       PetscCall(PetscViewerASCIIPrintf(viewer, "    drop tolerance %g\n", (double)jac->tol));
614     } else {
615       PetscCall(PetscViewerASCIIPrintf(viewer, "    default drop tolerance \n"));
616     }
617     if (jac->factorrowsize != PETSC_DEFAULT) {
618       PetscCall(PetscViewerASCIIPrintf(viewer, "    factor row size %" PetscInt_FMT "\n", jac->factorrowsize));
619     } else {
620       PetscCall(PetscViewerASCIIPrintf(viewer, "    default factor row size \n"));
621     }
622   }
623   PetscFunctionReturn(PETSC_SUCCESS);
624 }
625 
626 static PetscErrorCode PCSetFromOptions_HYPRE_Euclid(PC pc, PetscOptionItems *PetscOptionsObject)
627 {
628   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
629   PetscBool flag, eu_bj = jac->eu_bj ? PETSC_TRUE : PETSC_FALSE;
630 
631   PetscFunctionBegin;
632   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Euclid Options");
633   PetscCall(PetscOptionsInt("-pc_hypre_euclid_level", "Factorization levels", "None", jac->eu_level, &jac->eu_level, &flag));
634   if (flag) PetscCallExternal(HYPRE_EuclidSetLevel, jac->hsolver, jac->eu_level);
635 
636   PetscCall(PetscOptionsReal("-pc_hypre_euclid_droptolerance", "Drop tolerance for ILU(k) in Euclid", "None", jac->eu_droptolerance, &jac->eu_droptolerance, &flag));
637   if (flag) {
638     PetscMPIInt size;
639 
640     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
641     PetscCheck(size == 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "hypre's Euclid does not support a parallel drop tolerance");
642     PetscCallExternal(HYPRE_EuclidSetILUT, jac->hsolver, jac->eu_droptolerance);
643   }
644 
645   PetscCall(PetscOptionsBool("-pc_hypre_euclid_bj", "Use Block Jacobi for ILU in Euclid", "None", eu_bj, &eu_bj, &flag));
646   if (flag) {
647     jac->eu_bj = eu_bj ? 1 : 0;
648     PetscCallExternal(HYPRE_EuclidSetBJ, jac->hsolver, jac->eu_bj);
649   }
650   PetscOptionsHeadEnd();
651   PetscFunctionReturn(PETSC_SUCCESS);
652 }
653 
654 static PetscErrorCode PCView_HYPRE_Euclid(PC pc, PetscViewer viewer)
655 {
656   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
657   PetscBool iascii;
658 
659   PetscFunctionBegin;
660   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
661   if (iascii) {
662     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE Euclid preconditioning\n"));
663     if (jac->eu_level != PETSC_DEFAULT) {
664       PetscCall(PetscViewerASCIIPrintf(viewer, "    factorization levels %" PetscInt_FMT "\n", jac->eu_level));
665     } else {
666       PetscCall(PetscViewerASCIIPrintf(viewer, "    default factorization levels \n"));
667     }
668     PetscCall(PetscViewerASCIIPrintf(viewer, "    drop tolerance %g\n", (double)jac->eu_droptolerance));
669     PetscCall(PetscViewerASCIIPrintf(viewer, "    use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj));
670   }
671   PetscFunctionReturn(PETSC_SUCCESS);
672 }
673 
674 static PetscErrorCode PCApplyTranspose_HYPRE_BoomerAMG(PC pc, Vec b, Vec x)
675 {
676   PC_HYPRE          *jac  = (PC_HYPRE *)pc->data;
677   Mat_HYPRE         *hjac = (Mat_HYPRE *)(jac->hpmat->data);
678   HYPRE_ParCSRMatrix hmat;
679   HYPRE_ParVector    jbv, jxv;
680 
681   PetscFunctionBegin;
682   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
683   PetscCall(VecSet(x, 0.0));
684   PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->x, b));
685   PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->b, x));
686 
687   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
688   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv);
689   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv);
690 
691   PetscStackCallExternalVoid(
692     "Hypre Transpose solve", do {
693       HYPRE_Int hierr = HYPRE_BoomerAMGSolveT(jac->hsolver, hmat, jbv, jxv);
694       if (hierr) {
695         /* error code of 1 in BoomerAMG merely means convergence not achieved */
696         PetscCheck(hierr == 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
697         HYPRE_ClearAllErrors();
698       }
699     } while (0));
700 
701   PetscCall(VecHYPRE_IJVectorPopVec(hjac->x));
702   PetscCall(VecHYPRE_IJVectorPopVec(hjac->b));
703   PetscFunctionReturn(PETSC_SUCCESS);
704 }
705 
706 static PetscErrorCode PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char name[])
707 {
708   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
709   PetscBool flag;
710 
711 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
712   PetscFunctionBegin;
713   if (jac->spgemm_type) {
714     PetscCall(PetscStrcmp(jac->spgemm_type, name, &flag));
715     PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE SpGEMM (really we can)");
716     PetscFunctionReturn(PETSC_SUCCESS);
717   } else {
718     PetscCall(PetscStrallocpy(name, &jac->spgemm_type));
719   }
720   PetscCall(PetscStrcmp("cusparse", jac->spgemm_type, &flag));
721   if (flag) {
722     PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 1);
723     PetscFunctionReturn(PETSC_SUCCESS);
724   }
725   PetscCall(PetscStrcmp("hypre", jac->spgemm_type, &flag));
726   if (flag) {
727     PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 0);
728     PetscFunctionReturn(PETSC_SUCCESS);
729   }
730   jac->spgemm_type = NULL;
731   SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE SpGEMM type %s; Choices are cusparse, hypre", name);
732 #endif
733 }
734 
735 static PetscErrorCode PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char *spgemm[])
736 {
737   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
738 
739   PetscFunctionBegin;
740   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
741 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
742   *spgemm = jac->spgemm_type;
743 #endif
744   PetscFunctionReturn(PETSC_SUCCESS);
745 }
746 
747 static const char *HYPREBoomerAMGCycleType[]   = {"", "V", "W"};
748 static const char *HYPREBoomerAMGCoarsenType[] = {"CLJP", "Ruge-Stueben", "", "modifiedRuge-Stueben", "", "", "Falgout", "", "PMIS", "", "HMIS"};
749 static const char *HYPREBoomerAMGMeasureType[] = {"local", "global"};
750 /* The following corresponds to HYPRE_BoomerAMGSetRelaxType which has many missing numbers in the enum */
751 static const char *HYPREBoomerAMGSmoothType[] = {"Schwarz-smoothers", "Pilut", "ParaSails", "Euclid"};
752 static const char *HYPREBoomerAMGRelaxType[] = {"Jacobi", "sequential-Gauss-Seidel", "seqboundary-Gauss-Seidel", "SOR/Jacobi", "backward-SOR/Jacobi", "" /* [5] hybrid chaotic Gauss-Seidel (works only with OpenMP) */, "symmetric-SOR/Jacobi", "" /* 7 */, "l1scaled-SOR/Jacobi", "Gaussian-elimination", "" /* 10 */, "" /* 11 */, "" /* 12 */, "l1-Gauss-Seidel" /* nonsymmetric */, "backward-l1-Gauss-Seidel" /* nonsymmetric */, "CG" /* non-stationary */, "Chebyshev", "FCF-Jacobi", "l1scaled-Jacobi"};
753 static const char    *HYPREBoomerAMGInterpType[] = {"classical", "", "", "direct", "multipass", "multipass-wts", "ext+i", "ext+i-cc", "standard", "standard-wts", "block", "block-wtd", "FF", "FF1", "ext", "ad-wts", "ext-mm", "ext+i-mm", "ext+e-mm"};
754 static PetscErrorCode PCSetFromOptions_HYPRE_BoomerAMG(PC pc, PetscOptionItems *PetscOptionsObject)
755 {
756   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
757   PetscInt    bs, n, indx, level;
758   PetscBool   flg, tmp_truth;
759   double      tmpdbl, twodbl[2];
760   const char *symtlist[]           = {"nonsymmetric", "SPD", "nonsymmetric,SPD"};
761   const char *PCHYPRESpgemmTypes[] = {"cusparse", "hypre"};
762 
763   PetscFunctionBegin;
764   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE BoomerAMG Options");
765   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_cycle_type", "Cycle type", "None", HYPREBoomerAMGCycleType + 1, 2, HYPREBoomerAMGCycleType[jac->cycletype], &indx, &flg));
766   if (flg) {
767     jac->cycletype = indx + 1;
768     PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype);
769   }
770   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_levels", "Number of levels (of grids) allowed", "None", jac->maxlevels, &jac->maxlevels, &flg));
771   if (flg) {
772     PetscCheck(jac->maxlevels >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of levels %" PetscInt_FMT " must be at least two", jac->maxlevels);
773     PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels);
774   }
775   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_iter", "Maximum iterations used PER hypre call", "None", jac->maxiter, &jac->maxiter, &flg));
776   if (flg) {
777     PetscCheck(jac->maxiter >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of iterations %" PetscInt_FMT " must be at least one", jac->maxiter);
778     PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
779   }
780   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_tol", "Convergence tolerance PER hypre call (0.0 = use a fixed number of iterations)", "None", jac->tol, &jac->tol, &flg));
781   if (flg) {
782     PetscCheck(jac->tol >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Tolerance %g must be greater than or equal to zero", (double)jac->tol);
783     PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
784   }
785   bs = 1;
786   if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &bs));
787   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_numfunctions", "Number of functions", "HYPRE_BoomerAMGSetNumFunctions", bs, &bs, &flg));
788   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs);
789 
790   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_truncfactor", "Truncation factor for interpolation (0=no truncation)", "None", jac->truncfactor, &jac->truncfactor, &flg));
791   if (flg) {
792     PetscCheck(jac->truncfactor >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Truncation factor %g must be great than or equal zero", (double)jac->truncfactor);
793     PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor);
794   }
795 
796   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_P_max", "Max elements per row for interpolation operator (0=unlimited)", "None", jac->pmax, &jac->pmax, &flg));
797   if (flg) {
798     PetscCheck(jac->pmax >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "P_max %" PetscInt_FMT " must be greater than or equal to zero", jac->pmax);
799     PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax);
800   }
801 
802   PetscCall(PetscOptionsRangeInt("-pc_hypre_boomeramg_agg_nl", "Number of levels of aggressive coarsening", "None", jac->agg_nl, &jac->agg_nl, &flg, 0, jac->maxlevels));
803   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl);
804 
805   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_agg_num_paths", "Number of paths for aggressive coarsening", "None", jac->agg_num_paths, &jac->agg_num_paths, &flg));
806   if (flg) {
807     PetscCheck(jac->agg_num_paths >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of paths %" PetscInt_FMT " must be greater than or equal to 1", jac->agg_num_paths);
808     PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths);
809   }
810 
811   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_strong_threshold", "Threshold for being strongly connected", "None", jac->strongthreshold, &jac->strongthreshold, &flg));
812   if (flg) {
813     PetscCheck(jac->strongthreshold >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Strong threshold %g must be great than or equal zero", (double)jac->strongthreshold);
814     PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold);
815   }
816   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_max_row_sum", "Maximum row sum", "None", jac->maxrowsum, &jac->maxrowsum, &flg));
817   if (flg) {
818     PetscCheck(jac->maxrowsum >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Maximum row sum %g must be greater than zero", (double)jac->maxrowsum);
819     PetscCheck(jac->maxrowsum <= 1.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Maximum row sum %g must be less than or equal one", (double)jac->maxrowsum);
820     PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum);
821   }
822 
823   /* Grid sweeps */
824   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_all", "Number of sweeps for the up and down grid levels", "None", jac->gridsweeps[0], &indx, &flg));
825   if (flg) {
826     PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, indx);
827     /* modify the jac structure so we can view the updated options with PC_View */
828     jac->gridsweeps[0] = indx;
829     jac->gridsweeps[1] = indx;
830     /*defaults coarse to 1 */
831     jac->gridsweeps[2] = 1;
832   }
833   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen", "Use a nodal based coarsening 1-6", "HYPRE_BoomerAMGSetNodal", jac->nodal_coarsening, &jac->nodal_coarsening, &flg));
834   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodal, jac->hsolver, jac->nodal_coarsening);
835   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen_diag", "Diagonal in strength matrix for nodal based coarsening 0-2", "HYPRE_BoomerAMGSetNodalDiag", jac->nodal_coarsening_diag, &jac->nodal_coarsening_diag, &flg));
836   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodalDiag, jac->hsolver, jac->nodal_coarsening_diag);
837   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_variant", "Variant of algorithm 1-3", "HYPRE_BoomerAMGSetInterpVecVariant", jac->vec_interp_variant, &jac->vec_interp_variant, &flg));
838   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecVariant, jac->hsolver, jac->vec_interp_variant);
839   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_qmax", "Max elements per row for each Q", "HYPRE_BoomerAMGSetInterpVecQMax", jac->vec_interp_qmax, &jac->vec_interp_qmax, &flg));
840   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecQMax, jac->hsolver, jac->vec_interp_qmax);
841   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_vec_interp_smooth", "Whether to smooth the interpolation vectors", "HYPRE_BoomerAMGSetSmoothInterpVectors", jac->vec_interp_smooth, &jac->vec_interp_smooth, &flg));
842   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetSmoothInterpVectors, jac->hsolver, jac->vec_interp_smooth);
843   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_interp_refine", "Preprocess the interpolation matrix through iterative weight refinement", "HYPRE_BoomerAMGSetInterpRefine", jac->interp_refine, &jac->interp_refine, &flg));
844   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpRefine, jac->hsolver, jac->interp_refine);
845   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_down", "Number of sweeps for the down cycles", "None", jac->gridsweeps[0], &indx, &flg));
846   if (flg) {
847     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 1);
848     jac->gridsweeps[0] = indx;
849   }
850   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_up", "Number of sweeps for the up cycles", "None", jac->gridsweeps[1], &indx, &flg));
851   if (flg) {
852     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 2);
853     jac->gridsweeps[1] = indx;
854   }
855   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_coarse", "Number of sweeps for the coarse level", "None", jac->gridsweeps[2], &indx, &flg));
856   if (flg) {
857     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 3);
858     jac->gridsweeps[2] = indx;
859   }
860 
861   /* Smooth type */
862   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_smooth_type", "Enable more complex smoothers", "None", HYPREBoomerAMGSmoothType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGSmoothType), HYPREBoomerAMGSmoothType[0], &indx, &flg));
863   if (flg) {
864     jac->smoothtype = indx;
865     PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, indx + 6);
866     jac->smoothnumlevels = 25;
867     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, 25);
868   }
869 
870   /* Number of smoothing levels */
871   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_smooth_num_levels", "Number of levels on which more complex smoothers are used", "None", 25, &indx, &flg));
872   if (flg && (jac->smoothtype != -1)) {
873     jac->smoothnumlevels = indx;
874     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, indx);
875   }
876 
877   /* Number of levels for ILU(k) for Euclid */
878   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_eu_level", "Number of levels for ILU(k) in Euclid smoother", "None", 0, &indx, &flg));
879   if (flg && (jac->smoothtype == 3)) {
880     jac->eu_level = indx;
881     PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, indx);
882   }
883 
884   /* Filter for ILU(k) for Euclid */
885   double droptolerance;
886   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_eu_droptolerance", "Drop tolerance for ILU(k) in Euclid smoother", "None", 0, &droptolerance, &flg));
887   if (flg && (jac->smoothtype == 3)) {
888     jac->eu_droptolerance = droptolerance;
889     PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, droptolerance);
890   }
891 
892   /* Use Block Jacobi ILUT for Euclid */
893   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_eu_bj", "Use Block Jacobi for ILU in Euclid smoother?", "None", PETSC_FALSE, &tmp_truth, &flg));
894   if (flg && (jac->smoothtype == 3)) {
895     jac->eu_bj = tmp_truth;
896     PetscCallExternal(HYPRE_BoomerAMGSetEuBJ, jac->hsolver, jac->eu_bj);
897   }
898 
899   /* Relax type */
900   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_all", "Relax type for the up and down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg));
901   if (flg) {
902     jac->relaxtype[0] = jac->relaxtype[1] = indx;
903     PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, indx);
904     /* by default, coarse type set to 9 */
905     jac->relaxtype[2] = 9;
906     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, 9, 3);
907   }
908   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_down", "Relax type for the down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg));
909   if (flg) {
910     jac->relaxtype[0] = indx;
911     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 1);
912   }
913   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_up", "Relax type for the up cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg));
914   if (flg) {
915     jac->relaxtype[1] = indx;
916     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 2);
917   }
918   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_coarse", "Relax type on coarse grid", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[9], &indx, &flg));
919   if (flg) {
920     jac->relaxtype[2] = indx;
921     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 3);
922   }
923 
924   /* Relaxation Weight */
925   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_relax_weight_all", "Relaxation weight for all levels (0 = hypre estimates, -k = determined with k CG steps)", "None", jac->relaxweight, &tmpdbl, &flg));
926   if (flg) {
927     PetscCallExternal(HYPRE_BoomerAMGSetRelaxWt, jac->hsolver, tmpdbl);
928     jac->relaxweight = tmpdbl;
929   }
930 
931   n         = 2;
932   twodbl[0] = twodbl[1] = 1.0;
933   PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_relax_weight_level", "Set the relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg));
934   if (flg) {
935     PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight level: you must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n);
936     indx = (int)PetscAbsReal(twodbl[1]);
937     PetscCallExternal(HYPRE_BoomerAMGSetLevelRelaxWt, jac->hsolver, twodbl[0], indx);
938   }
939 
940   /* Outer relaxation Weight */
941   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_outer_relax_weight_all", "Outer relaxation weight for all levels (-k = determined with k CG steps)", "None", jac->outerrelaxweight, &tmpdbl, &flg));
942   if (flg) {
943     PetscCallExternal(HYPRE_BoomerAMGSetOuterWt, jac->hsolver, tmpdbl);
944     jac->outerrelaxweight = tmpdbl;
945   }
946 
947   n         = 2;
948   twodbl[0] = twodbl[1] = 1.0;
949   PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_outer_relax_weight_level", "Set the outer relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg));
950   if (flg) {
951     PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight outer level: You must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n);
952     indx = (int)PetscAbsReal(twodbl[1]);
953     PetscCallExternal(HYPRE_BoomerAMGSetLevelOuterWt, jac->hsolver, twodbl[0], indx);
954   }
955 
956   /* the Relax Order */
957   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_no_CF", "Do not use CF-relaxation", "None", PETSC_FALSE, &tmp_truth, &flg));
958 
959   if (flg && tmp_truth) {
960     jac->relaxorder = 0;
961     PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder);
962   }
963   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_measure_type", "Measure type", "None", HYPREBoomerAMGMeasureType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGMeasureType), HYPREBoomerAMGMeasureType[0], &indx, &flg));
964   if (flg) {
965     jac->measuretype = indx;
966     PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype);
967   }
968   /* update list length 3/07 */
969   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_coarsen_type", "Coarsen type", "None", HYPREBoomerAMGCoarsenType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGCoarsenType), HYPREBoomerAMGCoarsenType[6], &indx, &flg));
970   if (flg) {
971     jac->coarsentype = indx;
972     PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype);
973   }
974 
975   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_coarse_size", "Maximum size of coarsest grid", "None", jac->maxc, &jac->maxc, &flg));
976   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc);
977   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_min_coarse_size", "Minimum size of coarsest grid", "None", jac->minc, &jac->minc, &flg));
978   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc);
979 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
980   // global parameter but is closely associated with BoomerAMG
981   PetscCall(PetscOptionsEList("-pc_mg_galerkin_mat_product_algorithm", "Type of SpGEMM to use in hypre (only for now)", "PCMGGalerkinSetMatProductAlgorithm", PCHYPRESpgemmTypes, PETSC_STATIC_ARRAY_LENGTH(PCHYPRESpgemmTypes), PCHYPRESpgemmTypes[0], &indx, &flg));
982   #if defined(PETSC_HAVE_HYPRE_DEVICE)
983   if (!flg) indx = 0;
984   PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, PCHYPRESpgemmTypes[indx]));
985   #else
986   PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, "hypre"));
987   #endif
988 #endif
989   /* AIR */
990 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
991   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_restriction_type", "Type of AIR method (distance 1 or 2, 0 means no AIR)", "None", jac->Rtype, &jac->Rtype, NULL));
992   PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype);
993   if (jac->Rtype) {
994     HYPRE_Int **grid_relax_points = hypre_TAlloc(HYPRE_Int *, 4, HYPRE_MEMORY_HOST);
995     char       *prerelax[256];
996     char       *postrelax[256];
997     char        stringF[2] = "F", stringC[2] = "C", stringA[2] = "A";
998     PetscInt    ns_down = 256, ns_up = 256;
999     PetscBool   matchF, matchC, matchA;
1000 
1001     jac->interptype = 100; /* no way we can pass this with strings... Set it as default as in MFEM, then users can still customize it back to a different one */
1002 
1003     PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_strongthresholdR", "Threshold for R", "None", jac->Rstrongthreshold, &jac->Rstrongthreshold, NULL));
1004     PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold);
1005 
1006     PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_filterthresholdR", "Filter threshold for R", "None", jac->Rfilterthreshold, &jac->Rfilterthreshold, NULL));
1007     PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold);
1008 
1009     PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_Adroptol", "Defines the drop tolerance for the A-matrices from the 2nd level of AMG", "None", jac->Adroptol, &jac->Adroptol, NULL));
1010     PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol);
1011 
1012     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_Adroptype", "Drops the entries that are not on the diagonal and smaller than its row norm: type 1: 1-norm, 2: 2-norm, -1: infinity norm", "None", jac->Adroptype, &jac->Adroptype, NULL));
1013     PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype);
1014     PetscCall(PetscOptionsStringArray("-pc_hypre_boomeramg_prerelax", "Defines prerelax scheme", "None", prerelax, &ns_down, NULL));
1015     PetscCall(PetscOptionsStringArray("-pc_hypre_boomeramg_postrelax", "Defines postrelax scheme", "None", postrelax, &ns_up, NULL));
1016     PetscCheck(ns_down == jac->gridsweeps[0], PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_SIZ, "The number of arguments passed to -pc_hypre_boomeramg_prerelax must match the number passed to -pc_hypre_bomeramg_grid_sweeps_down");
1017     PetscCheck(ns_up == jac->gridsweeps[1], PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_SIZ, "The number of arguments passed to -pc_hypre_boomeramg_postrelax must match the number passed to -pc_hypre_bomeramg_grid_sweeps_up");
1018 
1019     grid_relax_points[0]    = NULL;
1020     grid_relax_points[1]    = hypre_TAlloc(HYPRE_Int, ns_down, HYPRE_MEMORY_HOST);
1021     grid_relax_points[2]    = hypre_TAlloc(HYPRE_Int, ns_up, HYPRE_MEMORY_HOST);
1022     grid_relax_points[3]    = hypre_TAlloc(HYPRE_Int, jac->gridsweeps[2], HYPRE_MEMORY_HOST);
1023     grid_relax_points[3][0] = 0;
1024 
1025     // set down relax scheme
1026     for (PetscInt i = 0; i < ns_down; i++) {
1027       PetscCall(PetscStrcasecmp(prerelax[i], stringF, &matchF));
1028       PetscCall(PetscStrcasecmp(prerelax[i], stringC, &matchC));
1029       PetscCall(PetscStrcasecmp(prerelax[i], stringA, &matchA));
1030       PetscCheck(matchF || matchC || matchA, PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_WRONG, "Valid argument options for -pc_hypre_boomeramg_prerelax are C, F, and A");
1031       if (matchF) grid_relax_points[1][i] = -1;
1032       else if (matchC) grid_relax_points[1][i] = 1;
1033       else if (matchA) grid_relax_points[1][i] = 0;
1034     }
1035 
1036     // set up relax scheme
1037     for (PetscInt i = 0; i < ns_up; i++) {
1038       PetscCall(PetscStrcasecmp(postrelax[i], stringF, &matchF));
1039       PetscCall(PetscStrcasecmp(postrelax[i], stringC, &matchC));
1040       PetscCall(PetscStrcasecmp(postrelax[i], stringA, &matchA));
1041       PetscCheck(matchF || matchC || matchA, PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_WRONG, "Valid argument options for -pc_hypre_boomeramg_postrelax are C, F, and A");
1042       if (matchF) grid_relax_points[2][i] = -1;
1043       else if (matchC) grid_relax_points[2][i] = 1;
1044       else if (matchA) grid_relax_points[2][i] = 0;
1045     }
1046 
1047     // set coarse relax scheme
1048     for (PetscInt i = 0; i < jac->gridsweeps[2]; i++) grid_relax_points[3][i] = 0;
1049 
1050     // Pass relax schemes to hypre
1051     PetscCallExternal(HYPRE_BoomerAMGSetGridRelaxPoints, jac->hsolver, grid_relax_points);
1052 
1053     // cleanup memory
1054     for (PetscInt i = 0; i < ns_down; i++) PetscCall(PetscFree(prerelax[i]));
1055     for (PetscInt i = 0; i < ns_up; i++) PetscCall(PetscFree(postrelax[i]));
1056   }
1057 #endif
1058 
1059 #if PETSC_PKG_HYPRE_VERSION_LE(9, 9, 9)
1060   PetscCheck(!jac->Rtype || !jac->agg_nl, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "-pc_hypre_boomeramg_restriction_type (%" PetscInt_FMT ") and -pc_hypre_boomeramg_agg_nl (%" PetscInt_FMT ")", jac->Rtype, jac->agg_nl);
1061 #endif
1062 
1063   /* new 3/07 */
1064   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_interp_type", "Interpolation type", "None", HYPREBoomerAMGInterpType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGInterpType), HYPREBoomerAMGInterpType[0], &indx, &flg));
1065   if (flg || jac->Rtype) {
1066     if (flg) jac->interptype = indx;
1067     PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype);
1068   }
1069 
1070   PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", &flg));
1071   if (flg) {
1072     level = 3;
1073     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", level, &level, NULL));
1074 
1075     jac->printstatistics = PETSC_TRUE;
1076     PetscCallExternal(HYPRE_BoomerAMGSetPrintLevel, jac->hsolver, level);
1077   }
1078 
1079   PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", &flg));
1080   if (flg) {
1081     level = 3;
1082     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", level, &level, NULL));
1083 
1084     jac->printstatistics = PETSC_TRUE;
1085     PetscCallExternal(HYPRE_BoomerAMGSetDebugFlag, jac->hsolver, level);
1086   }
1087 
1088   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", PETSC_FALSE, &tmp_truth, &flg));
1089   if (flg && tmp_truth) {
1090     PetscInt tmp_int;
1091     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", jac->nodal_relax_levels, &tmp_int, &flg));
1092     if (flg) jac->nodal_relax_levels = tmp_int;
1093     PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, 6);
1094     PetscCallExternal(HYPRE_BoomerAMGSetDomainType, jac->hsolver, 1);
1095     PetscCallExternal(HYPRE_BoomerAMGSetOverlap, jac->hsolver, 0);
1096     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, jac->nodal_relax_levels);
1097   }
1098 
1099   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_keeptranspose", "Avoid transpose matvecs in preconditioner application", "None", jac->keeptranspose, &jac->keeptranspose, NULL));
1100   PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0);
1101 
1102   /* options for ParaSails solvers */
1103   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flg));
1104   if (flg) {
1105     jac->symt = indx;
1106     PetscCallExternal(HYPRE_BoomerAMGSetSym, jac->hsolver, jac->symt);
1107   }
1108 
1109   PetscOptionsHeadEnd();
1110   PetscFunctionReturn(PETSC_SUCCESS);
1111 }
1112 
1113 static PetscErrorCode PCApplyRichardson_HYPRE_BoomerAMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
1114 {
1115   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1116   HYPRE_Int oits;
1117 
1118   PetscFunctionBegin;
1119   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
1120   PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, its * jac->maxiter);
1121   PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, rtol);
1122   jac->applyrichardson = PETSC_TRUE;
1123   PetscCall(PCApply_HYPRE(pc, b, y));
1124   jac->applyrichardson = PETSC_FALSE;
1125   PetscCallExternal(HYPRE_BoomerAMGGetNumIterations, jac->hsolver, &oits);
1126   *outits = oits;
1127   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
1128   else *reason = PCRICHARDSON_CONVERGED_RTOL;
1129   PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
1130   PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
1131   PetscFunctionReturn(PETSC_SUCCESS);
1132 }
1133 
1134 static PetscErrorCode PCView_HYPRE_BoomerAMG(PC pc, PetscViewer viewer)
1135 {
1136   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1137   PetscBool iascii;
1138 
1139   PetscFunctionBegin;
1140   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1141   if (iascii) {
1142     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE BoomerAMG preconditioning\n"));
1143     PetscCall(PetscViewerASCIIPrintf(viewer, "    Cycle type %s\n", HYPREBoomerAMGCycleType[jac->cycletype]));
1144     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum number of levels %" PetscInt_FMT "\n", jac->maxlevels));
1145     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum number of iterations PER hypre call %" PetscInt_FMT "\n", jac->maxiter));
1146     PetscCall(PetscViewerASCIIPrintf(viewer, "    Convergence tolerance PER hypre call %g\n", (double)jac->tol));
1147     PetscCall(PetscViewerASCIIPrintf(viewer, "    Threshold for strong coupling %g\n", (double)jac->strongthreshold));
1148     PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation truncation factor %g\n", (double)jac->truncfactor));
1149     PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation: max elements per row %" PetscInt_FMT "\n", jac->pmax));
1150     if (jac->interp_refine) PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation: number of steps of weighted refinement %" PetscInt_FMT "\n", jac->interp_refine));
1151     PetscCall(PetscViewerASCIIPrintf(viewer, "    Number of levels of aggressive coarsening %" PetscInt_FMT "\n", jac->agg_nl));
1152     PetscCall(PetscViewerASCIIPrintf(viewer, "    Number of paths for aggressive coarsening %" PetscInt_FMT "\n", jac->agg_num_paths));
1153 
1154     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum row sums %g\n", (double)jac->maxrowsum));
1155 
1156     PetscCall(PetscViewerASCIIPrintf(viewer, "    Sweeps down         %" PetscInt_FMT "\n", jac->gridsweeps[0]));
1157     PetscCall(PetscViewerASCIIPrintf(viewer, "    Sweeps up           %" PetscInt_FMT "\n", jac->gridsweeps[1]));
1158     PetscCall(PetscViewerASCIIPrintf(viewer, "    Sweeps on coarse    %" PetscInt_FMT "\n", jac->gridsweeps[2]));
1159 
1160     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax down          %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[0]]));
1161     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax up            %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[1]]));
1162     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax on coarse     %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[2]]));
1163 
1164     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax weight  (all)      %g\n", (double)jac->relaxweight));
1165     PetscCall(PetscViewerASCIIPrintf(viewer, "    Outer relax weight (all) %g\n", (double)jac->outerrelaxweight));
1166 
1167     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum size of coarsest grid %" PetscInt_FMT "\n", jac->maxc));
1168     PetscCall(PetscViewerASCIIPrintf(viewer, "    Minimum size of coarsest grid %" PetscInt_FMT "\n", jac->minc));
1169 
1170     if (jac->relaxorder) {
1171       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using CF-relaxation\n"));
1172     } else {
1173       PetscCall(PetscViewerASCIIPrintf(viewer, "    Not using CF-relaxation\n"));
1174     }
1175     if (jac->smoothtype != -1) {
1176       PetscCall(PetscViewerASCIIPrintf(viewer, "    Smooth type          %s\n", HYPREBoomerAMGSmoothType[jac->smoothtype]));
1177       PetscCall(PetscViewerASCIIPrintf(viewer, "    Smooth num levels    %" PetscInt_FMT "\n", jac->smoothnumlevels));
1178     } else {
1179       PetscCall(PetscViewerASCIIPrintf(viewer, "    Not using more complex smoothers.\n"));
1180     }
1181     if (jac->smoothtype == 3) {
1182       PetscCall(PetscViewerASCIIPrintf(viewer, "    Euclid ILU(k) levels %" PetscInt_FMT "\n", jac->eu_level));
1183       PetscCall(PetscViewerASCIIPrintf(viewer, "    Euclid ILU(k) drop tolerance %g\n", (double)jac->eu_droptolerance));
1184       PetscCall(PetscViewerASCIIPrintf(viewer, "    Euclid ILU use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj));
1185     }
1186     PetscCall(PetscViewerASCIIPrintf(viewer, "    Measure type        %s\n", HYPREBoomerAMGMeasureType[jac->measuretype]));
1187     PetscCall(PetscViewerASCIIPrintf(viewer, "    Coarsen type        %s\n", HYPREBoomerAMGCoarsenType[jac->coarsentype]));
1188     PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation type  %s\n", jac->interptype != 100 ? HYPREBoomerAMGInterpType[jac->interptype] : "1pt"));
1189     if (jac->nodal_coarsening) PetscCall(PetscViewerASCIIPrintf(viewer, "    Using nodal coarsening with HYPRE_BOOMERAMGSetNodal() %" PetscInt_FMT "\n", jac->nodal_coarsening));
1190     if (jac->vec_interp_variant) {
1191       PetscCall(PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetInterpVecVariant() %" PetscInt_FMT "\n", jac->vec_interp_variant));
1192       PetscCall(PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetInterpVecQMax() %" PetscInt_FMT "\n", jac->vec_interp_qmax));
1193       PetscCall(PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetSmoothInterpVectors() %d\n", jac->vec_interp_smooth));
1194     }
1195     if (jac->nodal_relax) PetscCall(PetscViewerASCIIPrintf(viewer, "    Using nodal relaxation via Schwarz smoothing on levels %" PetscInt_FMT "\n", jac->nodal_relax_levels));
1196 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
1197     PetscCall(PetscViewerASCIIPrintf(viewer, "    SpGEMM type         %s\n", jac->spgemm_type));
1198 #else
1199     PetscCall(PetscViewerASCIIPrintf(viewer, "    SpGEMM type         %s\n", "hypre"));
1200 #endif
1201     /* AIR */
1202     if (jac->Rtype) {
1203       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using approximate ideal restriction type %" PetscInt_FMT "\n", jac->Rtype));
1204       PetscCall(PetscViewerASCIIPrintf(viewer, "      Threshold for R %g\n", (double)jac->Rstrongthreshold));
1205       PetscCall(PetscViewerASCIIPrintf(viewer, "      Filter for R %g\n", (double)jac->Rfilterthreshold));
1206       PetscCall(PetscViewerASCIIPrintf(viewer, "      A drop tolerance %g\n", (double)jac->Adroptol));
1207       PetscCall(PetscViewerASCIIPrintf(viewer, "      A drop type %" PetscInt_FMT "\n", jac->Adroptype));
1208     }
1209   }
1210   PetscFunctionReturn(PETSC_SUCCESS);
1211 }
1212 
1213 static PetscErrorCode PCSetFromOptions_HYPRE_ParaSails(PC pc, PetscOptionItems *PetscOptionsObject)
1214 {
1215   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
1216   PetscInt    indx;
1217   PetscBool   flag;
1218   const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"};
1219 
1220   PetscFunctionBegin;
1221   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ParaSails Options");
1222   PetscCall(PetscOptionsInt("-pc_hypre_parasails_nlevels", "Number of number of levels", "None", jac->nlevels, &jac->nlevels, 0));
1223   PetscCall(PetscOptionsReal("-pc_hypre_parasails_thresh", "Threshold", "None", jac->threshold, &jac->threshold, &flag));
1224   if (flag) PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels);
1225 
1226   PetscCall(PetscOptionsReal("-pc_hypre_parasails_filter", "filter", "None", jac->filter, &jac->filter, &flag));
1227   if (flag) PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter);
1228 
1229   PetscCall(PetscOptionsReal("-pc_hypre_parasails_loadbal", "Load balance", "None", jac->loadbal, &jac->loadbal, &flag));
1230   if (flag) PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal);
1231 
1232   PetscCall(PetscOptionsBool("-pc_hypre_parasails_logging", "Print info to screen", "None", (PetscBool)jac->logging, (PetscBool *)&jac->logging, &flag));
1233   if (flag) PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging);
1234 
1235   PetscCall(PetscOptionsBool("-pc_hypre_parasails_reuse", "Reuse nonzero pattern in preconditioner", "None", (PetscBool)jac->ruse, (PetscBool *)&jac->ruse, &flag));
1236   if (flag) PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse);
1237 
1238   PetscCall(PetscOptionsEList("-pc_hypre_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flag));
1239   if (flag) {
1240     jac->symt = indx;
1241     PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt);
1242   }
1243 
1244   PetscOptionsHeadEnd();
1245   PetscFunctionReturn(PETSC_SUCCESS);
1246 }
1247 
1248 static PetscErrorCode PCView_HYPRE_ParaSails(PC pc, PetscViewer viewer)
1249 {
1250   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
1251   PetscBool   iascii;
1252   const char *symt = 0;
1253 
1254   PetscFunctionBegin;
1255   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1256   if (iascii) {
1257     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE ParaSails preconditioning\n"));
1258     PetscCall(PetscViewerASCIIPrintf(viewer, "    nlevels %" PetscInt_FMT "\n", jac->nlevels));
1259     PetscCall(PetscViewerASCIIPrintf(viewer, "    threshold %g\n", (double)jac->threshold));
1260     PetscCall(PetscViewerASCIIPrintf(viewer, "    filter %g\n", (double)jac->filter));
1261     PetscCall(PetscViewerASCIIPrintf(viewer, "    load balance %g\n", (double)jac->loadbal));
1262     PetscCall(PetscViewerASCIIPrintf(viewer, "    reuse nonzero structure %s\n", PetscBools[jac->ruse]));
1263     PetscCall(PetscViewerASCIIPrintf(viewer, "    print info to screen %s\n", PetscBools[jac->logging]));
1264     if (!jac->symt) symt = "nonsymmetric matrix and preconditioner";
1265     else if (jac->symt == 1) symt = "SPD matrix and preconditioner";
1266     else if (jac->symt == 2) symt = "nonsymmetric matrix but SPD preconditioner";
1267     else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Unknown HYPRE ParaSails symmetric option %" PetscInt_FMT, jac->symt);
1268     PetscCall(PetscViewerASCIIPrintf(viewer, "    %s\n", symt));
1269   }
1270   PetscFunctionReturn(PETSC_SUCCESS);
1271 }
1272 
1273 static PetscErrorCode PCSetFromOptions_HYPRE_AMS(PC pc, PetscOptionItems *PetscOptionsObject)
1274 {
1275   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1276   PetscInt  n;
1277   PetscBool flag, flag2, flag3, flag4;
1278 
1279   PetscFunctionBegin;
1280   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE AMS Options");
1281   PetscCall(PetscOptionsInt("-pc_hypre_ams_print_level", "Debugging output level for AMS", "None", jac->as_print, &jac->as_print, &flag));
1282   if (flag) PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print);
1283   PetscCall(PetscOptionsInt("-pc_hypre_ams_max_iter", "Maximum number of AMS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag));
1284   if (flag) PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter);
1285   PetscCall(PetscOptionsInt("-pc_hypre_ams_cycle_type", "Cycle type for AMS multigrid", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag));
1286   if (flag) PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type);
1287   PetscCall(PetscOptionsReal("-pc_hypre_ams_tol", "Error tolerance for AMS multigrid", "None", jac->as_tol, &jac->as_tol, &flag));
1288   if (flag) PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol);
1289   PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_type", "Relaxation type for AMS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag));
1290   PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_times", "Number of relaxation steps for AMS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2));
1291   PetscCall(PetscOptionsReal("-pc_hypre_ams_relax_weight", "Relaxation weight for AMS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3));
1292   PetscCall(PetscOptionsReal("-pc_hypre_ams_omega", "SSOR coefficient for AMS smoother", "None", jac->as_omega, &jac->as_omega, &flag4));
1293   if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
1294   PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_alpha_theta", "Threshold for strong coupling of vector Poisson AMG solver", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag));
1295   n = 5;
1296   PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_alpha_options", "AMG options for vector Poisson", "None", jac->as_amg_alpha_opts, &n, &flag2));
1297   if (flag || flag2) {
1298     PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */
1299                       jac->as_amg_alpha_opts[1],                                            /* AMG agg_levels */
1300                       jac->as_amg_alpha_opts[2],                                            /* AMG relax_type */
1301                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],                   /* AMG interp_type */
1302                       jac->as_amg_alpha_opts[4]);                                           /* AMG Pmax */
1303   }
1304   PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_beta_theta", "Threshold for strong coupling of scalar Poisson AMG solver", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag));
1305   n = 5;
1306   PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_beta_options", "AMG options for scalar Poisson solver", "None", jac->as_amg_beta_opts, &n, &flag2));
1307   if (flag || flag2) {
1308     PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
1309                       jac->as_amg_beta_opts[1],                                           /* AMG agg_levels */
1310                       jac->as_amg_beta_opts[2],                                           /* AMG relax_type */
1311                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],                   /* AMG interp_type */
1312                       jac->as_amg_beta_opts[4]);                                          /* AMG Pmax */
1313   }
1314   PetscCall(PetscOptionsInt("-pc_hypre_ams_projection_frequency", "Frequency at which a projection onto the compatible subspace for problems with zero conductivity regions is performed", "None", jac->ams_proj_freq, &jac->ams_proj_freq, &flag));
1315   if (flag) { /* override HYPRE's default only if the options is used */
1316     PetscCallExternal(HYPRE_AMSSetProjectionFrequency, jac->hsolver, jac->ams_proj_freq);
1317   }
1318   PetscOptionsHeadEnd();
1319   PetscFunctionReturn(PETSC_SUCCESS);
1320 }
1321 
1322 static PetscErrorCode PCView_HYPRE_AMS(PC pc, PetscViewer viewer)
1323 {
1324   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1325   PetscBool iascii;
1326 
1327   PetscFunctionBegin;
1328   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1329   if (iascii) {
1330     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE AMS preconditioning\n"));
1331     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter));
1332     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type));
1333     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iteration tolerance %g\n", (double)jac->as_tol));
1334     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother type %" PetscInt_FMT "\n", jac->as_relax_type));
1335     PetscCall(PetscViewerASCIIPrintf(viewer, "    number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times));
1336     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother weight %g\n", (double)jac->as_relax_weight));
1337     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother omega %g\n", (double)jac->as_omega));
1338     if (jac->alpha_Poisson) {
1339       PetscCall(PetscViewerASCIIPrintf(viewer, "    vector Poisson solver (passed in by user)\n"));
1340     } else {
1341       PetscCall(PetscViewerASCIIPrintf(viewer, "    vector Poisson solver (computed) \n"));
1342     }
1343     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0]));
1344     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1]));
1345     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2]));
1346     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3]));
1347     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4]));
1348     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG strength threshold %g\n", (double)jac->as_amg_alpha_theta));
1349     if (!jac->ams_beta_is_zero) {
1350       if (jac->beta_Poisson) {
1351         PetscCall(PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver (passed in by user)\n"));
1352       } else {
1353         PetscCall(PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver (computed) \n"));
1354       }
1355       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0]));
1356       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1]));
1357       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2]));
1358       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3]));
1359       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4]));
1360       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG strength threshold %g\n", (double)jac->as_amg_beta_theta));
1361       if (jac->ams_beta_is_zero_part) PetscCall(PetscViewerASCIIPrintf(viewer, "        compatible subspace projection frequency %" PetscInt_FMT " (-1 HYPRE uses default)\n", jac->ams_proj_freq));
1362     } else {
1363       PetscCall(PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver not used (zero-conductivity everywhere) \n"));
1364     }
1365   }
1366   PetscFunctionReturn(PETSC_SUCCESS);
1367 }
1368 
1369 static PetscErrorCode PCSetFromOptions_HYPRE_ADS(PC pc, PetscOptionItems *PetscOptionsObject)
1370 {
1371   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1372   PetscInt  n;
1373   PetscBool flag, flag2, flag3, flag4;
1374 
1375   PetscFunctionBegin;
1376   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ADS Options");
1377   PetscCall(PetscOptionsInt("-pc_hypre_ads_print_level", "Debugging output level for ADS", "None", jac->as_print, &jac->as_print, &flag));
1378   if (flag) PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print);
1379   PetscCall(PetscOptionsInt("-pc_hypre_ads_max_iter", "Maximum number of ADS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag));
1380   if (flag) PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter);
1381   PetscCall(PetscOptionsInt("-pc_hypre_ads_cycle_type", "Cycle type for ADS multigrid", "None", jac->ads_cycle_type, &jac->ads_cycle_type, &flag));
1382   if (flag) PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ads_cycle_type);
1383   PetscCall(PetscOptionsReal("-pc_hypre_ads_tol", "Error tolerance for ADS multigrid", "None", jac->as_tol, &jac->as_tol, &flag));
1384   if (flag) PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol);
1385   PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_type", "Relaxation type for ADS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag));
1386   PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_times", "Number of relaxation steps for ADS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2));
1387   PetscCall(PetscOptionsReal("-pc_hypre_ads_relax_weight", "Relaxation weight for ADS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3));
1388   PetscCall(PetscOptionsReal("-pc_hypre_ads_omega", "SSOR coefficient for ADS smoother", "None", jac->as_omega, &jac->as_omega, &flag4));
1389   if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
1390   PetscCall(PetscOptionsReal("-pc_hypre_ads_ams_theta", "Threshold for strong coupling of AMS solver inside ADS", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag));
1391   n = 5;
1392   PetscCall(PetscOptionsIntArray("-pc_hypre_ads_ams_options", "AMG options for AMS solver inside ADS", "None", jac->as_amg_alpha_opts, &n, &flag2));
1393   PetscCall(PetscOptionsInt("-pc_hypre_ads_ams_cycle_type", "Cycle type for AMS solver inside ADS", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag3));
1394   if (flag || flag2 || flag3) {
1395     PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMS cycle type */
1396                       jac->as_amg_alpha_opts[0],                                 /* AMG coarsen type */
1397                       jac->as_amg_alpha_opts[1],                                 /* AMG agg_levels */
1398                       jac->as_amg_alpha_opts[2],                                 /* AMG relax_type */
1399                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],        /* AMG interp_type */
1400                       jac->as_amg_alpha_opts[4]);                                /* AMG Pmax */
1401   }
1402   PetscCall(PetscOptionsReal("-pc_hypre_ads_amg_theta", "Threshold for strong coupling of vector AMG solver inside ADS", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag));
1403   n = 5;
1404   PetscCall(PetscOptionsIntArray("-pc_hypre_ads_amg_options", "AMG options for vector AMG solver inside ADS", "None", jac->as_amg_beta_opts, &n, &flag2));
1405   if (flag || flag2) {
1406     PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
1407                       jac->as_amg_beta_opts[1],                                       /* AMG agg_levels */
1408                       jac->as_amg_beta_opts[2],                                       /* AMG relax_type */
1409                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],               /* AMG interp_type */
1410                       jac->as_amg_beta_opts[4]);                                      /* AMG Pmax */
1411   }
1412   PetscOptionsHeadEnd();
1413   PetscFunctionReturn(PETSC_SUCCESS);
1414 }
1415 
1416 static PetscErrorCode PCView_HYPRE_ADS(PC pc, PetscViewer viewer)
1417 {
1418   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1419   PetscBool iascii;
1420 
1421   PetscFunctionBegin;
1422   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1423   if (iascii) {
1424     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE ADS preconditioning\n"));
1425     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter));
1426     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace cycle type %" PetscInt_FMT "\n", jac->ads_cycle_type));
1427     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iteration tolerance %g\n", (double)jac->as_tol));
1428     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother type %" PetscInt_FMT "\n", jac->as_relax_type));
1429     PetscCall(PetscViewerASCIIPrintf(viewer, "    number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times));
1430     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother weight %g\n", (double)jac->as_relax_weight));
1431     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother omega %g\n", (double)jac->as_omega));
1432     PetscCall(PetscViewerASCIIPrintf(viewer, "    AMS solver using boomerAMG\n"));
1433     PetscCall(PetscViewerASCIIPrintf(viewer, "        subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type));
1434     PetscCall(PetscViewerASCIIPrintf(viewer, "        coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0]));
1435     PetscCall(PetscViewerASCIIPrintf(viewer, "        levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1]));
1436     PetscCall(PetscViewerASCIIPrintf(viewer, "        relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2]));
1437     PetscCall(PetscViewerASCIIPrintf(viewer, "        interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3]));
1438     PetscCall(PetscViewerASCIIPrintf(viewer, "        max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4]));
1439     PetscCall(PetscViewerASCIIPrintf(viewer, "        strength threshold %g\n", (double)jac->as_amg_alpha_theta));
1440     PetscCall(PetscViewerASCIIPrintf(viewer, "    vector Poisson solver using boomerAMG\n"));
1441     PetscCall(PetscViewerASCIIPrintf(viewer, "        coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0]));
1442     PetscCall(PetscViewerASCIIPrintf(viewer, "        levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1]));
1443     PetscCall(PetscViewerASCIIPrintf(viewer, "        relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2]));
1444     PetscCall(PetscViewerASCIIPrintf(viewer, "        interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3]));
1445     PetscCall(PetscViewerASCIIPrintf(viewer, "        max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4]));
1446     PetscCall(PetscViewerASCIIPrintf(viewer, "        strength threshold %g\n", (double)jac->as_amg_beta_theta));
1447   }
1448   PetscFunctionReturn(PETSC_SUCCESS);
1449 }
1450 
1451 static PetscErrorCode PCHYPRESetDiscreteGradient_HYPRE(PC pc, Mat G)
1452 {
1453   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1454   PetscBool ishypre;
1455 
1456   PetscFunctionBegin;
1457   PetscCall(PetscObjectTypeCompare((PetscObject)G, MATHYPRE, &ishypre));
1458   if (ishypre) {
1459     PetscCall(PetscObjectReference((PetscObject)G));
1460     PetscCall(MatDestroy(&jac->G));
1461     jac->G = G;
1462   } else {
1463     PetscCall(MatDestroy(&jac->G));
1464     PetscCall(MatConvert(G, MATHYPRE, MAT_INITIAL_MATRIX, &jac->G));
1465   }
1466   PetscFunctionReturn(PETSC_SUCCESS);
1467 }
1468 
1469 /*@
1470   PCHYPRESetDiscreteGradient - Set discrete gradient matrix for `PCHYPRE` type of ams or ads
1471 
1472   Collective
1473 
1474   Input Parameters:
1475 + pc - the preconditioning context
1476 - G  - the discrete gradient
1477 
1478   Level: intermediate
1479 
1480   Notes:
1481   G should have as many rows as the number of edges and as many columns as the number of vertices in the mesh
1482 
1483   Each row of G has 2 nonzeros, with column indexes being the global indexes of edge's endpoints: matrix entries are +1 and -1 depending on edge orientation
1484 
1485   Developer Notes:
1486   This automatically converts the matrix to `MATHYPRE` if it is not already of that type
1487 
1488 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteCurl()`
1489 @*/
1490 PetscErrorCode PCHYPRESetDiscreteGradient(PC pc, Mat G)
1491 {
1492   PetscFunctionBegin;
1493   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1494   PetscValidHeaderSpecific(G, MAT_CLASSID, 2);
1495   PetscCheckSameComm(pc, 1, G, 2);
1496   PetscTryMethod(pc, "PCHYPRESetDiscreteGradient_C", (PC, Mat), (pc, G));
1497   PetscFunctionReturn(PETSC_SUCCESS);
1498 }
1499 
1500 static PetscErrorCode PCHYPRESetDiscreteCurl_HYPRE(PC pc, Mat C)
1501 {
1502   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1503   PetscBool ishypre;
1504 
1505   PetscFunctionBegin;
1506   PetscCall(PetscObjectTypeCompare((PetscObject)C, MATHYPRE, &ishypre));
1507   if (ishypre) {
1508     PetscCall(PetscObjectReference((PetscObject)C));
1509     PetscCall(MatDestroy(&jac->C));
1510     jac->C = C;
1511   } else {
1512     PetscCall(MatDestroy(&jac->C));
1513     PetscCall(MatConvert(C, MATHYPRE, MAT_INITIAL_MATRIX, &jac->C));
1514   }
1515   PetscFunctionReturn(PETSC_SUCCESS);
1516 }
1517 
1518 /*@
1519   PCHYPRESetDiscreteCurl - Set discrete curl matrx for `PCHYPRE` type of ads
1520 
1521   Collective
1522 
1523   Input Parameters:
1524 + pc - the preconditioning context
1525 - C  - the discrete curl
1526 
1527   Level: intermediate
1528 
1529   Notes:
1530   C should have as many rows as the number of faces and as many columns as the number of edges in the mesh
1531 
1532   Each row of G has as many nonzeros as the number of edges of a face, with column indexes being the global indexes of the corresponding edge: matrix entries are +1 and -1 depending on edge orientation with respect to the face orientation
1533 
1534   Developer Notes:
1535   This automatically converts the matrix to `MATHYPRE` if it is not already of that type
1536 
1537   If this is only for  `PCHYPRE` type of ads it should be called `PCHYPREADSSetDiscreteCurl()`
1538 
1539 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`
1540 @*/
1541 PetscErrorCode PCHYPRESetDiscreteCurl(PC pc, Mat C)
1542 {
1543   PetscFunctionBegin;
1544   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1545   PetscValidHeaderSpecific(C, MAT_CLASSID, 2);
1546   PetscCheckSameComm(pc, 1, C, 2);
1547   PetscTryMethod(pc, "PCHYPRESetDiscreteCurl_C", (PC, Mat), (pc, C));
1548   PetscFunctionReturn(PETSC_SUCCESS);
1549 }
1550 
1551 static PetscErrorCode PCHYPRESetInterpolations_HYPRE(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[])
1552 {
1553   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1554   PetscBool ishypre;
1555   PetscInt  i;
1556   PetscFunctionBegin;
1557 
1558   PetscCall(MatDestroy(&jac->RT_PiFull));
1559   PetscCall(MatDestroy(&jac->ND_PiFull));
1560   for (i = 0; i < 3; ++i) {
1561     PetscCall(MatDestroy(&jac->RT_Pi[i]));
1562     PetscCall(MatDestroy(&jac->ND_Pi[i]));
1563   }
1564 
1565   jac->dim = dim;
1566   if (RT_PiFull) {
1567     PetscCall(PetscObjectTypeCompare((PetscObject)RT_PiFull, MATHYPRE, &ishypre));
1568     if (ishypre) {
1569       PetscCall(PetscObjectReference((PetscObject)RT_PiFull));
1570       jac->RT_PiFull = RT_PiFull;
1571     } else {
1572       PetscCall(MatConvert(RT_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_PiFull));
1573     }
1574   }
1575   if (RT_Pi) {
1576     for (i = 0; i < dim; ++i) {
1577       if (RT_Pi[i]) {
1578         PetscCall(PetscObjectTypeCompare((PetscObject)RT_Pi[i], MATHYPRE, &ishypre));
1579         if (ishypre) {
1580           PetscCall(PetscObjectReference((PetscObject)RT_Pi[i]));
1581           jac->RT_Pi[i] = RT_Pi[i];
1582         } else {
1583           PetscCall(MatConvert(RT_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_Pi[i]));
1584         }
1585       }
1586     }
1587   }
1588   if (ND_PiFull) {
1589     PetscCall(PetscObjectTypeCompare((PetscObject)ND_PiFull, MATHYPRE, &ishypre));
1590     if (ishypre) {
1591       PetscCall(PetscObjectReference((PetscObject)ND_PiFull));
1592       jac->ND_PiFull = ND_PiFull;
1593     } else {
1594       PetscCall(MatConvert(ND_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_PiFull));
1595     }
1596   }
1597   if (ND_Pi) {
1598     for (i = 0; i < dim; ++i) {
1599       if (ND_Pi[i]) {
1600         PetscCall(PetscObjectTypeCompare((PetscObject)ND_Pi[i], MATHYPRE, &ishypre));
1601         if (ishypre) {
1602           PetscCall(PetscObjectReference((PetscObject)ND_Pi[i]));
1603           jac->ND_Pi[i] = ND_Pi[i];
1604         } else {
1605           PetscCall(MatConvert(ND_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_Pi[i]));
1606         }
1607       }
1608     }
1609   }
1610 
1611   PetscFunctionReturn(PETSC_SUCCESS);
1612 }
1613 
1614 /*@
1615   PCHYPRESetInterpolations - Set interpolation matrices for `PCHYPRE` type of ams or ads
1616 
1617   Collective
1618 
1619   Input Parameters:
1620 + pc        - the preconditioning context
1621 . dim       - the dimension of the problem, only used in AMS
1622 . RT_PiFull - Raviart-Thomas interpolation matrix
1623 . RT_Pi     - x/y/z component of Raviart-Thomas interpolation matrix
1624 . ND_PiFull - Nedelec interpolation matrix
1625 - ND_Pi     - x/y/z component of Nedelec interpolation matrix
1626 
1627   Level: intermediate
1628 
1629   Notes:
1630   For AMS, only Nedelec interpolation matrices are needed, the Raviart-Thomas interpolation matrices can be set to NULL.
1631 
1632   For ADS, both type of interpolation matrices are needed.
1633 
1634   Developer Notes:
1635   This automatically converts the matrix to `MATHYPRE` if it is not already of that type
1636 
1637 .seealso: `PCHYPRE`
1638 @*/
1639 PetscErrorCode PCHYPRESetInterpolations(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[])
1640 {
1641   PetscInt i;
1642 
1643   PetscFunctionBegin;
1644   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1645   if (RT_PiFull) {
1646     PetscValidHeaderSpecific(RT_PiFull, MAT_CLASSID, 3);
1647     PetscCheckSameComm(pc, 1, RT_PiFull, 3);
1648   }
1649   if (RT_Pi) {
1650     PetscAssertPointer(RT_Pi, 4);
1651     for (i = 0; i < dim; ++i) {
1652       if (RT_Pi[i]) {
1653         PetscValidHeaderSpecific(RT_Pi[i], MAT_CLASSID, 4);
1654         PetscCheckSameComm(pc, 1, RT_Pi[i], 4);
1655       }
1656     }
1657   }
1658   if (ND_PiFull) {
1659     PetscValidHeaderSpecific(ND_PiFull, MAT_CLASSID, 5);
1660     PetscCheckSameComm(pc, 1, ND_PiFull, 5);
1661   }
1662   if (ND_Pi) {
1663     PetscAssertPointer(ND_Pi, 6);
1664     for (i = 0; i < dim; ++i) {
1665       if (ND_Pi[i]) {
1666         PetscValidHeaderSpecific(ND_Pi[i], MAT_CLASSID, 6);
1667         PetscCheckSameComm(pc, 1, ND_Pi[i], 6);
1668       }
1669     }
1670   }
1671   PetscTryMethod(pc, "PCHYPRESetInterpolations_C", (PC, PetscInt, Mat, Mat[], Mat, Mat[]), (pc, dim, RT_PiFull, RT_Pi, ND_PiFull, ND_Pi));
1672   PetscFunctionReturn(PETSC_SUCCESS);
1673 }
1674 
1675 static PetscErrorCode PCHYPRESetPoissonMatrix_HYPRE(PC pc, Mat A, PetscBool isalpha)
1676 {
1677   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1678   PetscBool ishypre;
1679 
1680   PetscFunctionBegin;
1681   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1682   if (ishypre) {
1683     if (isalpha) {
1684       PetscCall(PetscObjectReference((PetscObject)A));
1685       PetscCall(MatDestroy(&jac->alpha_Poisson));
1686       jac->alpha_Poisson = A;
1687     } else {
1688       if (A) {
1689         PetscCall(PetscObjectReference((PetscObject)A));
1690       } else {
1691         jac->ams_beta_is_zero = PETSC_TRUE;
1692       }
1693       PetscCall(MatDestroy(&jac->beta_Poisson));
1694       jac->beta_Poisson = A;
1695     }
1696   } else {
1697     if (isalpha) {
1698       PetscCall(MatDestroy(&jac->alpha_Poisson));
1699       PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->alpha_Poisson));
1700     } else {
1701       if (A) {
1702         PetscCall(MatDestroy(&jac->beta_Poisson));
1703         PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->beta_Poisson));
1704       } else {
1705         PetscCall(MatDestroy(&jac->beta_Poisson));
1706         jac->ams_beta_is_zero = PETSC_TRUE;
1707       }
1708     }
1709   }
1710   PetscFunctionReturn(PETSC_SUCCESS);
1711 }
1712 
1713 /*@
1714   PCHYPRESetAlphaPoissonMatrix - Set vector Poisson matrix for `PCHYPRE` of type ams
1715 
1716   Collective
1717 
1718   Input Parameters:
1719 + pc - the preconditioning context
1720 - A  - the matrix
1721 
1722   Level: intermediate
1723 
1724   Note:
1725   A should be obtained by discretizing the vector valued Poisson problem with linear finite elements
1726 
1727   Developer Notes:
1728   This automatically converts the matrix to `MATHYPRE` if it is not already of that type
1729 
1730   If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSSetAlphaPoissonMatrix()`
1731 
1732 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetBetaPoissonMatrix()`
1733 @*/
1734 PetscErrorCode PCHYPRESetAlphaPoissonMatrix(PC pc, Mat A)
1735 {
1736   PetscFunctionBegin;
1737   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1738   PetscValidHeaderSpecific(A, MAT_CLASSID, 2);
1739   PetscCheckSameComm(pc, 1, A, 2);
1740   PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_TRUE));
1741   PetscFunctionReturn(PETSC_SUCCESS);
1742 }
1743 
1744 /*@
1745   PCHYPRESetBetaPoissonMatrix - Set Poisson matrix for `PCHYPRE` of type ams
1746 
1747   Collective
1748 
1749   Input Parameters:
1750 + pc - the preconditioning context
1751 - A  - the matrix, or NULL to turn it off
1752 
1753   Level: intermediate
1754 
1755   Note:
1756   A should be obtained by discretizing the Poisson problem with linear finite elements.
1757 
1758   Developer Notes:
1759   This automatically converts the matrix to `MATHYPRE` if it is not already of that type
1760 
1761   If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSPCHYPRESetBetaPoissonMatrix()`
1762 
1763 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
1764 @*/
1765 PetscErrorCode PCHYPRESetBetaPoissonMatrix(PC pc, Mat A)
1766 {
1767   PetscFunctionBegin;
1768   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1769   if (A) {
1770     PetscValidHeaderSpecific(A, MAT_CLASSID, 2);
1771     PetscCheckSameComm(pc, 1, A, 2);
1772   }
1773   PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_FALSE));
1774   PetscFunctionReturn(PETSC_SUCCESS);
1775 }
1776 
1777 static PetscErrorCode PCHYPRESetEdgeConstantVectors_HYPRE(PC pc, Vec ozz, Vec zoz, Vec zzo)
1778 {
1779   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1780 
1781   PetscFunctionBegin;
1782   /* throw away any vector if already set */
1783   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0]));
1784   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1]));
1785   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2]));
1786   PetscCall(VecHYPRE_IJVectorCreate(ozz->map, &jac->constants[0]));
1787   PetscCall(VecHYPRE_IJVectorCopy(ozz, jac->constants[0]));
1788   PetscCall(VecHYPRE_IJVectorCreate(zoz->map, &jac->constants[1]));
1789   PetscCall(VecHYPRE_IJVectorCopy(zoz, jac->constants[1]));
1790   jac->dim = 2;
1791   if (zzo) {
1792     PetscCall(VecHYPRE_IJVectorCreate(zzo->map, &jac->constants[2]));
1793     PetscCall(VecHYPRE_IJVectorCopy(zzo, jac->constants[2]));
1794     jac->dim++;
1795   }
1796   PetscFunctionReturn(PETSC_SUCCESS);
1797 }
1798 
1799 /*@
1800   PCHYPRESetEdgeConstantVectors - Set the representation of the constant vector fields in the edge element basis for `PCHYPRE` of type ams
1801 
1802   Collective
1803 
1804   Input Parameters:
1805 + pc  - the preconditioning context
1806 . ozz - vector representing (1,0,0) (or (1,0) in 2D)
1807 . zoz - vector representing (0,1,0) (or (0,1) in 2D)
1808 - zzo - vector representing (0,0,1) (use NULL in 2D)
1809 
1810   Level: intermediate
1811 
1812   Developer Notes:
1813   If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSSetEdgeConstantVectors()`
1814 
1815 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
1816 @*/
1817 PetscErrorCode PCHYPRESetEdgeConstantVectors(PC pc, Vec ozz, Vec zoz, Vec zzo)
1818 {
1819   PetscFunctionBegin;
1820   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1821   PetscValidHeaderSpecific(ozz, VEC_CLASSID, 2);
1822   PetscValidHeaderSpecific(zoz, VEC_CLASSID, 3);
1823   if (zzo) PetscValidHeaderSpecific(zzo, VEC_CLASSID, 4);
1824   PetscCheckSameComm(pc, 1, ozz, 2);
1825   PetscCheckSameComm(pc, 1, zoz, 3);
1826   if (zzo) PetscCheckSameComm(pc, 1, zzo, 4);
1827   PetscTryMethod(pc, "PCHYPRESetEdgeConstantVectors_C", (PC, Vec, Vec, Vec), (pc, ozz, zoz, zzo));
1828   PetscFunctionReturn(PETSC_SUCCESS);
1829 }
1830 
1831 static PetscErrorCode PCHYPREAMSSetInteriorNodes_HYPRE(PC pc, Vec interior)
1832 {
1833   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1834 
1835   PetscFunctionBegin;
1836   PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior));
1837   PetscCall(VecHYPRE_IJVectorCreate(interior->map, &jac->interior));
1838   PetscCall(VecHYPRE_IJVectorCopy(interior, jac->interior));
1839   jac->ams_beta_is_zero_part = PETSC_TRUE;
1840   PetscFunctionReturn(PETSC_SUCCESS);
1841 }
1842 
1843 /*@
1844   PCHYPREAMSSetInteriorNodes - Set the list of interior nodes to a zero-conductivity region for `PCHYPRE` of type ams
1845 
1846   Collective
1847 
1848   Input Parameters:
1849 + pc       - the preconditioning context
1850 - interior - vector. node is interior if its entry in the array is 1.0.
1851 
1852   Level: intermediate
1853 
1854   Note:
1855   This calls `HYPRE_AMSSetInteriorNodes()`
1856 
1857   Developer Notes:
1858   If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSSetInteriorNodes()`
1859 
1860 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
1861 @*/
1862 PetscErrorCode PCHYPREAMSSetInteriorNodes(PC pc, Vec interior)
1863 {
1864   PetscFunctionBegin;
1865   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1866   PetscValidHeaderSpecific(interior, VEC_CLASSID, 2);
1867   PetscCheckSameComm(pc, 1, interior, 2);
1868   PetscTryMethod(pc, "PCHYPREAMSSetInteriorNodes_C", (PC, Vec), (pc, interior));
1869   PetscFunctionReturn(PETSC_SUCCESS);
1870 }
1871 
1872 static PetscErrorCode PCSetCoordinates_HYPRE(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords)
1873 {
1874   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1875   Vec       tv;
1876   PetscInt  i;
1877 
1878   PetscFunctionBegin;
1879   /* throw away any coordinate vector if already set */
1880   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0]));
1881   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1]));
1882   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2]));
1883   jac->dim = dim;
1884 
1885   /* compute IJ vector for coordinates */
1886   PetscCall(VecCreate(PetscObjectComm((PetscObject)pc), &tv));
1887   PetscCall(VecSetType(tv, VECSTANDARD));
1888   PetscCall(VecSetSizes(tv, nloc, PETSC_DECIDE));
1889   for (i = 0; i < dim; i++) {
1890     PetscScalar *array;
1891     PetscInt     j;
1892 
1893     PetscCall(VecHYPRE_IJVectorCreate(tv->map, &jac->coords[i]));
1894     PetscCall(VecGetArrayWrite(tv, &array));
1895     for (j = 0; j < nloc; j++) array[j] = coords[j * dim + i];
1896     PetscCall(VecRestoreArrayWrite(tv, &array));
1897     PetscCall(VecHYPRE_IJVectorCopy(tv, jac->coords[i]));
1898   }
1899   PetscCall(VecDestroy(&tv));
1900   PetscFunctionReturn(PETSC_SUCCESS);
1901 }
1902 
1903 static PetscErrorCode PCHYPREGetType_HYPRE(PC pc, const char *name[])
1904 {
1905   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1906 
1907   PetscFunctionBegin;
1908   *name = jac->hypre_type;
1909   PetscFunctionReturn(PETSC_SUCCESS);
1910 }
1911 
1912 static PetscErrorCode PCHYPRESetType_HYPRE(PC pc, const char name[])
1913 {
1914   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1915   PetscBool flag;
1916 
1917   PetscFunctionBegin;
1918   if (jac->hypre_type) {
1919     PetscCall(PetscStrcmp(jac->hypre_type, name, &flag));
1920     PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE preconditioner type once it has been set");
1921     PetscFunctionReturn(PETSC_SUCCESS);
1922   } else {
1923     PetscCall(PetscStrallocpy(name, &jac->hypre_type));
1924   }
1925 
1926   jac->maxiter         = PETSC_DEFAULT;
1927   jac->tol             = PETSC_DEFAULT;
1928   jac->printstatistics = PetscLogPrintInfo;
1929 
1930   PetscCall(PetscStrcmp("pilut", jac->hypre_type, &flag));
1931   if (flag) {
1932     PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
1933     PetscCallExternal(HYPRE_ParCSRPilutCreate, jac->comm_hypre, &jac->hsolver);
1934     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Pilut;
1935     pc->ops->view           = PCView_HYPRE_Pilut;
1936     jac->destroy            = HYPRE_ParCSRPilutDestroy;
1937     jac->setup              = HYPRE_ParCSRPilutSetup;
1938     jac->solve              = HYPRE_ParCSRPilutSolve;
1939     jac->factorrowsize      = PETSC_DEFAULT;
1940     PetscFunctionReturn(PETSC_SUCCESS);
1941   }
1942   PetscCall(PetscStrcmp("euclid", jac->hypre_type, &flag));
1943   if (flag) {
1944 #if defined(PETSC_USE_64BIT_INDICES)
1945     SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Hypre Euclid does not support 64-bit indices");
1946 #endif
1947     PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
1948     PetscCallExternal(HYPRE_EuclidCreate, jac->comm_hypre, &jac->hsolver);
1949     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Euclid;
1950     pc->ops->view           = PCView_HYPRE_Euclid;
1951     jac->destroy            = HYPRE_EuclidDestroy;
1952     jac->setup              = HYPRE_EuclidSetup;
1953     jac->solve              = HYPRE_EuclidSolve;
1954     jac->factorrowsize      = PETSC_DEFAULT;
1955     jac->eu_level           = PETSC_DEFAULT; /* default */
1956     PetscFunctionReturn(PETSC_SUCCESS);
1957   }
1958   PetscCall(PetscStrcmp("parasails", jac->hypre_type, &flag));
1959   if (flag) {
1960     PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
1961     PetscCallExternal(HYPRE_ParaSailsCreate, jac->comm_hypre, &jac->hsolver);
1962     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ParaSails;
1963     pc->ops->view           = PCView_HYPRE_ParaSails;
1964     jac->destroy            = HYPRE_ParaSailsDestroy;
1965     jac->setup              = HYPRE_ParaSailsSetup;
1966     jac->solve              = HYPRE_ParaSailsSolve;
1967     /* initialize */
1968     jac->nlevels   = 1;
1969     jac->threshold = .1;
1970     jac->filter    = .1;
1971     jac->loadbal   = 0;
1972     if (PetscLogPrintInfo) jac->logging = (int)PETSC_TRUE;
1973     else jac->logging = (int)PETSC_FALSE;
1974 
1975     jac->ruse = (int)PETSC_FALSE;
1976     jac->symt = 0;
1977     PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels);
1978     PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter);
1979     PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal);
1980     PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging);
1981     PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse);
1982     PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt);
1983     PetscFunctionReturn(PETSC_SUCCESS);
1984   }
1985   PetscCall(PetscStrcmp("boomeramg", jac->hypre_type, &flag));
1986   if (flag) {
1987     PetscCallExternal(HYPRE_BoomerAMGCreate, &jac->hsolver);
1988     pc->ops->setfromoptions  = PCSetFromOptions_HYPRE_BoomerAMG;
1989     pc->ops->view            = PCView_HYPRE_BoomerAMG;
1990     pc->ops->applytranspose  = PCApplyTranspose_HYPRE_BoomerAMG;
1991     pc->ops->applyrichardson = PCApplyRichardson_HYPRE_BoomerAMG;
1992     pc->ops->matapply        = PCMatApply_HYPRE_BoomerAMG;
1993     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_BoomerAMG));
1994     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_BoomerAMG));
1995     jac->destroy         = HYPRE_BoomerAMGDestroy;
1996     jac->setup           = HYPRE_BoomerAMGSetup;
1997     jac->solve           = HYPRE_BoomerAMGSolve;
1998     jac->applyrichardson = PETSC_FALSE;
1999     /* these defaults match the hypre defaults */
2000     jac->cycletype       = 1;
2001     jac->maxlevels       = 25;
2002     jac->maxiter         = 1;
2003     jac->tol             = 0.0; /* tolerance of zero indicates use as preconditioner (suppresses convergence errors) */
2004     jac->truncfactor     = 0.0;
2005     jac->strongthreshold = .25;
2006     jac->maxrowsum       = .9;
2007     jac->coarsentype     = 6;
2008     jac->measuretype     = 0;
2009     jac->gridsweeps[0] = jac->gridsweeps[1] = jac->gridsweeps[2] = 1;
2010     jac->smoothtype                                              = -1; /* Not set by default */
2011     jac->smoothnumlevels                                         = 25;
2012     jac->eu_level                                                = 0;
2013     jac->eu_droptolerance                                        = 0;
2014     jac->eu_bj                                                   = 0;
2015     jac->relaxtype[0] = jac->relaxtype[1] = 6; /* Defaults to SYMMETRIC since in PETSc we are using a PC - most likely with CG */
2016     jac->relaxtype[2]                     = 9; /*G.E. */
2017     jac->relaxweight                      = 1.0;
2018     jac->outerrelaxweight                 = 1.0;
2019     jac->relaxorder                       = 1;
2020     jac->interptype                       = 0;
2021     jac->Rtype                            = 0;
2022     jac->Rstrongthreshold                 = 0.25;
2023     jac->Rfilterthreshold                 = 0.0;
2024     jac->Adroptype                        = -1;
2025     jac->Adroptol                         = 0.0;
2026     jac->agg_nl                           = 0;
2027     jac->agg_interptype                   = 4;
2028     jac->pmax                             = 0;
2029     jac->truncfactor                      = 0.0;
2030     jac->agg_num_paths                    = 1;
2031     jac->maxc                             = 9;
2032     jac->minc                             = 1;
2033     jac->nodal_coarsening                 = 0;
2034     jac->nodal_coarsening_diag            = 0;
2035     jac->vec_interp_variant               = 0;
2036     jac->vec_interp_qmax                  = 0;
2037     jac->vec_interp_smooth                = PETSC_FALSE;
2038     jac->interp_refine                    = 0;
2039     jac->nodal_relax                      = PETSC_FALSE;
2040     jac->nodal_relax_levels               = 1;
2041     jac->rap2                             = 0;
2042 
2043     /* GPU defaults
2044          from https://hypre.readthedocs.io/en/latest/solvers-boomeramg.html#gpu-supported-options
2045          and /src/parcsr_ls/par_amg.c */
2046 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2047     jac->keeptranspose  = PETSC_TRUE;
2048     jac->mod_rap2       = 1;
2049     jac->coarsentype    = 8;
2050     jac->relaxorder     = 0;
2051     jac->interptype     = 6;
2052     jac->relaxtype[0]   = 18;
2053     jac->relaxtype[1]   = 18;
2054     jac->agg_interptype = 7;
2055 #else
2056     jac->keeptranspose = PETSC_FALSE;
2057     jac->mod_rap2      = 0;
2058 #endif
2059     PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype);
2060     PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels);
2061     PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
2062     PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
2063     PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor);
2064     PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold);
2065     PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum);
2066     PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype);
2067     PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype);
2068     PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder);
2069     PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype);
2070     PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl);
2071     PetscCallExternal(HYPRE_BoomerAMGSetAggInterpType, jac->hsolver, jac->agg_interptype);
2072     PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax);
2073     PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths);
2074     PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, jac->relaxtype[0]);  /* defaults coarse to 9 */
2075     PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, jac->gridsweeps[0]); /* defaults coarse to 1 */
2076     PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc);
2077     PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc);
2078     /* GPU */
2079 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
2080     PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0);
2081     PetscCallExternal(HYPRE_BoomerAMGSetRAP2, jac->hsolver, jac->rap2);
2082     PetscCallExternal(HYPRE_BoomerAMGSetModuleRAP2, jac->hsolver, jac->mod_rap2);
2083 #endif
2084 
2085     /* AIR */
2086 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
2087     PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype);
2088     PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold);
2089     PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold);
2090     PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol);
2091     PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype);
2092 #endif
2093     PetscFunctionReturn(PETSC_SUCCESS);
2094   }
2095   PetscCall(PetscStrcmp("ams", jac->hypre_type, &flag));
2096   if (flag) {
2097     PetscCallExternal(HYPRE_AMSCreate, &jac->hsolver);
2098     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_AMS;
2099     pc->ops->view           = PCView_HYPRE_AMS;
2100     jac->destroy            = HYPRE_AMSDestroy;
2101     jac->setup              = HYPRE_AMSSetup;
2102     jac->solve              = HYPRE_AMSSolve;
2103     jac->coords[0]          = NULL;
2104     jac->coords[1]          = NULL;
2105     jac->coords[2]          = NULL;
2106     jac->interior           = NULL;
2107     /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE AMS */
2108     jac->as_print       = 0;
2109     jac->as_max_iter    = 1;  /* used as a preconditioner */
2110     jac->as_tol         = 0.; /* used as a preconditioner */
2111     jac->ams_cycle_type = 13;
2112     /* Smoothing options */
2113     jac->as_relax_type   = 2;
2114     jac->as_relax_times  = 1;
2115     jac->as_relax_weight = 1.0;
2116     jac->as_omega        = 1.0;
2117     /* Vector valued Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2118     jac->as_amg_alpha_opts[0] = 10;
2119     jac->as_amg_alpha_opts[1] = 1;
2120     jac->as_amg_alpha_opts[2] = 6;
2121     jac->as_amg_alpha_opts[3] = 6;
2122     jac->as_amg_alpha_opts[4] = 4;
2123     jac->as_amg_alpha_theta   = 0.25;
2124     /* Scalar Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2125     jac->as_amg_beta_opts[0] = 10;
2126     jac->as_amg_beta_opts[1] = 1;
2127     jac->as_amg_beta_opts[2] = 6;
2128     jac->as_amg_beta_opts[3] = 6;
2129     jac->as_amg_beta_opts[4] = 4;
2130     jac->as_amg_beta_theta   = 0.25;
2131     PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print);
2132     PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter);
2133     PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type);
2134     PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol);
2135     PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
2136     PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */
2137                       jac->as_amg_alpha_opts[1],                                            /* AMG agg_levels */
2138                       jac->as_amg_alpha_opts[2],                                            /* AMG relax_type */
2139                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],                   /* AMG interp_type */
2140                       jac->as_amg_alpha_opts[4]);                                           /* AMG Pmax */
2141     PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0],   /* AMG coarsen type */
2142                       jac->as_amg_beta_opts[1],                                             /* AMG agg_levels */
2143                       jac->as_amg_beta_opts[2],                                             /* AMG relax_type */
2144                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],                     /* AMG interp_type */
2145                       jac->as_amg_beta_opts[4]);                                            /* AMG Pmax */
2146     /* Zero conductivity */
2147     jac->ams_beta_is_zero      = PETSC_FALSE;
2148     jac->ams_beta_is_zero_part = PETSC_FALSE;
2149     PetscFunctionReturn(PETSC_SUCCESS);
2150   }
2151   PetscCall(PetscStrcmp("ads", jac->hypre_type, &flag));
2152   if (flag) {
2153     PetscCallExternal(HYPRE_ADSCreate, &jac->hsolver);
2154     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ADS;
2155     pc->ops->view           = PCView_HYPRE_ADS;
2156     jac->destroy            = HYPRE_ADSDestroy;
2157     jac->setup              = HYPRE_ADSSetup;
2158     jac->solve              = HYPRE_ADSSolve;
2159     jac->coords[0]          = NULL;
2160     jac->coords[1]          = NULL;
2161     jac->coords[2]          = NULL;
2162     /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE ADS */
2163     jac->as_print       = 0;
2164     jac->as_max_iter    = 1;  /* used as a preconditioner */
2165     jac->as_tol         = 0.; /* used as a preconditioner */
2166     jac->ads_cycle_type = 13;
2167     /* Smoothing options */
2168     jac->as_relax_type   = 2;
2169     jac->as_relax_times  = 1;
2170     jac->as_relax_weight = 1.0;
2171     jac->as_omega        = 1.0;
2172     /* AMS solver parameters: cycle_type, coarsen type, agg_levels, relax_type, interp_type, Pmax */
2173     jac->ams_cycle_type       = 14;
2174     jac->as_amg_alpha_opts[0] = 10;
2175     jac->as_amg_alpha_opts[1] = 1;
2176     jac->as_amg_alpha_opts[2] = 6;
2177     jac->as_amg_alpha_opts[3] = 6;
2178     jac->as_amg_alpha_opts[4] = 4;
2179     jac->as_amg_alpha_theta   = 0.25;
2180     /* Vector Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2181     jac->as_amg_beta_opts[0] = 10;
2182     jac->as_amg_beta_opts[1] = 1;
2183     jac->as_amg_beta_opts[2] = 6;
2184     jac->as_amg_beta_opts[3] = 6;
2185     jac->as_amg_beta_opts[4] = 4;
2186     jac->as_amg_beta_theta   = 0.25;
2187     PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print);
2188     PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter);
2189     PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ams_cycle_type);
2190     PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol);
2191     PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
2192     PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type,      /* AMG coarsen type */
2193                       jac->as_amg_alpha_opts[0],                                      /* AMG coarsen type */
2194                       jac->as_amg_alpha_opts[1],                                      /* AMG agg_levels */
2195                       jac->as_amg_alpha_opts[2],                                      /* AMG relax_type */
2196                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],             /* AMG interp_type */
2197                       jac->as_amg_alpha_opts[4]);                                     /* AMG Pmax */
2198     PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
2199                       jac->as_amg_beta_opts[1],                                       /* AMG agg_levels */
2200                       jac->as_amg_beta_opts[2],                                       /* AMG relax_type */
2201                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],               /* AMG interp_type */
2202                       jac->as_amg_beta_opts[4]);                                      /* AMG Pmax */
2203     PetscFunctionReturn(PETSC_SUCCESS);
2204   }
2205   PetscCall(PetscFree(jac->hypre_type));
2206 
2207   jac->hypre_type = NULL;
2208   SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE preconditioner %s; Choices are euclid, pilut, parasails, boomeramg, ams", name);
2209 }
2210 
2211 /*
2212     It only gets here if the HYPRE type has not been set before the call to
2213    ...SetFromOptions() which actually is most of the time
2214 */
2215 PetscErrorCode PCSetFromOptions_HYPRE(PC pc, PetscOptionItems *PetscOptionsObject)
2216 {
2217   PetscInt    indx;
2218   const char *type[] = {"euclid", "pilut", "parasails", "boomeramg", "ams", "ads"};
2219   PetscBool   flg;
2220 
2221   PetscFunctionBegin;
2222   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE preconditioner options");
2223   PetscCall(PetscOptionsEList("-pc_hypre_type", "HYPRE preconditioner type", "PCHYPRESetType", type, PETSC_STATIC_ARRAY_LENGTH(type), "boomeramg", &indx, &flg));
2224   if (flg) {
2225     PetscCall(PCHYPRESetType_HYPRE(pc, type[indx]));
2226   } else {
2227     PetscCall(PCHYPRESetType_HYPRE(pc, "boomeramg"));
2228   }
2229   PetscTryTypeMethod(pc, setfromoptions, PetscOptionsObject);
2230   PetscOptionsHeadEnd();
2231   PetscFunctionReturn(PETSC_SUCCESS);
2232 }
2233 
2234 /*@C
2235   PCHYPRESetType - Sets which hypre preconditioner you wish to use
2236 
2237   Input Parameters:
2238 + pc   - the preconditioner context
2239 - name - either  euclid, pilut, parasails, boomeramg, ams, ads
2240 
2241   Options Database Key:
2242 . pc_hypre_type - One of euclid, pilut, parasails, boomeramg, ams, ads
2243 
2244   Level: intermediate
2245 
2246 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRE`
2247 @*/
2248 PetscErrorCode PCHYPRESetType(PC pc, const char name[])
2249 {
2250   PetscFunctionBegin;
2251   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2252   PetscAssertPointer(name, 2);
2253   PetscTryMethod(pc, "PCHYPRESetType_C", (PC, const char[]), (pc, name));
2254   PetscFunctionReturn(PETSC_SUCCESS);
2255 }
2256 
2257 /*@C
2258   PCHYPREGetType - Gets which hypre preconditioner you are using
2259 
2260   Input Parameter:
2261 . pc - the preconditioner context
2262 
2263   Output Parameter:
2264 . name - either  euclid, pilut, parasails, boomeramg, ams, ads
2265 
2266   Level: intermediate
2267 
2268 .seealso: `PCCreate()`, `PCHYPRESetType()`, `PCType`, `PC`, `PCHYPRE`
2269 @*/
2270 PetscErrorCode PCHYPREGetType(PC pc, const char *name[])
2271 {
2272   PetscFunctionBegin;
2273   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2274   PetscAssertPointer(name, 2);
2275   PetscTryMethod(pc, "PCHYPREGetType_C", (PC, const char *[]), (pc, name));
2276   PetscFunctionReturn(PETSC_SUCCESS);
2277 }
2278 
2279 /*@C
2280   PCMGGalerkinSetMatProductAlgorithm - Set type of SpGEMM for hypre to use on GPUs
2281 
2282   Logically Collective
2283 
2284   Input Parameters:
2285 + pc   - the hypre context
2286 - name - one of 'cusparse', 'hypre'
2287 
2288   Options Database Key:
2289 . -pc_mg_galerkin_mat_product_algorithm <cusparse,hypre> - Type of SpGEMM to use in hypre
2290 
2291   Level: intermediate
2292 
2293   Developer Notes:
2294   How the name starts with `PCMG`, should it not be `PCHYPREBoomerAMG`?
2295 
2296 .seealso: `PCHYPRE`, `PCMGGalerkinGetMatProductAlgorithm()`
2297 @*/
2298 PetscErrorCode PCMGGalerkinSetMatProductAlgorithm(PC pc, const char name[])
2299 {
2300   PetscFunctionBegin;
2301   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2302   PetscTryMethod(pc, "PCMGGalerkinSetMatProductAlgorithm_C", (PC, const char[]), (pc, name));
2303   PetscFunctionReturn(PETSC_SUCCESS);
2304 }
2305 
2306 /*@C
2307   PCMGGalerkinGetMatProductAlgorithm - Get type of SpGEMM for hypre to use on GPUs
2308 
2309   Not Collective
2310 
2311   Input Parameter:
2312 . pc - the multigrid context
2313 
2314   Output Parameter:
2315 . name - one of 'cusparse', 'hypre'
2316 
2317   Level: intermediate
2318 
2319 .seealso: `PCHYPRE`, ``PCMGGalerkinSetMatProductAlgorithm()`
2320 @*/
2321 PetscErrorCode PCMGGalerkinGetMatProductAlgorithm(PC pc, const char *name[])
2322 {
2323   PetscFunctionBegin;
2324   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2325   PetscTryMethod(pc, "PCMGGalerkinGetMatProductAlgorithm_C", (PC, const char *[]), (pc, name));
2326   PetscFunctionReturn(PETSC_SUCCESS);
2327 }
2328 
2329 /*MC
2330      PCHYPRE - Allows you to use the matrix element based preconditioners in the LLNL package hypre as PETSc `PC`
2331 
2332    Options Database Keys:
2333 +   -pc_hypre_type - One of `euclid`, `pilut`, `parasails`, `boomeramg`, `ams`, or `ads`
2334 .   -pc_hypre_boomeramg_nodal_coarsen <n> - where n is from 1 to 6 (see `HYPRE_BOOMERAMGSetNodal()`)
2335 .   -pc_hypre_boomeramg_vec_interp_variant <v> - where v is from 1 to 3 (see `HYPRE_BoomerAMGSetInterpVecVariant()`)
2336 -   Many others, run with `-pc_type hypre` `-pc_hypre_type XXX` `-help` to see options for the XXX preconditioner
2337 
2338    Level: intermediate
2339 
2340    Notes:
2341     Apart from `-pc_hypre_type` (for which there is `PCHYPRESetType()`),
2342           the many hypre options can ONLY be set via the options database (e.g. the command line
2343           or with `PetscOptionsSetValue()`, there are no functions to set them)
2344 
2345           The options `-pc_hypre_boomeramg_max_iter` and `-pc_hypre_boomeramg_tol` refer to the number of iterations
2346           (V-cycles) and tolerance that boomerAMG does EACH time it is called. So for example, if
2347           `-pc_hypre_boomeramg_max_iter` is set to 2 then 2-V-cycles are being used to define the preconditioner
2348           (`-pc_hypre_boomeramg_tol` should be set to 0.0 - the default - to strictly use a fixed number of
2349           iterations per hypre call). `-ksp_max_it` and `-ksp_rtol` STILL determine the total number of iterations
2350           and tolerance for the Krylov solver. For example, if `-pc_hypre_boomeramg_max_iter` is 2 and `-ksp_max_it` is 10
2351           then AT MOST twenty V-cycles of boomeramg will be used.
2352 
2353            Note that the option `-pc_hypre_boomeramg_relax_type_all` defaults to symmetric relaxation
2354            (symmetric-SOR/Jacobi), which is required for Krylov solvers like CG that expect symmetry.
2355            Otherwise, you may want to use `-pc_hypre_boomeramg_relax_type_all SOR/Jacobi`.
2356 
2357           `MatSetNearNullSpace()` - if you provide a near null space to your matrix it is ignored by hypre UNLESS you also use
2358           the following two options: `-pc_hypre_boomeramg_nodal_coarsen <n> -pc_hypre_boomeramg_vec_interp_variant <v>`
2359 
2360           See `PCPFMG`, `PCSMG`, and `PCSYSPFMG` for access to hypre's other (nonalgebraic) multigrid solvers
2361 
2362           For `PCHYPRE` type of `ams` or `ads` auxiliary data must be provided to the preconditioner with `PCHYPRESetDiscreteGradient()`,
2363           `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`,
2364           `PCHYPREAMSSetInteriorNodes()`
2365 
2366   Sometimes people want to try algebraic multigrid as a "standalone" solver, that is not accelerating it with a Krylov method. Though we generally do not recommend this
2367   since it is usually slower, one should use a `KSPType` of `KSPRICHARDSON`
2368   (or equivalently `-ksp_type richardson`) to achieve this. Using `KSPPREONLY` will not work since it only applies a single cycle of multigrid.
2369 
2370    PETSc provides its own geometric and algebraic multigrid solvers `PCMG` and `PCGAMG`, also see `PCHMG` which is useful for certain multicomponent problems
2371 
2372    GPU Notes:
2373      To configure hypre BoomerAMG so that it can utilize NVIDIA GPUs run ./configure --download-hypre --with-cuda
2374      Then pass `VECCUDA` vectors and `MATAIJCUSPARSE` matrices to the solvers and PETSc will automatically utilize hypre's GPU solvers.
2375 
2376      To configure hypre BoomerAMG so that it can utilize AMD GPUs run ./configure --download-hypre --with-hip
2377      Then pass `VECHIP` vectors to the solvers and PETSc will automatically utilize hypre's GPU solvers.
2378 
2379 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRESetType()`, `PCPFMG`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`, `PCHYPRESetDiscreteGradient()`,
2380           `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`,
2381           PCHYPREAMSSetInteriorNodes()
2382 M*/
2383 
2384 PETSC_EXTERN PetscErrorCode PCCreate_HYPRE(PC pc)
2385 {
2386   PC_HYPRE *jac;
2387 
2388   PetscFunctionBegin;
2389   PetscCall(PetscNew(&jac));
2390 
2391   pc->data                = jac;
2392   pc->ops->reset          = PCReset_HYPRE;
2393   pc->ops->destroy        = PCDestroy_HYPRE;
2394   pc->ops->setfromoptions = PCSetFromOptions_HYPRE;
2395   pc->ops->setup          = PCSetUp_HYPRE;
2396   pc->ops->apply          = PCApply_HYPRE;
2397   jac->comm_hypre         = MPI_COMM_NULL;
2398   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", PCHYPRESetType_HYPRE));
2399   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", PCHYPREGetType_HYPRE));
2400   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_HYPRE));
2401   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", PCHYPRESetDiscreteGradient_HYPRE));
2402   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", PCHYPRESetDiscreteCurl_HYPRE));
2403   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", PCHYPRESetInterpolations_HYPRE));
2404   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", PCHYPRESetEdgeConstantVectors_HYPRE));
2405   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", PCHYPREAMSSetInteriorNodes_HYPRE));
2406   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", PCHYPRESetPoissonMatrix_HYPRE));
2407   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG));
2408   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG));
2409 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2410   #if defined(HYPRE_USING_HIP)
2411   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP));
2412   #endif
2413   #if defined(HYPRE_USING_CUDA)
2414   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA));
2415   #endif
2416 #endif
2417   PetscHYPREInitialize();
2418   PetscFunctionReturn(PETSC_SUCCESS);
2419 }
2420 
2421 typedef struct {
2422   MPI_Comm           hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */
2423   HYPRE_StructSolver hsolver;
2424 
2425   /* keep copy of PFMG options used so may view them */
2426   PetscInt  its;
2427   double    tol;
2428   PetscInt  relax_type;
2429   PetscInt  rap_type;
2430   PetscInt  num_pre_relax, num_post_relax;
2431   PetscInt  max_levels;
2432   PetscInt  skip_relax;
2433   PetscBool print_statistics;
2434 } PC_PFMG;
2435 
2436 PetscErrorCode PCDestroy_PFMG(PC pc)
2437 {
2438   PC_PFMG *ex = (PC_PFMG *)pc->data;
2439 
2440   PetscFunctionBegin;
2441   if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver);
2442   PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2443   PetscCall(PetscFree(pc->data));
2444   PetscFunctionReturn(PETSC_SUCCESS);
2445 }
2446 
2447 static const char *PFMGRelaxType[] = {"Jacobi", "Weighted-Jacobi", "symmetric-Red/Black-Gauss-Seidel", "Red/Black-Gauss-Seidel"};
2448 static const char *PFMGRAPType[]   = {"Galerkin", "non-Galerkin"};
2449 
2450 PetscErrorCode PCView_PFMG(PC pc, PetscViewer viewer)
2451 {
2452   PetscBool iascii;
2453   PC_PFMG  *ex = (PC_PFMG *)pc->data;
2454 
2455   PetscFunctionBegin;
2456   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
2457   if (iascii) {
2458     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE PFMG preconditioning\n"));
2459     PetscCall(PetscViewerASCIIPrintf(viewer, "    max iterations %" PetscInt_FMT "\n", ex->its));
2460     PetscCall(PetscViewerASCIIPrintf(viewer, "    tolerance %g\n", ex->tol));
2461     PetscCall(PetscViewerASCIIPrintf(viewer, "    relax type %s\n", PFMGRelaxType[ex->relax_type]));
2462     PetscCall(PetscViewerASCIIPrintf(viewer, "    RAP type %s\n", PFMGRAPType[ex->rap_type]));
2463     PetscCall(PetscViewerASCIIPrintf(viewer, "    number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
2464     PetscCall(PetscViewerASCIIPrintf(viewer, "    max levels %" PetscInt_FMT "\n", ex->max_levels));
2465     PetscCall(PetscViewerASCIIPrintf(viewer, "    skip relax %" PetscInt_FMT "\n", ex->skip_relax));
2466   }
2467   PetscFunctionReturn(PETSC_SUCCESS);
2468 }
2469 
2470 PetscErrorCode PCSetFromOptions_PFMG(PC pc, PetscOptionItems *PetscOptionsObject)
2471 {
2472   PC_PFMG *ex = (PC_PFMG *)pc->data;
2473 
2474   PetscFunctionBegin;
2475   PetscOptionsHeadBegin(PetscOptionsObject, "PFMG options");
2476   PetscCall(PetscOptionsBool("-pc_pfmg_print_statistics", "Print statistics", "HYPRE_StructPFMGSetPrintLevel", ex->print_statistics, &ex->print_statistics, NULL));
2477   PetscCall(PetscOptionsInt("-pc_pfmg_its", "Number of iterations of PFMG to use as preconditioner", "HYPRE_StructPFMGSetMaxIter", ex->its, &ex->its, NULL));
2478   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its);
2479   PetscCall(PetscOptionsInt("-pc_pfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
2480   PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
2481   PetscCall(PetscOptionsInt("-pc_pfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
2482   PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
2483 
2484   PetscCall(PetscOptionsInt("-pc_pfmg_max_levels", "Max Levels for MG hierarchy", "HYPRE_StructPFMGSetMaxLevels", ex->max_levels, &ex->max_levels, NULL));
2485   PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels);
2486 
2487   PetscCall(PetscOptionsReal("-pc_pfmg_tol", "Tolerance of PFMG", "HYPRE_StructPFMGSetTol", ex->tol, &ex->tol, NULL));
2488   PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol);
2489   PetscCall(PetscOptionsEList("-pc_pfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_StructPFMGSetRelaxType", PFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(PFMGRelaxType), PFMGRelaxType[ex->relax_type], &ex->relax_type, NULL));
2490   PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type);
2491   PetscCall(PetscOptionsEList("-pc_pfmg_rap_type", "RAP type", "HYPRE_StructPFMGSetRAPType", PFMGRAPType, PETSC_STATIC_ARRAY_LENGTH(PFMGRAPType), PFMGRAPType[ex->rap_type], &ex->rap_type, NULL));
2492   PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type);
2493   PetscCall(PetscOptionsInt("-pc_pfmg_skip_relax", "Skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations when the underlying problem is isotropic", "HYPRE_StructPFMGSetSkipRelax", ex->skip_relax, &ex->skip_relax, NULL));
2494   PetscCallExternal(HYPRE_StructPFMGSetSkipRelax, ex->hsolver, ex->skip_relax);
2495   PetscOptionsHeadEnd();
2496   PetscFunctionReturn(PETSC_SUCCESS);
2497 }
2498 
2499 PetscErrorCode PCApply_PFMG(PC pc, Vec x, Vec y)
2500 {
2501   PC_PFMG           *ex = (PC_PFMG *)pc->data;
2502   PetscScalar       *yy;
2503   const PetscScalar *xx;
2504   PetscInt           ilower[3], iupper[3];
2505   HYPRE_Int          hlower[3], hupper[3];
2506   Mat_HYPREStruct   *mx = (Mat_HYPREStruct *)(pc->pmat->data);
2507 
2508   PetscFunctionBegin;
2509   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2510   PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
2511   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
2512   iupper[0] += ilower[0] - 1;
2513   iupper[1] += ilower[1] - 1;
2514   iupper[2] += ilower[2] - 1;
2515   hlower[0] = (HYPRE_Int)ilower[0];
2516   hlower[1] = (HYPRE_Int)ilower[1];
2517   hlower[2] = (HYPRE_Int)ilower[2];
2518   hupper[0] = (HYPRE_Int)iupper[0];
2519   hupper[1] = (HYPRE_Int)iupper[1];
2520   hupper[2] = (HYPRE_Int)iupper[2];
2521 
2522   /* copy x values over to hypre */
2523   PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0);
2524   PetscCall(VecGetArrayRead(x, &xx));
2525   PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx);
2526   PetscCall(VecRestoreArrayRead(x, &xx));
2527   PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb);
2528   PetscCallExternal(HYPRE_StructPFMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx);
2529 
2530   /* copy solution values back to PETSc */
2531   PetscCall(VecGetArray(y, &yy));
2532   PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy);
2533   PetscCall(VecRestoreArray(y, &yy));
2534   PetscFunctionReturn(PETSC_SUCCESS);
2535 }
2536 
2537 static PetscErrorCode PCApplyRichardson_PFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
2538 {
2539   PC_PFMG  *jac = (PC_PFMG *)pc->data;
2540   HYPRE_Int oits;
2541 
2542   PetscFunctionBegin;
2543   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2544   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, its * jac->its);
2545   PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, rtol);
2546 
2547   PetscCall(PCApply_PFMG(pc, b, y));
2548   PetscCallExternal(HYPRE_StructPFMGGetNumIterations, jac->hsolver, &oits);
2549   *outits = oits;
2550   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
2551   else *reason = PCRICHARDSON_CONVERGED_RTOL;
2552   PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, jac->tol);
2553   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, jac->its);
2554   PetscFunctionReturn(PETSC_SUCCESS);
2555 }
2556 
2557 PetscErrorCode PCSetUp_PFMG(PC pc)
2558 {
2559   PC_PFMG         *ex = (PC_PFMG *)pc->data;
2560   Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data);
2561   PetscBool        flg;
2562 
2563   PetscFunctionBegin;
2564   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg));
2565   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner");
2566 
2567   /* create the hypre solver object and set its information */
2568   if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver);
2569   PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver);
2570 
2571   // Print Hypre statistics about the solve process
2572   if (ex->print_statistics) PetscCallExternal(HYPRE_StructPFMGSetPrintLevel, ex->hsolver, 3);
2573 
2574   // The hypre options must be repeated here because the StructPFMG was destroyed and recreated
2575   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its);
2576   PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
2577   PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
2578   PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels);
2579   PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol);
2580   PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type);
2581   PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type);
2582 
2583   PetscCallExternal(HYPRE_StructPFMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx);
2584   PetscCallExternal(HYPRE_StructPFMGSetZeroGuess, ex->hsolver);
2585   PetscFunctionReturn(PETSC_SUCCESS);
2586 }
2587 
2588 /*MC
2589      PCPFMG - the hypre PFMG multigrid solver
2590 
2591    Options Database Keys:
2592 + -pc_pfmg_its <its> - number of iterations of PFMG to use as preconditioner
2593 . -pc_pfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid solve
2594 . -pc_pfmg_num_post_relax <steps> - number of smoothing steps after coarse grid solve
2595 . -pc_pfmg_tol <tol> - tolerance of PFMG
2596 . -pc_pfmg_relax_type - relaxation type for the up and down cycles, one of Jacobi,Weighted-Jacobi,symmetric-Red/Black-Gauss-Seidel,Red/Black-Gauss-Seidel
2597 . -pc_pfmg_rap_type - type of coarse matrix generation, one of Galerkin,non-Galerkin
2598 - -pc_pfmg_skip_relax - skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations
2599                         when the underlying problem is isotropic, one of 0,1
2600 
2601    Level: advanced
2602 
2603    Notes:
2604    This is for CELL-centered descretizations
2605 
2606    See `PCSYSPFMG` for a version suitable for systems of PDEs, and `PCSMG`
2607 
2608    See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver
2609 
2610    This must be used with the `MATHYPRESTRUCT` matrix type.
2611 
2612    This provides only some of the functionality of PFMG, it supports only one block per process defined by a PETSc `DMDA`.
2613 
2614 .seealso: `PCMG`, `MATHYPRESTRUCT`, `PCHYPRE`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`
2615 M*/
2616 
2617 PETSC_EXTERN PetscErrorCode PCCreate_PFMG(PC pc)
2618 {
2619   PC_PFMG *ex;
2620 
2621   PetscFunctionBegin;
2622   PetscCall(PetscNew(&ex));
2623   pc->data = ex;
2624 
2625   ex->its              = 1;
2626   ex->tol              = 1.e-8;
2627   ex->relax_type       = 1;
2628   ex->rap_type         = 0;
2629   ex->num_pre_relax    = 1;
2630   ex->num_post_relax   = 1;
2631   ex->max_levels       = 0;
2632   ex->skip_relax       = 0;
2633   ex->print_statistics = PETSC_FALSE;
2634 
2635   pc->ops->setfromoptions  = PCSetFromOptions_PFMG;
2636   pc->ops->view            = PCView_PFMG;
2637   pc->ops->destroy         = PCDestroy_PFMG;
2638   pc->ops->apply           = PCApply_PFMG;
2639   pc->ops->applyrichardson = PCApplyRichardson_PFMG;
2640   pc->ops->setup           = PCSetUp_PFMG;
2641 
2642   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2643   PetscHYPREInitialize();
2644   PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver);
2645   PetscFunctionReturn(PETSC_SUCCESS);
2646 }
2647 
2648 /* we know we are working with a HYPRE_SStructMatrix */
2649 typedef struct {
2650   MPI_Comm            hcomm; /* does not share comm with HYPRE_SStructMatrix because need to create solver before getting matrix */
2651   HYPRE_SStructSolver ss_solver;
2652 
2653   /* keep copy of SYSPFMG options used so may view them */
2654   PetscInt its;
2655   double   tol;
2656   PetscInt relax_type;
2657   PetscInt num_pre_relax, num_post_relax;
2658 } PC_SysPFMG;
2659 
2660 PetscErrorCode PCDestroy_SysPFMG(PC pc)
2661 {
2662   PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;
2663 
2664   PetscFunctionBegin;
2665   if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver);
2666   PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2667   PetscCall(PetscFree(pc->data));
2668   PetscFunctionReturn(PETSC_SUCCESS);
2669 }
2670 
2671 static const char *SysPFMGRelaxType[] = {"Weighted-Jacobi", "Red/Black-Gauss-Seidel"};
2672 
2673 PetscErrorCode PCView_SysPFMG(PC pc, PetscViewer viewer)
2674 {
2675   PetscBool   iascii;
2676   PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;
2677 
2678   PetscFunctionBegin;
2679   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
2680   if (iascii) {
2681     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE SysPFMG preconditioning\n"));
2682     PetscCall(PetscViewerASCIIPrintf(viewer, "  max iterations %" PetscInt_FMT "\n", ex->its));
2683     PetscCall(PetscViewerASCIIPrintf(viewer, "  tolerance %g\n", ex->tol));
2684     PetscCall(PetscViewerASCIIPrintf(viewer, "  relax type %s\n", PFMGRelaxType[ex->relax_type]));
2685     PetscCall(PetscViewerASCIIPrintf(viewer, "  number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
2686   }
2687   PetscFunctionReturn(PETSC_SUCCESS);
2688 }
2689 
2690 PetscErrorCode PCSetFromOptions_SysPFMG(PC pc, PetscOptionItems *PetscOptionsObject)
2691 {
2692   PC_SysPFMG *ex  = (PC_SysPFMG *)pc->data;
2693   PetscBool   flg = PETSC_FALSE;
2694 
2695   PetscFunctionBegin;
2696   PetscOptionsHeadBegin(PetscOptionsObject, "SysPFMG options");
2697   PetscCall(PetscOptionsBool("-pc_syspfmg_print_statistics", "Print statistics", "HYPRE_SStructSysPFMGSetPrintLevel", flg, &flg, NULL));
2698   if (flg) PetscCallExternal(HYPRE_SStructSysPFMGSetPrintLevel, ex->ss_solver, 3);
2699   PetscCall(PetscOptionsInt("-pc_syspfmg_its", "Number of iterations of SysPFMG to use as preconditioner", "HYPRE_SStructSysPFMGSetMaxIter", ex->its, &ex->its, NULL));
2700   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, ex->ss_solver, ex->its);
2701   PetscCall(PetscOptionsInt("-pc_syspfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_SStructSysPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
2702   PetscCallExternal(HYPRE_SStructSysPFMGSetNumPreRelax, ex->ss_solver, ex->num_pre_relax);
2703   PetscCall(PetscOptionsInt("-pc_syspfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_SStructSysPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
2704   PetscCallExternal(HYPRE_SStructSysPFMGSetNumPostRelax, ex->ss_solver, ex->num_post_relax);
2705 
2706   PetscCall(PetscOptionsReal("-pc_syspfmg_tol", "Tolerance of SysPFMG", "HYPRE_SStructSysPFMGSetTol", ex->tol, &ex->tol, NULL));
2707   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, ex->ss_solver, ex->tol);
2708   PetscCall(PetscOptionsEList("-pc_syspfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_SStructSysPFMGSetRelaxType", SysPFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(SysPFMGRelaxType), SysPFMGRelaxType[ex->relax_type], &ex->relax_type, NULL));
2709   PetscCallExternal(HYPRE_SStructSysPFMGSetRelaxType, ex->ss_solver, ex->relax_type);
2710   PetscOptionsHeadEnd();
2711   PetscFunctionReturn(PETSC_SUCCESS);
2712 }
2713 
2714 PetscErrorCode PCApply_SysPFMG(PC pc, Vec x, Vec y)
2715 {
2716   PC_SysPFMG        *ex = (PC_SysPFMG *)pc->data;
2717   PetscScalar       *yy;
2718   const PetscScalar *xx;
2719   PetscInt           ilower[3], iupper[3];
2720   HYPRE_Int          hlower[3], hupper[3];
2721   Mat_HYPRESStruct  *mx       = (Mat_HYPRESStruct *)(pc->pmat->data);
2722   PetscInt           ordering = mx->dofs_order;
2723   PetscInt           nvars    = mx->nvars;
2724   PetscInt           part     = 0;
2725   PetscInt           size;
2726   PetscInt           i;
2727 
2728   PetscFunctionBegin;
2729   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2730   PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
2731   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
2732   iupper[0] += ilower[0] - 1;
2733   iupper[1] += ilower[1] - 1;
2734   iupper[2] += ilower[2] - 1;
2735   hlower[0] = (HYPRE_Int)ilower[0];
2736   hlower[1] = (HYPRE_Int)ilower[1];
2737   hlower[2] = (HYPRE_Int)ilower[2];
2738   hupper[0] = (HYPRE_Int)iupper[0];
2739   hupper[1] = (HYPRE_Int)iupper[1];
2740   hupper[2] = (HYPRE_Int)iupper[2];
2741 
2742   size = 1;
2743   for (i = 0; i < 3; i++) size *= (iupper[i] - ilower[i] + 1);
2744 
2745   /* copy x values over to hypre for variable ordering */
2746   if (ordering) {
2747     PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0);
2748     PetscCall(VecGetArrayRead(x, &xx));
2749     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(xx + (size * i)));
2750     PetscCall(VecRestoreArrayRead(x, &xx));
2751     PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b);
2752     PetscCallExternal(HYPRE_SStructMatrixMatvec, 1.0, mx->ss_mat, mx->ss_b, 0.0, mx->ss_x);
2753     PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);
2754 
2755     /* copy solution values back to PETSc */
2756     PetscCall(VecGetArray(y, &yy));
2757     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(yy + (size * i)));
2758     PetscCall(VecRestoreArray(y, &yy));
2759   } else { /* nodal ordering must be mapped to variable ordering for sys_pfmg */
2760     PetscScalar *z;
2761     PetscInt     j, k;
2762 
2763     PetscCall(PetscMalloc1(nvars * size, &z));
2764     PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0);
2765     PetscCall(VecGetArrayRead(x, &xx));
2766 
2767     /* transform nodal to hypre's variable ordering for sys_pfmg */
2768     for (i = 0; i < size; i++) {
2769       k = i * nvars;
2770       for (j = 0; j < nvars; j++) z[j * size + i] = xx[k + j];
2771     }
2772     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i)));
2773     PetscCall(VecRestoreArrayRead(x, &xx));
2774     PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b);
2775     PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);
2776 
2777     /* copy solution values back to PETSc */
2778     PetscCall(VecGetArray(y, &yy));
2779     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i)));
2780     /* transform hypre's variable ordering for sys_pfmg to nodal ordering */
2781     for (i = 0; i < size; i++) {
2782       k = i * nvars;
2783       for (j = 0; j < nvars; j++) yy[k + j] = z[j * size + i];
2784     }
2785     PetscCall(VecRestoreArray(y, &yy));
2786     PetscCall(PetscFree(z));
2787   }
2788   PetscFunctionReturn(PETSC_SUCCESS);
2789 }
2790 
2791 static PetscErrorCode PCApplyRichardson_SysPFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
2792 {
2793   PC_SysPFMG *jac = (PC_SysPFMG *)pc->data;
2794   HYPRE_Int   oits;
2795 
2796   PetscFunctionBegin;
2797   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2798   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, its * jac->its);
2799   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, rtol);
2800   PetscCall(PCApply_SysPFMG(pc, b, y));
2801   PetscCallExternal(HYPRE_SStructSysPFMGGetNumIterations, jac->ss_solver, &oits);
2802   *outits = oits;
2803   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
2804   else *reason = PCRICHARDSON_CONVERGED_RTOL;
2805   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, jac->tol);
2806   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, jac->its);
2807   PetscFunctionReturn(PETSC_SUCCESS);
2808 }
2809 
2810 PetscErrorCode PCSetUp_SysPFMG(PC pc)
2811 {
2812   PC_SysPFMG       *ex = (PC_SysPFMG *)pc->data;
2813   Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)(pc->pmat->data);
2814   PetscBool         flg;
2815 
2816   PetscFunctionBegin;
2817   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESSTRUCT, &flg));
2818   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESSTRUCT with this preconditioner");
2819 
2820   /* create the hypre sstruct solver object and set its information */
2821   if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver);
2822   PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver);
2823   PetscCallExternal(HYPRE_SStructSysPFMGSetZeroGuess, ex->ss_solver);
2824   PetscCallExternal(HYPRE_SStructSysPFMGSetup, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);
2825   PetscFunctionReturn(PETSC_SUCCESS);
2826 }
2827 
2828 /*MC
2829      PCSYSPFMG - the hypre SysPFMG multigrid solver
2830 
2831    Level: advanced
2832 
2833    Options Database Keys:
2834 + -pc_syspfmg_its <its> - number of iterations of SysPFMG to use as preconditioner
2835 . -pc_syspfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid
2836 . -pc_syspfmg_num_post_relax <steps> - number of smoothing steps after coarse grid
2837 . -pc_syspfmg_tol <tol> - tolerance of SysPFMG
2838 - -pc_syspfmg_relax_type <Weighted-Jacobi,Red/Black-Gauss-Seidel> - relaxation type for the up and down cycles
2839 
2840    Notes:
2841    See `PCPFMG` for hypre's PFMG that works for a scalar PDE and `PCSMG`
2842 
2843    See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver
2844 
2845    This is for CELL-centered descretizations
2846 
2847    This must be used with the `MATHYPRESSTRUCT` matrix type.
2848 
2849    This does not give access to all the functionality of hypres SysPFMG, it supports only one part, and one block per process defined by a PETSc `DMDA`.
2850 
2851 .seealso: `PCMG`, `MATHYPRESSTRUCT`, `PCPFMG`, `PCHYPRE`, `PCGAMG`, `PCSMG`
2852 M*/
2853 
2854 PETSC_EXTERN PetscErrorCode PCCreate_SysPFMG(PC pc)
2855 {
2856   PC_SysPFMG *ex;
2857 
2858   PetscFunctionBegin;
2859   PetscCall(PetscNew(&ex));
2860   pc->data = ex;
2861 
2862   ex->its            = 1;
2863   ex->tol            = 1.e-8;
2864   ex->relax_type     = 1;
2865   ex->num_pre_relax  = 1;
2866   ex->num_post_relax = 1;
2867 
2868   pc->ops->setfromoptions  = PCSetFromOptions_SysPFMG;
2869   pc->ops->view            = PCView_SysPFMG;
2870   pc->ops->destroy         = PCDestroy_SysPFMG;
2871   pc->ops->apply           = PCApply_SysPFMG;
2872   pc->ops->applyrichardson = PCApplyRichardson_SysPFMG;
2873   pc->ops->setup           = PCSetUp_SysPFMG;
2874 
2875   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2876   PetscHYPREInitialize();
2877   PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver);
2878   PetscFunctionReturn(PETSC_SUCCESS);
2879 }
2880 
2881 /* PC SMG */
2882 typedef struct {
2883   MPI_Comm           hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */
2884   HYPRE_StructSolver hsolver;
2885   PetscInt           its; /* keep copy of SMG options used so may view them */
2886   double             tol;
2887   PetscBool          print_statistics;
2888   PetscInt           num_pre_relax, num_post_relax;
2889 } PC_SMG;
2890 
2891 PetscErrorCode PCDestroy_SMG(PC pc)
2892 {
2893   PC_SMG *ex = (PC_SMG *)pc->data;
2894 
2895   PetscFunctionBegin;
2896   if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, ex->hsolver);
2897   PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2898   PetscCall(PetscFree(pc->data));
2899   PetscFunctionReturn(PETSC_SUCCESS);
2900 }
2901 
2902 PetscErrorCode PCView_SMG(PC pc, PetscViewer viewer)
2903 {
2904   PetscBool iascii;
2905   PC_SMG   *ex = (PC_SMG *)pc->data;
2906 
2907   PetscFunctionBegin;
2908   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
2909   if (iascii) {
2910     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE SMG preconditioning\n"));
2911     PetscCall(PetscViewerASCIIPrintf(viewer, "    max iterations %" PetscInt_FMT "\n", ex->its));
2912     PetscCall(PetscViewerASCIIPrintf(viewer, "    tolerance %g\n", ex->tol));
2913     PetscCall(PetscViewerASCIIPrintf(viewer, "    number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
2914   }
2915   PetscFunctionReturn(PETSC_SUCCESS);
2916 }
2917 
2918 PetscErrorCode PCSetFromOptions_SMG(PC pc, PetscOptionItems *PetscOptionsObject)
2919 {
2920   PC_SMG *ex = (PC_SMG *)pc->data;
2921 
2922   PetscFunctionBegin;
2923   PetscOptionsHeadBegin(PetscOptionsObject, "SMG options");
2924 
2925   PetscCall(PetscOptionsInt("-pc_smg_its", "Number of iterations of SMG to use as preconditioner", "HYPRE_StructSMGSetMaxIter", ex->its, &ex->its, NULL));
2926   PetscCall(PetscOptionsInt("-pc_smg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructSMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
2927   PetscCall(PetscOptionsInt("-pc_smg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructSMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
2928   PetscCall(PetscOptionsReal("-pc_smg_tol", "Tolerance of SMG", "HYPRE_StructSMGSetTol", ex->tol, &ex->tol, NULL));
2929 
2930   PetscOptionsHeadEnd();
2931   PetscFunctionReturn(PETSC_SUCCESS);
2932 }
2933 
2934 PetscErrorCode PCApply_SMG(PC pc, Vec x, Vec y)
2935 {
2936   PC_SMG            *ex = (PC_SMG *)pc->data;
2937   PetscScalar       *yy;
2938   const PetscScalar *xx;
2939   PetscInt           ilower[3], iupper[3];
2940   HYPRE_Int          hlower[3], hupper[3];
2941   Mat_HYPREStruct   *mx = (Mat_HYPREStruct *)(pc->pmat->data);
2942 
2943   PetscFunctionBegin;
2944   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2945   PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
2946   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
2947   iupper[0] += ilower[0] - 1;
2948   iupper[1] += ilower[1] - 1;
2949   iupper[2] += ilower[2] - 1;
2950   hlower[0] = (HYPRE_Int)ilower[0];
2951   hlower[1] = (HYPRE_Int)ilower[1];
2952   hlower[2] = (HYPRE_Int)ilower[2];
2953   hupper[0] = (HYPRE_Int)iupper[0];
2954   hupper[1] = (HYPRE_Int)iupper[1];
2955   hupper[2] = (HYPRE_Int)iupper[2];
2956 
2957   /* copy x values over to hypre */
2958   PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0);
2959   PetscCall(VecGetArrayRead(x, &xx));
2960   PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx);
2961   PetscCall(VecRestoreArrayRead(x, &xx));
2962   PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb);
2963   PetscCallExternal(HYPRE_StructSMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx);
2964 
2965   /* copy solution values back to PETSc */
2966   PetscCall(VecGetArray(y, &yy));
2967   PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy);
2968   PetscCall(VecRestoreArray(y, &yy));
2969   PetscFunctionReturn(PETSC_SUCCESS);
2970 }
2971 
2972 static PetscErrorCode PCApplyRichardson_SMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
2973 {
2974   PC_SMG   *jac = (PC_SMG *)pc->data;
2975   HYPRE_Int oits;
2976 
2977   PetscFunctionBegin;
2978   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2979   PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, its * jac->its);
2980   PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, rtol);
2981 
2982   PetscCall(PCApply_SMG(pc, b, y));
2983   PetscCallExternal(HYPRE_StructSMGGetNumIterations, jac->hsolver, &oits);
2984   *outits = oits;
2985   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
2986   else *reason = PCRICHARDSON_CONVERGED_RTOL;
2987   PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, jac->tol);
2988   PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, jac->its);
2989   PetscFunctionReturn(PETSC_SUCCESS);
2990 }
2991 
2992 PetscErrorCode PCSetUp_SMG(PC pc)
2993 {
2994   PetscInt         i, dim;
2995   PC_SMG          *ex = (PC_SMG *)pc->data;
2996   Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data);
2997   PetscBool        flg;
2998   DMBoundaryType   p[3];
2999   PetscInt         M[3];
3000 
3001   PetscFunctionBegin;
3002   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg));
3003   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner");
3004 
3005   PetscCall(DMDAGetInfo(mx->da, &dim, &M[0], &M[1], &M[2], 0, 0, 0, 0, 0, &p[0], &p[1], &p[2], 0));
3006   // Check if power of 2 in periodic directions
3007   for (i = 0; i < dim; i++) {
3008     if (((M[i] & (M[i] - 1)) != 0) && (p[i] == DM_BOUNDARY_PERIODIC)) {
3009       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "With SMG, the number of points in a periodic direction must be a power of 2, but is here %" PetscInt_FMT ".", M[i]);
3010     }
3011   }
3012 
3013   /* create the hypre solver object and set its information */
3014   if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, (ex->hsolver));
3015   PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver);
3016   // The hypre options must be set here and not in SetFromOptions because it is created here!
3017   PetscCallExternal(HYPRE_StructSMGSetMaxIter, ex->hsolver, ex->its);
3018   PetscCallExternal(HYPRE_StructSMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
3019   PetscCallExternal(HYPRE_StructSMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
3020   PetscCallExternal(HYPRE_StructSMGSetTol, ex->hsolver, ex->tol);
3021 
3022   PetscCallExternal(HYPRE_StructSMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx);
3023   PetscCallExternal(HYPRE_StructSMGSetZeroGuess, ex->hsolver);
3024   PetscFunctionReturn(PETSC_SUCCESS);
3025 }
3026 
3027 /*MC
3028      PCSMG - the hypre (structured grid) SMG multigrid solver
3029 
3030    Level: advanced
3031 
3032    Options Database Keys:
3033 + -pc_smg_its <its> - number of iterations of SMG to use as preconditioner
3034 . -pc_smg_num_pre_relax <steps> - number of smoothing steps before coarse grid
3035 . -pc_smg_num_post_relax <steps> - number of smoothing steps after coarse grid
3036 - -pc_smg_tol <tol> - tolerance of SMG
3037 
3038    Notes:
3039    This is for CELL-centered descretizations
3040 
3041    This must be used with the `MATHYPRESTRUCT` `MatType`.
3042 
3043    This does not provide all the functionality of  hypre's SMG solver, it supports only one block per process defined by a PETSc `DMDA`.
3044 
3045    See `PCSYSPFMG`, `PCSMG`, `PCPFMG`, and `PCHYPRE` for access to hypre's other preconditioners
3046 
3047 .seealso:  `PCMG`, `MATHYPRESTRUCT`, `PCPFMG`, `PCSYSPFMG`, `PCHYPRE`, `PCGAMG`
3048 M*/
3049 
3050 PETSC_EXTERN PetscErrorCode PCCreate_SMG(PC pc)
3051 {
3052   PC_SMG *ex;
3053 
3054   PetscFunctionBegin;
3055   PetscCall(PetscNew(&ex));
3056   pc->data = ex;
3057 
3058   ex->its            = 1;
3059   ex->tol            = 1.e-8;
3060   ex->num_pre_relax  = 1;
3061   ex->num_post_relax = 1;
3062 
3063   pc->ops->setfromoptions  = PCSetFromOptions_SMG;
3064   pc->ops->view            = PCView_SMG;
3065   pc->ops->destroy         = PCDestroy_SMG;
3066   pc->ops->apply           = PCApply_SMG;
3067   pc->ops->applyrichardson = PCApplyRichardson_SMG;
3068   pc->ops->setup           = PCSetUp_SMG;
3069 
3070   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
3071   PetscHYPREInitialize();
3072   PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver);
3073   PetscFunctionReturn(PETSC_SUCCESS);
3074 }
3075