1 /* 2 Provides an interface to the LLNL package hypre 3 */ 4 5 #include <petscpkg_version.h> 6 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 7 /* this include is needed ONLY to allow access to the private data inside the Mat object specific to hypre */ 8 #include <petsc/private/matimpl.h> 9 #include <petsc/private/vecimpl.h> 10 #include <../src/vec/vec/impls/hypre/vhyp.h> 11 #include <../src/mat/impls/hypre/mhypre.h> 12 #include <../src/dm/impls/da/hypre/mhyp.h> 13 #include <_hypre_parcsr_ls.h> 14 #include <petscmathypre.h> 15 16 #if defined(PETSC_HAVE_HYPRE_DEVICE) 17 #include <petsc/private/deviceimpl.h> 18 #endif 19 20 static PetscBool cite = PETSC_FALSE; 21 static const char hypreCitation[] = "@manual{hypre-web-page,\n title = {{\\sl hypre}: High Performance Preconditioners},\n organization = {Lawrence Livermore National Laboratory},\n note = " 22 "{\\url{https://www.llnl.gov/casc/hypre}}\n}\n"; 23 24 /* 25 Private context (data structure) for the preconditioner. 26 */ 27 typedef struct { 28 HYPRE_Solver hsolver; 29 Mat hpmat; /* MatHYPRE */ 30 31 HYPRE_Int (*destroy)(HYPRE_Solver); 32 HYPRE_Int (*solve)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector); 33 HYPRE_Int (*setup)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector); 34 35 MPI_Comm comm_hypre; 36 char *hypre_type; 37 38 /* options for Pilut and BoomerAMG*/ 39 PetscInt maxiter; 40 PetscReal tol; 41 42 /* options for Pilut */ 43 PetscInt factorrowsize; 44 45 /* options for ParaSails */ 46 PetscInt nlevels; 47 PetscReal threshold; 48 PetscReal filter; 49 PetscReal loadbal; 50 PetscInt logging; 51 PetscInt ruse; 52 PetscInt symt; 53 54 /* options for BoomerAMG */ 55 PetscBool printstatistics; 56 57 /* options for BoomerAMG */ 58 PetscInt cycletype; 59 PetscInt maxlevels; 60 PetscReal strongthreshold; 61 PetscReal maxrowsum; 62 PetscInt gridsweeps[3]; 63 PetscInt coarsentype; 64 PetscInt measuretype; 65 PetscInt smoothtype; 66 PetscInt smoothsweeps; 67 PetscInt smoothnumlevels; 68 PetscInt eu_level; /* Number of levels for ILU(k) in Euclid */ 69 PetscReal eu_droptolerance; /* Drop tolerance for ILU(k) in Euclid */ 70 PetscInt eu_bj; /* Defines use of Block Jacobi ILU in Euclid */ 71 PetscInt relaxtype[3]; 72 PetscReal relaxweight; 73 PetscReal outerrelaxweight; 74 PetscInt relaxorder; 75 PetscReal truncfactor; 76 PetscBool applyrichardson; 77 PetscInt pmax; 78 PetscInt interptype; 79 PetscInt maxc; 80 PetscInt minc; 81 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 82 char *spgemm_type; // this is a global hypre parameter but is closely associated with BoomerAMG 83 #endif 84 /* GPU */ 85 PetscBool keeptranspose; 86 PetscInt rap2; 87 PetscInt mod_rap2; 88 89 /* AIR */ 90 PetscInt Rtype; 91 PetscReal Rstrongthreshold; 92 PetscReal Rfilterthreshold; 93 PetscInt Adroptype; 94 PetscReal Adroptol; 95 96 PetscInt agg_nl; 97 PetscInt agg_interptype; 98 PetscInt agg_num_paths; 99 PetscBool nodal_relax; 100 PetscInt nodal_relax_levels; 101 102 PetscInt nodal_coarsening; 103 PetscInt nodal_coarsening_diag; 104 PetscInt vec_interp_variant; 105 PetscInt vec_interp_qmax; 106 PetscBool vec_interp_smooth; 107 PetscInt interp_refine; 108 109 /* NearNullSpace support */ 110 VecHYPRE_IJVector *hmnull; 111 HYPRE_ParVector *phmnull; 112 PetscInt n_hmnull; 113 Vec hmnull_constant; 114 115 /* options for AS (Auxiliary Space preconditioners) */ 116 PetscInt as_print; 117 PetscInt as_max_iter; 118 PetscReal as_tol; 119 PetscInt as_relax_type; 120 PetscInt as_relax_times; 121 PetscReal as_relax_weight; 122 PetscReal as_omega; 123 PetscInt as_amg_alpha_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for vector Poisson (AMS) or Curl problem (ADS) */ 124 PetscReal as_amg_alpha_theta; /* AMG strength for vector Poisson (AMS) or Curl problem (ADS) */ 125 PetscInt as_amg_beta_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for scalar Poisson (AMS) or vector Poisson (ADS) */ 126 PetscReal as_amg_beta_theta; /* AMG strength for scalar Poisson (AMS) or vector Poisson (ADS) */ 127 PetscInt ams_cycle_type; 128 PetscInt ads_cycle_type; 129 130 /* additional data */ 131 Mat G; /* MatHYPRE */ 132 Mat C; /* MatHYPRE */ 133 Mat alpha_Poisson; /* MatHYPRE */ 134 Mat beta_Poisson; /* MatHYPRE */ 135 136 /* extra information for AMS */ 137 PetscInt dim; /* geometrical dimension */ 138 VecHYPRE_IJVector coords[3]; 139 VecHYPRE_IJVector constants[3]; 140 VecHYPRE_IJVector interior; 141 Mat RT_PiFull, RT_Pi[3]; 142 Mat ND_PiFull, ND_Pi[3]; 143 PetscBool ams_beta_is_zero; 144 PetscBool ams_beta_is_zero_part; 145 PetscInt ams_proj_freq; 146 } PC_HYPRE; 147 148 /* 149 Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix 150 is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine. 151 It is used in PCHMG. Other users should avoid using this function. 152 */ 153 static PetscErrorCode PCGetCoarseOperators_BoomerAMG(PC pc, PetscInt *nlevels, Mat *operators[]) 154 { 155 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 156 PetscBool same; 157 PetscInt num_levels, l; 158 Mat *mattmp; 159 hypre_ParCSRMatrix **A_array; 160 161 PetscFunctionBegin; 162 PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same)); 163 PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG"); 164 num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)jac->hsolver); 165 PetscCall(PetscMalloc1(num_levels, &mattmp)); 166 A_array = hypre_ParAMGDataAArray((hypre_ParAMGData *)jac->hsolver); 167 for (l = 1; l < num_levels; l++) { 168 PetscCall(MatCreateFromParCSR(A_array[l], MATAIJ, PETSC_OWN_POINTER, &mattmp[num_levels - 1 - l])); 169 /* We want to own the data, and HYPRE can not touch this matrix any more */ 170 A_array[l] = NULL; 171 } 172 *nlevels = num_levels; 173 *operators = mattmp; 174 PetscFunctionReturn(PETSC_SUCCESS); 175 } 176 177 /* 178 Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix 179 is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine. 180 It is used in PCHMG. Other users should avoid using this function. 181 */ 182 static PetscErrorCode PCGetInterpolations_BoomerAMG(PC pc, PetscInt *nlevels, Mat *interpolations[]) 183 { 184 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 185 PetscBool same; 186 PetscInt num_levels, l; 187 Mat *mattmp; 188 hypre_ParCSRMatrix **P_array; 189 190 PetscFunctionBegin; 191 PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same)); 192 PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG"); 193 num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)jac->hsolver); 194 PetscCall(PetscMalloc1(num_levels, &mattmp)); 195 P_array = hypre_ParAMGDataPArray((hypre_ParAMGData *)jac->hsolver); 196 for (l = 1; l < num_levels; l++) { 197 PetscCall(MatCreateFromParCSR(P_array[num_levels - 1 - l], MATAIJ, PETSC_OWN_POINTER, &mattmp[l - 1])); 198 /* We want to own the data, and HYPRE can not touch this matrix any more */ 199 P_array[num_levels - 1 - l] = NULL; 200 } 201 *nlevels = num_levels; 202 *interpolations = mattmp; 203 PetscFunctionReturn(PETSC_SUCCESS); 204 } 205 206 /* 207 Boolean Vecs are created IN PLACE with using data from BoomerAMG. 208 */ 209 static PetscErrorCode PCHYPREGetCFMarkers_BoomerAMG(PC pc, PetscInt *n_per_level[], PetscBT *CFMarkers[]) 210 { 211 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 212 PetscBool same; 213 PetscInt num_levels, fine_nodes = 0, coarse_nodes; 214 PetscInt *n_per_temp; 215 PetscBT *markertmp; 216 hypre_IntArray **CF_marker_array; 217 218 PetscFunctionBegin; 219 PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same)); 220 PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG"); 221 num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)jac->hsolver); 222 PetscCall(PetscMalloc1(num_levels, &n_per_temp)); 223 PetscCall(PetscMalloc1(num_levels - 1, &markertmp)); 224 CF_marker_array = hypre_ParAMGDataCFMarkerArray((hypre_ParAMGData *)jac->hsolver); 225 for (PetscInt l = 0, CFMaxIndex = num_levels - 2; CFMaxIndex >= 0; l++, CFMaxIndex--) { 226 fine_nodes = hypre_IntArraySize(CF_marker_array[CFMaxIndex]); 227 coarse_nodes = 0; 228 PetscCall(PetscBTCreate(fine_nodes, &markertmp[l])); 229 for (PetscInt k = 0; k < fine_nodes; k++) { 230 if (hypre_IntArrayDataI(CF_marker_array[CFMaxIndex], k) > 0) { 231 PetscCall(PetscBTSet(markertmp[l], k)); 232 coarse_nodes++; 233 } 234 } 235 n_per_temp[l] = coarse_nodes; 236 } 237 n_per_temp[num_levels - 1] = fine_nodes; 238 *n_per_level = n_per_temp; 239 *CFMarkers = markertmp; 240 PetscFunctionReturn(PETSC_SUCCESS); 241 } 242 243 /* Resets (frees) Hypre's representation of the near null space */ 244 static PetscErrorCode PCHYPREResetNearNullSpace_Private(PC pc) 245 { 246 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 247 PetscInt i; 248 249 PetscFunctionBegin; 250 for (i = 0; i < jac->n_hmnull; i++) PetscCall(VecHYPRE_IJVectorDestroy(&jac->hmnull[i])); 251 PetscCall(PetscFree(jac->hmnull)); 252 PetscCall(PetscFree(jac->phmnull)); 253 PetscCall(VecDestroy(&jac->hmnull_constant)); 254 jac->n_hmnull = 0; 255 PetscFunctionReturn(PETSC_SUCCESS); 256 } 257 258 static PetscErrorCode PCSetUp_HYPRE(PC pc) 259 { 260 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 261 Mat_HYPRE *hjac; 262 HYPRE_ParCSRMatrix hmat; 263 HYPRE_ParVector bv, xv; 264 PetscBool ishypre; 265 266 PetscFunctionBegin; 267 /* default type is boomerAMG */ 268 if (!jac->hypre_type) PetscCall(PCHYPRESetType(pc, "boomeramg")); 269 270 /* get hypre matrix */ 271 if (pc->flag == DIFFERENT_NONZERO_PATTERN) PetscCall(MatDestroy(&jac->hpmat)); 272 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRE, &ishypre)); 273 if (!ishypre) { 274 #if defined(PETSC_HAVE_HYPRE_DEVICE) && PETSC_PKG_HYPRE_VERSION_LE(2, 30, 0) 275 /* Temporary fix since we do not support MAT_REUSE_MATRIX with HYPRE device */ 276 PetscBool iscuda, iship, iskokkos; 277 278 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, "")); 279 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iship, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE, "")); 280 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iskokkos, MATSEQAIJKOKKOS, MATMPIAIJKOKKOS, "")); 281 if (iscuda || iship || iskokkos) PetscCall(MatDestroy(&jac->hpmat)); 282 #endif 283 PetscCall(MatConvert(pc->pmat, MATHYPRE, jac->hpmat ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX, &jac->hpmat)); 284 } else { 285 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 286 PetscCall(MatDestroy(&jac->hpmat)); 287 jac->hpmat = pc->pmat; 288 } 289 290 /* allow debug */ 291 PetscCall(MatViewFromOptions(jac->hpmat, NULL, "-pc_hypre_mat_view")); 292 hjac = (Mat_HYPRE *)jac->hpmat->data; 293 294 /* special case for BoomerAMG */ 295 if (jac->setup == HYPRE_BoomerAMGSetup) { 296 MatNullSpace mnull; 297 PetscBool has_const; 298 PetscInt bs, nvec, i; 299 const Vec *vecs; 300 301 PetscCall(MatGetBlockSize(pc->pmat, &bs)); 302 if (bs > 1) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs); 303 PetscCall(MatGetNearNullSpace(pc->mat, &mnull)); 304 if (mnull) { 305 PetscCall(PCHYPREResetNearNullSpace_Private(pc)); 306 PetscCall(MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs)); 307 PetscCall(PetscMalloc1(nvec + 1, &jac->hmnull)); 308 PetscCall(PetscMalloc1(nvec + 1, &jac->phmnull)); 309 for (i = 0; i < nvec; i++) { 310 PetscCall(VecHYPRE_IJVectorCreate(vecs[i]->map, &jac->hmnull[i])); 311 PetscCall(VecHYPRE_IJVectorCopy(vecs[i], jac->hmnull[i])); 312 PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[i]->ij, (void **)&jac->phmnull[i]); 313 } 314 if (has_const) { 315 PetscCall(MatCreateVecs(pc->pmat, &jac->hmnull_constant, NULL)); 316 PetscCall(VecSet(jac->hmnull_constant, 1)); 317 PetscCall(VecNormalize(jac->hmnull_constant, NULL)); 318 PetscCall(VecHYPRE_IJVectorCreate(jac->hmnull_constant->map, &jac->hmnull[nvec])); 319 PetscCall(VecHYPRE_IJVectorCopy(jac->hmnull_constant, jac->hmnull[nvec])); 320 PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[nvec]->ij, (void **)&jac->phmnull[nvec]); 321 nvec++; 322 } 323 PetscCallExternal(HYPRE_BoomerAMGSetInterpVectors, jac->hsolver, nvec, jac->phmnull); 324 jac->n_hmnull = nvec; 325 } 326 } 327 328 /* special case for AMS */ 329 if (jac->setup == HYPRE_AMSSetup) { 330 Mat_HYPRE *hm; 331 HYPRE_ParCSRMatrix parcsr; 332 if (!jac->coords[0] && !jac->constants[0] && !(jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) { 333 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the edge constant vectors via PCHYPRESetEdgeConstantVectors() or the interpolation matrix via PCHYPRESetInterpolations()"); 334 } 335 if (jac->dim) PetscCallExternal(HYPRE_AMSSetDimension, jac->hsolver, jac->dim); 336 if (jac->constants[0]) { 337 HYPRE_ParVector ozz, zoz, zzo = NULL; 338 PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[0]->ij, (void **)(&ozz)); 339 PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[1]->ij, (void **)(&zoz)); 340 if (jac->constants[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[2]->ij, (void **)(&zzo)); 341 PetscCallExternal(HYPRE_AMSSetEdgeConstantVectors, jac->hsolver, ozz, zoz, zzo); 342 } 343 if (jac->coords[0]) { 344 HYPRE_ParVector coords[3]; 345 coords[0] = NULL; 346 coords[1] = NULL; 347 coords[2] = NULL; 348 if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0])); 349 if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1])); 350 if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2])); 351 PetscCallExternal(HYPRE_AMSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]); 352 } 353 PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient"); 354 hm = (Mat_HYPRE *)jac->G->data; 355 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 356 PetscCallExternal(HYPRE_AMSSetDiscreteGradient, jac->hsolver, parcsr); 357 if (jac->alpha_Poisson) { 358 hm = (Mat_HYPRE *)jac->alpha_Poisson->data; 359 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 360 PetscCallExternal(HYPRE_AMSSetAlphaPoissonMatrix, jac->hsolver, parcsr); 361 } 362 if (jac->ams_beta_is_zero) { 363 PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, NULL); 364 } else if (jac->beta_Poisson) { 365 hm = (Mat_HYPRE *)jac->beta_Poisson->data; 366 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 367 PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, parcsr); 368 } else if (jac->ams_beta_is_zero_part) { 369 if (jac->interior) { 370 HYPRE_ParVector interior = NULL; 371 PetscCallExternal(HYPRE_IJVectorGetObject, jac->interior->ij, (void **)(&interior)); 372 PetscCallExternal(HYPRE_AMSSetInteriorNodes, jac->hsolver, interior); 373 } else { 374 jac->ams_beta_is_zero_part = PETSC_FALSE; 375 } 376 } 377 if (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])) { 378 PetscInt i; 379 HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3]; 380 if (jac->ND_PiFull) { 381 hm = (Mat_HYPRE *)jac->ND_PiFull->data; 382 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull)); 383 } else { 384 nd_parcsrfull = NULL; 385 } 386 for (i = 0; i < 3; ++i) { 387 if (jac->ND_Pi[i]) { 388 hm = (Mat_HYPRE *)jac->ND_Pi[i]->data; 389 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i])); 390 } else { 391 nd_parcsr[i] = NULL; 392 } 393 } 394 PetscCallExternal(HYPRE_AMSSetInterpolations, jac->hsolver, nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]); 395 } 396 } 397 /* special case for ADS */ 398 if (jac->setup == HYPRE_ADSSetup) { 399 Mat_HYPRE *hm; 400 HYPRE_ParCSRMatrix parcsr; 401 if (!jac->coords[0] && !((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])))) { 402 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the interpolation matrices via PCHYPRESetInterpolations"); 403 } else PetscCheck(jac->coords[1] && jac->coords[2], PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner has been designed for three dimensional problems! For two dimensional problems, use HYPRE AMS instead"); 404 PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient"); 405 PetscCheck(jac->C, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete curl operator via PCHYPRESetDiscreteGradient"); 406 if (jac->coords[0]) { 407 HYPRE_ParVector coords[3]; 408 coords[0] = NULL; 409 coords[1] = NULL; 410 coords[2] = NULL; 411 if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0])); 412 if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1])); 413 if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2])); 414 PetscCallExternal(HYPRE_ADSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]); 415 } 416 hm = (Mat_HYPRE *)jac->G->data; 417 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 418 PetscCallExternal(HYPRE_ADSSetDiscreteGradient, jac->hsolver, parcsr); 419 hm = (Mat_HYPRE *)jac->C->data; 420 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 421 PetscCallExternal(HYPRE_ADSSetDiscreteCurl, jac->hsolver, parcsr); 422 if ((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) { 423 PetscInt i; 424 HYPRE_ParCSRMatrix rt_parcsrfull, rt_parcsr[3]; 425 HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3]; 426 if (jac->RT_PiFull) { 427 hm = (Mat_HYPRE *)jac->RT_PiFull->data; 428 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsrfull)); 429 } else { 430 rt_parcsrfull = NULL; 431 } 432 for (i = 0; i < 3; ++i) { 433 if (jac->RT_Pi[i]) { 434 hm = (Mat_HYPRE *)jac->RT_Pi[i]->data; 435 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsr[i])); 436 } else { 437 rt_parcsr[i] = NULL; 438 } 439 } 440 if (jac->ND_PiFull) { 441 hm = (Mat_HYPRE *)jac->ND_PiFull->data; 442 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull)); 443 } else { 444 nd_parcsrfull = NULL; 445 } 446 for (i = 0; i < 3; ++i) { 447 if (jac->ND_Pi[i]) { 448 hm = (Mat_HYPRE *)jac->ND_Pi[i]->data; 449 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i])); 450 } else { 451 nd_parcsr[i] = NULL; 452 } 453 } 454 PetscCallExternal(HYPRE_ADSSetInterpolations, jac->hsolver, rt_parcsrfull, rt_parcsr[0], rt_parcsr[1], rt_parcsr[2], nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]); 455 } 456 } 457 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat); 458 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&bv); 459 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&xv); 460 PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF)); 461 PetscCallExternal(jac->setup, jac->hsolver, hmat, bv, xv); 462 PetscCall(PetscFPTrapPop()); 463 PetscFunctionReturn(PETSC_SUCCESS); 464 } 465 466 static PetscErrorCode PCApply_HYPRE(PC pc, Vec b, Vec x) 467 { 468 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 469 Mat_HYPRE *hjac = (Mat_HYPRE *)jac->hpmat->data; 470 HYPRE_ParCSRMatrix hmat; 471 HYPRE_ParVector jbv, jxv; 472 473 PetscFunctionBegin; 474 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 475 if (!jac->applyrichardson) PetscCall(VecSet(x, 0.0)); 476 PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->b, b)); 477 if (jac->applyrichardson) PetscCall(VecHYPRE_IJVectorPushVec(hjac->x, x)); 478 else PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->x, x)); 479 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat); 480 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv); 481 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv); 482 PetscStackCallExternalVoid( 483 "Hypre solve", do { 484 HYPRE_Int hierr = (*jac->solve)(jac->hsolver, hmat, jbv, jxv); 485 if (hierr) { 486 PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr); 487 HYPRE_ClearAllErrors(); 488 } 489 } while (0)); 490 491 if (jac->setup == HYPRE_AMSSetup && jac->ams_beta_is_zero_part) PetscCallExternal(HYPRE_AMSProjectOutGradients, jac->hsolver, jxv); 492 PetscCall(VecHYPRE_IJVectorPopVec(hjac->x)); 493 PetscCall(VecHYPRE_IJVectorPopVec(hjac->b)); 494 PetscFunctionReturn(PETSC_SUCCESS); 495 } 496 497 static PetscErrorCode PCMatApply_HYPRE_BoomerAMG(PC pc, Mat B, Mat X) 498 { 499 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 500 Mat_HYPRE *hjac = (Mat_HYPRE *)jac->hpmat->data; 501 hypre_ParCSRMatrix *par_matrix; 502 HYPRE_ParVector hb, hx; 503 const PetscScalar *b; 504 PetscScalar *x; 505 PetscInt m, N, lda; 506 hypre_Vector *x_local; 507 PetscMemType type; 508 509 PetscFunctionBegin; 510 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 511 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&par_matrix); 512 PetscCall(MatGetLocalSize(B, &m, NULL)); 513 PetscCall(MatGetSize(B, NULL, &N)); 514 PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hb); 515 PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hx); 516 PetscCall(MatZeroEntries(X)); 517 PetscCall(MatDenseGetArrayReadAndMemType(B, &b, &type)); 518 PetscCall(MatDenseGetLDA(B, &lda)); 519 PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m); 520 PetscCall(MatDenseGetLDA(X, &lda)); 521 PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m); 522 x_local = hypre_ParVectorLocalVector(hb); 523 PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0); 524 hypre_VectorData(x_local) = (HYPRE_Complex *)b; 525 PetscCall(MatDenseGetArrayWriteAndMemType(X, &x, NULL)); 526 x_local = hypre_ParVectorLocalVector(hx); 527 PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0); 528 hypre_VectorData(x_local) = (HYPRE_Complex *)x; 529 PetscCallExternal(hypre_ParVectorInitialize_v2, hb, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE); 530 PetscCallExternal(hypre_ParVectorInitialize_v2, hx, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE); 531 PetscStackCallExternalVoid( 532 "Hypre solve", do { 533 HYPRE_Int hierr = (*jac->solve)(jac->hsolver, par_matrix, hb, hx); 534 if (hierr) { 535 PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr); 536 HYPRE_ClearAllErrors(); 537 } 538 } while (0)); 539 PetscCallExternal(HYPRE_ParVectorDestroy, hb); 540 PetscCallExternal(HYPRE_ParVectorDestroy, hx); 541 PetscCall(MatDenseRestoreArrayReadAndMemType(B, &b)); 542 PetscCall(MatDenseRestoreArrayWriteAndMemType(X, &x)); 543 PetscFunctionReturn(PETSC_SUCCESS); 544 } 545 546 static PetscErrorCode PCReset_HYPRE(PC pc) 547 { 548 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 549 550 PetscFunctionBegin; 551 PetscCall(MatDestroy(&jac->hpmat)); 552 PetscCall(MatDestroy(&jac->G)); 553 PetscCall(MatDestroy(&jac->C)); 554 PetscCall(MatDestroy(&jac->alpha_Poisson)); 555 PetscCall(MatDestroy(&jac->beta_Poisson)); 556 PetscCall(MatDestroy(&jac->RT_PiFull)); 557 PetscCall(MatDestroy(&jac->RT_Pi[0])); 558 PetscCall(MatDestroy(&jac->RT_Pi[1])); 559 PetscCall(MatDestroy(&jac->RT_Pi[2])); 560 PetscCall(MatDestroy(&jac->ND_PiFull)); 561 PetscCall(MatDestroy(&jac->ND_Pi[0])); 562 PetscCall(MatDestroy(&jac->ND_Pi[1])); 563 PetscCall(MatDestroy(&jac->ND_Pi[2])); 564 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0])); 565 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1])); 566 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2])); 567 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0])); 568 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1])); 569 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2])); 570 PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior)); 571 PetscCall(PCHYPREResetNearNullSpace_Private(pc)); 572 jac->ams_beta_is_zero = PETSC_FALSE; 573 jac->ams_beta_is_zero_part = PETSC_FALSE; 574 jac->dim = 0; 575 PetscFunctionReturn(PETSC_SUCCESS); 576 } 577 578 static PetscErrorCode PCDestroy_HYPRE(PC pc) 579 { 580 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 581 582 PetscFunctionBegin; 583 PetscCall(PCReset_HYPRE(pc)); 584 if (jac->destroy) PetscCallExternal(jac->destroy, jac->hsolver); 585 PetscCall(PetscFree(jac->hypre_type)); 586 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 587 PetscCall(PetscFree(jac->spgemm_type)); 588 #endif 589 if (jac->comm_hypre != MPI_COMM_NULL) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 590 PetscCall(PetscFree(pc->data)); 591 592 PetscCall(PetscObjectChangeTypeName((PetscObject)pc, 0)); 593 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", NULL)); 594 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", NULL)); 595 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", NULL)); 596 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", NULL)); 597 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", NULL)); 598 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetConstantEdgeVectors_C", NULL)); 599 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", NULL)); 600 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", NULL)); 601 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", NULL)); 602 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 603 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 604 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetCFMarkers_C", NULL)); 605 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", NULL)); 606 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", NULL)); 607 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL)); 608 PetscFunctionReturn(PETSC_SUCCESS); 609 } 610 611 static PetscErrorCode PCSetFromOptions_HYPRE_Pilut(PC pc, PetscOptionItems PetscOptionsObject) 612 { 613 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 614 PetscBool flag; 615 616 PetscFunctionBegin; 617 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Pilut Options"); 618 PetscCall(PetscOptionsInt("-pc_hypre_pilut_maxiter", "Number of iterations", "None", jac->maxiter, &jac->maxiter, &flag)); 619 if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetMaxIter, jac->hsolver, jac->maxiter); 620 PetscCall(PetscOptionsReal("-pc_hypre_pilut_tol", "Drop tolerance", "None", jac->tol, &jac->tol, &flag)); 621 if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetDropTolerance, jac->hsolver, jac->tol); 622 PetscCall(PetscOptionsInt("-pc_hypre_pilut_factorrowsize", "FactorRowSize", "None", jac->factorrowsize, &jac->factorrowsize, &flag)); 623 if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetFactorRowSize, jac->hsolver, jac->factorrowsize); 624 PetscOptionsHeadEnd(); 625 PetscFunctionReturn(PETSC_SUCCESS); 626 } 627 628 static PetscErrorCode PCView_HYPRE_Pilut(PC pc, PetscViewer viewer) 629 { 630 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 631 PetscBool iascii; 632 633 PetscFunctionBegin; 634 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 635 if (iascii) { 636 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE Pilut preconditioning\n")); 637 if (jac->maxiter != PETSC_DEFAULT) { 638 PetscCall(PetscViewerASCIIPrintf(viewer, " maximum number of iterations %" PetscInt_FMT "\n", jac->maxiter)); 639 } else { 640 PetscCall(PetscViewerASCIIPrintf(viewer, " default maximum number of iterations \n")); 641 } 642 if (jac->tol != PETSC_DEFAULT) { 643 PetscCall(PetscViewerASCIIPrintf(viewer, " drop tolerance %g\n", (double)jac->tol)); 644 } else { 645 PetscCall(PetscViewerASCIIPrintf(viewer, " default drop tolerance \n")); 646 } 647 if (jac->factorrowsize != PETSC_DEFAULT) { 648 PetscCall(PetscViewerASCIIPrintf(viewer, " factor row size %" PetscInt_FMT "\n", jac->factorrowsize)); 649 } else { 650 PetscCall(PetscViewerASCIIPrintf(viewer, " default factor row size \n")); 651 } 652 } 653 PetscFunctionReturn(PETSC_SUCCESS); 654 } 655 656 static const char *HYPREILUType[] = { 657 "Block-Jacobi-ILUk", "Block-Jacobi-ILUT", "", "", "", "", "", "", "", "", /* 0-9 */ 658 "GMRES-ILUk", "GMRES-ILUT", "", "", "", "", "", "", "", "", /* 10-19 */ 659 "NSH-ILUk", "NSH-ILUT", "", "", "", "", "", "", "", "", /* 20-29 */ 660 "RAS-ILUk", "RAS-ILUT", "", "", "", "", "", "", "", "", /* 30-39 */ 661 "ddPQ-GMRES-ILUk", "ddPQ-GMRES-ILUT", "", "", "", "", "", "", "", "", /* 40-49 */ 662 "GMRES-ILU0" /* 50 */ 663 }; 664 665 static const char *HYPREILUIterSetup[] = {"default", "async-in-place", "async-explicit", "sync-explicit", "semisync-explicit"}; 666 667 static PetscErrorCode PCSetFromOptions_HYPRE_ILU(PC pc, PetscOptionItems PetscOptionsObject) 668 { 669 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 670 PetscBool flg; 671 PetscInt indx; 672 PetscReal tmpdbl; 673 PetscBool tmp_truth; 674 675 PetscFunctionBegin; 676 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ILU Options"); 677 678 /* ILU: ILU Type */ 679 PetscCall(PetscOptionsEList("-pc_hypre_ilu_type", "Choose ILU Type", "None", HYPREILUType, PETSC_STATIC_ARRAY_LENGTH(HYPREILUType), HYPREILUType[0], &indx, &flg)); 680 if (flg) { PetscCallExternal(HYPRE_ILUSetType, jac->hsolver, indx); } 681 682 /* ILU: ILU iterative setup type*/ 683 PetscCall(PetscOptionsEList("-pc_hypre_ilu_iterative_setup_type", "Set ILU iterative setup type", "None", HYPREILUIterSetup, PETSC_STATIC_ARRAY_LENGTH(HYPREILUIterSetup), HYPREILUIterSetup[0], &indx, &flg)); 684 if (flg) { PetscCallExternal(HYPRE_ILUSetIterativeSetupType, jac->hsolver, indx); } 685 686 /* ILU: ILU iterative setup option*/ 687 PetscCall(PetscOptionsInt("-pc_hypre_ilu_iterative_setup_option", "Set ILU iterative setup option", "None", 0, &indx, &flg)); 688 if (flg) { PetscCallExternal(HYPRE_ILUSetIterativeSetupOption, jac->hsolver, indx); } 689 690 /* ILU: ILU iterative setup maxiter */ 691 PetscCall(PetscOptionsInt("-pc_hypre_ilu_iterative_setup_maxiter", "Set ILU iterative setup maximum iteration count", "None", 0, &indx, &flg)); 692 if (flg) { PetscCallExternal(HYPRE_ILUSetIterativeSetupMaxIter, jac->hsolver, indx); } 693 694 /* ILU: ILU iterative setup tolerance */ 695 PetscCall(PetscOptionsReal("-pc_hypre_ilu_iterative_setup_tolerance", "Set ILU iterative setup tolerance", "None", 0, &tmpdbl, &flg)); 696 if (flg) { PetscCallExternal(HYPRE_ILUSetIterativeSetupTolerance, jac->hsolver, tmpdbl); } 697 698 /* ILU: ILU Print Level */ 699 PetscCall(PetscOptionsInt("-pc_hypre_ilu_print_level", "Set ILU print level", "None", 0, &indx, &flg)); 700 if (flg) { PetscCallExternal(HYPRE_ILUSetPrintLevel, jac->hsolver, indx); } 701 702 /* ILU: Logging */ 703 PetscCall(PetscOptionsInt("-pc_hypre_ilu_logging", "Set ILU logging level", "None", 0, &indx, &flg)); 704 if (flg) { PetscCallExternal(HYPRE_ILUSetLogging, jac->hsolver, indx); } 705 706 /* ILU: ILU Level */ 707 PetscCall(PetscOptionsInt("-pc_hypre_ilu_level", "Set ILU level", "None", 0, &indx, &flg)); 708 if (flg) { PetscCallExternal(HYPRE_ILUSetLevelOfFill, jac->hsolver, indx); } 709 710 /* ILU: ILU Max NNZ per row */ 711 PetscCall(PetscOptionsInt("-pc_hypre_ilu_max_nnz_per_row", "Set maximum NNZ per row", "None", 0, &indx, &flg)); 712 if (flg) { PetscCallExternal(HYPRE_ILUSetMaxNnzPerRow, jac->hsolver, indx); } 713 714 /* ILU: tolerance */ 715 PetscCall(PetscOptionsReal("-pc_hypre_ilu_tol", "Tolerance for ILU", "None", 0, &tmpdbl, &flg)); 716 if (flg) { PetscCallExternal(HYPRE_ILUSetTol, jac->hsolver, tmpdbl); } 717 718 /* ILU: maximum iteration count */ 719 PetscCall(PetscOptionsInt("-pc_hypre_ilu_maxiter", "Set ILU max iterations", "None", 0, &indx, &flg)); 720 if (flg) { PetscCallExternal(HYPRE_ILUSetMaxIter, jac->hsolver, indx); } 721 722 /* ILU: drop threshold */ 723 PetscCall(PetscOptionsReal("-pc_hypre_ilu_drop_threshold", "Drop threshold for ILU", "None", 0, &tmpdbl, &flg)); 724 if (flg) { PetscCallExternal(HYPRE_ILUSetDropThreshold, jac->hsolver, tmpdbl); } 725 726 /* ILU: Triangular Solve */ 727 PetscCall(PetscOptionsBool("-pc_hypre_ilu_tri_solve", "Enable triangular solve", "None", PETSC_FALSE, &tmp_truth, &flg)); 728 if (flg) { PetscCallExternal(HYPRE_ILUSetTriSolve, jac->hsolver, tmp_truth); } 729 730 /* ILU: Lower Jacobi iteration */ 731 PetscCall(PetscOptionsInt("-pc_hypre_ilu_lower_jacobi_iters", "Set lower Jacobi iteration count", "None", 0, &indx, &flg)); 732 if (flg) { PetscCallExternal(HYPRE_ILUSetLowerJacobiIters, jac->hsolver, indx); } 733 734 /* ILU: Upper Jacobi iteration */ 735 PetscCall(PetscOptionsInt("-pc_hypre_ilu_upper_jacobi_iters", "Set upper Jacobi iteration count", "None", 0, &indx, &flg)); 736 if (flg) { PetscCallExternal(HYPRE_ILUSetUpperJacobiIters, jac->hsolver, indx); } 737 738 /* ILU: local reordering */ 739 PetscCall(PetscOptionsBool("-pc_hypre_ilu_local_reordering", "Enable local reordering", "None", PETSC_FALSE, &tmp_truth, &flg)); 740 if (flg) { PetscCallExternal(HYPRE_ILUSetLocalReordering, jac->hsolver, tmp_truth); } 741 742 PetscOptionsHeadEnd(); 743 PetscFunctionReturn(PETSC_SUCCESS); 744 } 745 746 static PetscErrorCode PCView_HYPRE_ILU(PC pc, PetscViewer viewer) 747 { 748 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 749 hypre_ParILUData *ilu_data = (hypre_ParILUData *)jac->hsolver; 750 PetscBool iascii; 751 PetscInt indx; 752 PetscReal tmpdbl; 753 PetscReal *tmpdbl3; 754 755 PetscFunctionBegin; 756 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 757 if (iascii) { 758 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE ILU preconditioning\n")); 759 PetscStackCallExternalVoid("hypre_ParILUDataIluType", indx = hypre_ParILUDataIluType(ilu_data)); 760 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU type %s (%" PetscInt_FMT ")\n", HYPREILUType[indx], indx)); 761 PetscStackCallExternalVoid("hypre_ParILUDataLfil", indx = hypre_ParILUDataLfil(ilu_data)); 762 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU level %" PetscInt_FMT "\n", indx)); 763 PetscStackCallExternalVoid("hypre_ParILUDataMaxIter", indx = hypre_ParILUDataMaxIter(ilu_data)); 764 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU max iterations %" PetscInt_FMT "\n", indx)); 765 PetscStackCallExternalVoid("hypre_ParILUDataMaxRowNnz", indx = hypre_ParILUDataMaxRowNnz(ilu_data)); 766 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU max NNZ per row %" PetscInt_FMT "\n", indx)); 767 PetscStackCallExternalVoid("hypre_ParILUDataTriSolve", indx = hypre_ParILUDataTriSolve(ilu_data)); 768 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU triangular solve %" PetscInt_FMT "\n", indx)); 769 PetscStackCallExternalVoid("hypre_ParILUDataTol", tmpdbl = hypre_ParILUDataTol(ilu_data)); 770 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU tolerance %e\n", tmpdbl)); 771 PetscStackCallExternalVoid("hypre_ParILUDataDroptol", tmpdbl3 = hypre_ParILUDataDroptol(ilu_data)); 772 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU drop tolerance %e / %e / %e\n", tmpdbl3[0], tmpdbl3[1], tmpdbl3[2])); 773 PetscStackCallExternalVoid("hypre_ParILUDataReorderingType", indx = hypre_ParILUDataReorderingType(ilu_data)); 774 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU local reordering %" PetscInt_FMT "\n", indx)); 775 PetscStackCallExternalVoid("hypre_ParILUDataLowerJacobiIters", indx = hypre_ParILUDataLowerJacobiIters(ilu_data)); 776 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU lower Jacobi iterations %" PetscInt_FMT "\n", indx)); 777 PetscStackCallExternalVoid("hypre_ParILUDataUpperJacobiIters", indx = hypre_ParILUDataUpperJacobiIters(ilu_data)); 778 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU upper Jacobi iterations %" PetscInt_FMT "\n", indx)); 779 PetscStackCallExternalVoid("hypre_ParILUDataPrintLevel", indx = hypre_ParILUDataPrintLevel(ilu_data)); 780 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU print level %" PetscInt_FMT "\n", indx)); 781 PetscStackCallExternalVoid("hypre_ParILUDataLogging", indx = hypre_ParILUDataLogging(ilu_data)); 782 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU logging level %" PetscInt_FMT "\n", indx)); 783 PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupType", indx = hypre_ParILUDataIterativeSetupType(ilu_data)); 784 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup type %s (%" PetscInt_FMT ")\n", HYPREILUIterSetup[indx], indx)); 785 PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupOption", indx = hypre_ParILUDataIterativeSetupOption(ilu_data)); 786 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup option %" PetscInt_FMT "\n", indx)); 787 PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupMaxIter", indx = hypre_ParILUDataIterativeSetupMaxIter(ilu_data)); 788 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup max iterations %" PetscInt_FMT "\n", indx)); 789 PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupTolerance", tmpdbl = hypre_ParILUDataIterativeSetupTolerance(ilu_data)); 790 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup tolerance %e\n", tmpdbl)); 791 } 792 PetscFunctionReturn(PETSC_SUCCESS); 793 } 794 795 static PetscErrorCode PCSetFromOptions_HYPRE_Euclid(PC pc, PetscOptionItems PetscOptionsObject) 796 { 797 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 798 PetscBool flag, eu_bj = jac->eu_bj ? PETSC_TRUE : PETSC_FALSE; 799 800 PetscFunctionBegin; 801 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Euclid Options"); 802 PetscCall(PetscOptionsInt("-pc_hypre_euclid_level", "Factorization levels", "None", jac->eu_level, &jac->eu_level, &flag)); 803 if (flag) PetscCallExternal(HYPRE_EuclidSetLevel, jac->hsolver, jac->eu_level); 804 805 PetscCall(PetscOptionsReal("-pc_hypre_euclid_droptolerance", "Drop tolerance for ILU(k) in Euclid", "None", jac->eu_droptolerance, &jac->eu_droptolerance, &flag)); 806 if (flag) { 807 PetscMPIInt size; 808 809 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 810 PetscCheck(size == 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "hypre's Euclid does not support a parallel drop tolerance"); 811 PetscCallExternal(HYPRE_EuclidSetILUT, jac->hsolver, jac->eu_droptolerance); 812 } 813 814 PetscCall(PetscOptionsBool("-pc_hypre_euclid_bj", "Use Block Jacobi for ILU in Euclid", "None", eu_bj, &eu_bj, &flag)); 815 if (flag) { 816 jac->eu_bj = eu_bj ? 1 : 0; 817 PetscCallExternal(HYPRE_EuclidSetBJ, jac->hsolver, jac->eu_bj); 818 } 819 PetscOptionsHeadEnd(); 820 PetscFunctionReturn(PETSC_SUCCESS); 821 } 822 823 static PetscErrorCode PCView_HYPRE_Euclid(PC pc, PetscViewer viewer) 824 { 825 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 826 PetscBool iascii; 827 828 PetscFunctionBegin; 829 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 830 if (iascii) { 831 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE Euclid preconditioning\n")); 832 if (jac->eu_level != PETSC_DEFAULT) { 833 PetscCall(PetscViewerASCIIPrintf(viewer, " factorization levels %" PetscInt_FMT "\n", jac->eu_level)); 834 } else { 835 PetscCall(PetscViewerASCIIPrintf(viewer, " default factorization levels \n")); 836 } 837 PetscCall(PetscViewerASCIIPrintf(viewer, " drop tolerance %g\n", (double)jac->eu_droptolerance)); 838 PetscCall(PetscViewerASCIIPrintf(viewer, " use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj)); 839 } 840 PetscFunctionReturn(PETSC_SUCCESS); 841 } 842 843 static PetscErrorCode PCApplyTranspose_HYPRE_BoomerAMG(PC pc, Vec b, Vec x) 844 { 845 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 846 Mat_HYPRE *hjac = (Mat_HYPRE *)jac->hpmat->data; 847 HYPRE_ParCSRMatrix hmat; 848 HYPRE_ParVector jbv, jxv; 849 850 PetscFunctionBegin; 851 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 852 PetscCall(VecSet(x, 0.0)); 853 PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->x, b)); 854 PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->b, x)); 855 856 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat); 857 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv); 858 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv); 859 860 PetscStackCallExternalVoid( 861 "Hypre Transpose solve", do { 862 HYPRE_Int hierr = HYPRE_BoomerAMGSolveT(jac->hsolver, hmat, jbv, jxv); 863 if (hierr) { 864 /* error code of 1 in BoomerAMG merely means convergence not achieved */ 865 PetscCheck(hierr == 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr); 866 HYPRE_ClearAllErrors(); 867 } 868 } while (0)); 869 870 PetscCall(VecHYPRE_IJVectorPopVec(hjac->x)); 871 PetscCall(VecHYPRE_IJVectorPopVec(hjac->b)); 872 PetscFunctionReturn(PETSC_SUCCESS); 873 } 874 875 static PetscErrorCode PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char name[]) 876 { 877 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 878 PetscBool flag; 879 880 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 881 PetscFunctionBegin; 882 if (jac->spgemm_type) { 883 PetscCall(PetscStrcmp(jac->spgemm_type, name, &flag)); 884 PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE SpGEMM (really we can)"); 885 PetscFunctionReturn(PETSC_SUCCESS); 886 } else { 887 PetscCall(PetscStrallocpy(name, &jac->spgemm_type)); 888 } 889 PetscCall(PetscStrcmp("cusparse", jac->spgemm_type, &flag)); 890 if (flag) { 891 PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 1); 892 PetscFunctionReturn(PETSC_SUCCESS); 893 } 894 PetscCall(PetscStrcmp("hypre", jac->spgemm_type, &flag)); 895 if (flag) { 896 PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 0); 897 PetscFunctionReturn(PETSC_SUCCESS); 898 } 899 jac->spgemm_type = NULL; 900 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE SpGEMM type %s; Choices are cusparse, hypre", name); 901 #endif 902 } 903 904 static PetscErrorCode PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char *spgemm[]) 905 { 906 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 907 908 PetscFunctionBegin; 909 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 910 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 911 *spgemm = jac->spgemm_type; 912 #endif 913 PetscFunctionReturn(PETSC_SUCCESS); 914 } 915 916 static const char *HYPREBoomerAMGCycleType[] = {"", "V", "W"}; 917 static const char *HYPREBoomerAMGCoarsenType[] = {"CLJP", "Ruge-Stueben", "", "modifiedRuge-Stueben", "", "", "Falgout", "", "PMIS", "", "HMIS"}; 918 static const char *HYPREBoomerAMGMeasureType[] = {"local", "global"}; 919 /* The following corresponds to HYPRE_BoomerAMGSetRelaxType which has many missing numbers in the enum */ 920 static const char *HYPREBoomerAMGSmoothType[] = {"ILU", "Schwarz-smoothers", "Pilut", "ParaSails", "Euclid"}; 921 static const char *HYPREBoomerAMGRelaxType[] = {"Jacobi", "sequential-Gauss-Seidel", "seqboundary-Gauss-Seidel", "SOR/Jacobi", "backward-SOR/Jacobi", "" /* [5] hybrid chaotic Gauss-Seidel (works only with OpenMP) */, "symmetric-SOR/Jacobi", "" /* 7 */, "l1scaled-SOR/Jacobi", "Gaussian-elimination", "" /* 10 */, "" /* 11 */, "" /* 12 */, "l1-Gauss-Seidel" /* nonsymmetric */, "backward-l1-Gauss-Seidel" /* nonsymmetric */, "CG" /* non-stationary */, "Chebyshev", "FCF-Jacobi", "l1scaled-Jacobi"}; 922 static const char *HYPREBoomerAMGInterpType[] = {"classical", "", "", "direct", "multipass", "multipass-wts", "ext+i", "ext+i-cc", "standard", "standard-wts", "block", "block-wtd", "FF", "FF1", "ext", "ad-wts", "ext-mm", "ext+i-mm", "ext+e-mm"}; 923 static PetscErrorCode PCSetFromOptions_HYPRE_BoomerAMG(PC pc, PetscOptionItems PetscOptionsObject) 924 { 925 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 926 PetscInt bs, n, indx, level; 927 PetscBool flg, tmp_truth; 928 PetscReal tmpdbl, twodbl[2]; 929 const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"}; 930 const char *PCHYPRESpgemmTypes[] = {"cusparse", "hypre"}; 931 932 PetscFunctionBegin; 933 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE BoomerAMG Options"); 934 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_cycle_type", "Cycle type", "None", HYPREBoomerAMGCycleType + 1, 2, HYPREBoomerAMGCycleType[jac->cycletype], &indx, &flg)); 935 if (flg) { 936 jac->cycletype = indx + 1; 937 PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype); 938 } 939 PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_max_levels", "Number of levels (of grids) allowed", "None", jac->maxlevels, &jac->maxlevels, &flg, 2)); 940 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels); 941 PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_max_iter", "Maximum iterations used PER hypre call", "None", jac->maxiter, &jac->maxiter, &flg, 1)); 942 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter); 943 PetscCall(PetscOptionsBoundedReal("-pc_hypre_boomeramg_tol", "Convergence tolerance PER hypre call (0.0 = use a fixed number of iterations)", "None", jac->tol, &jac->tol, &flg, 0.0)); 944 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol); 945 bs = 1; 946 if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &bs)); 947 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_numfunctions", "Number of functions", "HYPRE_BoomerAMGSetNumFunctions", bs, &bs, &flg)); 948 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs); 949 950 PetscCall(PetscOptionsBoundedReal("-pc_hypre_boomeramg_truncfactor", "Truncation factor for interpolation (0=no truncation)", "None", jac->truncfactor, &jac->truncfactor, &flg, 0.0)); 951 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor); 952 953 PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_P_max", "Max elements per row for interpolation operator (0=unlimited)", "None", jac->pmax, &jac->pmax, &flg, 0)); 954 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax); 955 956 PetscCall(PetscOptionsRangeInt("-pc_hypre_boomeramg_agg_nl", "Number of levels of aggressive coarsening", "None", jac->agg_nl, &jac->agg_nl, &flg, 0, jac->maxlevels)); 957 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl); 958 959 PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_agg_num_paths", "Number of paths for aggressive coarsening", "None", jac->agg_num_paths, &jac->agg_num_paths, &flg, 1)); 960 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths); 961 962 PetscCall(PetscOptionsBoundedReal("-pc_hypre_boomeramg_strong_threshold", "Threshold for being strongly connected", "None", jac->strongthreshold, &jac->strongthreshold, &flg, 0.0)); 963 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold); 964 PetscCall(PetscOptionsRangeReal("-pc_hypre_boomeramg_max_row_sum", "Maximum row sum", "None", jac->maxrowsum, &jac->maxrowsum, &flg, 0.0, 1.0)); 965 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum); 966 967 /* Grid sweeps */ 968 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_all", "Number of sweeps for the up and down grid levels", "None", jac->gridsweeps[0], &indx, &flg)); 969 if (flg) { 970 PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, indx); 971 /* modify the jac structure so we can view the updated options with PC_View */ 972 jac->gridsweeps[0] = indx; 973 jac->gridsweeps[1] = indx; 974 /*defaults coarse to 1 */ 975 jac->gridsweeps[2] = 1; 976 } 977 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen", "Use a nodal based coarsening 1-6", "HYPRE_BoomerAMGSetNodal", jac->nodal_coarsening, &jac->nodal_coarsening, &flg)); 978 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodal, jac->hsolver, jac->nodal_coarsening); 979 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen_diag", "Diagonal in strength matrix for nodal based coarsening 0-2", "HYPRE_BoomerAMGSetNodalDiag", jac->nodal_coarsening_diag, &jac->nodal_coarsening_diag, &flg)); 980 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodalDiag, jac->hsolver, jac->nodal_coarsening_diag); 981 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_variant", "Variant of algorithm 1-3", "HYPRE_BoomerAMGSetInterpVecVariant", jac->vec_interp_variant, &jac->vec_interp_variant, &flg)); 982 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecVariant, jac->hsolver, jac->vec_interp_variant); 983 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_qmax", "Max elements per row for each Q", "HYPRE_BoomerAMGSetInterpVecQMax", jac->vec_interp_qmax, &jac->vec_interp_qmax, &flg)); 984 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecQMax, jac->hsolver, jac->vec_interp_qmax); 985 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_vec_interp_smooth", "Whether to smooth the interpolation vectors", "HYPRE_BoomerAMGSetSmoothInterpVectors", jac->vec_interp_smooth, &jac->vec_interp_smooth, &flg)); 986 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetSmoothInterpVectors, jac->hsolver, jac->vec_interp_smooth); 987 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_interp_refine", "Preprocess the interpolation matrix through iterative weight refinement", "HYPRE_BoomerAMGSetInterpRefine", jac->interp_refine, &jac->interp_refine, &flg)); 988 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpRefine, jac->hsolver, jac->interp_refine); 989 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_down", "Number of sweeps for the down cycles", "None", jac->gridsweeps[0], &indx, &flg)); 990 if (flg) { 991 PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 1); 992 jac->gridsweeps[0] = indx; 993 } 994 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_up", "Number of sweeps for the up cycles", "None", jac->gridsweeps[1], &indx, &flg)); 995 if (flg) { 996 PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 2); 997 jac->gridsweeps[1] = indx; 998 } 999 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_coarse", "Number of sweeps for the coarse level", "None", jac->gridsweeps[2], &indx, &flg)); 1000 if (flg) { 1001 PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 3); 1002 jac->gridsweeps[2] = indx; 1003 } 1004 1005 /* Smooth type */ 1006 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_smooth_type", "Enable more complex smoothers", "None", HYPREBoomerAMGSmoothType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGSmoothType), HYPREBoomerAMGSmoothType[0], &indx, &flg)); 1007 if (flg) { 1008 jac->smoothtype = indx; 1009 PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, indx + 5); 1010 jac->smoothnumlevels = 25; 1011 PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, 25); 1012 } 1013 1014 /* Number of smoothing levels */ 1015 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_smooth_num_levels", "Number of levels on which more complex smoothers are used", "None", 25, &indx, &flg)); 1016 if (flg && (jac->smoothtype != -1)) { 1017 jac->smoothnumlevels = indx; 1018 PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, indx); 1019 } 1020 1021 /* Smooth num sweeps */ 1022 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_smooth_num_sweeps", "Set number of smoother sweeps", "None", 1, &indx, &flg)); 1023 if (flg && indx > 0) { 1024 jac->smoothsweeps = indx; 1025 PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumSweeps, jac->hsolver, indx); 1026 } 1027 1028 /* ILU: ILU Type */ 1029 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_ilu_type", "Choose ILU Type", "None", HYPREILUType, PETSC_STATIC_ARRAY_LENGTH(HYPREILUType), HYPREILUType[0], &indx, &flg)); 1030 if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUType, jac->hsolver, indx); } 1031 1032 /* ILU: ILU iterative setup type*/ 1033 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_ilu_iterative_setup_type", "Set ILU iterative setup type", "None", HYPREILUIterSetup, PETSC_STATIC_ARRAY_LENGTH(HYPREILUIterSetup), HYPREILUIterSetup[0], &indx, &flg)); 1034 if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUIterSetupType, jac->hsolver, indx); } 1035 1036 /* ILU: ILU iterative setup option*/ 1037 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_iterative_setup_option", "Set ILU iterative setup option", "None", 0, &indx, &flg)); 1038 if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUIterSetupOption, jac->hsolver, indx); } 1039 1040 /* ILU: ILU iterative setup maxiter */ 1041 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_iterative_setup_maxiter", "Set ILU iterative setup maximum iteration count", "None", 0, &indx, &flg)); 1042 if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUIterSetupMaxIter, jac->hsolver, indx); } 1043 1044 /* ILU: ILU iterative setup tolerance */ 1045 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_ilu_iterative_setup_tolerance", "Set ILU iterative setup tolerance", "None", 0, &tmpdbl, &flg)); 1046 if (flg) { PetscCallExternal(hypre_BoomerAMGSetILUIterSetupTolerance, jac->hsolver, tmpdbl); } 1047 1048 /* ILU: ILU Print Level */ 1049 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_print_level", "Set ILU print level", "None", 0, &indx, &flg)); 1050 if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetPrintLevel, jac->hsolver, indx); } 1051 1052 /* ILU: Logging */ 1053 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_logging", "Set ILU logging level", "None", 0, &indx, &flg)); 1054 if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetLogging, jac->hsolver, indx); } 1055 1056 /* ILU: ILU Level */ 1057 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_level", "Set ILU level", "None", 0, &indx, &flg)); 1058 if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILULevel, jac->hsolver, indx); } 1059 1060 /* ILU: ILU Max NNZ per row */ 1061 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_max_nnz_per_row", "Set maximum NNZ per row", "None", 0, &indx, &flg)); 1062 if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUMaxRowNnz, jac->hsolver, indx); } 1063 1064 /* ILU: maximum iteration count */ 1065 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_maxiter", "Set ILU max iterations", "None", 0, &indx, &flg)); 1066 if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUMaxIter, jac->hsolver, indx); } 1067 1068 /* ILU: drop threshold */ 1069 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_ilu_drop_tol", "Drop tolerance for ILU", "None", 0, &tmpdbl, &flg)); 1070 if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUDroptol, jac->hsolver, tmpdbl); } 1071 1072 /* ILU: Triangular Solve */ 1073 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_ilu_tri_solve", "Enable triangular solve", "None", PETSC_FALSE, &tmp_truth, &flg)); 1074 if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUTriSolve, jac->hsolver, tmp_truth); } 1075 1076 /* ILU: Lower Jacobi iteration */ 1077 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_lower_jacobi_iters", "Set lower Jacobi iteration count", "None", 0, &indx, &flg)); 1078 if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILULowerJacobiIters, jac->hsolver, indx); } 1079 1080 /* ILU: Upper Jacobi iteration */ 1081 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_upper_jacobi_iters", "Set upper Jacobi iteration count", "None", 0, &indx, &flg)); 1082 if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUUpperJacobiIters, jac->hsolver, indx); } 1083 1084 /* ILU: local reordering */ 1085 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_ilu_local_reordering", "Enable local reordering", "None", PETSC_FALSE, &tmp_truth, &flg)); 1086 if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILULocalReordering, jac->hsolver, tmp_truth); } 1087 1088 /* Number of levels for ILU(k) for Euclid */ 1089 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_eu_level", "Number of levels for ILU(k) in Euclid smoother", "None", 0, &indx, &flg)); 1090 if (flg && (jac->smoothtype == 4)) { 1091 jac->eu_level = indx; 1092 PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, indx); 1093 } 1094 1095 /* Filter for ILU(k) for Euclid */ 1096 PetscReal droptolerance; 1097 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_eu_droptolerance", "Drop tolerance for ILU(k) in Euclid smoother", "None", 0, &droptolerance, &flg)); 1098 if (flg && (jac->smoothtype == 4)) { 1099 jac->eu_droptolerance = droptolerance; 1100 PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, droptolerance); 1101 } 1102 1103 /* Use Block Jacobi ILUT for Euclid */ 1104 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_eu_bj", "Use Block Jacobi for ILU in Euclid smoother?", "None", PETSC_FALSE, &tmp_truth, &flg)); 1105 if (flg && (jac->smoothtype == 4)) { 1106 jac->eu_bj = tmp_truth; 1107 PetscCallExternal(HYPRE_BoomerAMGSetEuBJ, jac->hsolver, jac->eu_bj); 1108 } 1109 1110 /* Relax type */ 1111 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_all", "Relax type for the up and down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg)); 1112 if (flg) { 1113 jac->relaxtype[0] = jac->relaxtype[1] = indx; 1114 PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, indx); 1115 /* by default, coarse type set to 9 */ 1116 jac->relaxtype[2] = 9; 1117 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, 9, 3); 1118 } 1119 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_down", "Relax type for the down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg)); 1120 if (flg) { 1121 jac->relaxtype[0] = indx; 1122 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 1); 1123 } 1124 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_up", "Relax type for the up cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg)); 1125 if (flg) { 1126 jac->relaxtype[1] = indx; 1127 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 2); 1128 } 1129 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_coarse", "Relax type on coarse grid", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[9], &indx, &flg)); 1130 if (flg) { 1131 jac->relaxtype[2] = indx; 1132 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 3); 1133 } 1134 1135 /* Relaxation Weight */ 1136 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_relax_weight_all", "Relaxation weight for all levels (0 = hypre estimates, -k = determined with k CG steps)", "None", jac->relaxweight, &tmpdbl, &flg)); 1137 if (flg) { 1138 PetscCallExternal(HYPRE_BoomerAMGSetRelaxWt, jac->hsolver, tmpdbl); 1139 jac->relaxweight = tmpdbl; 1140 } 1141 1142 n = 2; 1143 twodbl[0] = twodbl[1] = 1.0; 1144 PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_relax_weight_level", "Set the relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg)); 1145 if (flg) { 1146 PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight level: you must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n); 1147 indx = (int)PetscAbsReal(twodbl[1]); 1148 PetscCallExternal(HYPRE_BoomerAMGSetLevelRelaxWt, jac->hsolver, twodbl[0], indx); 1149 } 1150 1151 /* Outer relaxation Weight */ 1152 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_outer_relax_weight_all", "Outer relaxation weight for all levels (-k = determined with k CG steps)", "None", jac->outerrelaxweight, &tmpdbl, &flg)); 1153 if (flg) { 1154 PetscCallExternal(HYPRE_BoomerAMGSetOuterWt, jac->hsolver, tmpdbl); 1155 jac->outerrelaxweight = tmpdbl; 1156 } 1157 1158 n = 2; 1159 twodbl[0] = twodbl[1] = 1.0; 1160 PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_outer_relax_weight_level", "Set the outer relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg)); 1161 if (flg) { 1162 PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight outer level: You must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n); 1163 indx = (int)PetscAbsReal(twodbl[1]); 1164 PetscCallExternal(HYPRE_BoomerAMGSetLevelOuterWt, jac->hsolver, twodbl[0], indx); 1165 } 1166 1167 /* the Relax Order */ 1168 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_no_CF", "Do not use CF-relaxation", "None", PETSC_FALSE, &tmp_truth, &flg)); 1169 1170 if (flg && tmp_truth) { 1171 jac->relaxorder = 0; 1172 PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder); 1173 } 1174 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_measure_type", "Measure type", "None", HYPREBoomerAMGMeasureType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGMeasureType), HYPREBoomerAMGMeasureType[0], &indx, &flg)); 1175 if (flg) { 1176 jac->measuretype = indx; 1177 PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype); 1178 } 1179 /* update list length 3/07 */ 1180 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_coarsen_type", "Coarsen type", "None", HYPREBoomerAMGCoarsenType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGCoarsenType), HYPREBoomerAMGCoarsenType[6], &indx, &flg)); 1181 if (flg) { 1182 jac->coarsentype = indx; 1183 PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype); 1184 } 1185 1186 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_coarse_size", "Maximum size of coarsest grid", "None", jac->maxc, &jac->maxc, &flg)); 1187 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc); 1188 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_min_coarse_size", "Minimum size of coarsest grid", "None", jac->minc, &jac->minc, &flg)); 1189 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc); 1190 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 1191 // global parameter but is closely associated with BoomerAMG 1192 PetscCall(PetscOptionsEList("-pc_mg_galerkin_mat_product_algorithm", "Type of SpGEMM to use in hypre (only for now)", "PCMGGalerkinSetMatProductAlgorithm", PCHYPRESpgemmTypes, PETSC_STATIC_ARRAY_LENGTH(PCHYPRESpgemmTypes), PCHYPRESpgemmTypes[0], &indx, &flg)); 1193 #if defined(PETSC_HAVE_HYPRE_DEVICE) 1194 if (!flg) indx = 0; 1195 PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, PCHYPRESpgemmTypes[indx])); 1196 #else 1197 PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, "hypre")); 1198 #endif 1199 #endif 1200 /* AIR */ 1201 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0) 1202 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_restriction_type", "Type of AIR method (distance 1 or 2, 0 means no AIR)", "None", jac->Rtype, &jac->Rtype, NULL)); 1203 PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype); 1204 if (jac->Rtype) { 1205 HYPRE_Int **grid_relax_points = hypre_TAlloc(HYPRE_Int *, 4, HYPRE_MEMORY_HOST); 1206 char *prerelax[256]; 1207 char *postrelax[256]; 1208 char stringF[2] = "F", stringC[2] = "C", stringA[2] = "A"; 1209 PetscInt ns_down = 256, ns_up = 256; 1210 PetscBool matchF, matchC, matchA; 1211 1212 jac->interptype = 100; /* no way we can pass this with strings... Set it as default as in MFEM, then users can still customize it back to a different one */ 1213 1214 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_strongthresholdR", "Threshold for R", "None", jac->Rstrongthreshold, &jac->Rstrongthreshold, NULL)); 1215 PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold); 1216 1217 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_filterthresholdR", "Filter threshold for R", "None", jac->Rfilterthreshold, &jac->Rfilterthreshold, NULL)); 1218 PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold); 1219 1220 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_Adroptol", "Defines the drop tolerance for the A-matrices from the 2nd level of AMG", "None", jac->Adroptol, &jac->Adroptol, NULL)); 1221 PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol); 1222 1223 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_Adroptype", "Drops the entries that are not on the diagonal and smaller than its row norm: type 1: 1-norm, 2: 2-norm, -1: infinity norm", "None", jac->Adroptype, &jac->Adroptype, NULL)); 1224 PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype); 1225 PetscCall(PetscOptionsStringArray("-pc_hypre_boomeramg_prerelax", "Defines prerelax scheme", "None", prerelax, &ns_down, NULL)); 1226 PetscCall(PetscOptionsStringArray("-pc_hypre_boomeramg_postrelax", "Defines postrelax scheme", "None", postrelax, &ns_up, NULL)); 1227 PetscCheck(ns_down == jac->gridsweeps[0], PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_SIZ, "The number of arguments passed to -pc_hypre_boomeramg_prerelax must match the number passed to -pc_hypre_bomeramg_grid_sweeps_down"); 1228 PetscCheck(ns_up == jac->gridsweeps[1], PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_SIZ, "The number of arguments passed to -pc_hypre_boomeramg_postrelax must match the number passed to -pc_hypre_bomeramg_grid_sweeps_up"); 1229 1230 grid_relax_points[0] = NULL; 1231 grid_relax_points[1] = hypre_TAlloc(HYPRE_Int, ns_down, HYPRE_MEMORY_HOST); 1232 grid_relax_points[2] = hypre_TAlloc(HYPRE_Int, ns_up, HYPRE_MEMORY_HOST); 1233 grid_relax_points[3] = hypre_TAlloc(HYPRE_Int, jac->gridsweeps[2], HYPRE_MEMORY_HOST); 1234 grid_relax_points[3][0] = 0; 1235 1236 // set down relax scheme 1237 for (PetscInt i = 0; i < ns_down; i++) { 1238 PetscCall(PetscStrcasecmp(prerelax[i], stringF, &matchF)); 1239 PetscCall(PetscStrcasecmp(prerelax[i], stringC, &matchC)); 1240 PetscCall(PetscStrcasecmp(prerelax[i], stringA, &matchA)); 1241 PetscCheck(matchF || matchC || matchA, PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_WRONG, "Valid argument options for -pc_hypre_boomeramg_prerelax are C, F, and A"); 1242 if (matchF) grid_relax_points[1][i] = -1; 1243 else if (matchC) grid_relax_points[1][i] = 1; 1244 else if (matchA) grid_relax_points[1][i] = 0; 1245 } 1246 1247 // set up relax scheme 1248 for (PetscInt i = 0; i < ns_up; i++) { 1249 PetscCall(PetscStrcasecmp(postrelax[i], stringF, &matchF)); 1250 PetscCall(PetscStrcasecmp(postrelax[i], stringC, &matchC)); 1251 PetscCall(PetscStrcasecmp(postrelax[i], stringA, &matchA)); 1252 PetscCheck(matchF || matchC || matchA, PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_WRONG, "Valid argument options for -pc_hypre_boomeramg_postrelax are C, F, and A"); 1253 if (matchF) grid_relax_points[2][i] = -1; 1254 else if (matchC) grid_relax_points[2][i] = 1; 1255 else if (matchA) grid_relax_points[2][i] = 0; 1256 } 1257 1258 // set coarse relax scheme 1259 for (PetscInt i = 0; i < jac->gridsweeps[2]; i++) grid_relax_points[3][i] = 0; 1260 1261 // Pass relax schemes to hypre 1262 PetscCallExternal(HYPRE_BoomerAMGSetGridRelaxPoints, jac->hsolver, grid_relax_points); 1263 1264 // cleanup memory 1265 for (PetscInt i = 0; i < ns_down; i++) PetscCall(PetscFree(prerelax[i])); 1266 for (PetscInt i = 0; i < ns_up; i++) PetscCall(PetscFree(postrelax[i])); 1267 } 1268 #endif 1269 1270 #if PETSC_PKG_HYPRE_VERSION_LE(9, 9, 9) 1271 PetscCheck(!jac->Rtype || !jac->agg_nl, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "-pc_hypre_boomeramg_restriction_type (%" PetscInt_FMT ") and -pc_hypre_boomeramg_agg_nl (%" PetscInt_FMT ")", jac->Rtype, jac->agg_nl); 1272 #endif 1273 1274 /* new 3/07 */ 1275 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_interp_type", "Interpolation type", "None", HYPREBoomerAMGInterpType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGInterpType), HYPREBoomerAMGInterpType[0], &indx, &flg)); 1276 if (flg || jac->Rtype) { 1277 if (flg) jac->interptype = indx; 1278 PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype); 1279 } 1280 1281 PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", &flg)); 1282 if (flg) { 1283 level = 3; 1284 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", level, &level, NULL)); 1285 1286 jac->printstatistics = PETSC_TRUE; 1287 PetscCallExternal(HYPRE_BoomerAMGSetPrintLevel, jac->hsolver, level); 1288 } 1289 1290 PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", &flg)); 1291 if (flg) { 1292 level = 3; 1293 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", level, &level, NULL)); 1294 1295 jac->printstatistics = PETSC_TRUE; 1296 PetscCallExternal(HYPRE_BoomerAMGSetDebugFlag, jac->hsolver, level); 1297 } 1298 1299 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", PETSC_FALSE, &tmp_truth, &flg)); 1300 if (flg && tmp_truth) { 1301 PetscInt tmp_int; 1302 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", jac->nodal_relax_levels, &tmp_int, &flg)); 1303 if (flg) jac->nodal_relax_levels = tmp_int; 1304 PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, 6); 1305 PetscCallExternal(HYPRE_BoomerAMGSetDomainType, jac->hsolver, 1); 1306 PetscCallExternal(HYPRE_BoomerAMGSetOverlap, jac->hsolver, 0); 1307 PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, jac->nodal_relax_levels); 1308 } 1309 1310 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_keeptranspose", "Avoid transpose matvecs in preconditioner application", "None", jac->keeptranspose, &jac->keeptranspose, NULL)); 1311 PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0); 1312 1313 /* options for ParaSails solvers */ 1314 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flg)); 1315 if (flg) { 1316 jac->symt = indx; 1317 PetscCallExternal(HYPRE_BoomerAMGSetSym, jac->hsolver, jac->symt); 1318 } 1319 1320 PetscOptionsHeadEnd(); 1321 PetscFunctionReturn(PETSC_SUCCESS); 1322 } 1323 1324 static PetscErrorCode PCApplyRichardson_HYPRE_BoomerAMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 1325 { 1326 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1327 HYPRE_Int oits; 1328 1329 PetscFunctionBegin; 1330 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 1331 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, its * jac->maxiter); 1332 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, rtol); 1333 jac->applyrichardson = PETSC_TRUE; 1334 PetscCall(PCApply_HYPRE(pc, b, y)); 1335 jac->applyrichardson = PETSC_FALSE; 1336 PetscCallExternal(HYPRE_BoomerAMGGetNumIterations, jac->hsolver, &oits); 1337 *outits = oits; 1338 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 1339 else *reason = PCRICHARDSON_CONVERGED_RTOL; 1340 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol); 1341 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter); 1342 PetscFunctionReturn(PETSC_SUCCESS); 1343 } 1344 1345 static PetscErrorCode PCView_HYPRE_BoomerAMG(PC pc, PetscViewer viewer) 1346 { 1347 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1348 hypre_ParAMGData *amg_data = (hypre_ParAMGData *)jac->hsolver; 1349 PetscBool iascii; 1350 PetscInt indx; 1351 PetscReal val; 1352 1353 PetscFunctionBegin; 1354 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1355 if (iascii) { 1356 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE BoomerAMG preconditioning\n")); 1357 PetscCall(PetscViewerASCIIPrintf(viewer, " Cycle type %s\n", HYPREBoomerAMGCycleType[jac->cycletype])); 1358 PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum number of levels %" PetscInt_FMT "\n", jac->maxlevels)); 1359 PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum number of iterations PER hypre call %" PetscInt_FMT "\n", jac->maxiter)); 1360 PetscCall(PetscViewerASCIIPrintf(viewer, " Convergence tolerance PER hypre call %g\n", (double)jac->tol)); 1361 PetscCall(PetscViewerASCIIPrintf(viewer, " Threshold for strong coupling %g\n", (double)jac->strongthreshold)); 1362 PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation truncation factor %g\n", (double)jac->truncfactor)); 1363 PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation: max elements per row %" PetscInt_FMT "\n", jac->pmax)); 1364 if (jac->interp_refine) PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation: number of steps of weighted refinement %" PetscInt_FMT "\n", jac->interp_refine)); 1365 PetscCall(PetscViewerASCIIPrintf(viewer, " Number of levels of aggressive coarsening %" PetscInt_FMT "\n", jac->agg_nl)); 1366 PetscCall(PetscViewerASCIIPrintf(viewer, " Number of paths for aggressive coarsening %" PetscInt_FMT "\n", jac->agg_num_paths)); 1367 1368 PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum row sums %g\n", (double)jac->maxrowsum)); 1369 1370 PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps down %" PetscInt_FMT "\n", jac->gridsweeps[0])); 1371 PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps up %" PetscInt_FMT "\n", jac->gridsweeps[1])); 1372 PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps on coarse %" PetscInt_FMT "\n", jac->gridsweeps[2])); 1373 1374 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax down %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[0]])); 1375 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax up %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[1]])); 1376 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax on coarse %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[2]])); 1377 1378 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax weight (all) %g\n", (double)jac->relaxweight)); 1379 PetscCall(PetscViewerASCIIPrintf(viewer, " Outer relax weight (all) %g\n", (double)jac->outerrelaxweight)); 1380 1381 PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum size of coarsest grid %" PetscInt_FMT "\n", jac->maxc)); 1382 PetscCall(PetscViewerASCIIPrintf(viewer, " Minimum size of coarsest grid %" PetscInt_FMT "\n", jac->minc)); 1383 1384 if (jac->relaxorder) { 1385 PetscCall(PetscViewerASCIIPrintf(viewer, " Using CF-relaxation\n")); 1386 } else { 1387 PetscCall(PetscViewerASCIIPrintf(viewer, " Not using CF-relaxation\n")); 1388 } 1389 if (jac->smoothtype != -1) { 1390 PetscCall(PetscViewerASCIIPrintf(viewer, " Smooth type %s\n", HYPREBoomerAMGSmoothType[jac->smoothtype])); 1391 PetscCall(PetscViewerASCIIPrintf(viewer, " Smooth num levels %" PetscInt_FMT "\n", jac->smoothnumlevels)); 1392 PetscCall(PetscViewerASCIIPrintf(viewer, " Smooth num sweeps %" PetscInt_FMT "\n", jac->smoothsweeps)); 1393 if (jac->smoothtype == 0) { 1394 PetscStackCallExternalVoid("hypre_ParAMGDataILUType", indx = hypre_ParAMGDataILUType(amg_data)); 1395 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU type %s (%" PetscInt_FMT ")\n", HYPREILUType[indx], indx)); 1396 PetscStackCallExternalVoid("hypre_ParAMGDataILULevel", indx = hypre_ParAMGDataILULevel(amg_data)); 1397 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU level %" PetscInt_FMT "\n", indx)); 1398 PetscStackCallExternalVoid("hypre_ParAMGDataILUMaxIter", indx = hypre_ParAMGDataILUMaxIter(amg_data)); 1399 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU max iterations %" PetscInt_FMT "\n", indx)); 1400 PetscStackCallExternalVoid("hypre_ParAMGDataILUMaxRowNnz", indx = hypre_ParAMGDataILUMaxRowNnz(amg_data)); 1401 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU max NNZ per row %" PetscInt_FMT "\n", indx)); 1402 PetscStackCallExternalVoid("hypre_ParAMGDataILUTriSolve", indx = hypre_ParAMGDataILUTriSolve(amg_data)); 1403 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU triangular solve %" PetscInt_FMT "\n", indx)); 1404 PetscStackCallExternalVoid("hypre_ParAMGDataTol", val = hypre_ParAMGDataTol(amg_data)); 1405 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU tolerance %e\n", val)); 1406 PetscStackCallExternalVoid("hypre_ParAMGDataILUDroptol", val = hypre_ParAMGDataILUDroptol(amg_data)); 1407 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU drop tolerance %e\n", val)); 1408 PetscStackCallExternalVoid("hypre_ParAMGDataILULocalReordering", indx = hypre_ParAMGDataILULocalReordering(amg_data)); 1409 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU local reordering %" PetscInt_FMT "\n", indx)); 1410 PetscStackCallExternalVoid("hypre_ParAMGDataILULowerJacobiIters", indx = hypre_ParAMGDataILULowerJacobiIters(amg_data)); 1411 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU lower Jacobi iterations %" PetscInt_FMT "\n", indx)); 1412 PetscStackCallExternalVoid("hypre_ParAMGDataILUUpperJacobiIters", indx = hypre_ParAMGDataILUUpperJacobiIters(amg_data)); 1413 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU upper Jacobi iterations %" PetscInt_FMT "\n", indx)); 1414 PetscStackCallExternalVoid("hypre_ParAMGDataPrintLevel", indx = hypre_ParAMGDataPrintLevel(amg_data)); 1415 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU print level %" PetscInt_FMT "\n", indx)); 1416 PetscStackCallExternalVoid("hypre_ParAMGDataLogging", indx = hypre_ParAMGDataLogging(amg_data)); 1417 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU logging level %" PetscInt_FMT "\n", indx)); 1418 PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupType", indx = hypre_ParAMGDataILUIterSetupType(amg_data)); 1419 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup type %s (%" PetscInt_FMT ")\n", HYPREILUIterSetup[indx], indx)); 1420 PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupOption", indx = hypre_ParAMGDataILUIterSetupOption(amg_data)); 1421 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup option %" PetscInt_FMT "\n", indx)); 1422 PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupMaxIter", indx = hypre_ParAMGDataILUIterSetupMaxIter(amg_data)); 1423 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup max iterations %" PetscInt_FMT "\n", indx)); 1424 PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupTolerance", val = hypre_ParAMGDataILUIterSetupTolerance(amg_data)); 1425 PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup tolerance %e\n", val)); 1426 } 1427 } else { 1428 PetscCall(PetscViewerASCIIPrintf(viewer, " Not using more complex smoothers.\n")); 1429 } 1430 if (jac->smoothtype == 3) { 1431 PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU(k) levels %" PetscInt_FMT "\n", jac->eu_level)); 1432 PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU(k) drop tolerance %g\n", (double)jac->eu_droptolerance)); 1433 PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj)); 1434 } 1435 PetscCall(PetscViewerASCIIPrintf(viewer, " Measure type %s\n", HYPREBoomerAMGMeasureType[jac->measuretype])); 1436 PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsen type %s\n", HYPREBoomerAMGCoarsenType[jac->coarsentype])); 1437 PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation type %s\n", jac->interptype != 100 ? HYPREBoomerAMGInterpType[jac->interptype] : "1pt")); 1438 if (jac->nodal_coarsening) PetscCall(PetscViewerASCIIPrintf(viewer, " Using nodal coarsening with HYPRE_BOOMERAMGSetNodal() %" PetscInt_FMT "\n", jac->nodal_coarsening)); 1439 if (jac->vec_interp_variant) { 1440 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetInterpVecVariant() %" PetscInt_FMT "\n", jac->vec_interp_variant)); 1441 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetInterpVecQMax() %" PetscInt_FMT "\n", jac->vec_interp_qmax)); 1442 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetSmoothInterpVectors() %d\n", jac->vec_interp_smooth)); 1443 } 1444 if (jac->nodal_relax) PetscCall(PetscViewerASCIIPrintf(viewer, " Using nodal relaxation via Schwarz smoothing on levels %" PetscInt_FMT "\n", jac->nodal_relax_levels)); 1445 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 1446 PetscCall(PetscViewerASCIIPrintf(viewer, " SpGEMM type %s\n", jac->spgemm_type)); 1447 #else 1448 PetscCall(PetscViewerASCIIPrintf(viewer, " SpGEMM type %s\n", "hypre")); 1449 #endif 1450 /* AIR */ 1451 if (jac->Rtype) { 1452 PetscCall(PetscViewerASCIIPrintf(viewer, " Using approximate ideal restriction type %" PetscInt_FMT "\n", jac->Rtype)); 1453 PetscCall(PetscViewerASCIIPrintf(viewer, " Threshold for R %g\n", (double)jac->Rstrongthreshold)); 1454 PetscCall(PetscViewerASCIIPrintf(viewer, " Filter for R %g\n", (double)jac->Rfilterthreshold)); 1455 PetscCall(PetscViewerASCIIPrintf(viewer, " A drop tolerance %g\n", (double)jac->Adroptol)); 1456 PetscCall(PetscViewerASCIIPrintf(viewer, " A drop type %" PetscInt_FMT "\n", jac->Adroptype)); 1457 } 1458 } 1459 PetscFunctionReturn(PETSC_SUCCESS); 1460 } 1461 1462 static PetscErrorCode PCSetFromOptions_HYPRE_ParaSails(PC pc, PetscOptionItems PetscOptionsObject) 1463 { 1464 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1465 PetscInt indx; 1466 PetscBool flag; 1467 const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"}; 1468 1469 PetscFunctionBegin; 1470 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ParaSails Options"); 1471 PetscCall(PetscOptionsInt("-pc_hypre_parasails_nlevels", "Number of number of levels", "None", jac->nlevels, &jac->nlevels, 0)); 1472 PetscCall(PetscOptionsReal("-pc_hypre_parasails_thresh", "Threshold", "None", jac->threshold, &jac->threshold, &flag)); 1473 if (flag) PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels); 1474 1475 PetscCall(PetscOptionsReal("-pc_hypre_parasails_filter", "filter", "None", jac->filter, &jac->filter, &flag)); 1476 if (flag) PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter); 1477 1478 PetscCall(PetscOptionsReal("-pc_hypre_parasails_loadbal", "Load balance", "None", jac->loadbal, &jac->loadbal, &flag)); 1479 if (flag) PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal); 1480 1481 PetscCall(PetscOptionsBool("-pc_hypre_parasails_logging", "Print info to screen", "None", (PetscBool)jac->logging, (PetscBool *)&jac->logging, &flag)); 1482 if (flag) PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging); 1483 1484 PetscCall(PetscOptionsBool("-pc_hypre_parasails_reuse", "Reuse nonzero pattern in preconditioner", "None", (PetscBool)jac->ruse, (PetscBool *)&jac->ruse, &flag)); 1485 if (flag) PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse); 1486 1487 PetscCall(PetscOptionsEList("-pc_hypre_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flag)); 1488 if (flag) { 1489 jac->symt = indx; 1490 PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt); 1491 } 1492 1493 PetscOptionsHeadEnd(); 1494 PetscFunctionReturn(PETSC_SUCCESS); 1495 } 1496 1497 static PetscErrorCode PCView_HYPRE_ParaSails(PC pc, PetscViewer viewer) 1498 { 1499 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1500 PetscBool iascii; 1501 const char *symt = 0; 1502 1503 PetscFunctionBegin; 1504 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1505 if (iascii) { 1506 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE ParaSails preconditioning\n")); 1507 PetscCall(PetscViewerASCIIPrintf(viewer, " nlevels %" PetscInt_FMT "\n", jac->nlevels)); 1508 PetscCall(PetscViewerASCIIPrintf(viewer, " threshold %g\n", (double)jac->threshold)); 1509 PetscCall(PetscViewerASCIIPrintf(viewer, " filter %g\n", (double)jac->filter)); 1510 PetscCall(PetscViewerASCIIPrintf(viewer, " load balance %g\n", (double)jac->loadbal)); 1511 PetscCall(PetscViewerASCIIPrintf(viewer, " reuse nonzero structure %s\n", PetscBools[jac->ruse])); 1512 PetscCall(PetscViewerASCIIPrintf(viewer, " print info to screen %s\n", PetscBools[jac->logging])); 1513 if (!jac->symt) symt = "nonsymmetric matrix and preconditioner"; 1514 else if (jac->symt == 1) symt = "SPD matrix and preconditioner"; 1515 else if (jac->symt == 2) symt = "nonsymmetric matrix but SPD preconditioner"; 1516 else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Unknown HYPRE ParaSails symmetric option %" PetscInt_FMT, jac->symt); 1517 PetscCall(PetscViewerASCIIPrintf(viewer, " %s\n", symt)); 1518 } 1519 PetscFunctionReturn(PETSC_SUCCESS); 1520 } 1521 1522 static PetscErrorCode PCSetFromOptions_HYPRE_AMS(PC pc, PetscOptionItems PetscOptionsObject) 1523 { 1524 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1525 PetscInt n; 1526 PetscBool flag, flag2, flag3, flag4; 1527 1528 PetscFunctionBegin; 1529 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE AMS Options"); 1530 PetscCall(PetscOptionsInt("-pc_hypre_ams_print_level", "Debugging output level for AMS", "None", jac->as_print, &jac->as_print, &flag)); 1531 if (flag) PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print); 1532 PetscCall(PetscOptionsInt("-pc_hypre_ams_max_iter", "Maximum number of AMS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag)); 1533 if (flag) PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter); 1534 PetscCall(PetscOptionsInt("-pc_hypre_ams_cycle_type", "Cycle type for AMS multigrid", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag)); 1535 if (flag) PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type); 1536 PetscCall(PetscOptionsReal("-pc_hypre_ams_tol", "Error tolerance for AMS multigrid", "None", jac->as_tol, &jac->as_tol, &flag)); 1537 if (flag) PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol); 1538 PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_type", "Relaxation type for AMS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag)); 1539 PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_times", "Number of relaxation steps for AMS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2)); 1540 PetscCall(PetscOptionsReal("-pc_hypre_ams_relax_weight", "Relaxation weight for AMS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3)); 1541 PetscCall(PetscOptionsReal("-pc_hypre_ams_omega", "SSOR coefficient for AMS smoother", "None", jac->as_omega, &jac->as_omega, &flag4)); 1542 if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 1543 PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_alpha_theta", "Threshold for strong coupling of vector Poisson AMG solver", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag)); 1544 n = 5; 1545 PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_alpha_options", "AMG options for vector Poisson", "None", jac->as_amg_alpha_opts, &n, &flag2)); 1546 if (flag || flag2) { 1547 PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 1548 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 1549 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 1550 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 1551 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 1552 } 1553 PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_beta_theta", "Threshold for strong coupling of scalar Poisson AMG solver", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag)); 1554 n = 5; 1555 PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_beta_options", "AMG options for scalar Poisson solver", "None", jac->as_amg_beta_opts, &n, &flag2)); 1556 if (flag || flag2) { 1557 PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 1558 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 1559 jac->as_amg_beta_opts[2], /* AMG relax_type */ 1560 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 1561 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 1562 } 1563 PetscCall(PetscOptionsInt("-pc_hypre_ams_projection_frequency", "Frequency at which a projection onto the compatible subspace for problems with zero conductivity regions is performed", "None", jac->ams_proj_freq, &jac->ams_proj_freq, &flag)); 1564 if (flag) { /* override HYPRE's default only if the options is used */ 1565 PetscCallExternal(HYPRE_AMSSetProjectionFrequency, jac->hsolver, jac->ams_proj_freq); 1566 } 1567 PetscOptionsHeadEnd(); 1568 PetscFunctionReturn(PETSC_SUCCESS); 1569 } 1570 1571 static PetscErrorCode PCView_HYPRE_AMS(PC pc, PetscViewer viewer) 1572 { 1573 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1574 PetscBool iascii; 1575 1576 PetscFunctionBegin; 1577 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1578 if (iascii) { 1579 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE AMS preconditioning\n")); 1580 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter)); 1581 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type)); 1582 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iteration tolerance %g\n", (double)jac->as_tol)); 1583 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother type %" PetscInt_FMT "\n", jac->as_relax_type)); 1584 PetscCall(PetscViewerASCIIPrintf(viewer, " number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times)); 1585 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother weight %g\n", (double)jac->as_relax_weight)); 1586 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother omega %g\n", (double)jac->as_omega)); 1587 if (jac->alpha_Poisson) { 1588 PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver (passed in by user)\n")); 1589 } else { 1590 PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver (computed) \n")); 1591 } 1592 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0])); 1593 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1])); 1594 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2])); 1595 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3])); 1596 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4])); 1597 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG strength threshold %g\n", (double)jac->as_amg_alpha_theta)); 1598 if (!jac->ams_beta_is_zero) { 1599 if (jac->beta_Poisson) { 1600 PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver (passed in by user)\n")); 1601 } else { 1602 PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver (computed) \n")); 1603 } 1604 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0])); 1605 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1])); 1606 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2])); 1607 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3])); 1608 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4])); 1609 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG strength threshold %g\n", (double)jac->as_amg_beta_theta)); 1610 if (jac->ams_beta_is_zero_part) PetscCall(PetscViewerASCIIPrintf(viewer, " compatible subspace projection frequency %" PetscInt_FMT " (-1 HYPRE uses default)\n", jac->ams_proj_freq)); 1611 } else { 1612 PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver not used (zero-conductivity everywhere) \n")); 1613 } 1614 } 1615 PetscFunctionReturn(PETSC_SUCCESS); 1616 } 1617 1618 static PetscErrorCode PCSetFromOptions_HYPRE_ADS(PC pc, PetscOptionItems PetscOptionsObject) 1619 { 1620 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1621 PetscInt n; 1622 PetscBool flag, flag2, flag3, flag4; 1623 1624 PetscFunctionBegin; 1625 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ADS Options"); 1626 PetscCall(PetscOptionsInt("-pc_hypre_ads_print_level", "Debugging output level for ADS", "None", jac->as_print, &jac->as_print, &flag)); 1627 if (flag) PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print); 1628 PetscCall(PetscOptionsInt("-pc_hypre_ads_max_iter", "Maximum number of ADS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag)); 1629 if (flag) PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter); 1630 PetscCall(PetscOptionsInt("-pc_hypre_ads_cycle_type", "Cycle type for ADS multigrid", "None", jac->ads_cycle_type, &jac->ads_cycle_type, &flag)); 1631 if (flag) PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ads_cycle_type); 1632 PetscCall(PetscOptionsReal("-pc_hypre_ads_tol", "Error tolerance for ADS multigrid", "None", jac->as_tol, &jac->as_tol, &flag)); 1633 if (flag) PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol); 1634 PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_type", "Relaxation type for ADS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag)); 1635 PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_times", "Number of relaxation steps for ADS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2)); 1636 PetscCall(PetscOptionsReal("-pc_hypre_ads_relax_weight", "Relaxation weight for ADS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3)); 1637 PetscCall(PetscOptionsReal("-pc_hypre_ads_omega", "SSOR coefficient for ADS smoother", "None", jac->as_omega, &jac->as_omega, &flag4)); 1638 if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 1639 PetscCall(PetscOptionsReal("-pc_hypre_ads_ams_theta", "Threshold for strong coupling of AMS solver inside ADS", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag)); 1640 n = 5; 1641 PetscCall(PetscOptionsIntArray("-pc_hypre_ads_ams_options", "AMG options for AMS solver inside ADS", "None", jac->as_amg_alpha_opts, &n, &flag2)); 1642 PetscCall(PetscOptionsInt("-pc_hypre_ads_ams_cycle_type", "Cycle type for AMS solver inside ADS", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag3)); 1643 if (flag || flag2 || flag3) { 1644 PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMS cycle type */ 1645 jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 1646 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 1647 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 1648 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 1649 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 1650 } 1651 PetscCall(PetscOptionsReal("-pc_hypre_ads_amg_theta", "Threshold for strong coupling of vector AMG solver inside ADS", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag)); 1652 n = 5; 1653 PetscCall(PetscOptionsIntArray("-pc_hypre_ads_amg_options", "AMG options for vector AMG solver inside ADS", "None", jac->as_amg_beta_opts, &n, &flag2)); 1654 if (flag || flag2) { 1655 PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 1656 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 1657 jac->as_amg_beta_opts[2], /* AMG relax_type */ 1658 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 1659 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 1660 } 1661 PetscOptionsHeadEnd(); 1662 PetscFunctionReturn(PETSC_SUCCESS); 1663 } 1664 1665 static PetscErrorCode PCView_HYPRE_ADS(PC pc, PetscViewer viewer) 1666 { 1667 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1668 PetscBool iascii; 1669 1670 PetscFunctionBegin; 1671 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1672 if (iascii) { 1673 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE ADS preconditioning\n")); 1674 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter)); 1675 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ads_cycle_type)); 1676 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iteration tolerance %g\n", (double)jac->as_tol)); 1677 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother type %" PetscInt_FMT "\n", jac->as_relax_type)); 1678 PetscCall(PetscViewerASCIIPrintf(viewer, " number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times)); 1679 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother weight %g\n", (double)jac->as_relax_weight)); 1680 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother omega %g\n", (double)jac->as_omega)); 1681 PetscCall(PetscViewerASCIIPrintf(viewer, " AMS solver using boomerAMG\n")); 1682 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type)); 1683 PetscCall(PetscViewerASCIIPrintf(viewer, " coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0])); 1684 PetscCall(PetscViewerASCIIPrintf(viewer, " levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1])); 1685 PetscCall(PetscViewerASCIIPrintf(viewer, " relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2])); 1686 PetscCall(PetscViewerASCIIPrintf(viewer, " interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3])); 1687 PetscCall(PetscViewerASCIIPrintf(viewer, " max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4])); 1688 PetscCall(PetscViewerASCIIPrintf(viewer, " strength threshold %g\n", (double)jac->as_amg_alpha_theta)); 1689 PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver using boomerAMG\n")); 1690 PetscCall(PetscViewerASCIIPrintf(viewer, " coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0])); 1691 PetscCall(PetscViewerASCIIPrintf(viewer, " levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1])); 1692 PetscCall(PetscViewerASCIIPrintf(viewer, " relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2])); 1693 PetscCall(PetscViewerASCIIPrintf(viewer, " interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3])); 1694 PetscCall(PetscViewerASCIIPrintf(viewer, " max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4])); 1695 PetscCall(PetscViewerASCIIPrintf(viewer, " strength threshold %g\n", (double)jac->as_amg_beta_theta)); 1696 } 1697 PetscFunctionReturn(PETSC_SUCCESS); 1698 } 1699 1700 static PetscErrorCode PCHYPRESetDiscreteGradient_HYPRE(PC pc, Mat G) 1701 { 1702 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1703 PetscBool ishypre; 1704 1705 PetscFunctionBegin; 1706 PetscCall(PetscObjectTypeCompare((PetscObject)G, MATHYPRE, &ishypre)); 1707 if (ishypre) { 1708 PetscCall(PetscObjectReference((PetscObject)G)); 1709 PetscCall(MatDestroy(&jac->G)); 1710 jac->G = G; 1711 } else { 1712 PetscCall(MatDestroy(&jac->G)); 1713 PetscCall(MatConvert(G, MATHYPRE, MAT_INITIAL_MATRIX, &jac->G)); 1714 } 1715 PetscFunctionReturn(PETSC_SUCCESS); 1716 } 1717 1718 /*@ 1719 PCHYPRESetDiscreteGradient - Set discrete gradient matrix for `PCHYPRE` type of ams or ads 1720 1721 Collective 1722 1723 Input Parameters: 1724 + pc - the preconditioning context 1725 - G - the discrete gradient 1726 1727 Level: intermediate 1728 1729 Notes: 1730 G should have as many rows as the number of edges and as many columns as the number of vertices in the mesh 1731 1732 Each row of G has 2 nonzeros, with column indexes being the global indexes of edge's endpoints: matrix entries are +1 and -1 depending on edge orientation 1733 1734 Developer Notes: 1735 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1736 1737 .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteCurl()` 1738 @*/ 1739 PetscErrorCode PCHYPRESetDiscreteGradient(PC pc, Mat G) 1740 { 1741 PetscFunctionBegin; 1742 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1743 PetscValidHeaderSpecific(G, MAT_CLASSID, 2); 1744 PetscCheckSameComm(pc, 1, G, 2); 1745 PetscTryMethod(pc, "PCHYPRESetDiscreteGradient_C", (PC, Mat), (pc, G)); 1746 PetscFunctionReturn(PETSC_SUCCESS); 1747 } 1748 1749 static PetscErrorCode PCHYPRESetDiscreteCurl_HYPRE(PC pc, Mat C) 1750 { 1751 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1752 PetscBool ishypre; 1753 1754 PetscFunctionBegin; 1755 PetscCall(PetscObjectTypeCompare((PetscObject)C, MATHYPRE, &ishypre)); 1756 if (ishypre) { 1757 PetscCall(PetscObjectReference((PetscObject)C)); 1758 PetscCall(MatDestroy(&jac->C)); 1759 jac->C = C; 1760 } else { 1761 PetscCall(MatDestroy(&jac->C)); 1762 PetscCall(MatConvert(C, MATHYPRE, MAT_INITIAL_MATRIX, &jac->C)); 1763 } 1764 PetscFunctionReturn(PETSC_SUCCESS); 1765 } 1766 1767 /*@ 1768 PCHYPRESetDiscreteCurl - Set discrete curl matrix for `PCHYPRE` type of ads 1769 1770 Collective 1771 1772 Input Parameters: 1773 + pc - the preconditioning context 1774 - C - the discrete curl 1775 1776 Level: intermediate 1777 1778 Notes: 1779 C should have as many rows as the number of faces and as many columns as the number of edges in the mesh 1780 1781 Each row of C has as many nonzeros as the number of edges of a face, with column indexes being the global indexes of the corresponding edge: matrix entries are +1 and -1 depending on edge orientation with respect to the face orientation 1782 1783 Developer Notes: 1784 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1785 1786 If this is only for `PCHYPRE` type of ads it should be called `PCHYPREADSSetDiscreteCurl()` 1787 1788 .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()` 1789 @*/ 1790 PetscErrorCode PCHYPRESetDiscreteCurl(PC pc, Mat C) 1791 { 1792 PetscFunctionBegin; 1793 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1794 PetscValidHeaderSpecific(C, MAT_CLASSID, 2); 1795 PetscCheckSameComm(pc, 1, C, 2); 1796 PetscTryMethod(pc, "PCHYPRESetDiscreteCurl_C", (PC, Mat), (pc, C)); 1797 PetscFunctionReturn(PETSC_SUCCESS); 1798 } 1799 1800 static PetscErrorCode PCHYPRESetInterpolations_HYPRE(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[]) 1801 { 1802 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1803 PetscBool ishypre; 1804 PetscInt i; 1805 1806 PetscFunctionBegin; 1807 PetscCall(MatDestroy(&jac->RT_PiFull)); 1808 PetscCall(MatDestroy(&jac->ND_PiFull)); 1809 for (i = 0; i < 3; ++i) { 1810 PetscCall(MatDestroy(&jac->RT_Pi[i])); 1811 PetscCall(MatDestroy(&jac->ND_Pi[i])); 1812 } 1813 1814 jac->dim = dim; 1815 if (RT_PiFull) { 1816 PetscCall(PetscObjectTypeCompare((PetscObject)RT_PiFull, MATHYPRE, &ishypre)); 1817 if (ishypre) { 1818 PetscCall(PetscObjectReference((PetscObject)RT_PiFull)); 1819 jac->RT_PiFull = RT_PiFull; 1820 } else { 1821 PetscCall(MatConvert(RT_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_PiFull)); 1822 } 1823 } 1824 if (RT_Pi) { 1825 for (i = 0; i < dim; ++i) { 1826 if (RT_Pi[i]) { 1827 PetscCall(PetscObjectTypeCompare((PetscObject)RT_Pi[i], MATHYPRE, &ishypre)); 1828 if (ishypre) { 1829 PetscCall(PetscObjectReference((PetscObject)RT_Pi[i])); 1830 jac->RT_Pi[i] = RT_Pi[i]; 1831 } else { 1832 PetscCall(MatConvert(RT_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_Pi[i])); 1833 } 1834 } 1835 } 1836 } 1837 if (ND_PiFull) { 1838 PetscCall(PetscObjectTypeCompare((PetscObject)ND_PiFull, MATHYPRE, &ishypre)); 1839 if (ishypre) { 1840 PetscCall(PetscObjectReference((PetscObject)ND_PiFull)); 1841 jac->ND_PiFull = ND_PiFull; 1842 } else { 1843 PetscCall(MatConvert(ND_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_PiFull)); 1844 } 1845 } 1846 if (ND_Pi) { 1847 for (i = 0; i < dim; ++i) { 1848 if (ND_Pi[i]) { 1849 PetscCall(PetscObjectTypeCompare((PetscObject)ND_Pi[i], MATHYPRE, &ishypre)); 1850 if (ishypre) { 1851 PetscCall(PetscObjectReference((PetscObject)ND_Pi[i])); 1852 jac->ND_Pi[i] = ND_Pi[i]; 1853 } else { 1854 PetscCall(MatConvert(ND_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_Pi[i])); 1855 } 1856 } 1857 } 1858 } 1859 PetscFunctionReturn(PETSC_SUCCESS); 1860 } 1861 1862 /*@ 1863 PCHYPRESetInterpolations - Set interpolation matrices for `PCHYPRE` type of ams or ads 1864 1865 Collective 1866 1867 Input Parameters: 1868 + pc - the preconditioning context 1869 . dim - the dimension of the problem, only used in AMS 1870 . RT_PiFull - Raviart-Thomas interpolation matrix 1871 . RT_Pi - x/y/z component of Raviart-Thomas interpolation matrix 1872 . ND_PiFull - Nedelec interpolation matrix 1873 - ND_Pi - x/y/z component of Nedelec interpolation matrix 1874 1875 Level: intermediate 1876 1877 Notes: 1878 For AMS, only Nedelec interpolation matrices are needed, the Raviart-Thomas interpolation matrices can be set to NULL. 1879 1880 For ADS, both type of interpolation matrices are needed. 1881 1882 Developer Notes: 1883 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1884 1885 .seealso: [](ch_ksp), `PCHYPRE` 1886 @*/ 1887 PetscErrorCode PCHYPRESetInterpolations(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[]) 1888 { 1889 PetscInt i; 1890 1891 PetscFunctionBegin; 1892 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1893 if (RT_PiFull) { 1894 PetscValidHeaderSpecific(RT_PiFull, MAT_CLASSID, 3); 1895 PetscCheckSameComm(pc, 1, RT_PiFull, 3); 1896 } 1897 if (RT_Pi) { 1898 PetscAssertPointer(RT_Pi, 4); 1899 for (i = 0; i < dim; ++i) { 1900 if (RT_Pi[i]) { 1901 PetscValidHeaderSpecific(RT_Pi[i], MAT_CLASSID, 4); 1902 PetscCheckSameComm(pc, 1, RT_Pi[i], 4); 1903 } 1904 } 1905 } 1906 if (ND_PiFull) { 1907 PetscValidHeaderSpecific(ND_PiFull, MAT_CLASSID, 5); 1908 PetscCheckSameComm(pc, 1, ND_PiFull, 5); 1909 } 1910 if (ND_Pi) { 1911 PetscAssertPointer(ND_Pi, 6); 1912 for (i = 0; i < dim; ++i) { 1913 if (ND_Pi[i]) { 1914 PetscValidHeaderSpecific(ND_Pi[i], MAT_CLASSID, 6); 1915 PetscCheckSameComm(pc, 1, ND_Pi[i], 6); 1916 } 1917 } 1918 } 1919 PetscTryMethod(pc, "PCHYPRESetInterpolations_C", (PC, PetscInt, Mat, Mat[], Mat, Mat[]), (pc, dim, RT_PiFull, RT_Pi, ND_PiFull, ND_Pi)); 1920 PetscFunctionReturn(PETSC_SUCCESS); 1921 } 1922 1923 static PetscErrorCode PCHYPRESetPoissonMatrix_HYPRE(PC pc, Mat A, PetscBool isalpha) 1924 { 1925 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1926 PetscBool ishypre; 1927 1928 PetscFunctionBegin; 1929 PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre)); 1930 if (ishypre) { 1931 if (isalpha) { 1932 PetscCall(PetscObjectReference((PetscObject)A)); 1933 PetscCall(MatDestroy(&jac->alpha_Poisson)); 1934 jac->alpha_Poisson = A; 1935 } else { 1936 if (A) { 1937 PetscCall(PetscObjectReference((PetscObject)A)); 1938 } else { 1939 jac->ams_beta_is_zero = PETSC_TRUE; 1940 } 1941 PetscCall(MatDestroy(&jac->beta_Poisson)); 1942 jac->beta_Poisson = A; 1943 } 1944 } else { 1945 if (isalpha) { 1946 PetscCall(MatDestroy(&jac->alpha_Poisson)); 1947 PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->alpha_Poisson)); 1948 } else { 1949 if (A) { 1950 PetscCall(MatDestroy(&jac->beta_Poisson)); 1951 PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->beta_Poisson)); 1952 } else { 1953 PetscCall(MatDestroy(&jac->beta_Poisson)); 1954 jac->ams_beta_is_zero = PETSC_TRUE; 1955 } 1956 } 1957 } 1958 PetscFunctionReturn(PETSC_SUCCESS); 1959 } 1960 1961 /*@ 1962 PCHYPRESetAlphaPoissonMatrix - Set vector Poisson matrix for `PCHYPRE` of type ams 1963 1964 Collective 1965 1966 Input Parameters: 1967 + pc - the preconditioning context 1968 - A - the matrix 1969 1970 Level: intermediate 1971 1972 Note: 1973 A should be obtained by discretizing the vector valued Poisson problem with linear finite elements 1974 1975 Developer Notes: 1976 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1977 1978 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSSetAlphaPoissonMatrix()` 1979 1980 .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetBetaPoissonMatrix()` 1981 @*/ 1982 PetscErrorCode PCHYPRESetAlphaPoissonMatrix(PC pc, Mat A) 1983 { 1984 PetscFunctionBegin; 1985 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1986 PetscValidHeaderSpecific(A, MAT_CLASSID, 2); 1987 PetscCheckSameComm(pc, 1, A, 2); 1988 PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_TRUE)); 1989 PetscFunctionReturn(PETSC_SUCCESS); 1990 } 1991 1992 /*@ 1993 PCHYPRESetBetaPoissonMatrix - Set Poisson matrix for `PCHYPRE` of type ams 1994 1995 Collective 1996 1997 Input Parameters: 1998 + pc - the preconditioning context 1999 - A - the matrix, or NULL to turn it off 2000 2001 Level: intermediate 2002 2003 Note: 2004 A should be obtained by discretizing the Poisson problem with linear finite elements. 2005 2006 Developer Notes: 2007 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 2008 2009 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSPCHYPRESetBetaPoissonMatrix()` 2010 2011 .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()` 2012 @*/ 2013 PetscErrorCode PCHYPRESetBetaPoissonMatrix(PC pc, Mat A) 2014 { 2015 PetscFunctionBegin; 2016 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2017 if (A) { 2018 PetscValidHeaderSpecific(A, MAT_CLASSID, 2); 2019 PetscCheckSameComm(pc, 1, A, 2); 2020 } 2021 PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_FALSE)); 2022 PetscFunctionReturn(PETSC_SUCCESS); 2023 } 2024 2025 static PetscErrorCode PCHYPRESetEdgeConstantVectors_HYPRE(PC pc, Vec ozz, Vec zoz, Vec zzo) 2026 { 2027 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 2028 2029 PetscFunctionBegin; 2030 /* throw away any vector if already set */ 2031 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0])); 2032 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1])); 2033 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2])); 2034 PetscCall(VecHYPRE_IJVectorCreate(ozz->map, &jac->constants[0])); 2035 PetscCall(VecHYPRE_IJVectorCopy(ozz, jac->constants[0])); 2036 PetscCall(VecHYPRE_IJVectorCreate(zoz->map, &jac->constants[1])); 2037 PetscCall(VecHYPRE_IJVectorCopy(zoz, jac->constants[1])); 2038 jac->dim = 2; 2039 if (zzo) { 2040 PetscCall(VecHYPRE_IJVectorCreate(zzo->map, &jac->constants[2])); 2041 PetscCall(VecHYPRE_IJVectorCopy(zzo, jac->constants[2])); 2042 jac->dim++; 2043 } 2044 PetscFunctionReturn(PETSC_SUCCESS); 2045 } 2046 2047 /*@ 2048 PCHYPRESetEdgeConstantVectors - Set the representation of the constant vector fields in the edge element basis for `PCHYPRE` of type ams 2049 2050 Collective 2051 2052 Input Parameters: 2053 + pc - the preconditioning context 2054 . ozz - vector representing (1,0,0) (or (1,0) in 2D) 2055 . zoz - vector representing (0,1,0) (or (0,1) in 2D) 2056 - zzo - vector representing (0,0,1) (use NULL in 2D) 2057 2058 Level: intermediate 2059 2060 Developer Notes: 2061 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSSetEdgeConstantVectors()` 2062 2063 .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()` 2064 @*/ 2065 PetscErrorCode PCHYPRESetEdgeConstantVectors(PC pc, Vec ozz, Vec zoz, Vec zzo) 2066 { 2067 PetscFunctionBegin; 2068 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2069 PetscValidHeaderSpecific(ozz, VEC_CLASSID, 2); 2070 PetscValidHeaderSpecific(zoz, VEC_CLASSID, 3); 2071 if (zzo) PetscValidHeaderSpecific(zzo, VEC_CLASSID, 4); 2072 PetscCheckSameComm(pc, 1, ozz, 2); 2073 PetscCheckSameComm(pc, 1, zoz, 3); 2074 if (zzo) PetscCheckSameComm(pc, 1, zzo, 4); 2075 PetscTryMethod(pc, "PCHYPRESetEdgeConstantVectors_C", (PC, Vec, Vec, Vec), (pc, ozz, zoz, zzo)); 2076 PetscFunctionReturn(PETSC_SUCCESS); 2077 } 2078 2079 static PetscErrorCode PCHYPREAMSSetInteriorNodes_HYPRE(PC pc, Vec interior) 2080 { 2081 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 2082 2083 PetscFunctionBegin; 2084 PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior)); 2085 PetscCall(VecHYPRE_IJVectorCreate(interior->map, &jac->interior)); 2086 PetscCall(VecHYPRE_IJVectorCopy(interior, jac->interior)); 2087 jac->ams_beta_is_zero_part = PETSC_TRUE; 2088 PetscFunctionReturn(PETSC_SUCCESS); 2089 } 2090 2091 /*@ 2092 PCHYPREAMSSetInteriorNodes - Set the list of interior nodes to a zero-conductivity region for `PCHYPRE` of type ams 2093 2094 Collective 2095 2096 Input Parameters: 2097 + pc - the preconditioning context 2098 - interior - vector. node is interior if its entry in the array is 1.0. 2099 2100 Level: intermediate 2101 2102 Note: 2103 This calls `HYPRE_AMSSetInteriorNodes()` 2104 2105 .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()` 2106 @*/ 2107 PetscErrorCode PCHYPREAMSSetInteriorNodes(PC pc, Vec interior) 2108 { 2109 PetscFunctionBegin; 2110 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2111 PetscValidHeaderSpecific(interior, VEC_CLASSID, 2); 2112 PetscCheckSameComm(pc, 1, interior, 2); 2113 PetscTryMethod(pc, "PCHYPREAMSSetInteriorNodes_C", (PC, Vec), (pc, interior)); 2114 PetscFunctionReturn(PETSC_SUCCESS); 2115 } 2116 2117 static PetscErrorCode PCSetCoordinates_HYPRE(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords) 2118 { 2119 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 2120 Vec tv; 2121 PetscInt i; 2122 2123 PetscFunctionBegin; 2124 /* throw away any coordinate vector if already set */ 2125 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0])); 2126 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1])); 2127 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2])); 2128 jac->dim = dim; 2129 2130 /* compute IJ vector for coordinates */ 2131 PetscCall(VecCreate(PetscObjectComm((PetscObject)pc), &tv)); 2132 PetscCall(VecSetType(tv, VECSTANDARD)); 2133 PetscCall(VecSetSizes(tv, nloc, PETSC_DECIDE)); 2134 for (i = 0; i < dim; i++) { 2135 PetscScalar *array; 2136 PetscInt j; 2137 2138 PetscCall(VecHYPRE_IJVectorCreate(tv->map, &jac->coords[i])); 2139 PetscCall(VecGetArrayWrite(tv, &array)); 2140 for (j = 0; j < nloc; j++) array[j] = coords[j * dim + i]; 2141 PetscCall(VecRestoreArrayWrite(tv, &array)); 2142 PetscCall(VecHYPRE_IJVectorCopy(tv, jac->coords[i])); 2143 } 2144 PetscCall(VecDestroy(&tv)); 2145 PetscFunctionReturn(PETSC_SUCCESS); 2146 } 2147 2148 static PetscErrorCode PCHYPREGetType_HYPRE(PC pc, const char *name[]) 2149 { 2150 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 2151 2152 PetscFunctionBegin; 2153 *name = jac->hypre_type; 2154 PetscFunctionReturn(PETSC_SUCCESS); 2155 } 2156 2157 static PetscErrorCode PCHYPRESetType_HYPRE(PC pc, const char name[]) 2158 { 2159 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 2160 PetscBool flag; 2161 2162 PetscFunctionBegin; 2163 if (jac->hypre_type) { 2164 PetscCall(PetscStrcmp(jac->hypre_type, name, &flag)); 2165 PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE preconditioner type once it has been set"); 2166 PetscFunctionReturn(PETSC_SUCCESS); 2167 } else { 2168 PetscCall(PetscStrallocpy(name, &jac->hypre_type)); 2169 } 2170 2171 jac->maxiter = PETSC_DEFAULT; 2172 jac->tol = PETSC_DEFAULT; 2173 jac->printstatistics = PetscLogPrintInfo; 2174 2175 PetscCall(PetscStrcmp("ilu", jac->hypre_type, &flag)); 2176 if (flag) { 2177 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 2178 PetscCallExternal(HYPRE_ILUCreate, &jac->hsolver); 2179 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ILU; 2180 pc->ops->view = PCView_HYPRE_ILU; 2181 jac->destroy = HYPRE_ILUDestroy; 2182 jac->setup = HYPRE_ILUSetup; 2183 jac->solve = HYPRE_ILUSolve; 2184 jac->factorrowsize = PETSC_DEFAULT; 2185 PetscFunctionReturn(PETSC_SUCCESS); 2186 } 2187 2188 PetscCall(PetscStrcmp("pilut", jac->hypre_type, &flag)); 2189 if (flag) { 2190 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 2191 PetscCallExternal(HYPRE_ParCSRPilutCreate, jac->comm_hypre, &jac->hsolver); 2192 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Pilut; 2193 pc->ops->view = PCView_HYPRE_Pilut; 2194 jac->destroy = HYPRE_ParCSRPilutDestroy; 2195 jac->setup = HYPRE_ParCSRPilutSetup; 2196 jac->solve = HYPRE_ParCSRPilutSolve; 2197 jac->factorrowsize = PETSC_DEFAULT; 2198 PetscFunctionReturn(PETSC_SUCCESS); 2199 } 2200 PetscCall(PetscStrcmp("euclid", jac->hypre_type, &flag)); 2201 if (flag) { 2202 #if defined(PETSC_USE_64BIT_INDICES) 2203 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Hypre Euclid does not support 64-bit indices"); 2204 #endif 2205 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 2206 PetscCallExternal(HYPRE_EuclidCreate, jac->comm_hypre, &jac->hsolver); 2207 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Euclid; 2208 pc->ops->view = PCView_HYPRE_Euclid; 2209 jac->destroy = HYPRE_EuclidDestroy; 2210 jac->setup = HYPRE_EuclidSetup; 2211 jac->solve = HYPRE_EuclidSolve; 2212 jac->factorrowsize = PETSC_DEFAULT; 2213 jac->eu_level = PETSC_DEFAULT; /* default */ 2214 PetscFunctionReturn(PETSC_SUCCESS); 2215 } 2216 PetscCall(PetscStrcmp("parasails", jac->hypre_type, &flag)); 2217 if (flag) { 2218 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 2219 PetscCallExternal(HYPRE_ParaSailsCreate, jac->comm_hypre, &jac->hsolver); 2220 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ParaSails; 2221 pc->ops->view = PCView_HYPRE_ParaSails; 2222 jac->destroy = HYPRE_ParaSailsDestroy; 2223 jac->setup = HYPRE_ParaSailsSetup; 2224 jac->solve = HYPRE_ParaSailsSolve; 2225 /* initialize */ 2226 jac->nlevels = 1; 2227 jac->threshold = .1; 2228 jac->filter = .1; 2229 jac->loadbal = 0; 2230 if (PetscLogPrintInfo) jac->logging = (int)PETSC_TRUE; 2231 else jac->logging = (int)PETSC_FALSE; 2232 2233 jac->ruse = (int)PETSC_FALSE; 2234 jac->symt = 0; 2235 PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels); 2236 PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter); 2237 PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal); 2238 PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging); 2239 PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse); 2240 PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt); 2241 PetscFunctionReturn(PETSC_SUCCESS); 2242 } 2243 PetscCall(PetscStrcmp("boomeramg", jac->hypre_type, &flag)); 2244 if (flag) { 2245 PetscCallExternal(HYPRE_BoomerAMGCreate, &jac->hsolver); 2246 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_BoomerAMG; 2247 pc->ops->view = PCView_HYPRE_BoomerAMG; 2248 pc->ops->applytranspose = PCApplyTranspose_HYPRE_BoomerAMG; 2249 pc->ops->applyrichardson = PCApplyRichardson_HYPRE_BoomerAMG; 2250 pc->ops->matapply = PCMatApply_HYPRE_BoomerAMG; 2251 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_BoomerAMG)); 2252 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_BoomerAMG)); 2253 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetCFMarkers_C", PCHYPREGetCFMarkers_BoomerAMG)); 2254 jac->destroy = HYPRE_BoomerAMGDestroy; 2255 jac->setup = HYPRE_BoomerAMGSetup; 2256 jac->solve = HYPRE_BoomerAMGSolve; 2257 jac->applyrichardson = PETSC_FALSE; 2258 /* these defaults match the hypre defaults */ 2259 jac->cycletype = 1; 2260 jac->maxlevels = 25; 2261 jac->maxiter = 1; 2262 jac->tol = 0.0; /* tolerance of zero indicates use as preconditioner (suppresses convergence errors) */ 2263 jac->truncfactor = 0.0; 2264 jac->strongthreshold = .25; 2265 jac->maxrowsum = .9; 2266 jac->coarsentype = 6; 2267 jac->measuretype = 0; 2268 jac->gridsweeps[0] = jac->gridsweeps[1] = jac->gridsweeps[2] = 1; 2269 jac->smoothtype = -1; /* Not set by default */ 2270 jac->smoothnumlevels = 25; 2271 jac->eu_level = 0; 2272 jac->eu_droptolerance = 0; 2273 jac->eu_bj = 0; 2274 jac->relaxtype[0] = jac->relaxtype[1] = 6; /* Defaults to SYMMETRIC since in PETSc we are using a PC - most likely with CG */ 2275 jac->relaxtype[2] = 9; /*G.E. */ 2276 jac->relaxweight = 1.0; 2277 jac->outerrelaxweight = 1.0; 2278 jac->relaxorder = 1; 2279 jac->interptype = 0; 2280 jac->Rtype = 0; 2281 jac->Rstrongthreshold = 0.25; 2282 jac->Rfilterthreshold = 0.0; 2283 jac->Adroptype = -1; 2284 jac->Adroptol = 0.0; 2285 jac->agg_nl = 0; 2286 jac->agg_interptype = 4; 2287 jac->pmax = 0; 2288 jac->truncfactor = 0.0; 2289 jac->agg_num_paths = 1; 2290 jac->maxc = 9; 2291 jac->minc = 1; 2292 jac->nodal_coarsening = 0; 2293 jac->nodal_coarsening_diag = 0; 2294 jac->vec_interp_variant = 0; 2295 jac->vec_interp_qmax = 0; 2296 jac->vec_interp_smooth = PETSC_FALSE; 2297 jac->interp_refine = 0; 2298 jac->nodal_relax = PETSC_FALSE; 2299 jac->nodal_relax_levels = 1; 2300 jac->rap2 = 0; 2301 2302 /* GPU defaults 2303 from https://hypre.readthedocs.io/en/latest/solvers-boomeramg.html#gpu-supported-options 2304 and /src/parcsr_ls/par_amg.c */ 2305 #if defined(PETSC_HAVE_HYPRE_DEVICE) 2306 jac->keeptranspose = PETSC_TRUE; 2307 jac->mod_rap2 = 1; 2308 jac->coarsentype = 8; 2309 jac->relaxorder = 0; 2310 jac->interptype = 6; 2311 jac->relaxtype[0] = 18; 2312 jac->relaxtype[1] = 18; 2313 jac->agg_interptype = 7; 2314 #else 2315 jac->keeptranspose = PETSC_FALSE; 2316 jac->mod_rap2 = 0; 2317 #endif 2318 PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype); 2319 PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels); 2320 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter); 2321 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol); 2322 PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor); 2323 PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold); 2324 PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum); 2325 PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype); 2326 PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype); 2327 PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder); 2328 PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype); 2329 PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl); 2330 PetscCallExternal(HYPRE_BoomerAMGSetAggInterpType, jac->hsolver, jac->agg_interptype); 2331 PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax); 2332 PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths); 2333 PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, jac->relaxtype[0]); /* defaults coarse to 9 */ 2334 PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, jac->gridsweeps[0]); /* defaults coarse to 1 */ 2335 PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc); 2336 PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc); 2337 /* GPU */ 2338 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0) 2339 PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0); 2340 PetscCallExternal(HYPRE_BoomerAMGSetRAP2, jac->hsolver, jac->rap2); 2341 PetscCallExternal(HYPRE_BoomerAMGSetModuleRAP2, jac->hsolver, jac->mod_rap2); 2342 #endif 2343 2344 /* AIR */ 2345 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0) 2346 PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype); 2347 PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold); 2348 PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold); 2349 PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol); 2350 PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype); 2351 #endif 2352 PetscFunctionReturn(PETSC_SUCCESS); 2353 } 2354 PetscCall(PetscStrcmp("ams", jac->hypre_type, &flag)); 2355 if (flag) { 2356 PetscCallExternal(HYPRE_AMSCreate, &jac->hsolver); 2357 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_AMS; 2358 pc->ops->view = PCView_HYPRE_AMS; 2359 jac->destroy = HYPRE_AMSDestroy; 2360 jac->setup = HYPRE_AMSSetup; 2361 jac->solve = HYPRE_AMSSolve; 2362 jac->coords[0] = NULL; 2363 jac->coords[1] = NULL; 2364 jac->coords[2] = NULL; 2365 jac->interior = NULL; 2366 /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE AMS */ 2367 jac->as_print = 0; 2368 jac->as_max_iter = 1; /* used as a preconditioner */ 2369 jac->as_tol = 0.; /* used as a preconditioner */ 2370 jac->ams_cycle_type = 13; 2371 /* Smoothing options */ 2372 jac->as_relax_type = 2; 2373 jac->as_relax_times = 1; 2374 jac->as_relax_weight = 1.0; 2375 jac->as_omega = 1.0; 2376 /* Vector valued Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */ 2377 jac->as_amg_alpha_opts[0] = 10; 2378 jac->as_amg_alpha_opts[1] = 1; 2379 jac->as_amg_alpha_opts[2] = 6; 2380 jac->as_amg_alpha_opts[3] = 6; 2381 jac->as_amg_alpha_opts[4] = 4; 2382 jac->as_amg_alpha_theta = 0.25; 2383 /* Scalar Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */ 2384 jac->as_amg_beta_opts[0] = 10; 2385 jac->as_amg_beta_opts[1] = 1; 2386 jac->as_amg_beta_opts[2] = 6; 2387 jac->as_amg_beta_opts[3] = 6; 2388 jac->as_amg_beta_opts[4] = 4; 2389 jac->as_amg_beta_theta = 0.25; 2390 PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print); 2391 PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter); 2392 PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type); 2393 PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol); 2394 PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 2395 PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 2396 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 2397 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 2398 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 2399 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 2400 PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 2401 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 2402 jac->as_amg_beta_opts[2], /* AMG relax_type */ 2403 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 2404 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 2405 /* Zero conductivity */ 2406 jac->ams_beta_is_zero = PETSC_FALSE; 2407 jac->ams_beta_is_zero_part = PETSC_FALSE; 2408 PetscFunctionReturn(PETSC_SUCCESS); 2409 } 2410 PetscCall(PetscStrcmp("ads", jac->hypre_type, &flag)); 2411 if (flag) { 2412 PetscCallExternal(HYPRE_ADSCreate, &jac->hsolver); 2413 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ADS; 2414 pc->ops->view = PCView_HYPRE_ADS; 2415 jac->destroy = HYPRE_ADSDestroy; 2416 jac->setup = HYPRE_ADSSetup; 2417 jac->solve = HYPRE_ADSSolve; 2418 jac->coords[0] = NULL; 2419 jac->coords[1] = NULL; 2420 jac->coords[2] = NULL; 2421 /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE ADS */ 2422 jac->as_print = 0; 2423 jac->as_max_iter = 1; /* used as a preconditioner */ 2424 jac->as_tol = 0.; /* used as a preconditioner */ 2425 jac->ads_cycle_type = 13; 2426 /* Smoothing options */ 2427 jac->as_relax_type = 2; 2428 jac->as_relax_times = 1; 2429 jac->as_relax_weight = 1.0; 2430 jac->as_omega = 1.0; 2431 /* AMS solver parameters: cycle_type, coarsen type, agg_levels, relax_type, interp_type, Pmax */ 2432 jac->ams_cycle_type = 14; 2433 jac->as_amg_alpha_opts[0] = 10; 2434 jac->as_amg_alpha_opts[1] = 1; 2435 jac->as_amg_alpha_opts[2] = 6; 2436 jac->as_amg_alpha_opts[3] = 6; 2437 jac->as_amg_alpha_opts[4] = 4; 2438 jac->as_amg_alpha_theta = 0.25; 2439 /* Vector Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */ 2440 jac->as_amg_beta_opts[0] = 10; 2441 jac->as_amg_beta_opts[1] = 1; 2442 jac->as_amg_beta_opts[2] = 6; 2443 jac->as_amg_beta_opts[3] = 6; 2444 jac->as_amg_beta_opts[4] = 4; 2445 jac->as_amg_beta_theta = 0.25; 2446 PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print); 2447 PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter); 2448 PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ams_cycle_type); 2449 PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol); 2450 PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 2451 PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMG coarsen type */ 2452 jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 2453 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 2454 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 2455 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 2456 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 2457 PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 2458 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 2459 jac->as_amg_beta_opts[2], /* AMG relax_type */ 2460 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 2461 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 2462 PetscFunctionReturn(PETSC_SUCCESS); 2463 } 2464 PetscCall(PetscFree(jac->hypre_type)); 2465 2466 jac->hypre_type = NULL; 2467 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE preconditioner %s; Choices are euclid, ilu, pilut, parasails, boomeramg, ams, ads", name); 2468 } 2469 2470 /* 2471 It only gets here if the HYPRE type has not been set before the call to 2472 ...SetFromOptions() which actually is most of the time 2473 */ 2474 static PetscErrorCode PCSetFromOptions_HYPRE(PC pc, PetscOptionItems PetscOptionsObject) 2475 { 2476 PetscInt indx; 2477 const char *type[] = {"ilu", "euclid", "pilut", "parasails", "boomeramg", "ams", "ads"}; 2478 PetscBool flg; 2479 2480 PetscFunctionBegin; 2481 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE preconditioner options"); 2482 PetscCall(PetscOptionsEList("-pc_hypre_type", "HYPRE preconditioner type", "PCHYPRESetType", type, PETSC_STATIC_ARRAY_LENGTH(type), "boomeramg", &indx, &flg)); 2483 if (flg) { 2484 PetscCall(PCHYPRESetType_HYPRE(pc, type[indx])); 2485 } else { 2486 PetscCall(PCHYPRESetType_HYPRE(pc, "boomeramg")); 2487 } 2488 PetscTryTypeMethod(pc, setfromoptions, PetscOptionsObject); 2489 PetscOptionsHeadEnd(); 2490 PetscFunctionReturn(PETSC_SUCCESS); 2491 } 2492 2493 /*@ 2494 PCHYPRESetType - Sets which hypre preconditioner you wish to use 2495 2496 Input Parameters: 2497 + pc - the preconditioner context 2498 - name - either euclid, ilu, pilut, parasails, boomeramg, ams, ads 2499 2500 Options Database Key: 2501 . pc_hypre_type - One of euclid, ilu, pilut, parasails, boomeramg, ams, ads 2502 2503 Level: intermediate 2504 2505 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRE` 2506 @*/ 2507 PetscErrorCode PCHYPRESetType(PC pc, const char name[]) 2508 { 2509 PetscFunctionBegin; 2510 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2511 PetscAssertPointer(name, 2); 2512 PetscTryMethod(pc, "PCHYPRESetType_C", (PC, const char[]), (pc, name)); 2513 PetscFunctionReturn(PETSC_SUCCESS); 2514 } 2515 2516 /*@C 2517 PCHYPREGetCFMarkers - Gets CF marker arrays for all levels (except the finest level) 2518 2519 Logically Collective 2520 2521 Input Parameter: 2522 . pc - the preconditioner context 2523 2524 Output Parameters: 2525 + n_per_level - the number of nodes per level (size of `num_levels`) 2526 - CFMarkers - the Coarse/Fine Boolean arrays (size of `num_levels` - 1) 2527 2528 Note: 2529 Caller is responsible for memory management of `n_per_level` and `CFMarkers` pointers. That is they should free them with `PetscFree()` when no longer needed. 2530 2531 Level: advanced 2532 2533 .seealso: [](ch_ksp), `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()` 2534 @*/ 2535 PetscErrorCode PCHYPREGetCFMarkers(PC pc, PetscInt *n_per_level[], PetscBT *CFMarkers[]) 2536 { 2537 PetscFunctionBegin; 2538 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2539 PetscAssertPointer(n_per_level, 2); 2540 PetscAssertPointer(CFMarkers, 3); 2541 PetscUseMethod(pc, "PCHYPREGetCFMarkers_C", (PC, PetscInt *[], PetscBT *[]), (pc, n_per_level, CFMarkers)); 2542 PetscFunctionReturn(PETSC_SUCCESS); 2543 } 2544 2545 /*@ 2546 PCHYPREGetType - Gets which hypre preconditioner you are using 2547 2548 Input Parameter: 2549 . pc - the preconditioner context 2550 2551 Output Parameter: 2552 . name - either euclid, ilu, pilut, parasails, boomeramg, ams, ads 2553 2554 Level: intermediate 2555 2556 .seealso: [](ch_ksp), `PCCreate()`, `PCHYPRESetType()`, `PCType`, `PC`, `PCHYPRE` 2557 @*/ 2558 PetscErrorCode PCHYPREGetType(PC pc, const char *name[]) 2559 { 2560 PetscFunctionBegin; 2561 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2562 PetscAssertPointer(name, 2); 2563 PetscTryMethod(pc, "PCHYPREGetType_C", (PC, const char *[]), (pc, name)); 2564 PetscFunctionReturn(PETSC_SUCCESS); 2565 } 2566 2567 /*@ 2568 PCMGGalerkinSetMatProductAlgorithm - Set type of SpGEMM for hypre to use on GPUs 2569 2570 Logically Collective 2571 2572 Input Parameters: 2573 + pc - the hypre context 2574 - name - one of 'cusparse', 'hypre' 2575 2576 Options Database Key: 2577 . -pc_mg_galerkin_mat_product_algorithm <cusparse,hypre> - Type of SpGEMM to use in hypre 2578 2579 Level: intermediate 2580 2581 Developer Notes: 2582 How the name starts with `PCMG`, should it not be `PCHYPREBoomerAMG`? 2583 2584 .seealso: [](ch_ksp), `PCHYPRE`, `PCMGGalerkinGetMatProductAlgorithm()` 2585 @*/ 2586 PetscErrorCode PCMGGalerkinSetMatProductAlgorithm(PC pc, const char name[]) 2587 { 2588 PetscFunctionBegin; 2589 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2590 PetscTryMethod(pc, "PCMGGalerkinSetMatProductAlgorithm_C", (PC, const char[]), (pc, name)); 2591 PetscFunctionReturn(PETSC_SUCCESS); 2592 } 2593 2594 /*@ 2595 PCMGGalerkinGetMatProductAlgorithm - Get type of SpGEMM for hypre to use on GPUs 2596 2597 Not Collective 2598 2599 Input Parameter: 2600 . pc - the multigrid context 2601 2602 Output Parameter: 2603 . name - one of 'cusparse', 'hypre' 2604 2605 Level: intermediate 2606 2607 .seealso: [](ch_ksp), `PCHYPRE`, `PCMGGalerkinSetMatProductAlgorithm()` 2608 @*/ 2609 PetscErrorCode PCMGGalerkinGetMatProductAlgorithm(PC pc, const char *name[]) 2610 { 2611 PetscFunctionBegin; 2612 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2613 PetscTryMethod(pc, "PCMGGalerkinGetMatProductAlgorithm_C", (PC, const char *[]), (pc, name)); 2614 PetscFunctionReturn(PETSC_SUCCESS); 2615 } 2616 2617 /*MC 2618 PCHYPRE - Allows you to use the matrix element based preconditioners in the LLNL package hypre as PETSc `PC` 2619 2620 Options Database Keys: 2621 + -pc_hypre_type - One of `euclid`, `pilut`, `parasails`, `boomeramg`, `ams`, or `ads` 2622 . -pc_hypre_boomeramg_nodal_coarsen <n> - where n is from 1 to 6 (see `HYPRE_BoomerAMGSetNodal()`) 2623 . -pc_hypre_boomeramg_vec_interp_variant <v> - where v is from 1 to 3 (see `HYPRE_BoomerAMGSetInterpVecVariant()`) 2624 - Many others - run with `-pc_type hypre` `-pc_hypre_type XXX` `-help` to see options for the XXX preconditioner 2625 2626 Level: intermediate 2627 2628 Notes: 2629 Apart from `-pc_hypre_type` (for which there is `PCHYPRESetType()`), 2630 the many hypre options can ONLY be set via the options database (e.g. the command line 2631 or with `PetscOptionsSetValue()`, there are no functions to set them) 2632 2633 The options `-pc_hypre_boomeramg_max_iter` and `-pc_hypre_boomeramg_tol` refer to the number of iterations 2634 (V-cycles) and tolerance that boomerAMG does EACH time it is called. So for example, if 2635 `-pc_hypre_boomeramg_max_iter` is set to 2 then 2-V-cycles are being used to define the preconditioner 2636 (`-pc_hypre_boomeramg_tol` should be set to 0.0 - the default - to strictly use a fixed number of 2637 iterations per hypre call). `-ksp_max_it` and `-ksp_rtol` STILL determine the total number of iterations 2638 and tolerance for the Krylov solver. For example, if `-pc_hypre_boomeramg_max_iter` is 2 and `-ksp_max_it` is 10 2639 then AT MOST twenty V-cycles of boomeramg will be used. 2640 2641 Note that the option `-pc_hypre_boomeramg_relax_type_all` defaults to symmetric relaxation 2642 (symmetric-SOR/Jacobi), which is required for Krylov solvers like CG that expect symmetry. 2643 Otherwise, you may want to use `-pc_hypre_boomeramg_relax_type_all SOR/Jacobi`. 2644 2645 `MatSetNearNullSpace()` - if you provide a near null space to your matrix it is ignored by hypre UNLESS you also use 2646 the following two options: `-pc_hypre_boomeramg_nodal_coarsen <n> -pc_hypre_boomeramg_vec_interp_variant <v>` 2647 2648 See `PCPFMG`, `PCSMG`, and `PCSYSPFMG` for access to hypre's other (nonalgebraic) multigrid solvers 2649 2650 For `PCHYPRE` type of `ams` or `ads` auxiliary data must be provided to the preconditioner with `PCHYPRESetDiscreteGradient()`, 2651 `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`, 2652 `PCHYPREAMSSetInteriorNodes()` 2653 2654 Sometimes people want to try algebraic multigrid as a "standalone" solver, that is not accelerating it with a Krylov method. Though we generally do not recommend this 2655 since it is usually slower, one should use a `KSPType` of `KSPRICHARDSON` 2656 (or equivalently `-ksp_type richardson`) to achieve this. Using `KSPPREONLY` will not work since it only applies a single cycle of multigrid. 2657 2658 PETSc provides its own geometric and algebraic multigrid solvers `PCMG` and `PCGAMG`, also see `PCHMG` which is useful for certain multicomponent problems 2659 2660 GPU Notes: 2661 To configure hypre BoomerAMG so that it can utilize NVIDIA GPUs run ./configure --download-hypre --with-cuda 2662 Then pass `VECCUDA` vectors and `MATAIJCUSPARSE` matrices to the solvers and PETSc will automatically utilize hypre's GPU solvers. 2663 2664 To configure hypre BoomerAMG so that it can utilize AMD GPUs run ./configure --download-hypre --with-hip 2665 Then pass `VECHIP` vectors to the solvers and PETSc will automatically utilize hypre's GPU solvers. 2666 2667 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRESetType()`, `PCPFMG`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`, `PCHYPRESetDiscreteGradient()`, 2668 `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`, 2669 PCHYPREAMSSetInteriorNodes() 2670 M*/ 2671 2672 PETSC_EXTERN PetscErrorCode PCCreate_HYPRE(PC pc) 2673 { 2674 PC_HYPRE *jac; 2675 2676 PetscFunctionBegin; 2677 PetscCall(PetscNew(&jac)); 2678 2679 pc->data = jac; 2680 pc->ops->reset = PCReset_HYPRE; 2681 pc->ops->destroy = PCDestroy_HYPRE; 2682 pc->ops->setfromoptions = PCSetFromOptions_HYPRE; 2683 pc->ops->setup = PCSetUp_HYPRE; 2684 pc->ops->apply = PCApply_HYPRE; 2685 jac->comm_hypre = MPI_COMM_NULL; 2686 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", PCHYPRESetType_HYPRE)); 2687 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", PCHYPREGetType_HYPRE)); 2688 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_HYPRE)); 2689 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", PCHYPRESetDiscreteGradient_HYPRE)); 2690 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", PCHYPRESetDiscreteCurl_HYPRE)); 2691 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", PCHYPRESetInterpolations_HYPRE)); 2692 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", PCHYPRESetEdgeConstantVectors_HYPRE)); 2693 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", PCHYPREAMSSetInteriorNodes_HYPRE)); 2694 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", PCHYPRESetPoissonMatrix_HYPRE)); 2695 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG)); 2696 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG)); 2697 #if defined(PETSC_HAVE_HYPRE_DEVICE) 2698 #if defined(HYPRE_USING_HIP) 2699 PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP)); 2700 #endif 2701 #if defined(HYPRE_USING_CUDA) 2702 PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA)); 2703 #endif 2704 #endif 2705 PetscHYPREInitialize(); 2706 PetscFunctionReturn(PETSC_SUCCESS); 2707 } 2708 2709 typedef struct { 2710 MPI_Comm hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */ 2711 HYPRE_StructSolver hsolver; 2712 2713 /* keep copy of PFMG options used so may view them */ 2714 PetscInt its; 2715 PetscReal tol; 2716 PetscInt relax_type; 2717 PetscInt rap_type; 2718 PetscInt num_pre_relax, num_post_relax; 2719 PetscInt max_levels; 2720 PetscInt skip_relax; 2721 PetscBool print_statistics; 2722 } PC_PFMG; 2723 2724 static PetscErrorCode PCDestroy_PFMG(PC pc) 2725 { 2726 PC_PFMG *ex = (PC_PFMG *)pc->data; 2727 2728 PetscFunctionBegin; 2729 if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver); 2730 PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2731 PetscCall(PetscFree(pc->data)); 2732 PetscFunctionReturn(PETSC_SUCCESS); 2733 } 2734 2735 static const char *PFMGRelaxType[] = {"Jacobi", "Weighted-Jacobi", "symmetric-Red/Black-Gauss-Seidel", "Red/Black-Gauss-Seidel"}; 2736 static const char *PFMGRAPType[] = {"Galerkin", "non-Galerkin"}; 2737 2738 static PetscErrorCode PCView_PFMG(PC pc, PetscViewer viewer) 2739 { 2740 PetscBool iascii; 2741 PC_PFMG *ex = (PC_PFMG *)pc->data; 2742 2743 PetscFunctionBegin; 2744 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 2745 if (iascii) { 2746 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE PFMG preconditioning\n")); 2747 PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its)); 2748 PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol)); 2749 PetscCall(PetscViewerASCIIPrintf(viewer, " relax type %s\n", PFMGRelaxType[ex->relax_type])); 2750 PetscCall(PetscViewerASCIIPrintf(viewer, " RAP type %s\n", PFMGRAPType[ex->rap_type])); 2751 PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax)); 2752 PetscCall(PetscViewerASCIIPrintf(viewer, " max levels %" PetscInt_FMT "\n", ex->max_levels)); 2753 PetscCall(PetscViewerASCIIPrintf(viewer, " skip relax %" PetscInt_FMT "\n", ex->skip_relax)); 2754 } 2755 PetscFunctionReturn(PETSC_SUCCESS); 2756 } 2757 2758 static PetscErrorCode PCSetFromOptions_PFMG(PC pc, PetscOptionItems PetscOptionsObject) 2759 { 2760 PC_PFMG *ex = (PC_PFMG *)pc->data; 2761 2762 PetscFunctionBegin; 2763 PetscOptionsHeadBegin(PetscOptionsObject, "PFMG options"); 2764 PetscCall(PetscOptionsBool("-pc_pfmg_print_statistics", "Print statistics", "HYPRE_StructPFMGSetPrintLevel", ex->print_statistics, &ex->print_statistics, NULL)); 2765 PetscCall(PetscOptionsInt("-pc_pfmg_its", "Number of iterations of PFMG to use as preconditioner", "HYPRE_StructPFMGSetMaxIter", ex->its, &ex->its, NULL)); 2766 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its); 2767 PetscCall(PetscOptionsInt("-pc_pfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL)); 2768 PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax); 2769 PetscCall(PetscOptionsInt("-pc_pfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL)); 2770 PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax); 2771 2772 PetscCall(PetscOptionsInt("-pc_pfmg_max_levels", "Max Levels for MG hierarchy", "HYPRE_StructPFMGSetMaxLevels", ex->max_levels, &ex->max_levels, NULL)); 2773 PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels); 2774 2775 PetscCall(PetscOptionsReal("-pc_pfmg_tol", "Tolerance of PFMG", "HYPRE_StructPFMGSetTol", ex->tol, &ex->tol, NULL)); 2776 PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol); 2777 PetscCall(PetscOptionsEList("-pc_pfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_StructPFMGSetRelaxType", PFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(PFMGRelaxType), PFMGRelaxType[ex->relax_type], &ex->relax_type, NULL)); 2778 PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type); 2779 PetscCall(PetscOptionsEList("-pc_pfmg_rap_type", "RAP type", "HYPRE_StructPFMGSetRAPType", PFMGRAPType, PETSC_STATIC_ARRAY_LENGTH(PFMGRAPType), PFMGRAPType[ex->rap_type], &ex->rap_type, NULL)); 2780 PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type); 2781 PetscCall(PetscOptionsInt("-pc_pfmg_skip_relax", "Skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations when the underlying problem is isotropic", "HYPRE_StructPFMGSetSkipRelax", ex->skip_relax, &ex->skip_relax, NULL)); 2782 PetscCallExternal(HYPRE_StructPFMGSetSkipRelax, ex->hsolver, ex->skip_relax); 2783 PetscOptionsHeadEnd(); 2784 PetscFunctionReturn(PETSC_SUCCESS); 2785 } 2786 2787 static PetscErrorCode PCApply_PFMG(PC pc, Vec x, Vec y) 2788 { 2789 PC_PFMG *ex = (PC_PFMG *)pc->data; 2790 PetscScalar *yy; 2791 const PetscScalar *xx; 2792 PetscInt ilower[3], iupper[3]; 2793 HYPRE_Int hlower[3], hupper[3]; 2794 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)pc->pmat->data; 2795 2796 PetscFunctionBegin; 2797 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2798 PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2])); 2799 /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */ 2800 iupper[0] += ilower[0] - 1; 2801 iupper[1] += ilower[1] - 1; 2802 iupper[2] += ilower[2] - 1; 2803 hlower[0] = (HYPRE_Int)ilower[0]; 2804 hlower[1] = (HYPRE_Int)ilower[1]; 2805 hlower[2] = (HYPRE_Int)ilower[2]; 2806 hupper[0] = (HYPRE_Int)iupper[0]; 2807 hupper[1] = (HYPRE_Int)iupper[1]; 2808 hupper[2] = (HYPRE_Int)iupper[2]; 2809 2810 /* copy x values over to hypre */ 2811 PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0); 2812 PetscCall(VecGetArrayRead(x, &xx)); 2813 PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx); 2814 PetscCall(VecRestoreArrayRead(x, &xx)); 2815 PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb); 2816 PetscCallExternal(HYPRE_StructPFMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx); 2817 2818 /* copy solution values back to PETSc */ 2819 PetscCall(VecGetArray(y, &yy)); 2820 PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy); 2821 PetscCall(VecRestoreArray(y, &yy)); 2822 PetscFunctionReturn(PETSC_SUCCESS); 2823 } 2824 2825 static PetscErrorCode PCApplyRichardson_PFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 2826 { 2827 PC_PFMG *jac = (PC_PFMG *)pc->data; 2828 HYPRE_Int oits; 2829 2830 PetscFunctionBegin; 2831 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2832 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, its * jac->its); 2833 PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, rtol); 2834 2835 PetscCall(PCApply_PFMG(pc, b, y)); 2836 PetscCallExternal(HYPRE_StructPFMGGetNumIterations, jac->hsolver, &oits); 2837 *outits = oits; 2838 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 2839 else *reason = PCRICHARDSON_CONVERGED_RTOL; 2840 PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, jac->tol); 2841 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, jac->its); 2842 PetscFunctionReturn(PETSC_SUCCESS); 2843 } 2844 2845 static PetscErrorCode PCSetUp_PFMG(PC pc) 2846 { 2847 PC_PFMG *ex = (PC_PFMG *)pc->data; 2848 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)pc->pmat->data; 2849 PetscBool flg; 2850 2851 PetscFunctionBegin; 2852 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg)); 2853 PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner"); 2854 2855 /* create the hypre solver object and set its information */ 2856 if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver); 2857 PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver); 2858 2859 // Print Hypre statistics about the solve process 2860 if (ex->print_statistics) PetscCallExternal(HYPRE_StructPFMGSetPrintLevel, ex->hsolver, 3); 2861 2862 // The hypre options must be repeated here because the StructPFMG was destroyed and recreated 2863 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its); 2864 PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax); 2865 PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax); 2866 PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels); 2867 PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol); 2868 PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type); 2869 PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type); 2870 2871 PetscCallExternal(HYPRE_StructPFMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx); 2872 PetscCallExternal(HYPRE_StructPFMGSetZeroGuess, ex->hsolver); 2873 PetscFunctionReturn(PETSC_SUCCESS); 2874 } 2875 2876 /*MC 2877 PCPFMG - the hypre PFMG multigrid solver 2878 2879 Options Database Keys: 2880 + -pc_pfmg_its <its> - number of iterations of PFMG to use as preconditioner 2881 . -pc_pfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid solve 2882 . -pc_pfmg_num_post_relax <steps> - number of smoothing steps after coarse grid solve 2883 . -pc_pfmg_tol <tol> - tolerance of PFMG 2884 . -pc_pfmg_relax_type - relaxation type for the up and down cycles, one of Jacobi,Weighted-Jacobi,symmetric-Red/Black-Gauss-Seidel,Red/Black-Gauss-Seidel 2885 . -pc_pfmg_rap_type - type of coarse matrix generation, one of Galerkin,non-Galerkin 2886 - -pc_pfmg_skip_relax - skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations 2887 when the underlying problem is isotropic, one of 0,1 2888 2889 Level: advanced 2890 2891 Notes: 2892 This is for CELL-centered descretizations 2893 2894 See `PCSYSPFMG` for a version suitable for systems of PDEs, and `PCSMG` 2895 2896 See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver 2897 2898 This must be used with the `MATHYPRESTRUCT` matrix type. 2899 2900 This provides only some of the functionality of PFMG, it supports only one block per process defined by a PETSc `DMDA`. 2901 2902 .seealso: [](ch_ksp), `PCMG`, `MATHYPRESTRUCT`, `PCHYPRE`, `PCGAMG`, `PCSYSPFMG`, `PCSMG` 2903 M*/ 2904 2905 PETSC_EXTERN PetscErrorCode PCCreate_PFMG(PC pc) 2906 { 2907 PC_PFMG *ex; 2908 2909 PetscFunctionBegin; 2910 PetscCall(PetscNew(&ex)); 2911 pc->data = ex; 2912 2913 ex->its = 1; 2914 ex->tol = 1.e-8; 2915 ex->relax_type = 1; 2916 ex->rap_type = 0; 2917 ex->num_pre_relax = 1; 2918 ex->num_post_relax = 1; 2919 ex->max_levels = 0; 2920 ex->skip_relax = 0; 2921 ex->print_statistics = PETSC_FALSE; 2922 2923 pc->ops->setfromoptions = PCSetFromOptions_PFMG; 2924 pc->ops->view = PCView_PFMG; 2925 pc->ops->destroy = PCDestroy_PFMG; 2926 pc->ops->apply = PCApply_PFMG; 2927 pc->ops->applyrichardson = PCApplyRichardson_PFMG; 2928 pc->ops->setup = PCSetUp_PFMG; 2929 2930 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2931 PetscHYPREInitialize(); 2932 PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver); 2933 PetscFunctionReturn(PETSC_SUCCESS); 2934 } 2935 2936 /* we know we are working with a HYPRE_SStructMatrix */ 2937 typedef struct { 2938 MPI_Comm hcomm; /* does not share comm with HYPRE_SStructMatrix because need to create solver before getting matrix */ 2939 HYPRE_SStructSolver ss_solver; 2940 2941 /* keep copy of SYSPFMG options used so may view them */ 2942 PetscInt its; 2943 PetscReal tol; 2944 PetscInt relax_type; 2945 PetscInt num_pre_relax, num_post_relax; 2946 } PC_SysPFMG; 2947 2948 static PetscErrorCode PCDestroy_SysPFMG(PC pc) 2949 { 2950 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2951 2952 PetscFunctionBegin; 2953 if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver); 2954 PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2955 PetscCall(PetscFree(pc->data)); 2956 PetscFunctionReturn(PETSC_SUCCESS); 2957 } 2958 2959 static const char *SysPFMGRelaxType[] = {"Weighted-Jacobi", "Red/Black-Gauss-Seidel"}; 2960 2961 static PetscErrorCode PCView_SysPFMG(PC pc, PetscViewer viewer) 2962 { 2963 PetscBool iascii; 2964 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2965 2966 PetscFunctionBegin; 2967 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 2968 if (iascii) { 2969 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE SysPFMG preconditioning\n")); 2970 PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its)); 2971 PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol)); 2972 PetscCall(PetscViewerASCIIPrintf(viewer, " relax type %s\n", PFMGRelaxType[ex->relax_type])); 2973 PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax)); 2974 } 2975 PetscFunctionReturn(PETSC_SUCCESS); 2976 } 2977 2978 static PetscErrorCode PCSetFromOptions_SysPFMG(PC pc, PetscOptionItems PetscOptionsObject) 2979 { 2980 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2981 PetscBool flg = PETSC_FALSE; 2982 2983 PetscFunctionBegin; 2984 PetscOptionsHeadBegin(PetscOptionsObject, "SysPFMG options"); 2985 PetscCall(PetscOptionsBool("-pc_syspfmg_print_statistics", "Print statistics", "HYPRE_SStructSysPFMGSetPrintLevel", flg, &flg, NULL)); 2986 if (flg) PetscCallExternal(HYPRE_SStructSysPFMGSetPrintLevel, ex->ss_solver, 3); 2987 PetscCall(PetscOptionsInt("-pc_syspfmg_its", "Number of iterations of SysPFMG to use as preconditioner", "HYPRE_SStructSysPFMGSetMaxIter", ex->its, &ex->its, NULL)); 2988 PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, ex->ss_solver, ex->its); 2989 PetscCall(PetscOptionsInt("-pc_syspfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_SStructSysPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL)); 2990 PetscCallExternal(HYPRE_SStructSysPFMGSetNumPreRelax, ex->ss_solver, ex->num_pre_relax); 2991 PetscCall(PetscOptionsInt("-pc_syspfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_SStructSysPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL)); 2992 PetscCallExternal(HYPRE_SStructSysPFMGSetNumPostRelax, ex->ss_solver, ex->num_post_relax); 2993 2994 PetscCall(PetscOptionsReal("-pc_syspfmg_tol", "Tolerance of SysPFMG", "HYPRE_SStructSysPFMGSetTol", ex->tol, &ex->tol, NULL)); 2995 PetscCallExternal(HYPRE_SStructSysPFMGSetTol, ex->ss_solver, ex->tol); 2996 PetscCall(PetscOptionsEList("-pc_syspfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_SStructSysPFMGSetRelaxType", SysPFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(SysPFMGRelaxType), SysPFMGRelaxType[ex->relax_type], &ex->relax_type, NULL)); 2997 PetscCallExternal(HYPRE_SStructSysPFMGSetRelaxType, ex->ss_solver, ex->relax_type); 2998 PetscOptionsHeadEnd(); 2999 PetscFunctionReturn(PETSC_SUCCESS); 3000 } 3001 3002 static PetscErrorCode PCApply_SysPFMG(PC pc, Vec x, Vec y) 3003 { 3004 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 3005 PetscScalar *yy; 3006 const PetscScalar *xx; 3007 PetscInt ilower[3], iupper[3]; 3008 HYPRE_Int hlower[3], hupper[3]; 3009 Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)pc->pmat->data; 3010 PetscInt ordering = mx->dofs_order; 3011 PetscInt nvars = mx->nvars; 3012 PetscInt part = 0; 3013 PetscInt size; 3014 PetscInt i; 3015 3016 PetscFunctionBegin; 3017 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 3018 PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2])); 3019 /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */ 3020 iupper[0] += ilower[0] - 1; 3021 iupper[1] += ilower[1] - 1; 3022 iupper[2] += ilower[2] - 1; 3023 hlower[0] = (HYPRE_Int)ilower[0]; 3024 hlower[1] = (HYPRE_Int)ilower[1]; 3025 hlower[2] = (HYPRE_Int)ilower[2]; 3026 hupper[0] = (HYPRE_Int)iupper[0]; 3027 hupper[1] = (HYPRE_Int)iupper[1]; 3028 hupper[2] = (HYPRE_Int)iupper[2]; 3029 3030 size = 1; 3031 for (i = 0; i < 3; i++) size *= (iupper[i] - ilower[i] + 1); 3032 3033 /* copy x values over to hypre for variable ordering */ 3034 if (ordering) { 3035 PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0); 3036 PetscCall(VecGetArrayRead(x, &xx)); 3037 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(xx + (size * i))); 3038 PetscCall(VecRestoreArrayRead(x, &xx)); 3039 PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b); 3040 PetscCallExternal(HYPRE_SStructMatrixMatvec, 1.0, mx->ss_mat, mx->ss_b, 0.0, mx->ss_x); 3041 PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x); 3042 3043 /* copy solution values back to PETSc */ 3044 PetscCall(VecGetArray(y, &yy)); 3045 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(yy + (size * i))); 3046 PetscCall(VecRestoreArray(y, &yy)); 3047 } else { /* nodal ordering must be mapped to variable ordering for sys_pfmg */ 3048 PetscScalar *z; 3049 PetscInt j, k; 3050 3051 PetscCall(PetscMalloc1(nvars * size, &z)); 3052 PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0); 3053 PetscCall(VecGetArrayRead(x, &xx)); 3054 3055 /* transform nodal to hypre's variable ordering for sys_pfmg */ 3056 for (i = 0; i < size; i++) { 3057 k = i * nvars; 3058 for (j = 0; j < nvars; j++) z[j * size + i] = xx[k + j]; 3059 } 3060 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i))); 3061 PetscCall(VecRestoreArrayRead(x, &xx)); 3062 PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b); 3063 PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x); 3064 3065 /* copy solution values back to PETSc */ 3066 PetscCall(VecGetArray(y, &yy)); 3067 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i))); 3068 /* transform hypre's variable ordering for sys_pfmg to nodal ordering */ 3069 for (i = 0; i < size; i++) { 3070 k = i * nvars; 3071 for (j = 0; j < nvars; j++) yy[k + j] = z[j * size + i]; 3072 } 3073 PetscCall(VecRestoreArray(y, &yy)); 3074 PetscCall(PetscFree(z)); 3075 } 3076 PetscFunctionReturn(PETSC_SUCCESS); 3077 } 3078 3079 static PetscErrorCode PCApplyRichardson_SysPFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 3080 { 3081 PC_SysPFMG *jac = (PC_SysPFMG *)pc->data; 3082 HYPRE_Int oits; 3083 3084 PetscFunctionBegin; 3085 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 3086 PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, its * jac->its); 3087 PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, rtol); 3088 PetscCall(PCApply_SysPFMG(pc, b, y)); 3089 PetscCallExternal(HYPRE_SStructSysPFMGGetNumIterations, jac->ss_solver, &oits); 3090 *outits = oits; 3091 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 3092 else *reason = PCRICHARDSON_CONVERGED_RTOL; 3093 PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, jac->tol); 3094 PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, jac->its); 3095 PetscFunctionReturn(PETSC_SUCCESS); 3096 } 3097 3098 static PetscErrorCode PCSetUp_SysPFMG(PC pc) 3099 { 3100 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 3101 Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)pc->pmat->data; 3102 PetscBool flg; 3103 3104 PetscFunctionBegin; 3105 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESSTRUCT, &flg)); 3106 PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESSTRUCT with this preconditioner"); 3107 3108 /* create the hypre sstruct solver object and set its information */ 3109 if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver); 3110 PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver); 3111 PetscCallExternal(HYPRE_SStructSysPFMGSetZeroGuess, ex->ss_solver); 3112 PetscCallExternal(HYPRE_SStructSysPFMGSetup, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x); 3113 PetscFunctionReturn(PETSC_SUCCESS); 3114 } 3115 3116 /*MC 3117 PCSYSPFMG - the hypre SysPFMG multigrid solver 3118 3119 Level: advanced 3120 3121 Options Database Keys: 3122 + -pc_syspfmg_its <its> - number of iterations of SysPFMG to use as preconditioner 3123 . -pc_syspfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid 3124 . -pc_syspfmg_num_post_relax <steps> - number of smoothing steps after coarse grid 3125 . -pc_syspfmg_tol <tol> - tolerance of SysPFMG 3126 - -pc_syspfmg_relax_type <Weighted-Jacobi,Red/Black-Gauss-Seidel> - relaxation type for the up and down cycles 3127 3128 Notes: 3129 See `PCPFMG` for hypre's PFMG that works for a scalar PDE and `PCSMG` 3130 3131 See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver 3132 3133 This is for CELL-centered descretizations 3134 3135 This must be used with the `MATHYPRESSTRUCT` matrix type. 3136 3137 This does not give access to all the functionality of hypres SysPFMG, it supports only one part, and one block per process defined by a PETSc `DMDA`. 3138 3139 .seealso: [](ch_ksp), `PCMG`, `MATHYPRESSTRUCT`, `PCPFMG`, `PCHYPRE`, `PCGAMG`, `PCSMG` 3140 M*/ 3141 3142 PETSC_EXTERN PetscErrorCode PCCreate_SysPFMG(PC pc) 3143 { 3144 PC_SysPFMG *ex; 3145 3146 PetscFunctionBegin; 3147 PetscCall(PetscNew(&ex)); 3148 pc->data = ex; 3149 3150 ex->its = 1; 3151 ex->tol = 1.e-8; 3152 ex->relax_type = 1; 3153 ex->num_pre_relax = 1; 3154 ex->num_post_relax = 1; 3155 3156 pc->ops->setfromoptions = PCSetFromOptions_SysPFMG; 3157 pc->ops->view = PCView_SysPFMG; 3158 pc->ops->destroy = PCDestroy_SysPFMG; 3159 pc->ops->apply = PCApply_SysPFMG; 3160 pc->ops->applyrichardson = PCApplyRichardson_SysPFMG; 3161 pc->ops->setup = PCSetUp_SysPFMG; 3162 3163 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 3164 PetscHYPREInitialize(); 3165 PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver); 3166 PetscFunctionReturn(PETSC_SUCCESS); 3167 } 3168 3169 /* PC SMG */ 3170 typedef struct { 3171 MPI_Comm hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */ 3172 HYPRE_StructSolver hsolver; 3173 PetscInt its; /* keep copy of SMG options used so may view them */ 3174 PetscReal tol; 3175 PetscBool print_statistics; 3176 PetscInt num_pre_relax, num_post_relax; 3177 } PC_SMG; 3178 3179 static PetscErrorCode PCDestroy_SMG(PC pc) 3180 { 3181 PC_SMG *ex = (PC_SMG *)pc->data; 3182 3183 PetscFunctionBegin; 3184 if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, ex->hsolver); 3185 PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 3186 PetscCall(PetscFree(pc->data)); 3187 PetscFunctionReturn(PETSC_SUCCESS); 3188 } 3189 3190 static PetscErrorCode PCView_SMG(PC pc, PetscViewer viewer) 3191 { 3192 PetscBool iascii; 3193 PC_SMG *ex = (PC_SMG *)pc->data; 3194 3195 PetscFunctionBegin; 3196 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 3197 if (iascii) { 3198 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE SMG preconditioning\n")); 3199 PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its)); 3200 PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol)); 3201 PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax)); 3202 } 3203 PetscFunctionReturn(PETSC_SUCCESS); 3204 } 3205 3206 static PetscErrorCode PCSetFromOptions_SMG(PC pc, PetscOptionItems PetscOptionsObject) 3207 { 3208 PC_SMG *ex = (PC_SMG *)pc->data; 3209 3210 PetscFunctionBegin; 3211 PetscOptionsHeadBegin(PetscOptionsObject, "SMG options"); 3212 3213 PetscCall(PetscOptionsInt("-pc_smg_its", "Number of iterations of SMG to use as preconditioner", "HYPRE_StructSMGSetMaxIter", ex->its, &ex->its, NULL)); 3214 PetscCall(PetscOptionsInt("-pc_smg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructSMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL)); 3215 PetscCall(PetscOptionsInt("-pc_smg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructSMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL)); 3216 PetscCall(PetscOptionsReal("-pc_smg_tol", "Tolerance of SMG", "HYPRE_StructSMGSetTol", ex->tol, &ex->tol, NULL)); 3217 3218 PetscOptionsHeadEnd(); 3219 PetscFunctionReturn(PETSC_SUCCESS); 3220 } 3221 3222 static PetscErrorCode PCApply_SMG(PC pc, Vec x, Vec y) 3223 { 3224 PC_SMG *ex = (PC_SMG *)pc->data; 3225 PetscScalar *yy; 3226 const PetscScalar *xx; 3227 PetscInt ilower[3], iupper[3]; 3228 HYPRE_Int hlower[3], hupper[3]; 3229 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)pc->pmat->data; 3230 3231 PetscFunctionBegin; 3232 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 3233 PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2])); 3234 /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */ 3235 iupper[0] += ilower[0] - 1; 3236 iupper[1] += ilower[1] - 1; 3237 iupper[2] += ilower[2] - 1; 3238 hlower[0] = (HYPRE_Int)ilower[0]; 3239 hlower[1] = (HYPRE_Int)ilower[1]; 3240 hlower[2] = (HYPRE_Int)ilower[2]; 3241 hupper[0] = (HYPRE_Int)iupper[0]; 3242 hupper[1] = (HYPRE_Int)iupper[1]; 3243 hupper[2] = (HYPRE_Int)iupper[2]; 3244 3245 /* copy x values over to hypre */ 3246 PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0); 3247 PetscCall(VecGetArrayRead(x, &xx)); 3248 PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx); 3249 PetscCall(VecRestoreArrayRead(x, &xx)); 3250 PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb); 3251 PetscCallExternal(HYPRE_StructSMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx); 3252 3253 /* copy solution values back to PETSc */ 3254 PetscCall(VecGetArray(y, &yy)); 3255 PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy); 3256 PetscCall(VecRestoreArray(y, &yy)); 3257 PetscFunctionReturn(PETSC_SUCCESS); 3258 } 3259 3260 static PetscErrorCode PCApplyRichardson_SMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 3261 { 3262 PC_SMG *jac = (PC_SMG *)pc->data; 3263 HYPRE_Int oits; 3264 3265 PetscFunctionBegin; 3266 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 3267 PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, its * jac->its); 3268 PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, rtol); 3269 3270 PetscCall(PCApply_SMG(pc, b, y)); 3271 PetscCallExternal(HYPRE_StructSMGGetNumIterations, jac->hsolver, &oits); 3272 *outits = oits; 3273 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 3274 else *reason = PCRICHARDSON_CONVERGED_RTOL; 3275 PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, jac->tol); 3276 PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, jac->its); 3277 PetscFunctionReturn(PETSC_SUCCESS); 3278 } 3279 3280 static PetscErrorCode PCSetUp_SMG(PC pc) 3281 { 3282 PetscInt i, dim; 3283 PC_SMG *ex = (PC_SMG *)pc->data; 3284 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)pc->pmat->data; 3285 PetscBool flg; 3286 DMBoundaryType p[3]; 3287 PetscInt M[3]; 3288 3289 PetscFunctionBegin; 3290 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg)); 3291 PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner"); 3292 3293 PetscCall(DMDAGetInfo(mx->da, &dim, &M[0], &M[1], &M[2], 0, 0, 0, 0, 0, &p[0], &p[1], &p[2], 0)); 3294 // Check if power of 2 in periodic directions 3295 for (i = 0; i < dim; i++) { 3296 if (((M[i] & (M[i] - 1)) != 0) && (p[i] == DM_BOUNDARY_PERIODIC)) { 3297 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "With SMG, the number of points in a periodic direction must be a power of 2, but is here %" PetscInt_FMT ".", M[i]); 3298 } 3299 } 3300 3301 /* create the hypre solver object and set its information */ 3302 if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, ex->hsolver); 3303 PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver); 3304 // The hypre options must be set here and not in SetFromOptions because it is created here! 3305 PetscCallExternal(HYPRE_StructSMGSetMaxIter, ex->hsolver, ex->its); 3306 PetscCallExternal(HYPRE_StructSMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax); 3307 PetscCallExternal(HYPRE_StructSMGSetNumPostRelax, ex->hsolver, ex->num_post_relax); 3308 PetscCallExternal(HYPRE_StructSMGSetTol, ex->hsolver, ex->tol); 3309 3310 PetscCallExternal(HYPRE_StructSMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx); 3311 PetscCallExternal(HYPRE_StructSMGSetZeroGuess, ex->hsolver); 3312 PetscFunctionReturn(PETSC_SUCCESS); 3313 } 3314 3315 /*MC 3316 PCSMG - the hypre (structured grid) SMG multigrid solver 3317 3318 Level: advanced 3319 3320 Options Database Keys: 3321 + -pc_smg_its <its> - number of iterations of SMG to use as preconditioner 3322 . -pc_smg_num_pre_relax <steps> - number of smoothing steps before coarse grid 3323 . -pc_smg_num_post_relax <steps> - number of smoothing steps after coarse grid 3324 - -pc_smg_tol <tol> - tolerance of SMG 3325 3326 Notes: 3327 This is for CELL-centered descretizations 3328 3329 This must be used with the `MATHYPRESTRUCT` `MatType`. 3330 3331 This does not provide all the functionality of hypre's SMG solver, it supports only one block per process defined by a PETSc `DMDA`. 3332 3333 See `PCSYSPFMG`, `PCSMG`, `PCPFMG`, and `PCHYPRE` for access to hypre's other preconditioners 3334 3335 .seealso: `PCMG`, `MATHYPRESTRUCT`, `PCPFMG`, `PCSYSPFMG`, `PCHYPRE`, `PCGAMG` 3336 M*/ 3337 3338 PETSC_EXTERN PetscErrorCode PCCreate_SMG(PC pc) 3339 { 3340 PC_SMG *ex; 3341 3342 PetscFunctionBegin; 3343 PetscCall(PetscNew(&ex)); 3344 pc->data = ex; 3345 3346 ex->its = 1; 3347 ex->tol = 1.e-8; 3348 ex->num_pre_relax = 1; 3349 ex->num_post_relax = 1; 3350 3351 pc->ops->setfromoptions = PCSetFromOptions_SMG; 3352 pc->ops->view = PCView_SMG; 3353 pc->ops->destroy = PCDestroy_SMG; 3354 pc->ops->apply = PCApply_SMG; 3355 pc->ops->applyrichardson = PCApplyRichardson_SMG; 3356 pc->ops->setup = PCSetUp_SMG; 3357 3358 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 3359 PetscHYPREInitialize(); 3360 PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver); 3361 PetscFunctionReturn(PETSC_SUCCESS); 3362 } 3363