1 /* 2 Provides an interface to the LLNL package hypre 3 */ 4 5 #include <petscpkg_version.h> 6 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 7 /* this include is needed ONLY to allow access to the private data inside the Mat object specific to hypre */ 8 #include <petsc/private/matimpl.h> 9 #include <petsc/private/vecimpl.h> 10 #include <../src/vec/vec/impls/hypre/vhyp.h> 11 #include <../src/mat/impls/hypre/mhypre.h> 12 #include <../src/dm/impls/da/hypre/mhyp.h> 13 #include <_hypre_parcsr_ls.h> 14 #include <petscmathypre.h> 15 16 #if defined(PETSC_HAVE_HYPRE_DEVICE) 17 #include <petsc/private/deviceimpl.h> 18 #endif 19 20 static PetscBool cite = PETSC_FALSE; 21 static const char hypreCitation[] = "@manual{hypre-web-page,\n title = {{\\sl hypre}: High Performance Preconditioners},\n organization = {Lawrence Livermore National Laboratory},\n note = " 22 "{\\url{https://www.llnl.gov/casc/hypre}}\n}\n"; 23 24 /* 25 Private context (data structure) for the preconditioner. 26 */ 27 typedef struct { 28 HYPRE_Solver hsolver; 29 Mat hpmat; /* MatHYPRE */ 30 31 HYPRE_Int (*destroy)(HYPRE_Solver); 32 HYPRE_Int (*solve)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector); 33 HYPRE_Int (*setup)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector); 34 35 MPI_Comm comm_hypre; 36 char *hypre_type; 37 38 /* options for Pilut and BoomerAMG*/ 39 PetscInt maxiter; 40 PetscReal tol; 41 42 /* options for Pilut */ 43 PetscInt factorrowsize; 44 45 /* options for ParaSails */ 46 PetscInt nlevels; 47 PetscReal threshold; 48 PetscReal filter; 49 PetscReal loadbal; 50 PetscInt logging; 51 PetscInt ruse; 52 PetscInt symt; 53 54 /* options for BoomerAMG */ 55 PetscBool printstatistics; 56 57 /* options for BoomerAMG */ 58 PetscInt cycletype; 59 PetscInt maxlevels; 60 PetscReal strongthreshold; 61 PetscReal maxrowsum; 62 PetscInt gridsweeps[3]; 63 PetscInt coarsentype; 64 PetscInt measuretype; 65 PetscInt smoothtype; 66 PetscInt smoothnumlevels; 67 PetscInt eu_level; /* Number of levels for ILU(k) in Euclid */ 68 PetscReal eu_droptolerance; /* Drop tolerance for ILU(k) in Euclid */ 69 PetscInt eu_bj; /* Defines use of Block Jacobi ILU in Euclid */ 70 PetscInt relaxtype[3]; 71 PetscReal relaxweight; 72 PetscReal outerrelaxweight; 73 PetscInt relaxorder; 74 PetscReal truncfactor; 75 PetscBool applyrichardson; 76 PetscInt pmax; 77 PetscInt interptype; 78 PetscInt maxc; 79 PetscInt minc; 80 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 81 char *spgemm_type; // this is a global hypre parameter but is closely associated with BoomerAMG 82 #endif 83 /* GPU */ 84 PetscBool keeptranspose; 85 PetscInt rap2; 86 PetscInt mod_rap2; 87 88 /* AIR */ 89 PetscInt Rtype; 90 PetscReal Rstrongthreshold; 91 PetscReal Rfilterthreshold; 92 PetscInt Adroptype; 93 PetscReal Adroptol; 94 95 PetscInt agg_nl; 96 PetscInt agg_interptype; 97 PetscInt agg_num_paths; 98 PetscBool nodal_relax; 99 PetscInt nodal_relax_levels; 100 101 PetscInt nodal_coarsening; 102 PetscInt nodal_coarsening_diag; 103 PetscInt vec_interp_variant; 104 PetscInt vec_interp_qmax; 105 PetscBool vec_interp_smooth; 106 PetscInt interp_refine; 107 108 /* NearNullSpace support */ 109 VecHYPRE_IJVector *hmnull; 110 HYPRE_ParVector *phmnull; 111 PetscInt n_hmnull; 112 Vec hmnull_constant; 113 114 /* options for AS (Auxiliary Space preconditioners) */ 115 PetscInt as_print; 116 PetscInt as_max_iter; 117 PetscReal as_tol; 118 PetscInt as_relax_type; 119 PetscInt as_relax_times; 120 PetscReal as_relax_weight; 121 PetscReal as_omega; 122 PetscInt as_amg_alpha_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for vector Poisson (AMS) or Curl problem (ADS) */ 123 PetscReal as_amg_alpha_theta; /* AMG strength for vector Poisson (AMS) or Curl problem (ADS) */ 124 PetscInt as_amg_beta_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for scalar Poisson (AMS) or vector Poisson (ADS) */ 125 PetscReal as_amg_beta_theta; /* AMG strength for scalar Poisson (AMS) or vector Poisson (ADS) */ 126 PetscInt ams_cycle_type; 127 PetscInt ads_cycle_type; 128 129 /* additional data */ 130 Mat G; /* MatHYPRE */ 131 Mat C; /* MatHYPRE */ 132 Mat alpha_Poisson; /* MatHYPRE */ 133 Mat beta_Poisson; /* MatHYPRE */ 134 135 /* extra information for AMS */ 136 PetscInt dim; /* geometrical dimension */ 137 VecHYPRE_IJVector coords[3]; 138 VecHYPRE_IJVector constants[3]; 139 VecHYPRE_IJVector interior; 140 Mat RT_PiFull, RT_Pi[3]; 141 Mat ND_PiFull, ND_Pi[3]; 142 PetscBool ams_beta_is_zero; 143 PetscBool ams_beta_is_zero_part; 144 PetscInt ams_proj_freq; 145 } PC_HYPRE; 146 147 /* 148 Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix 149 is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine. 150 It is used in PCHMG. Other users should avoid using this function. 151 */ 152 static PetscErrorCode PCGetCoarseOperators_BoomerAMG(PC pc, PetscInt *nlevels, Mat *operators[]) 153 { 154 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 155 PetscBool same = PETSC_FALSE; 156 PetscInt num_levels, l; 157 Mat *mattmp; 158 hypre_ParCSRMatrix **A_array; 159 160 PetscFunctionBegin; 161 PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same)); 162 PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG "); 163 num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)(jac->hsolver)); 164 PetscCall(PetscMalloc1(num_levels, &mattmp)); 165 A_array = hypre_ParAMGDataAArray((hypre_ParAMGData *)(jac->hsolver)); 166 for (l = 1; l < num_levels; l++) { 167 PetscCall(MatCreateFromParCSR(A_array[l], MATAIJ, PETSC_OWN_POINTER, &(mattmp[num_levels - 1 - l]))); 168 /* We want to own the data, and HYPRE can not touch this matrix any more */ 169 A_array[l] = NULL; 170 } 171 *nlevels = num_levels; 172 *operators = mattmp; 173 PetscFunctionReturn(PETSC_SUCCESS); 174 } 175 176 /* 177 Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix 178 is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine. 179 It is used in PCHMG. Other users should avoid using this function. 180 */ 181 static PetscErrorCode PCGetInterpolations_BoomerAMG(PC pc, PetscInt *nlevels, Mat *interpolations[]) 182 { 183 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 184 PetscBool same = PETSC_FALSE; 185 PetscInt num_levels, l; 186 Mat *mattmp; 187 hypre_ParCSRMatrix **P_array; 188 189 PetscFunctionBegin; 190 PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same)); 191 PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG "); 192 num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)(jac->hsolver)); 193 PetscCall(PetscMalloc1(num_levels, &mattmp)); 194 P_array = hypre_ParAMGDataPArray((hypre_ParAMGData *)(jac->hsolver)); 195 for (l = 1; l < num_levels; l++) { 196 PetscCall(MatCreateFromParCSR(P_array[num_levels - 1 - l], MATAIJ, PETSC_OWN_POINTER, &(mattmp[l - 1]))); 197 /* We want to own the data, and HYPRE can not touch this matrix any more */ 198 P_array[num_levels - 1 - l] = NULL; 199 } 200 *nlevels = num_levels; 201 *interpolations = mattmp; 202 PetscFunctionReturn(PETSC_SUCCESS); 203 } 204 205 /* Resets (frees) Hypre's representation of the near null space */ 206 static PetscErrorCode PCHYPREResetNearNullSpace_Private(PC pc) 207 { 208 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 209 PetscInt i; 210 211 PetscFunctionBegin; 212 for (i = 0; i < jac->n_hmnull; i++) PetscCall(VecHYPRE_IJVectorDestroy(&jac->hmnull[i])); 213 PetscCall(PetscFree(jac->hmnull)); 214 PetscCall(PetscFree(jac->phmnull)); 215 PetscCall(VecDestroy(&jac->hmnull_constant)); 216 jac->n_hmnull = 0; 217 PetscFunctionReturn(PETSC_SUCCESS); 218 } 219 220 static PetscErrorCode PCSetUp_HYPRE(PC pc) 221 { 222 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 223 Mat_HYPRE *hjac; 224 HYPRE_ParCSRMatrix hmat; 225 HYPRE_ParVector bv, xv; 226 PetscBool ishypre; 227 228 PetscFunctionBegin; 229 /* default type is boomerAMG */ 230 if (!jac->hypre_type) PetscCall(PCHYPRESetType(pc, "boomeramg")); 231 232 /* get hypre matrix */ 233 if (pc->flag == DIFFERENT_NONZERO_PATTERN) PetscCall(MatDestroy(&jac->hpmat)); 234 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRE, &ishypre)); 235 if (!ishypre) { 236 /* Temporary fix since we do not support MAT_REUSE_MATRIX with HYPRE device */ 237 #if defined(PETSC_HAVE_HYPRE_DEVICE) 238 PetscBool iscuda, iship, iskokkos; 239 240 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, "")); 241 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iship, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE, "")); 242 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iskokkos, MATSEQAIJKOKKOS, MATMPIAIJKOKKOS, "")); 243 if (iscuda || iship || iskokkos) PetscCall(MatDestroy(&jac->hpmat)); 244 #endif 245 PetscCall(MatConvert(pc->pmat, MATHYPRE, jac->hpmat ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX, &jac->hpmat)); 246 } else { 247 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 248 PetscCall(MatDestroy(&jac->hpmat)); 249 jac->hpmat = pc->pmat; 250 } 251 252 /* allow debug */ 253 PetscCall(MatViewFromOptions(jac->hpmat, NULL, "-pc_hypre_mat_view")); 254 hjac = (Mat_HYPRE *)(jac->hpmat->data); 255 256 /* special case for BoomerAMG */ 257 if (jac->setup == HYPRE_BoomerAMGSetup) { 258 MatNullSpace mnull; 259 PetscBool has_const; 260 PetscInt bs, nvec, i; 261 const Vec *vecs; 262 263 PetscCall(MatGetBlockSize(pc->pmat, &bs)); 264 if (bs > 1) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs); 265 PetscCall(MatGetNearNullSpace(pc->mat, &mnull)); 266 if (mnull) { 267 PetscCall(PCHYPREResetNearNullSpace_Private(pc)); 268 PetscCall(MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs)); 269 PetscCall(PetscMalloc1(nvec + 1, &jac->hmnull)); 270 PetscCall(PetscMalloc1(nvec + 1, &jac->phmnull)); 271 for (i = 0; i < nvec; i++) { 272 PetscCall(VecHYPRE_IJVectorCreate(vecs[i]->map, &jac->hmnull[i])); 273 PetscCall(VecHYPRE_IJVectorCopy(vecs[i], jac->hmnull[i])); 274 PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[i]->ij, (void **)&jac->phmnull[i]); 275 } 276 if (has_const) { 277 PetscCall(MatCreateVecs(pc->pmat, &jac->hmnull_constant, NULL)); 278 PetscCall(VecSet(jac->hmnull_constant, 1)); 279 PetscCall(VecNormalize(jac->hmnull_constant, NULL)); 280 PetscCall(VecHYPRE_IJVectorCreate(jac->hmnull_constant->map, &jac->hmnull[nvec])); 281 PetscCall(VecHYPRE_IJVectorCopy(jac->hmnull_constant, jac->hmnull[nvec])); 282 PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[nvec]->ij, (void **)&jac->phmnull[nvec]); 283 nvec++; 284 } 285 PetscCallExternal(HYPRE_BoomerAMGSetInterpVectors, jac->hsolver, nvec, jac->phmnull); 286 jac->n_hmnull = nvec; 287 } 288 } 289 290 /* special case for AMS */ 291 if (jac->setup == HYPRE_AMSSetup) { 292 Mat_HYPRE *hm; 293 HYPRE_ParCSRMatrix parcsr; 294 if (!jac->coords[0] && !jac->constants[0] && !(jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) { 295 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the edge constant vectors via PCHYPRESetEdgeConstantVectors() or the interpolation matrix via PCHYPRESetInterpolations()"); 296 } 297 if (jac->dim) PetscCallExternal(HYPRE_AMSSetDimension, jac->hsolver, jac->dim); 298 if (jac->constants[0]) { 299 HYPRE_ParVector ozz, zoz, zzo = NULL; 300 PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[0]->ij, (void **)(&ozz)); 301 PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[1]->ij, (void **)(&zoz)); 302 if (jac->constants[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[2]->ij, (void **)(&zzo)); 303 PetscCallExternal(HYPRE_AMSSetEdgeConstantVectors, jac->hsolver, ozz, zoz, zzo); 304 } 305 if (jac->coords[0]) { 306 HYPRE_ParVector coords[3]; 307 coords[0] = NULL; 308 coords[1] = NULL; 309 coords[2] = NULL; 310 if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0])); 311 if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1])); 312 if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2])); 313 PetscCallExternal(HYPRE_AMSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]); 314 } 315 PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient"); 316 hm = (Mat_HYPRE *)(jac->G->data); 317 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 318 PetscCallExternal(HYPRE_AMSSetDiscreteGradient, jac->hsolver, parcsr); 319 if (jac->alpha_Poisson) { 320 hm = (Mat_HYPRE *)(jac->alpha_Poisson->data); 321 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 322 PetscCallExternal(HYPRE_AMSSetAlphaPoissonMatrix, jac->hsolver, parcsr); 323 } 324 if (jac->ams_beta_is_zero) { 325 PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, NULL); 326 } else if (jac->beta_Poisson) { 327 hm = (Mat_HYPRE *)(jac->beta_Poisson->data); 328 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 329 PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, parcsr); 330 } else if (jac->ams_beta_is_zero_part) { 331 if (jac->interior) { 332 HYPRE_ParVector interior = NULL; 333 PetscCallExternal(HYPRE_IJVectorGetObject, jac->interior->ij, (void **)(&interior)); 334 PetscCallExternal(HYPRE_AMSSetInteriorNodes, jac->hsolver, interior); 335 } else { 336 jac->ams_beta_is_zero_part = PETSC_FALSE; 337 } 338 } 339 if (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])) { 340 PetscInt i; 341 HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3]; 342 if (jac->ND_PiFull) { 343 hm = (Mat_HYPRE *)(jac->ND_PiFull->data); 344 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull)); 345 } else { 346 nd_parcsrfull = NULL; 347 } 348 for (i = 0; i < 3; ++i) { 349 if (jac->ND_Pi[i]) { 350 hm = (Mat_HYPRE *)(jac->ND_Pi[i]->data); 351 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i])); 352 } else { 353 nd_parcsr[i] = NULL; 354 } 355 } 356 PetscCallExternal(HYPRE_AMSSetInterpolations, jac->hsolver, nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]); 357 } 358 } 359 /* special case for ADS */ 360 if (jac->setup == HYPRE_ADSSetup) { 361 Mat_HYPRE *hm; 362 HYPRE_ParCSRMatrix parcsr; 363 if (!jac->coords[0] && !((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])))) { 364 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the interpolation matrices via PCHYPRESetInterpolations"); 365 } else PetscCheck(jac->coords[1] && jac->coords[2], PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner has been designed for three dimensional problems! For two dimensional problems, use HYPRE AMS instead"); 366 PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient"); 367 PetscCheck(jac->C, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete curl operator via PCHYPRESetDiscreteGradient"); 368 if (jac->coords[0]) { 369 HYPRE_ParVector coords[3]; 370 coords[0] = NULL; 371 coords[1] = NULL; 372 coords[2] = NULL; 373 if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0])); 374 if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1])); 375 if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2])); 376 PetscCallExternal(HYPRE_ADSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]); 377 } 378 hm = (Mat_HYPRE *)(jac->G->data); 379 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 380 PetscCallExternal(HYPRE_ADSSetDiscreteGradient, jac->hsolver, parcsr); 381 hm = (Mat_HYPRE *)(jac->C->data); 382 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 383 PetscCallExternal(HYPRE_ADSSetDiscreteCurl, jac->hsolver, parcsr); 384 if ((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) { 385 PetscInt i; 386 HYPRE_ParCSRMatrix rt_parcsrfull, rt_parcsr[3]; 387 HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3]; 388 if (jac->RT_PiFull) { 389 hm = (Mat_HYPRE *)(jac->RT_PiFull->data); 390 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsrfull)); 391 } else { 392 rt_parcsrfull = NULL; 393 } 394 for (i = 0; i < 3; ++i) { 395 if (jac->RT_Pi[i]) { 396 hm = (Mat_HYPRE *)(jac->RT_Pi[i]->data); 397 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsr[i])); 398 } else { 399 rt_parcsr[i] = NULL; 400 } 401 } 402 if (jac->ND_PiFull) { 403 hm = (Mat_HYPRE *)(jac->ND_PiFull->data); 404 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull)); 405 } else { 406 nd_parcsrfull = NULL; 407 } 408 for (i = 0; i < 3; ++i) { 409 if (jac->ND_Pi[i]) { 410 hm = (Mat_HYPRE *)(jac->ND_Pi[i]->data); 411 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i])); 412 } else { 413 nd_parcsr[i] = NULL; 414 } 415 } 416 PetscCallExternal(HYPRE_ADSSetInterpolations, jac->hsolver, rt_parcsrfull, rt_parcsr[0], rt_parcsr[1], rt_parcsr[2], nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]); 417 } 418 } 419 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat); 420 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&bv); 421 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&xv); 422 PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF)); 423 PetscCallExternal(jac->setup, jac->hsolver, hmat, bv, xv); 424 PetscCall(PetscFPTrapPop()); 425 PetscFunctionReturn(PETSC_SUCCESS); 426 } 427 428 static PetscErrorCode PCApply_HYPRE(PC pc, Vec b, Vec x) 429 { 430 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 431 Mat_HYPRE *hjac = (Mat_HYPRE *)(jac->hpmat->data); 432 HYPRE_ParCSRMatrix hmat; 433 HYPRE_ParVector jbv, jxv; 434 435 PetscFunctionBegin; 436 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 437 if (!jac->applyrichardson) PetscCall(VecSet(x, 0.0)); 438 PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->b, b)); 439 if (jac->applyrichardson) PetscCall(VecHYPRE_IJVectorPushVec(hjac->x, x)); 440 else PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->x, x)); 441 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat); 442 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv); 443 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv); 444 PetscStackCallExternalVoid( 445 "Hypre solve", do { 446 HYPRE_Int hierr = (*jac->solve)(jac->hsolver, hmat, jbv, jxv); 447 if (hierr) { 448 PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr); 449 HYPRE_ClearAllErrors(); 450 } 451 } while (0)); 452 453 if (jac->setup == HYPRE_AMSSetup && jac->ams_beta_is_zero_part) PetscCallExternal(HYPRE_AMSProjectOutGradients, jac->hsolver, jxv); 454 PetscCall(VecHYPRE_IJVectorPopVec(hjac->x)); 455 PetscCall(VecHYPRE_IJVectorPopVec(hjac->b)); 456 PetscFunctionReturn(PETSC_SUCCESS); 457 } 458 459 static PetscErrorCode PCMatApply_HYPRE_BoomerAMG(PC pc, Mat B, Mat X) 460 { 461 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 462 Mat_HYPRE *hjac = (Mat_HYPRE *)(jac->hpmat->data); 463 hypre_ParCSRMatrix *par_matrix; 464 HYPRE_ParVector hb, hx; 465 const PetscScalar *b; 466 PetscScalar *x; 467 PetscInt m, N, lda; 468 hypre_Vector *x_local; 469 PetscMemType type; 470 471 PetscFunctionBegin; 472 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 473 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&par_matrix); 474 PetscCall(MatGetLocalSize(B, &m, NULL)); 475 PetscCall(MatGetSize(B, NULL, &N)); 476 PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hb); 477 PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hx); 478 PetscCall(MatZeroEntries(X)); 479 PetscCall(MatDenseGetArrayReadAndMemType(B, &b, &type)); 480 PetscCall(MatDenseGetLDA(B, &lda)); 481 PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m); 482 PetscCall(MatDenseGetLDA(X, &lda)); 483 PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m); 484 x_local = hypre_ParVectorLocalVector(hb); 485 PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0); 486 hypre_VectorData(x_local) = (HYPRE_Complex *)b; 487 PetscCall(MatDenseGetArrayWriteAndMemType(X, &x, NULL)); 488 x_local = hypre_ParVectorLocalVector(hx); 489 PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0); 490 hypre_VectorData(x_local) = (HYPRE_Complex *)x; 491 PetscCallExternal(hypre_ParVectorInitialize_v2, hb, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE); 492 PetscCallExternal(hypre_ParVectorInitialize_v2, hx, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE); 493 PetscStackCallExternalVoid( 494 "Hypre solve", do { 495 HYPRE_Int hierr = (*jac->solve)(jac->hsolver, par_matrix, hb, hx); 496 if (hierr) { 497 PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr); 498 HYPRE_ClearAllErrors(); 499 } 500 } while (0)); 501 PetscCallExternal(HYPRE_ParVectorDestroy, hb); 502 PetscCallExternal(HYPRE_ParVectorDestroy, hx); 503 PetscCall(MatDenseRestoreArrayReadAndMemType(B, &b)); 504 PetscCall(MatDenseRestoreArrayWriteAndMemType(X, &x)); 505 PetscFunctionReturn(PETSC_SUCCESS); 506 } 507 508 static PetscErrorCode PCReset_HYPRE(PC pc) 509 { 510 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 511 512 PetscFunctionBegin; 513 PetscCall(MatDestroy(&jac->hpmat)); 514 PetscCall(MatDestroy(&jac->G)); 515 PetscCall(MatDestroy(&jac->C)); 516 PetscCall(MatDestroy(&jac->alpha_Poisson)); 517 PetscCall(MatDestroy(&jac->beta_Poisson)); 518 PetscCall(MatDestroy(&jac->RT_PiFull)); 519 PetscCall(MatDestroy(&jac->RT_Pi[0])); 520 PetscCall(MatDestroy(&jac->RT_Pi[1])); 521 PetscCall(MatDestroy(&jac->RT_Pi[2])); 522 PetscCall(MatDestroy(&jac->ND_PiFull)); 523 PetscCall(MatDestroy(&jac->ND_Pi[0])); 524 PetscCall(MatDestroy(&jac->ND_Pi[1])); 525 PetscCall(MatDestroy(&jac->ND_Pi[2])); 526 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0])); 527 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1])); 528 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2])); 529 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0])); 530 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1])); 531 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2])); 532 PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior)); 533 PetscCall(PCHYPREResetNearNullSpace_Private(pc)); 534 jac->ams_beta_is_zero = PETSC_FALSE; 535 jac->ams_beta_is_zero_part = PETSC_FALSE; 536 jac->dim = 0; 537 PetscFunctionReturn(PETSC_SUCCESS); 538 } 539 540 static PetscErrorCode PCDestroy_HYPRE(PC pc) 541 { 542 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 543 544 PetscFunctionBegin; 545 PetscCall(PCReset_HYPRE(pc)); 546 if (jac->destroy) PetscCallExternal(jac->destroy, jac->hsolver); 547 PetscCall(PetscFree(jac->hypre_type)); 548 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 549 PetscCall(PetscFree(jac->spgemm_type)); 550 #endif 551 if (jac->comm_hypre != MPI_COMM_NULL) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 552 PetscCall(PetscFree(pc->data)); 553 554 PetscCall(PetscObjectChangeTypeName((PetscObject)pc, 0)); 555 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", NULL)); 556 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", NULL)); 557 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", NULL)); 558 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", NULL)); 559 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", NULL)); 560 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetConstantEdgeVectors_C", NULL)); 561 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", NULL)); 562 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", NULL)); 563 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", NULL)); 564 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 565 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 566 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", NULL)); 567 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", NULL)); 568 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL)); 569 PetscFunctionReturn(PETSC_SUCCESS); 570 } 571 572 static PetscErrorCode PCSetFromOptions_HYPRE_Pilut(PC pc, PetscOptionItems *PetscOptionsObject) 573 { 574 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 575 PetscBool flag; 576 577 PetscFunctionBegin; 578 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Pilut Options"); 579 PetscCall(PetscOptionsInt("-pc_hypre_pilut_maxiter", "Number of iterations", "None", jac->maxiter, &jac->maxiter, &flag)); 580 if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetMaxIter, jac->hsolver, jac->maxiter); 581 PetscCall(PetscOptionsReal("-pc_hypre_pilut_tol", "Drop tolerance", "None", jac->tol, &jac->tol, &flag)); 582 if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetDropTolerance, jac->hsolver, jac->tol); 583 PetscCall(PetscOptionsInt("-pc_hypre_pilut_factorrowsize", "FactorRowSize", "None", jac->factorrowsize, &jac->factorrowsize, &flag)); 584 if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetFactorRowSize, jac->hsolver, jac->factorrowsize); 585 PetscOptionsHeadEnd(); 586 PetscFunctionReturn(PETSC_SUCCESS); 587 } 588 589 static PetscErrorCode PCView_HYPRE_Pilut(PC pc, PetscViewer viewer) 590 { 591 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 592 PetscBool iascii; 593 594 PetscFunctionBegin; 595 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 596 if (iascii) { 597 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE Pilut preconditioning\n")); 598 if (jac->maxiter != PETSC_DEFAULT) { 599 PetscCall(PetscViewerASCIIPrintf(viewer, " maximum number of iterations %" PetscInt_FMT "\n", jac->maxiter)); 600 } else { 601 PetscCall(PetscViewerASCIIPrintf(viewer, " default maximum number of iterations \n")); 602 } 603 if (jac->tol != PETSC_DEFAULT) { 604 PetscCall(PetscViewerASCIIPrintf(viewer, " drop tolerance %g\n", (double)jac->tol)); 605 } else { 606 PetscCall(PetscViewerASCIIPrintf(viewer, " default drop tolerance \n")); 607 } 608 if (jac->factorrowsize != PETSC_DEFAULT) { 609 PetscCall(PetscViewerASCIIPrintf(viewer, " factor row size %" PetscInt_FMT "\n", jac->factorrowsize)); 610 } else { 611 PetscCall(PetscViewerASCIIPrintf(viewer, " default factor row size \n")); 612 } 613 } 614 PetscFunctionReturn(PETSC_SUCCESS); 615 } 616 617 static PetscErrorCode PCSetFromOptions_HYPRE_Euclid(PC pc, PetscOptionItems *PetscOptionsObject) 618 { 619 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 620 PetscBool flag, eu_bj = jac->eu_bj ? PETSC_TRUE : PETSC_FALSE; 621 622 PetscFunctionBegin; 623 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Euclid Options"); 624 PetscCall(PetscOptionsInt("-pc_hypre_euclid_level", "Factorization levels", "None", jac->eu_level, &jac->eu_level, &flag)); 625 if (flag) PetscCallExternal(HYPRE_EuclidSetLevel, jac->hsolver, jac->eu_level); 626 627 PetscCall(PetscOptionsReal("-pc_hypre_euclid_droptolerance", "Drop tolerance for ILU(k) in Euclid", "None", jac->eu_droptolerance, &jac->eu_droptolerance, &flag)); 628 if (flag) { 629 PetscMPIInt size; 630 631 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 632 PetscCheck(size == 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "hypre's Euclid does not support a parallel drop tolerance"); 633 PetscCallExternal(HYPRE_EuclidSetILUT, jac->hsolver, jac->eu_droptolerance); 634 } 635 636 PetscCall(PetscOptionsBool("-pc_hypre_euclid_bj", "Use Block Jacobi for ILU in Euclid", "None", eu_bj, &eu_bj, &flag)); 637 if (flag) { 638 jac->eu_bj = eu_bj ? 1 : 0; 639 PetscCallExternal(HYPRE_EuclidSetBJ, jac->hsolver, jac->eu_bj); 640 } 641 PetscOptionsHeadEnd(); 642 PetscFunctionReturn(PETSC_SUCCESS); 643 } 644 645 static PetscErrorCode PCView_HYPRE_Euclid(PC pc, PetscViewer viewer) 646 { 647 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 648 PetscBool iascii; 649 650 PetscFunctionBegin; 651 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 652 if (iascii) { 653 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE Euclid preconditioning\n")); 654 if (jac->eu_level != PETSC_DEFAULT) { 655 PetscCall(PetscViewerASCIIPrintf(viewer, " factorization levels %" PetscInt_FMT "\n", jac->eu_level)); 656 } else { 657 PetscCall(PetscViewerASCIIPrintf(viewer, " default factorization levels \n")); 658 } 659 PetscCall(PetscViewerASCIIPrintf(viewer, " drop tolerance %g\n", (double)jac->eu_droptolerance)); 660 PetscCall(PetscViewerASCIIPrintf(viewer, " use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj)); 661 } 662 PetscFunctionReturn(PETSC_SUCCESS); 663 } 664 665 static PetscErrorCode PCApplyTranspose_HYPRE_BoomerAMG(PC pc, Vec b, Vec x) 666 { 667 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 668 Mat_HYPRE *hjac = (Mat_HYPRE *)(jac->hpmat->data); 669 HYPRE_ParCSRMatrix hmat; 670 HYPRE_ParVector jbv, jxv; 671 672 PetscFunctionBegin; 673 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 674 PetscCall(VecSet(x, 0.0)); 675 PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->x, b)); 676 PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->b, x)); 677 678 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat); 679 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv); 680 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv); 681 682 PetscStackCallExternalVoid( 683 "Hypre Transpose solve", do { 684 HYPRE_Int hierr = HYPRE_BoomerAMGSolveT(jac->hsolver, hmat, jbv, jxv); 685 if (hierr) { 686 /* error code of 1 in BoomerAMG merely means convergence not achieved */ 687 PetscCheck(hierr == 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr); 688 HYPRE_ClearAllErrors(); 689 } 690 } while (0)); 691 692 PetscCall(VecHYPRE_IJVectorPopVec(hjac->x)); 693 PetscCall(VecHYPRE_IJVectorPopVec(hjac->b)); 694 PetscFunctionReturn(PETSC_SUCCESS); 695 } 696 697 static PetscErrorCode PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char name[]) 698 { 699 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 700 PetscBool flag; 701 702 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 703 PetscFunctionBegin; 704 if (jac->spgemm_type) { 705 PetscCall(PetscStrcmp(jac->spgemm_type, name, &flag)); 706 PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE SpGEMM (really we can)"); 707 PetscFunctionReturn(PETSC_SUCCESS); 708 } else { 709 PetscCall(PetscStrallocpy(name, &jac->spgemm_type)); 710 } 711 PetscCall(PetscStrcmp("cusparse", jac->spgemm_type, &flag)); 712 if (flag) { 713 PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 1); 714 PetscFunctionReturn(PETSC_SUCCESS); 715 } 716 PetscCall(PetscStrcmp("hypre", jac->spgemm_type, &flag)); 717 if (flag) { 718 PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 0); 719 PetscFunctionReturn(PETSC_SUCCESS); 720 } 721 jac->spgemm_type = NULL; 722 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE SpGEMM type %s; Choices are cusparse, hypre", name); 723 #endif 724 } 725 726 static PetscErrorCode PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char *spgemm[]) 727 { 728 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 729 730 PetscFunctionBegin; 731 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 732 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 733 *spgemm = jac->spgemm_type; 734 #endif 735 PetscFunctionReturn(PETSC_SUCCESS); 736 } 737 738 static const char *HYPREBoomerAMGCycleType[] = {"", "V", "W"}; 739 static const char *HYPREBoomerAMGCoarsenType[] = {"CLJP", "Ruge-Stueben", "", "modifiedRuge-Stueben", "", "", "Falgout", "", "PMIS", "", "HMIS"}; 740 static const char *HYPREBoomerAMGMeasureType[] = {"local", "global"}; 741 /* The following corresponds to HYPRE_BoomerAMGSetRelaxType which has many missing numbers in the enum */ 742 static const char *HYPREBoomerAMGSmoothType[] = {"Schwarz-smoothers", "Pilut", "ParaSails", "Euclid"}; 743 static const char *HYPREBoomerAMGRelaxType[] = {"Jacobi", "sequential-Gauss-Seidel", "seqboundary-Gauss-Seidel", "SOR/Jacobi", "backward-SOR/Jacobi", "" /* [5] hybrid chaotic Gauss-Seidel (works only with OpenMP) */, "symmetric-SOR/Jacobi", "" /* 7 */, "l1scaled-SOR/Jacobi", "Gaussian-elimination", "" /* 10 */, "" /* 11 */, "" /* 12 */, "l1-Gauss-Seidel" /* nonsymmetric */, "backward-l1-Gauss-Seidel" /* nonsymmetric */, "CG" /* non-stationary */, "Chebyshev", "FCF-Jacobi", "l1scaled-Jacobi"}; 744 static const char *HYPREBoomerAMGInterpType[] = {"classical", "", "", "direct", "multipass", "multipass-wts", "ext+i", "ext+i-cc", "standard", "standard-wts", "block", "block-wtd", "FF", "FF1", "ext", "ad-wts", "ext-mm", "ext+i-mm", "ext+e-mm"}; 745 static PetscErrorCode PCSetFromOptions_HYPRE_BoomerAMG(PC pc, PetscOptionItems *PetscOptionsObject) 746 { 747 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 748 PetscInt bs, n, indx, level; 749 PetscBool flg, tmp_truth; 750 double tmpdbl, twodbl[2]; 751 const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"}; 752 const char *PCHYPRESpgemmTypes[] = {"cusparse", "hypre"}; 753 754 PetscFunctionBegin; 755 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE BoomerAMG Options"); 756 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_cycle_type", "Cycle type", "None", HYPREBoomerAMGCycleType + 1, 2, HYPREBoomerAMGCycleType[jac->cycletype], &indx, &flg)); 757 if (flg) { 758 jac->cycletype = indx + 1; 759 PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype); 760 } 761 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_levels", "Number of levels (of grids) allowed", "None", jac->maxlevels, &jac->maxlevels, &flg)); 762 if (flg) { 763 PetscCheck(jac->maxlevels >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of levels %" PetscInt_FMT " must be at least two", jac->maxlevels); 764 PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels); 765 } 766 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_iter", "Maximum iterations used PER hypre call", "None", jac->maxiter, &jac->maxiter, &flg)); 767 if (flg) { 768 PetscCheck(jac->maxiter >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of iterations %" PetscInt_FMT " must be at least one", jac->maxiter); 769 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter); 770 } 771 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_tol", "Convergence tolerance PER hypre call (0.0 = use a fixed number of iterations)", "None", jac->tol, &jac->tol, &flg)); 772 if (flg) { 773 PetscCheck(jac->tol >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Tolerance %g must be greater than or equal to zero", (double)jac->tol); 774 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol); 775 } 776 bs = 1; 777 if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &bs)); 778 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_numfunctions", "Number of functions", "HYPRE_BoomerAMGSetNumFunctions", bs, &bs, &flg)); 779 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs); 780 781 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_truncfactor", "Truncation factor for interpolation (0=no truncation)", "None", jac->truncfactor, &jac->truncfactor, &flg)); 782 if (flg) { 783 PetscCheck(jac->truncfactor >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Truncation factor %g must be great than or equal zero", (double)jac->truncfactor); 784 PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor); 785 } 786 787 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_P_max", "Max elements per row for interpolation operator (0=unlimited)", "None", jac->pmax, &jac->pmax, &flg)); 788 if (flg) { 789 PetscCheck(jac->pmax >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "P_max %" PetscInt_FMT " must be greater than or equal to zero", jac->pmax); 790 PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax); 791 } 792 793 PetscCall(PetscOptionsRangeInt("-pc_hypre_boomeramg_agg_nl", "Number of levels of aggressive coarsening", "None", jac->agg_nl, &jac->agg_nl, &flg, 0, jac->maxlevels)); 794 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl); 795 796 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_agg_num_paths", "Number of paths for aggressive coarsening", "None", jac->agg_num_paths, &jac->agg_num_paths, &flg)); 797 if (flg) { 798 PetscCheck(jac->agg_num_paths >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of paths %" PetscInt_FMT " must be greater than or equal to 1", jac->agg_num_paths); 799 PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths); 800 } 801 802 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_strong_threshold", "Threshold for being strongly connected", "None", jac->strongthreshold, &jac->strongthreshold, &flg)); 803 if (flg) { 804 PetscCheck(jac->strongthreshold >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Strong threshold %g must be great than or equal zero", (double)jac->strongthreshold); 805 PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold); 806 } 807 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_max_row_sum", "Maximum row sum", "None", jac->maxrowsum, &jac->maxrowsum, &flg)); 808 if (flg) { 809 PetscCheck(jac->maxrowsum >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Maximum row sum %g must be greater than zero", (double)jac->maxrowsum); 810 PetscCheck(jac->maxrowsum <= 1.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Maximum row sum %g must be less than or equal one", (double)jac->maxrowsum); 811 PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum); 812 } 813 814 /* Grid sweeps */ 815 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_all", "Number of sweeps for the up and down grid levels", "None", jac->gridsweeps[0], &indx, &flg)); 816 if (flg) { 817 PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, indx); 818 /* modify the jac structure so we can view the updated options with PC_View */ 819 jac->gridsweeps[0] = indx; 820 jac->gridsweeps[1] = indx; 821 /*defaults coarse to 1 */ 822 jac->gridsweeps[2] = 1; 823 } 824 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen", "Use a nodal based coarsening 1-6", "HYPRE_BoomerAMGSetNodal", jac->nodal_coarsening, &jac->nodal_coarsening, &flg)); 825 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodal, jac->hsolver, jac->nodal_coarsening); 826 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen_diag", "Diagonal in strength matrix for nodal based coarsening 0-2", "HYPRE_BoomerAMGSetNodalDiag", jac->nodal_coarsening_diag, &jac->nodal_coarsening_diag, &flg)); 827 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodalDiag, jac->hsolver, jac->nodal_coarsening_diag); 828 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_variant", "Variant of algorithm 1-3", "HYPRE_BoomerAMGSetInterpVecVariant", jac->vec_interp_variant, &jac->vec_interp_variant, &flg)); 829 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecVariant, jac->hsolver, jac->vec_interp_variant); 830 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_qmax", "Max elements per row for each Q", "HYPRE_BoomerAMGSetInterpVecQMax", jac->vec_interp_qmax, &jac->vec_interp_qmax, &flg)); 831 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecQMax, jac->hsolver, jac->vec_interp_qmax); 832 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_vec_interp_smooth", "Whether to smooth the interpolation vectors", "HYPRE_BoomerAMGSetSmoothInterpVectors", jac->vec_interp_smooth, &jac->vec_interp_smooth, &flg)); 833 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetSmoothInterpVectors, jac->hsolver, jac->vec_interp_smooth); 834 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_interp_refine", "Preprocess the interpolation matrix through iterative weight refinement", "HYPRE_BoomerAMGSetInterpRefine", jac->interp_refine, &jac->interp_refine, &flg)); 835 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpRefine, jac->hsolver, jac->interp_refine); 836 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_down", "Number of sweeps for the down cycles", "None", jac->gridsweeps[0], &indx, &flg)); 837 if (flg) { 838 PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 1); 839 jac->gridsweeps[0] = indx; 840 } 841 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_up", "Number of sweeps for the up cycles", "None", jac->gridsweeps[1], &indx, &flg)); 842 if (flg) { 843 PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 2); 844 jac->gridsweeps[1] = indx; 845 } 846 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_coarse", "Number of sweeps for the coarse level", "None", jac->gridsweeps[2], &indx, &flg)); 847 if (flg) { 848 PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 3); 849 jac->gridsweeps[2] = indx; 850 } 851 852 /* Smooth type */ 853 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_smooth_type", "Enable more complex smoothers", "None", HYPREBoomerAMGSmoothType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGSmoothType), HYPREBoomerAMGSmoothType[0], &indx, &flg)); 854 if (flg) { 855 jac->smoothtype = indx; 856 PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, indx + 6); 857 jac->smoothnumlevels = 25; 858 PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, 25); 859 } 860 861 /* Number of smoothing levels */ 862 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_smooth_num_levels", "Number of levels on which more complex smoothers are used", "None", 25, &indx, &flg)); 863 if (flg && (jac->smoothtype != -1)) { 864 jac->smoothnumlevels = indx; 865 PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, indx); 866 } 867 868 /* Number of levels for ILU(k) for Euclid */ 869 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_eu_level", "Number of levels for ILU(k) in Euclid smoother", "None", 0, &indx, &flg)); 870 if (flg && (jac->smoothtype == 3)) { 871 jac->eu_level = indx; 872 PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, indx); 873 } 874 875 /* Filter for ILU(k) for Euclid */ 876 double droptolerance; 877 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_eu_droptolerance", "Drop tolerance for ILU(k) in Euclid smoother", "None", 0, &droptolerance, &flg)); 878 if (flg && (jac->smoothtype == 3)) { 879 jac->eu_droptolerance = droptolerance; 880 PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, droptolerance); 881 } 882 883 /* Use Block Jacobi ILUT for Euclid */ 884 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_eu_bj", "Use Block Jacobi for ILU in Euclid smoother?", "None", PETSC_FALSE, &tmp_truth, &flg)); 885 if (flg && (jac->smoothtype == 3)) { 886 jac->eu_bj = tmp_truth; 887 PetscCallExternal(HYPRE_BoomerAMGSetEuBJ, jac->hsolver, jac->eu_bj); 888 } 889 890 /* Relax type */ 891 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_all", "Relax type for the up and down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg)); 892 if (flg) { 893 jac->relaxtype[0] = jac->relaxtype[1] = indx; 894 PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, indx); 895 /* by default, coarse type set to 9 */ 896 jac->relaxtype[2] = 9; 897 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, 9, 3); 898 } 899 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_down", "Relax type for the down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg)); 900 if (flg) { 901 jac->relaxtype[0] = indx; 902 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 1); 903 } 904 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_up", "Relax type for the up cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg)); 905 if (flg) { 906 jac->relaxtype[1] = indx; 907 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 2); 908 } 909 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_coarse", "Relax type on coarse grid", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[9], &indx, &flg)); 910 if (flg) { 911 jac->relaxtype[2] = indx; 912 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 3); 913 } 914 915 /* Relaxation Weight */ 916 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_relax_weight_all", "Relaxation weight for all levels (0 = hypre estimates, -k = determined with k CG steps)", "None", jac->relaxweight, &tmpdbl, &flg)); 917 if (flg) { 918 PetscCallExternal(HYPRE_BoomerAMGSetRelaxWt, jac->hsolver, tmpdbl); 919 jac->relaxweight = tmpdbl; 920 } 921 922 n = 2; 923 twodbl[0] = twodbl[1] = 1.0; 924 PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_relax_weight_level", "Set the relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg)); 925 if (flg) { 926 PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight level: you must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n); 927 indx = (int)PetscAbsReal(twodbl[1]); 928 PetscCallExternal(HYPRE_BoomerAMGSetLevelRelaxWt, jac->hsolver, twodbl[0], indx); 929 } 930 931 /* Outer relaxation Weight */ 932 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_outer_relax_weight_all", "Outer relaxation weight for all levels (-k = determined with k CG steps)", "None", jac->outerrelaxweight, &tmpdbl, &flg)); 933 if (flg) { 934 PetscCallExternal(HYPRE_BoomerAMGSetOuterWt, jac->hsolver, tmpdbl); 935 jac->outerrelaxweight = tmpdbl; 936 } 937 938 n = 2; 939 twodbl[0] = twodbl[1] = 1.0; 940 PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_outer_relax_weight_level", "Set the outer relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg)); 941 if (flg) { 942 PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight outer level: You must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n); 943 indx = (int)PetscAbsReal(twodbl[1]); 944 PetscCallExternal(HYPRE_BoomerAMGSetLevelOuterWt, jac->hsolver, twodbl[0], indx); 945 } 946 947 /* the Relax Order */ 948 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_no_CF", "Do not use CF-relaxation", "None", PETSC_FALSE, &tmp_truth, &flg)); 949 950 if (flg && tmp_truth) { 951 jac->relaxorder = 0; 952 PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder); 953 } 954 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_measure_type", "Measure type", "None", HYPREBoomerAMGMeasureType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGMeasureType), HYPREBoomerAMGMeasureType[0], &indx, &flg)); 955 if (flg) { 956 jac->measuretype = indx; 957 PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype); 958 } 959 /* update list length 3/07 */ 960 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_coarsen_type", "Coarsen type", "None", HYPREBoomerAMGCoarsenType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGCoarsenType), HYPREBoomerAMGCoarsenType[6], &indx, &flg)); 961 if (flg) { 962 jac->coarsentype = indx; 963 PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype); 964 } 965 966 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_coarse_size", "Maximum size of coarsest grid", "None", jac->maxc, &jac->maxc, &flg)); 967 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc); 968 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_min_coarse_size", "Minimum size of coarsest grid", "None", jac->minc, &jac->minc, &flg)); 969 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc); 970 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 971 // global parameter but is closely associated with BoomerAMG 972 PetscCall(PetscOptionsEList("-pc_mg_galerkin_mat_product_algorithm", "Type of SpGEMM to use in hypre (only for now)", "PCMGGalerkinSetMatProductAlgorithm", PCHYPRESpgemmTypes, PETSC_STATIC_ARRAY_LENGTH(PCHYPRESpgemmTypes), PCHYPRESpgemmTypes[0], &indx, &flg)); 973 #if defined(PETSC_HAVE_HYPRE_DEVICE) 974 if (!flg) indx = 0; 975 PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, PCHYPRESpgemmTypes[indx])); 976 #else 977 PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, "hypre")); 978 #endif 979 #endif 980 /* AIR */ 981 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0) 982 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_restriction_type", "Type of AIR method (distance 1 or 2, 0 means no AIR)", "None", jac->Rtype, &jac->Rtype, NULL)); 983 PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype); 984 if (jac->Rtype) { 985 HYPRE_Int **grid_relax_points = hypre_TAlloc(HYPRE_Int *, 4, HYPRE_MEMORY_HOST); 986 char *prerelax[256]; 987 char *postrelax[256]; 988 char stringF[2] = "F", stringC[2] = "C", stringA[2] = "A"; 989 PetscInt ns_down = 256, ns_up = 256; 990 PetscBool matchF, matchC, matchA; 991 992 jac->interptype = 100; /* no way we can pass this with strings... Set it as default as in MFEM, then users can still customize it back to a different one */ 993 994 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_strongthresholdR", "Threshold for R", "None", jac->Rstrongthreshold, &jac->Rstrongthreshold, NULL)); 995 PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold); 996 997 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_filterthresholdR", "Filter threshold for R", "None", jac->Rfilterthreshold, &jac->Rfilterthreshold, NULL)); 998 PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold); 999 1000 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_Adroptol", "Defines the drop tolerance for the A-matrices from the 2nd level of AMG", "None", jac->Adroptol, &jac->Adroptol, NULL)); 1001 PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol); 1002 1003 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_Adroptype", "Drops the entries that are not on the diagonal and smaller than its row norm: type 1: 1-norm, 2: 2-norm, -1: infinity norm", "None", jac->Adroptype, &jac->Adroptype, NULL)); 1004 PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype); 1005 PetscCall(PetscOptionsStringArray("-pc_hypre_boomeramg_prerelax", "Defines prerelax scheme", "None", prerelax, &ns_down, NULL)); 1006 PetscCall(PetscOptionsStringArray("-pc_hypre_boomeramg_postrelax", "Defines postrelax scheme", "None", postrelax, &ns_up, NULL)); 1007 PetscCheck(ns_down == jac->gridsweeps[0], PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_SIZ, "The number of arguments passed to -pc_hypre_boomeramg_prerelax must match the number passed to -pc_hypre_bomeramg_grid_sweeps_down"); 1008 PetscCheck(ns_up == jac->gridsweeps[1], PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_SIZ, "The number of arguments passed to -pc_hypre_boomeramg_postrelax must match the number passed to -pc_hypre_bomeramg_grid_sweeps_up"); 1009 1010 grid_relax_points[0] = NULL; 1011 grid_relax_points[1] = hypre_TAlloc(HYPRE_Int, ns_down, HYPRE_MEMORY_HOST); 1012 grid_relax_points[2] = hypre_TAlloc(HYPRE_Int, ns_up, HYPRE_MEMORY_HOST); 1013 grid_relax_points[3] = hypre_TAlloc(HYPRE_Int, jac->gridsweeps[2], HYPRE_MEMORY_HOST); 1014 grid_relax_points[3][0] = 0; 1015 1016 // set down relax scheme 1017 for (PetscInt i = 0; i < ns_down; i++) { 1018 PetscCall(PetscStrcasecmp(prerelax[i], stringF, &matchF)); 1019 PetscCall(PetscStrcasecmp(prerelax[i], stringC, &matchC)); 1020 PetscCall(PetscStrcasecmp(prerelax[i], stringA, &matchA)); 1021 PetscCheck(matchF || matchC || matchA, PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_WRONG, "Valid argument options for -pc_hypre_boomeramg_prerelax are C, F, and A"); 1022 if (matchF) grid_relax_points[1][i] = -1; 1023 else if (matchC) grid_relax_points[1][i] = 1; 1024 else if (matchA) grid_relax_points[1][i] = 0; 1025 } 1026 1027 // set up relax scheme 1028 for (PetscInt i = 0; i < ns_up; i++) { 1029 PetscCall(PetscStrcasecmp(postrelax[i], stringF, &matchF)); 1030 PetscCall(PetscStrcasecmp(postrelax[i], stringC, &matchC)); 1031 PetscCall(PetscStrcasecmp(postrelax[i], stringA, &matchA)); 1032 PetscCheck(matchF || matchC || matchA, PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_WRONG, "Valid argument options for -pc_hypre_boomeramg_postrelax are C, F, and A"); 1033 if (matchF) grid_relax_points[2][i] = -1; 1034 else if (matchC) grid_relax_points[2][i] = 1; 1035 else if (matchA) grid_relax_points[2][i] = 0; 1036 } 1037 1038 // set coarse relax scheme 1039 for (PetscInt i = 0; i < jac->gridsweeps[2]; i++) grid_relax_points[3][i] = 0; 1040 1041 // Pass relax schemes to hypre 1042 PetscCallExternal(HYPRE_BoomerAMGSetGridRelaxPoints, jac->hsolver, grid_relax_points); 1043 1044 // cleanup memory 1045 for (PetscInt i = 0; i < ns_down; i++) PetscCall(PetscFree(prerelax[i])); 1046 for (PetscInt i = 0; i < ns_up; i++) PetscCall(PetscFree(postrelax[i])); 1047 } 1048 #endif 1049 1050 #if PETSC_PKG_HYPRE_VERSION_LE(9, 9, 9) 1051 PetscCheck(!jac->Rtype || !jac->agg_nl, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "-pc_hypre_boomeramg_restriction_type (%" PetscInt_FMT ") and -pc_hypre_boomeramg_agg_nl (%" PetscInt_FMT ")", jac->Rtype, jac->agg_nl); 1052 #endif 1053 1054 /* new 3/07 */ 1055 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_interp_type", "Interpolation type", "None", HYPREBoomerAMGInterpType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGInterpType), HYPREBoomerAMGInterpType[0], &indx, &flg)); 1056 if (flg || jac->Rtype) { 1057 if (flg) jac->interptype = indx; 1058 PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype); 1059 } 1060 1061 PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", &flg)); 1062 if (flg) { 1063 level = 3; 1064 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", level, &level, NULL)); 1065 1066 jac->printstatistics = PETSC_TRUE; 1067 PetscCallExternal(HYPRE_BoomerAMGSetPrintLevel, jac->hsolver, level); 1068 } 1069 1070 PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", &flg)); 1071 if (flg) { 1072 level = 3; 1073 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", level, &level, NULL)); 1074 1075 jac->printstatistics = PETSC_TRUE; 1076 PetscCallExternal(HYPRE_BoomerAMGSetDebugFlag, jac->hsolver, level); 1077 } 1078 1079 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", PETSC_FALSE, &tmp_truth, &flg)); 1080 if (flg && tmp_truth) { 1081 PetscInt tmp_int; 1082 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", jac->nodal_relax_levels, &tmp_int, &flg)); 1083 if (flg) jac->nodal_relax_levels = tmp_int; 1084 PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, 6); 1085 PetscCallExternal(HYPRE_BoomerAMGSetDomainType, jac->hsolver, 1); 1086 PetscCallExternal(HYPRE_BoomerAMGSetOverlap, jac->hsolver, 0); 1087 PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, jac->nodal_relax_levels); 1088 } 1089 1090 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_keeptranspose", "Avoid transpose matvecs in preconditioner application", "None", jac->keeptranspose, &jac->keeptranspose, NULL)); 1091 PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0); 1092 1093 /* options for ParaSails solvers */ 1094 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flg)); 1095 if (flg) { 1096 jac->symt = indx; 1097 PetscCallExternal(HYPRE_BoomerAMGSetSym, jac->hsolver, jac->symt); 1098 } 1099 1100 PetscOptionsHeadEnd(); 1101 PetscFunctionReturn(PETSC_SUCCESS); 1102 } 1103 1104 static PetscErrorCode PCApplyRichardson_HYPRE_BoomerAMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 1105 { 1106 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1107 HYPRE_Int oits; 1108 1109 PetscFunctionBegin; 1110 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 1111 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, its * jac->maxiter); 1112 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, rtol); 1113 jac->applyrichardson = PETSC_TRUE; 1114 PetscCall(PCApply_HYPRE(pc, b, y)); 1115 jac->applyrichardson = PETSC_FALSE; 1116 PetscCallExternal(HYPRE_BoomerAMGGetNumIterations, jac->hsolver, &oits); 1117 *outits = oits; 1118 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 1119 else *reason = PCRICHARDSON_CONVERGED_RTOL; 1120 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol); 1121 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter); 1122 PetscFunctionReturn(PETSC_SUCCESS); 1123 } 1124 1125 static PetscErrorCode PCView_HYPRE_BoomerAMG(PC pc, PetscViewer viewer) 1126 { 1127 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1128 PetscBool iascii; 1129 1130 PetscFunctionBegin; 1131 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1132 if (iascii) { 1133 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE BoomerAMG preconditioning\n")); 1134 PetscCall(PetscViewerASCIIPrintf(viewer, " Cycle type %s\n", HYPREBoomerAMGCycleType[jac->cycletype])); 1135 PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum number of levels %" PetscInt_FMT "\n", jac->maxlevels)); 1136 PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum number of iterations PER hypre call %" PetscInt_FMT "\n", jac->maxiter)); 1137 PetscCall(PetscViewerASCIIPrintf(viewer, " Convergence tolerance PER hypre call %g\n", (double)jac->tol)); 1138 PetscCall(PetscViewerASCIIPrintf(viewer, " Threshold for strong coupling %g\n", (double)jac->strongthreshold)); 1139 PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation truncation factor %g\n", (double)jac->truncfactor)); 1140 PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation: max elements per row %" PetscInt_FMT "\n", jac->pmax)); 1141 if (jac->interp_refine) PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation: number of steps of weighted refinement %" PetscInt_FMT "\n", jac->interp_refine)); 1142 PetscCall(PetscViewerASCIIPrintf(viewer, " Number of levels of aggressive coarsening %" PetscInt_FMT "\n", jac->agg_nl)); 1143 PetscCall(PetscViewerASCIIPrintf(viewer, " Number of paths for aggressive coarsening %" PetscInt_FMT "\n", jac->agg_num_paths)); 1144 1145 PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum row sums %g\n", (double)jac->maxrowsum)); 1146 1147 PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps down %" PetscInt_FMT "\n", jac->gridsweeps[0])); 1148 PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps up %" PetscInt_FMT "\n", jac->gridsweeps[1])); 1149 PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps on coarse %" PetscInt_FMT "\n", jac->gridsweeps[2])); 1150 1151 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax down %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[0]])); 1152 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax up %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[1]])); 1153 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax on coarse %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[2]])); 1154 1155 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax weight (all) %g\n", (double)jac->relaxweight)); 1156 PetscCall(PetscViewerASCIIPrintf(viewer, " Outer relax weight (all) %g\n", (double)jac->outerrelaxweight)); 1157 1158 PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum size of coarsest grid %" PetscInt_FMT "\n", jac->maxc)); 1159 PetscCall(PetscViewerASCIIPrintf(viewer, " Minimum size of coarsest grid %" PetscInt_FMT "\n", jac->minc)); 1160 1161 if (jac->relaxorder) { 1162 PetscCall(PetscViewerASCIIPrintf(viewer, " Using CF-relaxation\n")); 1163 } else { 1164 PetscCall(PetscViewerASCIIPrintf(viewer, " Not using CF-relaxation\n")); 1165 } 1166 if (jac->smoothtype != -1) { 1167 PetscCall(PetscViewerASCIIPrintf(viewer, " Smooth type %s\n", HYPREBoomerAMGSmoothType[jac->smoothtype])); 1168 PetscCall(PetscViewerASCIIPrintf(viewer, " Smooth num levels %" PetscInt_FMT "\n", jac->smoothnumlevels)); 1169 } else { 1170 PetscCall(PetscViewerASCIIPrintf(viewer, " Not using more complex smoothers.\n")); 1171 } 1172 if (jac->smoothtype == 3) { 1173 PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU(k) levels %" PetscInt_FMT "\n", jac->eu_level)); 1174 PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU(k) drop tolerance %g\n", (double)jac->eu_droptolerance)); 1175 PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj)); 1176 } 1177 PetscCall(PetscViewerASCIIPrintf(viewer, " Measure type %s\n", HYPREBoomerAMGMeasureType[jac->measuretype])); 1178 PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsen type %s\n", HYPREBoomerAMGCoarsenType[jac->coarsentype])); 1179 PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation type %s\n", jac->interptype != 100 ? HYPREBoomerAMGInterpType[jac->interptype] : "1pt")); 1180 if (jac->nodal_coarsening) PetscCall(PetscViewerASCIIPrintf(viewer, " Using nodal coarsening with HYPRE_BOOMERAMGSetNodal() %" PetscInt_FMT "\n", jac->nodal_coarsening)); 1181 if (jac->vec_interp_variant) { 1182 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetInterpVecVariant() %" PetscInt_FMT "\n", jac->vec_interp_variant)); 1183 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetInterpVecQMax() %" PetscInt_FMT "\n", jac->vec_interp_qmax)); 1184 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetSmoothInterpVectors() %d\n", jac->vec_interp_smooth)); 1185 } 1186 if (jac->nodal_relax) PetscCall(PetscViewerASCIIPrintf(viewer, " Using nodal relaxation via Schwarz smoothing on levels %" PetscInt_FMT "\n", jac->nodal_relax_levels)); 1187 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 1188 PetscCall(PetscViewerASCIIPrintf(viewer, " SpGEMM type %s\n", jac->spgemm_type)); 1189 #else 1190 PetscCall(PetscViewerASCIIPrintf(viewer, " SpGEMM type %s\n", "hypre")); 1191 #endif 1192 /* AIR */ 1193 if (jac->Rtype) { 1194 PetscCall(PetscViewerASCIIPrintf(viewer, " Using approximate ideal restriction type %" PetscInt_FMT "\n", jac->Rtype)); 1195 PetscCall(PetscViewerASCIIPrintf(viewer, " Threshold for R %g\n", (double)jac->Rstrongthreshold)); 1196 PetscCall(PetscViewerASCIIPrintf(viewer, " Filter for R %g\n", (double)jac->Rfilterthreshold)); 1197 PetscCall(PetscViewerASCIIPrintf(viewer, " A drop tolerance %g\n", (double)jac->Adroptol)); 1198 PetscCall(PetscViewerASCIIPrintf(viewer, " A drop type %" PetscInt_FMT "\n", jac->Adroptype)); 1199 } 1200 } 1201 PetscFunctionReturn(PETSC_SUCCESS); 1202 } 1203 1204 static PetscErrorCode PCSetFromOptions_HYPRE_ParaSails(PC pc, PetscOptionItems *PetscOptionsObject) 1205 { 1206 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1207 PetscInt indx; 1208 PetscBool flag; 1209 const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"}; 1210 1211 PetscFunctionBegin; 1212 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ParaSails Options"); 1213 PetscCall(PetscOptionsInt("-pc_hypre_parasails_nlevels", "Number of number of levels", "None", jac->nlevels, &jac->nlevels, 0)); 1214 PetscCall(PetscOptionsReal("-pc_hypre_parasails_thresh", "Threshold", "None", jac->threshold, &jac->threshold, &flag)); 1215 if (flag) PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels); 1216 1217 PetscCall(PetscOptionsReal("-pc_hypre_parasails_filter", "filter", "None", jac->filter, &jac->filter, &flag)); 1218 if (flag) PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter); 1219 1220 PetscCall(PetscOptionsReal("-pc_hypre_parasails_loadbal", "Load balance", "None", jac->loadbal, &jac->loadbal, &flag)); 1221 if (flag) PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal); 1222 1223 PetscCall(PetscOptionsBool("-pc_hypre_parasails_logging", "Print info to screen", "None", (PetscBool)jac->logging, (PetscBool *)&jac->logging, &flag)); 1224 if (flag) PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging); 1225 1226 PetscCall(PetscOptionsBool("-pc_hypre_parasails_reuse", "Reuse nonzero pattern in preconditioner", "None", (PetscBool)jac->ruse, (PetscBool *)&jac->ruse, &flag)); 1227 if (flag) PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse); 1228 1229 PetscCall(PetscOptionsEList("-pc_hypre_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flag)); 1230 if (flag) { 1231 jac->symt = indx; 1232 PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt); 1233 } 1234 1235 PetscOptionsHeadEnd(); 1236 PetscFunctionReturn(PETSC_SUCCESS); 1237 } 1238 1239 static PetscErrorCode PCView_HYPRE_ParaSails(PC pc, PetscViewer viewer) 1240 { 1241 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1242 PetscBool iascii; 1243 const char *symt = 0; 1244 1245 PetscFunctionBegin; 1246 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1247 if (iascii) { 1248 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE ParaSails preconditioning\n")); 1249 PetscCall(PetscViewerASCIIPrintf(viewer, " nlevels %" PetscInt_FMT "\n", jac->nlevels)); 1250 PetscCall(PetscViewerASCIIPrintf(viewer, " threshold %g\n", (double)jac->threshold)); 1251 PetscCall(PetscViewerASCIIPrintf(viewer, " filter %g\n", (double)jac->filter)); 1252 PetscCall(PetscViewerASCIIPrintf(viewer, " load balance %g\n", (double)jac->loadbal)); 1253 PetscCall(PetscViewerASCIIPrintf(viewer, " reuse nonzero structure %s\n", PetscBools[jac->ruse])); 1254 PetscCall(PetscViewerASCIIPrintf(viewer, " print info to screen %s\n", PetscBools[jac->logging])); 1255 if (!jac->symt) symt = "nonsymmetric matrix and preconditioner"; 1256 else if (jac->symt == 1) symt = "SPD matrix and preconditioner"; 1257 else if (jac->symt == 2) symt = "nonsymmetric matrix but SPD preconditioner"; 1258 else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Unknown HYPRE ParaSails symmetric option %" PetscInt_FMT, jac->symt); 1259 PetscCall(PetscViewerASCIIPrintf(viewer, " %s\n", symt)); 1260 } 1261 PetscFunctionReturn(PETSC_SUCCESS); 1262 } 1263 1264 static PetscErrorCode PCSetFromOptions_HYPRE_AMS(PC pc, PetscOptionItems *PetscOptionsObject) 1265 { 1266 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1267 PetscInt n; 1268 PetscBool flag, flag2, flag3, flag4; 1269 1270 PetscFunctionBegin; 1271 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE AMS Options"); 1272 PetscCall(PetscOptionsInt("-pc_hypre_ams_print_level", "Debugging output level for AMS", "None", jac->as_print, &jac->as_print, &flag)); 1273 if (flag) PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print); 1274 PetscCall(PetscOptionsInt("-pc_hypre_ams_max_iter", "Maximum number of AMS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag)); 1275 if (flag) PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter); 1276 PetscCall(PetscOptionsInt("-pc_hypre_ams_cycle_type", "Cycle type for AMS multigrid", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag)); 1277 if (flag) PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type); 1278 PetscCall(PetscOptionsReal("-pc_hypre_ams_tol", "Error tolerance for AMS multigrid", "None", jac->as_tol, &jac->as_tol, &flag)); 1279 if (flag) PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol); 1280 PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_type", "Relaxation type for AMS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag)); 1281 PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_times", "Number of relaxation steps for AMS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2)); 1282 PetscCall(PetscOptionsReal("-pc_hypre_ams_relax_weight", "Relaxation weight for AMS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3)); 1283 PetscCall(PetscOptionsReal("-pc_hypre_ams_omega", "SSOR coefficient for AMS smoother", "None", jac->as_omega, &jac->as_omega, &flag4)); 1284 if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 1285 PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_alpha_theta", "Threshold for strong coupling of vector Poisson AMG solver", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag)); 1286 n = 5; 1287 PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_alpha_options", "AMG options for vector Poisson", "None", jac->as_amg_alpha_opts, &n, &flag2)); 1288 if (flag || flag2) { 1289 PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 1290 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 1291 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 1292 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 1293 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 1294 } 1295 PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_beta_theta", "Threshold for strong coupling of scalar Poisson AMG solver", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag)); 1296 n = 5; 1297 PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_beta_options", "AMG options for scalar Poisson solver", "None", jac->as_amg_beta_opts, &n, &flag2)); 1298 if (flag || flag2) { 1299 PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 1300 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 1301 jac->as_amg_beta_opts[2], /* AMG relax_type */ 1302 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 1303 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 1304 } 1305 PetscCall(PetscOptionsInt("-pc_hypre_ams_projection_frequency", "Frequency at which a projection onto the compatible subspace for problems with zero conductivity regions is performed", "None", jac->ams_proj_freq, &jac->ams_proj_freq, &flag)); 1306 if (flag) { /* override HYPRE's default only if the options is used */ 1307 PetscCallExternal(HYPRE_AMSSetProjectionFrequency, jac->hsolver, jac->ams_proj_freq); 1308 } 1309 PetscOptionsHeadEnd(); 1310 PetscFunctionReturn(PETSC_SUCCESS); 1311 } 1312 1313 static PetscErrorCode PCView_HYPRE_AMS(PC pc, PetscViewer viewer) 1314 { 1315 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1316 PetscBool iascii; 1317 1318 PetscFunctionBegin; 1319 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1320 if (iascii) { 1321 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE AMS preconditioning\n")); 1322 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter)); 1323 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type)); 1324 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iteration tolerance %g\n", (double)jac->as_tol)); 1325 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother type %" PetscInt_FMT "\n", jac->as_relax_type)); 1326 PetscCall(PetscViewerASCIIPrintf(viewer, " number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times)); 1327 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother weight %g\n", (double)jac->as_relax_weight)); 1328 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother omega %g\n", (double)jac->as_omega)); 1329 if (jac->alpha_Poisson) { 1330 PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver (passed in by user)\n")); 1331 } else { 1332 PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver (computed) \n")); 1333 } 1334 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0])); 1335 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1])); 1336 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2])); 1337 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3])); 1338 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4])); 1339 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG strength threshold %g\n", (double)jac->as_amg_alpha_theta)); 1340 if (!jac->ams_beta_is_zero) { 1341 if (jac->beta_Poisson) { 1342 PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver (passed in by user)\n")); 1343 } else { 1344 PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver (computed) \n")); 1345 } 1346 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0])); 1347 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1])); 1348 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2])); 1349 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3])); 1350 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4])); 1351 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG strength threshold %g\n", (double)jac->as_amg_beta_theta)); 1352 if (jac->ams_beta_is_zero_part) PetscCall(PetscViewerASCIIPrintf(viewer, " compatible subspace projection frequency %" PetscInt_FMT " (-1 HYPRE uses default)\n", jac->ams_proj_freq)); 1353 } else { 1354 PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver not used (zero-conductivity everywhere) \n")); 1355 } 1356 } 1357 PetscFunctionReturn(PETSC_SUCCESS); 1358 } 1359 1360 static PetscErrorCode PCSetFromOptions_HYPRE_ADS(PC pc, PetscOptionItems *PetscOptionsObject) 1361 { 1362 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1363 PetscInt n; 1364 PetscBool flag, flag2, flag3, flag4; 1365 1366 PetscFunctionBegin; 1367 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ADS Options"); 1368 PetscCall(PetscOptionsInt("-pc_hypre_ads_print_level", "Debugging output level for ADS", "None", jac->as_print, &jac->as_print, &flag)); 1369 if (flag) PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print); 1370 PetscCall(PetscOptionsInt("-pc_hypre_ads_max_iter", "Maximum number of ADS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag)); 1371 if (flag) PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter); 1372 PetscCall(PetscOptionsInt("-pc_hypre_ads_cycle_type", "Cycle type for ADS multigrid", "None", jac->ads_cycle_type, &jac->ads_cycle_type, &flag)); 1373 if (flag) PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ads_cycle_type); 1374 PetscCall(PetscOptionsReal("-pc_hypre_ads_tol", "Error tolerance for ADS multigrid", "None", jac->as_tol, &jac->as_tol, &flag)); 1375 if (flag) PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol); 1376 PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_type", "Relaxation type for ADS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag)); 1377 PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_times", "Number of relaxation steps for ADS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2)); 1378 PetscCall(PetscOptionsReal("-pc_hypre_ads_relax_weight", "Relaxation weight for ADS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3)); 1379 PetscCall(PetscOptionsReal("-pc_hypre_ads_omega", "SSOR coefficient for ADS smoother", "None", jac->as_omega, &jac->as_omega, &flag4)); 1380 if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 1381 PetscCall(PetscOptionsReal("-pc_hypre_ads_ams_theta", "Threshold for strong coupling of AMS solver inside ADS", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag)); 1382 n = 5; 1383 PetscCall(PetscOptionsIntArray("-pc_hypre_ads_ams_options", "AMG options for AMS solver inside ADS", "None", jac->as_amg_alpha_opts, &n, &flag2)); 1384 PetscCall(PetscOptionsInt("-pc_hypre_ads_ams_cycle_type", "Cycle type for AMS solver inside ADS", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag3)); 1385 if (flag || flag2 || flag3) { 1386 PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMS cycle type */ 1387 jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 1388 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 1389 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 1390 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 1391 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 1392 } 1393 PetscCall(PetscOptionsReal("-pc_hypre_ads_amg_theta", "Threshold for strong coupling of vector AMG solver inside ADS", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag)); 1394 n = 5; 1395 PetscCall(PetscOptionsIntArray("-pc_hypre_ads_amg_options", "AMG options for vector AMG solver inside ADS", "None", jac->as_amg_beta_opts, &n, &flag2)); 1396 if (flag || flag2) { 1397 PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 1398 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 1399 jac->as_amg_beta_opts[2], /* AMG relax_type */ 1400 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 1401 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 1402 } 1403 PetscOptionsHeadEnd(); 1404 PetscFunctionReturn(PETSC_SUCCESS); 1405 } 1406 1407 static PetscErrorCode PCView_HYPRE_ADS(PC pc, PetscViewer viewer) 1408 { 1409 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1410 PetscBool iascii; 1411 1412 PetscFunctionBegin; 1413 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1414 if (iascii) { 1415 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE ADS preconditioning\n")); 1416 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter)); 1417 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ads_cycle_type)); 1418 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iteration tolerance %g\n", (double)jac->as_tol)); 1419 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother type %" PetscInt_FMT "\n", jac->as_relax_type)); 1420 PetscCall(PetscViewerASCIIPrintf(viewer, " number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times)); 1421 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother weight %g\n", (double)jac->as_relax_weight)); 1422 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother omega %g\n", (double)jac->as_omega)); 1423 PetscCall(PetscViewerASCIIPrintf(viewer, " AMS solver using boomerAMG\n")); 1424 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type)); 1425 PetscCall(PetscViewerASCIIPrintf(viewer, " coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0])); 1426 PetscCall(PetscViewerASCIIPrintf(viewer, " levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1])); 1427 PetscCall(PetscViewerASCIIPrintf(viewer, " relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2])); 1428 PetscCall(PetscViewerASCIIPrintf(viewer, " interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3])); 1429 PetscCall(PetscViewerASCIIPrintf(viewer, " max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4])); 1430 PetscCall(PetscViewerASCIIPrintf(viewer, " strength threshold %g\n", (double)jac->as_amg_alpha_theta)); 1431 PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver using boomerAMG\n")); 1432 PetscCall(PetscViewerASCIIPrintf(viewer, " coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0])); 1433 PetscCall(PetscViewerASCIIPrintf(viewer, " levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1])); 1434 PetscCall(PetscViewerASCIIPrintf(viewer, " relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2])); 1435 PetscCall(PetscViewerASCIIPrintf(viewer, " interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3])); 1436 PetscCall(PetscViewerASCIIPrintf(viewer, " max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4])); 1437 PetscCall(PetscViewerASCIIPrintf(viewer, " strength threshold %g\n", (double)jac->as_amg_beta_theta)); 1438 } 1439 PetscFunctionReturn(PETSC_SUCCESS); 1440 } 1441 1442 static PetscErrorCode PCHYPRESetDiscreteGradient_HYPRE(PC pc, Mat G) 1443 { 1444 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1445 PetscBool ishypre; 1446 1447 PetscFunctionBegin; 1448 PetscCall(PetscObjectTypeCompare((PetscObject)G, MATHYPRE, &ishypre)); 1449 if (ishypre) { 1450 PetscCall(PetscObjectReference((PetscObject)G)); 1451 PetscCall(MatDestroy(&jac->G)); 1452 jac->G = G; 1453 } else { 1454 PetscCall(MatDestroy(&jac->G)); 1455 PetscCall(MatConvert(G, MATHYPRE, MAT_INITIAL_MATRIX, &jac->G)); 1456 } 1457 PetscFunctionReturn(PETSC_SUCCESS); 1458 } 1459 1460 /*@ 1461 PCHYPRESetDiscreteGradient - Set discrete gradient matrix for `PCHYPRE` type of ams or ads 1462 1463 Collective 1464 1465 Input Parameters: 1466 + pc - the preconditioning context 1467 - G - the discrete gradient 1468 1469 Level: intermediate 1470 1471 Notes: 1472 G should have as many rows as the number of edges and as many columns as the number of vertices in the mesh 1473 1474 Each row of G has 2 nonzeros, with column indexes being the global indexes of edge's endpoints: matrix entries are +1 and -1 depending on edge orientation 1475 1476 Developer Notes: 1477 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1478 1479 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteCurl()` 1480 @*/ 1481 PetscErrorCode PCHYPRESetDiscreteGradient(PC pc, Mat G) 1482 { 1483 PetscFunctionBegin; 1484 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1485 PetscValidHeaderSpecific(G, MAT_CLASSID, 2); 1486 PetscCheckSameComm(pc, 1, G, 2); 1487 PetscTryMethod(pc, "PCHYPRESetDiscreteGradient_C", (PC, Mat), (pc, G)); 1488 PetscFunctionReturn(PETSC_SUCCESS); 1489 } 1490 1491 static PetscErrorCode PCHYPRESetDiscreteCurl_HYPRE(PC pc, Mat C) 1492 { 1493 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1494 PetscBool ishypre; 1495 1496 PetscFunctionBegin; 1497 PetscCall(PetscObjectTypeCompare((PetscObject)C, MATHYPRE, &ishypre)); 1498 if (ishypre) { 1499 PetscCall(PetscObjectReference((PetscObject)C)); 1500 PetscCall(MatDestroy(&jac->C)); 1501 jac->C = C; 1502 } else { 1503 PetscCall(MatDestroy(&jac->C)); 1504 PetscCall(MatConvert(C, MATHYPRE, MAT_INITIAL_MATRIX, &jac->C)); 1505 } 1506 PetscFunctionReturn(PETSC_SUCCESS); 1507 } 1508 1509 /*@ 1510 PCHYPRESetDiscreteCurl - Set discrete curl matrx for `PCHYPRE` type of ads 1511 1512 Collective 1513 1514 Input Parameters: 1515 + pc - the preconditioning context 1516 - C - the discrete curl 1517 1518 Level: intermediate 1519 1520 Notes: 1521 C should have as many rows as the number of faces and as many columns as the number of edges in the mesh 1522 1523 Each row of G has as many nonzeros as the number of edges of a face, with column indexes being the global indexes of the corresponding edge: matrix entries are +1 and -1 depending on edge orientation with respect to the face orientation 1524 1525 Developer Notes: 1526 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1527 1528 If this is only for `PCHYPRE` type of ads it should be called `PCHYPREADSSetDiscreteCurl()` 1529 1530 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()` 1531 @*/ 1532 PetscErrorCode PCHYPRESetDiscreteCurl(PC pc, Mat C) 1533 { 1534 PetscFunctionBegin; 1535 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1536 PetscValidHeaderSpecific(C, MAT_CLASSID, 2); 1537 PetscCheckSameComm(pc, 1, C, 2); 1538 PetscTryMethod(pc, "PCHYPRESetDiscreteCurl_C", (PC, Mat), (pc, C)); 1539 PetscFunctionReturn(PETSC_SUCCESS); 1540 } 1541 1542 static PetscErrorCode PCHYPRESetInterpolations_HYPRE(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[]) 1543 { 1544 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1545 PetscBool ishypre; 1546 PetscInt i; 1547 PetscFunctionBegin; 1548 1549 PetscCall(MatDestroy(&jac->RT_PiFull)); 1550 PetscCall(MatDestroy(&jac->ND_PiFull)); 1551 for (i = 0; i < 3; ++i) { 1552 PetscCall(MatDestroy(&jac->RT_Pi[i])); 1553 PetscCall(MatDestroy(&jac->ND_Pi[i])); 1554 } 1555 1556 jac->dim = dim; 1557 if (RT_PiFull) { 1558 PetscCall(PetscObjectTypeCompare((PetscObject)RT_PiFull, MATHYPRE, &ishypre)); 1559 if (ishypre) { 1560 PetscCall(PetscObjectReference((PetscObject)RT_PiFull)); 1561 jac->RT_PiFull = RT_PiFull; 1562 } else { 1563 PetscCall(MatConvert(RT_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_PiFull)); 1564 } 1565 } 1566 if (RT_Pi) { 1567 for (i = 0; i < dim; ++i) { 1568 if (RT_Pi[i]) { 1569 PetscCall(PetscObjectTypeCompare((PetscObject)RT_Pi[i], MATHYPRE, &ishypre)); 1570 if (ishypre) { 1571 PetscCall(PetscObjectReference((PetscObject)RT_Pi[i])); 1572 jac->RT_Pi[i] = RT_Pi[i]; 1573 } else { 1574 PetscCall(MatConvert(RT_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_Pi[i])); 1575 } 1576 } 1577 } 1578 } 1579 if (ND_PiFull) { 1580 PetscCall(PetscObjectTypeCompare((PetscObject)ND_PiFull, MATHYPRE, &ishypre)); 1581 if (ishypre) { 1582 PetscCall(PetscObjectReference((PetscObject)ND_PiFull)); 1583 jac->ND_PiFull = ND_PiFull; 1584 } else { 1585 PetscCall(MatConvert(ND_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_PiFull)); 1586 } 1587 } 1588 if (ND_Pi) { 1589 for (i = 0; i < dim; ++i) { 1590 if (ND_Pi[i]) { 1591 PetscCall(PetscObjectTypeCompare((PetscObject)ND_Pi[i], MATHYPRE, &ishypre)); 1592 if (ishypre) { 1593 PetscCall(PetscObjectReference((PetscObject)ND_Pi[i])); 1594 jac->ND_Pi[i] = ND_Pi[i]; 1595 } else { 1596 PetscCall(MatConvert(ND_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_Pi[i])); 1597 } 1598 } 1599 } 1600 } 1601 1602 PetscFunctionReturn(PETSC_SUCCESS); 1603 } 1604 1605 /*@ 1606 PCHYPRESetInterpolations - Set interpolation matrices for `PCHYPRE` type of ams or ads 1607 1608 Collective 1609 1610 Input Parameters: 1611 + pc - the preconditioning context 1612 . dim - the dimension of the problem, only used in AMS 1613 . RT_PiFull - Raviart-Thomas interpolation matrix 1614 . RT_Pi - x/y/z component of Raviart-Thomas interpolation matrix 1615 . ND_PiFull - Nedelec interpolation matrix 1616 - ND_Pi - x/y/z component of Nedelec interpolation matrix 1617 1618 Level: intermediate 1619 1620 Notes: 1621 For AMS, only Nedelec interpolation matrices are needed, the Raviart-Thomas interpolation matrices can be set to NULL. 1622 1623 For ADS, both type of interpolation matrices are needed. 1624 1625 Developer Notes: 1626 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1627 1628 .seealso: `PCHYPRE` 1629 @*/ 1630 PetscErrorCode PCHYPRESetInterpolations(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[]) 1631 { 1632 PetscInt i; 1633 1634 PetscFunctionBegin; 1635 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1636 if (RT_PiFull) { 1637 PetscValidHeaderSpecific(RT_PiFull, MAT_CLASSID, 3); 1638 PetscCheckSameComm(pc, 1, RT_PiFull, 3); 1639 } 1640 if (RT_Pi) { 1641 PetscAssertPointer(RT_Pi, 4); 1642 for (i = 0; i < dim; ++i) { 1643 if (RT_Pi[i]) { 1644 PetscValidHeaderSpecific(RT_Pi[i], MAT_CLASSID, 4); 1645 PetscCheckSameComm(pc, 1, RT_Pi[i], 4); 1646 } 1647 } 1648 } 1649 if (ND_PiFull) { 1650 PetscValidHeaderSpecific(ND_PiFull, MAT_CLASSID, 5); 1651 PetscCheckSameComm(pc, 1, ND_PiFull, 5); 1652 } 1653 if (ND_Pi) { 1654 PetscAssertPointer(ND_Pi, 6); 1655 for (i = 0; i < dim; ++i) { 1656 if (ND_Pi[i]) { 1657 PetscValidHeaderSpecific(ND_Pi[i], MAT_CLASSID, 6); 1658 PetscCheckSameComm(pc, 1, ND_Pi[i], 6); 1659 } 1660 } 1661 } 1662 PetscTryMethod(pc, "PCHYPRESetInterpolations_C", (PC, PetscInt, Mat, Mat[], Mat, Mat[]), (pc, dim, RT_PiFull, RT_Pi, ND_PiFull, ND_Pi)); 1663 PetscFunctionReturn(PETSC_SUCCESS); 1664 } 1665 1666 static PetscErrorCode PCHYPRESetPoissonMatrix_HYPRE(PC pc, Mat A, PetscBool isalpha) 1667 { 1668 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1669 PetscBool ishypre; 1670 1671 PetscFunctionBegin; 1672 PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre)); 1673 if (ishypre) { 1674 if (isalpha) { 1675 PetscCall(PetscObjectReference((PetscObject)A)); 1676 PetscCall(MatDestroy(&jac->alpha_Poisson)); 1677 jac->alpha_Poisson = A; 1678 } else { 1679 if (A) { 1680 PetscCall(PetscObjectReference((PetscObject)A)); 1681 } else { 1682 jac->ams_beta_is_zero = PETSC_TRUE; 1683 } 1684 PetscCall(MatDestroy(&jac->beta_Poisson)); 1685 jac->beta_Poisson = A; 1686 } 1687 } else { 1688 if (isalpha) { 1689 PetscCall(MatDestroy(&jac->alpha_Poisson)); 1690 PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->alpha_Poisson)); 1691 } else { 1692 if (A) { 1693 PetscCall(MatDestroy(&jac->beta_Poisson)); 1694 PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->beta_Poisson)); 1695 } else { 1696 PetscCall(MatDestroy(&jac->beta_Poisson)); 1697 jac->ams_beta_is_zero = PETSC_TRUE; 1698 } 1699 } 1700 } 1701 PetscFunctionReturn(PETSC_SUCCESS); 1702 } 1703 1704 /*@ 1705 PCHYPRESetAlphaPoissonMatrix - Set vector Poisson matrix for `PCHYPRE` of type ams 1706 1707 Collective 1708 1709 Input Parameters: 1710 + pc - the preconditioning context 1711 - A - the matrix 1712 1713 Level: intermediate 1714 1715 Note: 1716 A should be obtained by discretizing the vector valued Poisson problem with linear finite elements 1717 1718 Developer Notes: 1719 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1720 1721 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSSetAlphaPoissonMatrix()` 1722 1723 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetBetaPoissonMatrix()` 1724 @*/ 1725 PetscErrorCode PCHYPRESetAlphaPoissonMatrix(PC pc, Mat A) 1726 { 1727 PetscFunctionBegin; 1728 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1729 PetscValidHeaderSpecific(A, MAT_CLASSID, 2); 1730 PetscCheckSameComm(pc, 1, A, 2); 1731 PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_TRUE)); 1732 PetscFunctionReturn(PETSC_SUCCESS); 1733 } 1734 1735 /*@ 1736 PCHYPRESetBetaPoissonMatrix - Set Poisson matrix for `PCHYPRE` of type ams 1737 1738 Collective 1739 1740 Input Parameters: 1741 + pc - the preconditioning context 1742 - A - the matrix, or NULL to turn it off 1743 1744 Level: intermediate 1745 1746 Note: 1747 A should be obtained by discretizing the Poisson problem with linear finite elements. 1748 1749 Developer Notes: 1750 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1751 1752 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSPCHYPRESetBetaPoissonMatrix()` 1753 1754 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()` 1755 @*/ 1756 PetscErrorCode PCHYPRESetBetaPoissonMatrix(PC pc, Mat A) 1757 { 1758 PetscFunctionBegin; 1759 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1760 if (A) { 1761 PetscValidHeaderSpecific(A, MAT_CLASSID, 2); 1762 PetscCheckSameComm(pc, 1, A, 2); 1763 } 1764 PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_FALSE)); 1765 PetscFunctionReturn(PETSC_SUCCESS); 1766 } 1767 1768 static PetscErrorCode PCHYPRESetEdgeConstantVectors_HYPRE(PC pc, Vec ozz, Vec zoz, Vec zzo) 1769 { 1770 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1771 1772 PetscFunctionBegin; 1773 /* throw away any vector if already set */ 1774 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0])); 1775 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1])); 1776 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2])); 1777 PetscCall(VecHYPRE_IJVectorCreate(ozz->map, &jac->constants[0])); 1778 PetscCall(VecHYPRE_IJVectorCopy(ozz, jac->constants[0])); 1779 PetscCall(VecHYPRE_IJVectorCreate(zoz->map, &jac->constants[1])); 1780 PetscCall(VecHYPRE_IJVectorCopy(zoz, jac->constants[1])); 1781 jac->dim = 2; 1782 if (zzo) { 1783 PetscCall(VecHYPRE_IJVectorCreate(zzo->map, &jac->constants[2])); 1784 PetscCall(VecHYPRE_IJVectorCopy(zzo, jac->constants[2])); 1785 jac->dim++; 1786 } 1787 PetscFunctionReturn(PETSC_SUCCESS); 1788 } 1789 1790 /*@ 1791 PCHYPRESetEdgeConstantVectors - Set the representation of the constant vector fields in the edge element basis for `PCHYPRE` of type ams 1792 1793 Collective 1794 1795 Input Parameters: 1796 + pc - the preconditioning context 1797 . ozz - vector representing (1,0,0) (or (1,0) in 2D) 1798 . zoz - vector representing (0,1,0) (or (0,1) in 2D) 1799 - zzo - vector representing (0,0,1) (use NULL in 2D) 1800 1801 Level: intermediate 1802 1803 Developer Notes: 1804 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSSetEdgeConstantVectors()` 1805 1806 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()` 1807 @*/ 1808 PetscErrorCode PCHYPRESetEdgeConstantVectors(PC pc, Vec ozz, Vec zoz, Vec zzo) 1809 { 1810 PetscFunctionBegin; 1811 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1812 PetscValidHeaderSpecific(ozz, VEC_CLASSID, 2); 1813 PetscValidHeaderSpecific(zoz, VEC_CLASSID, 3); 1814 if (zzo) PetscValidHeaderSpecific(zzo, VEC_CLASSID, 4); 1815 PetscCheckSameComm(pc, 1, ozz, 2); 1816 PetscCheckSameComm(pc, 1, zoz, 3); 1817 if (zzo) PetscCheckSameComm(pc, 1, zzo, 4); 1818 PetscTryMethod(pc, "PCHYPRESetEdgeConstantVectors_C", (PC, Vec, Vec, Vec), (pc, ozz, zoz, zzo)); 1819 PetscFunctionReturn(PETSC_SUCCESS); 1820 } 1821 1822 static PetscErrorCode PCHYPREAMSSetInteriorNodes_HYPRE(PC pc, Vec interior) 1823 { 1824 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1825 1826 PetscFunctionBegin; 1827 PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior)); 1828 PetscCall(VecHYPRE_IJVectorCreate(interior->map, &jac->interior)); 1829 PetscCall(VecHYPRE_IJVectorCopy(interior, jac->interior)); 1830 jac->ams_beta_is_zero_part = PETSC_TRUE; 1831 PetscFunctionReturn(PETSC_SUCCESS); 1832 } 1833 1834 /*@ 1835 PCHYPREAMSSetInteriorNodes - Set the list of interior nodes to a zero-conductivity region for `PCHYPRE` of type ams 1836 1837 Collective 1838 1839 Input Parameters: 1840 + pc - the preconditioning context 1841 - interior - vector. node is interior if its entry in the array is 1.0. 1842 1843 Level: intermediate 1844 1845 Note: 1846 This calls `HYPRE_AMSSetInteriorNodes()` 1847 1848 Developer Notes: 1849 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSSetInteriorNodes()` 1850 1851 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()` 1852 @*/ 1853 PetscErrorCode PCHYPREAMSSetInteriorNodes(PC pc, Vec interior) 1854 { 1855 PetscFunctionBegin; 1856 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1857 PetscValidHeaderSpecific(interior, VEC_CLASSID, 2); 1858 PetscCheckSameComm(pc, 1, interior, 2); 1859 PetscTryMethod(pc, "PCHYPREAMSSetInteriorNodes_C", (PC, Vec), (pc, interior)); 1860 PetscFunctionReturn(PETSC_SUCCESS); 1861 } 1862 1863 static PetscErrorCode PCSetCoordinates_HYPRE(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords) 1864 { 1865 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1866 Vec tv; 1867 PetscInt i; 1868 1869 PetscFunctionBegin; 1870 /* throw away any coordinate vector if already set */ 1871 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0])); 1872 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1])); 1873 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2])); 1874 jac->dim = dim; 1875 1876 /* compute IJ vector for coordinates */ 1877 PetscCall(VecCreate(PetscObjectComm((PetscObject)pc), &tv)); 1878 PetscCall(VecSetType(tv, VECSTANDARD)); 1879 PetscCall(VecSetSizes(tv, nloc, PETSC_DECIDE)); 1880 for (i = 0; i < dim; i++) { 1881 PetscScalar *array; 1882 PetscInt j; 1883 1884 PetscCall(VecHYPRE_IJVectorCreate(tv->map, &jac->coords[i])); 1885 PetscCall(VecGetArrayWrite(tv, &array)); 1886 for (j = 0; j < nloc; j++) array[j] = coords[j * dim + i]; 1887 PetscCall(VecRestoreArrayWrite(tv, &array)); 1888 PetscCall(VecHYPRE_IJVectorCopy(tv, jac->coords[i])); 1889 } 1890 PetscCall(VecDestroy(&tv)); 1891 PetscFunctionReturn(PETSC_SUCCESS); 1892 } 1893 1894 static PetscErrorCode PCHYPREGetType_HYPRE(PC pc, const char *name[]) 1895 { 1896 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1897 1898 PetscFunctionBegin; 1899 *name = jac->hypre_type; 1900 PetscFunctionReturn(PETSC_SUCCESS); 1901 } 1902 1903 static PetscErrorCode PCHYPRESetType_HYPRE(PC pc, const char name[]) 1904 { 1905 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1906 PetscBool flag; 1907 1908 PetscFunctionBegin; 1909 if (jac->hypre_type) { 1910 PetscCall(PetscStrcmp(jac->hypre_type, name, &flag)); 1911 PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE preconditioner type once it has been set"); 1912 PetscFunctionReturn(PETSC_SUCCESS); 1913 } else { 1914 PetscCall(PetscStrallocpy(name, &jac->hypre_type)); 1915 } 1916 1917 jac->maxiter = PETSC_DEFAULT; 1918 jac->tol = PETSC_DEFAULT; 1919 jac->printstatistics = PetscLogPrintInfo; 1920 1921 PetscCall(PetscStrcmp("pilut", jac->hypre_type, &flag)); 1922 if (flag) { 1923 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 1924 PetscCallExternal(HYPRE_ParCSRPilutCreate, jac->comm_hypre, &jac->hsolver); 1925 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Pilut; 1926 pc->ops->view = PCView_HYPRE_Pilut; 1927 jac->destroy = HYPRE_ParCSRPilutDestroy; 1928 jac->setup = HYPRE_ParCSRPilutSetup; 1929 jac->solve = HYPRE_ParCSRPilutSolve; 1930 jac->factorrowsize = PETSC_DEFAULT; 1931 PetscFunctionReturn(PETSC_SUCCESS); 1932 } 1933 PetscCall(PetscStrcmp("euclid", jac->hypre_type, &flag)); 1934 if (flag) { 1935 #if defined(PETSC_USE_64BIT_INDICES) 1936 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Hypre Euclid does not support 64-bit indices"); 1937 #endif 1938 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 1939 PetscCallExternal(HYPRE_EuclidCreate, jac->comm_hypre, &jac->hsolver); 1940 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Euclid; 1941 pc->ops->view = PCView_HYPRE_Euclid; 1942 jac->destroy = HYPRE_EuclidDestroy; 1943 jac->setup = HYPRE_EuclidSetup; 1944 jac->solve = HYPRE_EuclidSolve; 1945 jac->factorrowsize = PETSC_DEFAULT; 1946 jac->eu_level = PETSC_DEFAULT; /* default */ 1947 PetscFunctionReturn(PETSC_SUCCESS); 1948 } 1949 PetscCall(PetscStrcmp("parasails", jac->hypre_type, &flag)); 1950 if (flag) { 1951 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 1952 PetscCallExternal(HYPRE_ParaSailsCreate, jac->comm_hypre, &jac->hsolver); 1953 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ParaSails; 1954 pc->ops->view = PCView_HYPRE_ParaSails; 1955 jac->destroy = HYPRE_ParaSailsDestroy; 1956 jac->setup = HYPRE_ParaSailsSetup; 1957 jac->solve = HYPRE_ParaSailsSolve; 1958 /* initialize */ 1959 jac->nlevels = 1; 1960 jac->threshold = .1; 1961 jac->filter = .1; 1962 jac->loadbal = 0; 1963 if (PetscLogPrintInfo) jac->logging = (int)PETSC_TRUE; 1964 else jac->logging = (int)PETSC_FALSE; 1965 1966 jac->ruse = (int)PETSC_FALSE; 1967 jac->symt = 0; 1968 PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels); 1969 PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter); 1970 PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal); 1971 PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging); 1972 PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse); 1973 PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt); 1974 PetscFunctionReturn(PETSC_SUCCESS); 1975 } 1976 PetscCall(PetscStrcmp("boomeramg", jac->hypre_type, &flag)); 1977 if (flag) { 1978 PetscCallExternal(HYPRE_BoomerAMGCreate, &jac->hsolver); 1979 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_BoomerAMG; 1980 pc->ops->view = PCView_HYPRE_BoomerAMG; 1981 pc->ops->applytranspose = PCApplyTranspose_HYPRE_BoomerAMG; 1982 pc->ops->applyrichardson = PCApplyRichardson_HYPRE_BoomerAMG; 1983 pc->ops->matapply = PCMatApply_HYPRE_BoomerAMG; 1984 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_BoomerAMG)); 1985 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_BoomerAMG)); 1986 jac->destroy = HYPRE_BoomerAMGDestroy; 1987 jac->setup = HYPRE_BoomerAMGSetup; 1988 jac->solve = HYPRE_BoomerAMGSolve; 1989 jac->applyrichardson = PETSC_FALSE; 1990 /* these defaults match the hypre defaults */ 1991 jac->cycletype = 1; 1992 jac->maxlevels = 25; 1993 jac->maxiter = 1; 1994 jac->tol = 0.0; /* tolerance of zero indicates use as preconditioner (suppresses convergence errors) */ 1995 jac->truncfactor = 0.0; 1996 jac->strongthreshold = .25; 1997 jac->maxrowsum = .9; 1998 jac->coarsentype = 6; 1999 jac->measuretype = 0; 2000 jac->gridsweeps[0] = jac->gridsweeps[1] = jac->gridsweeps[2] = 1; 2001 jac->smoothtype = -1; /* Not set by default */ 2002 jac->smoothnumlevels = 25; 2003 jac->eu_level = 0; 2004 jac->eu_droptolerance = 0; 2005 jac->eu_bj = 0; 2006 jac->relaxtype[0] = jac->relaxtype[1] = 6; /* Defaults to SYMMETRIC since in PETSc we are using a PC - most likely with CG */ 2007 jac->relaxtype[2] = 9; /*G.E. */ 2008 jac->relaxweight = 1.0; 2009 jac->outerrelaxweight = 1.0; 2010 jac->relaxorder = 1; 2011 jac->interptype = 0; 2012 jac->Rtype = 0; 2013 jac->Rstrongthreshold = 0.25; 2014 jac->Rfilterthreshold = 0.0; 2015 jac->Adroptype = -1; 2016 jac->Adroptol = 0.0; 2017 jac->agg_nl = 0; 2018 jac->agg_interptype = 4; 2019 jac->pmax = 0; 2020 jac->truncfactor = 0.0; 2021 jac->agg_num_paths = 1; 2022 jac->maxc = 9; 2023 jac->minc = 1; 2024 jac->nodal_coarsening = 0; 2025 jac->nodal_coarsening_diag = 0; 2026 jac->vec_interp_variant = 0; 2027 jac->vec_interp_qmax = 0; 2028 jac->vec_interp_smooth = PETSC_FALSE; 2029 jac->interp_refine = 0; 2030 jac->nodal_relax = PETSC_FALSE; 2031 jac->nodal_relax_levels = 1; 2032 jac->rap2 = 0; 2033 2034 /* GPU defaults 2035 from https://hypre.readthedocs.io/en/latest/solvers-boomeramg.html#gpu-supported-options 2036 and /src/parcsr_ls/par_amg.c */ 2037 #if defined(PETSC_HAVE_HYPRE_DEVICE) 2038 jac->keeptranspose = PETSC_TRUE; 2039 jac->mod_rap2 = 1; 2040 jac->coarsentype = 8; 2041 jac->relaxorder = 0; 2042 jac->interptype = 6; 2043 jac->relaxtype[0] = 18; 2044 jac->relaxtype[1] = 18; 2045 jac->agg_interptype = 7; 2046 #else 2047 jac->keeptranspose = PETSC_FALSE; 2048 jac->mod_rap2 = 0; 2049 #endif 2050 PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype); 2051 PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels); 2052 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter); 2053 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol); 2054 PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor); 2055 PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold); 2056 PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum); 2057 PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype); 2058 PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype); 2059 PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder); 2060 PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype); 2061 PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl); 2062 PetscCallExternal(HYPRE_BoomerAMGSetAggInterpType, jac->hsolver, jac->agg_interptype); 2063 PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax); 2064 PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths); 2065 PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, jac->relaxtype[0]); /* defaults coarse to 9 */ 2066 PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, jac->gridsweeps[0]); /* defaults coarse to 1 */ 2067 PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc); 2068 PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc); 2069 /* GPU */ 2070 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0) 2071 PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0); 2072 PetscCallExternal(HYPRE_BoomerAMGSetRAP2, jac->hsolver, jac->rap2); 2073 PetscCallExternal(HYPRE_BoomerAMGSetModuleRAP2, jac->hsolver, jac->mod_rap2); 2074 #endif 2075 2076 /* AIR */ 2077 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0) 2078 PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype); 2079 PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold); 2080 PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold); 2081 PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol); 2082 PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype); 2083 #endif 2084 PetscFunctionReturn(PETSC_SUCCESS); 2085 } 2086 PetscCall(PetscStrcmp("ams", jac->hypre_type, &flag)); 2087 if (flag) { 2088 PetscCallExternal(HYPRE_AMSCreate, &jac->hsolver); 2089 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_AMS; 2090 pc->ops->view = PCView_HYPRE_AMS; 2091 jac->destroy = HYPRE_AMSDestroy; 2092 jac->setup = HYPRE_AMSSetup; 2093 jac->solve = HYPRE_AMSSolve; 2094 jac->coords[0] = NULL; 2095 jac->coords[1] = NULL; 2096 jac->coords[2] = NULL; 2097 jac->interior = NULL; 2098 /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE AMS */ 2099 jac->as_print = 0; 2100 jac->as_max_iter = 1; /* used as a preconditioner */ 2101 jac->as_tol = 0.; /* used as a preconditioner */ 2102 jac->ams_cycle_type = 13; 2103 /* Smoothing options */ 2104 jac->as_relax_type = 2; 2105 jac->as_relax_times = 1; 2106 jac->as_relax_weight = 1.0; 2107 jac->as_omega = 1.0; 2108 /* Vector valued Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */ 2109 jac->as_amg_alpha_opts[0] = 10; 2110 jac->as_amg_alpha_opts[1] = 1; 2111 jac->as_amg_alpha_opts[2] = 6; 2112 jac->as_amg_alpha_opts[3] = 6; 2113 jac->as_amg_alpha_opts[4] = 4; 2114 jac->as_amg_alpha_theta = 0.25; 2115 /* Scalar Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */ 2116 jac->as_amg_beta_opts[0] = 10; 2117 jac->as_amg_beta_opts[1] = 1; 2118 jac->as_amg_beta_opts[2] = 6; 2119 jac->as_amg_beta_opts[3] = 6; 2120 jac->as_amg_beta_opts[4] = 4; 2121 jac->as_amg_beta_theta = 0.25; 2122 PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print); 2123 PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter); 2124 PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type); 2125 PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol); 2126 PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 2127 PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 2128 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 2129 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 2130 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 2131 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 2132 PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 2133 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 2134 jac->as_amg_beta_opts[2], /* AMG relax_type */ 2135 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 2136 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 2137 /* Zero conductivity */ 2138 jac->ams_beta_is_zero = PETSC_FALSE; 2139 jac->ams_beta_is_zero_part = PETSC_FALSE; 2140 PetscFunctionReturn(PETSC_SUCCESS); 2141 } 2142 PetscCall(PetscStrcmp("ads", jac->hypre_type, &flag)); 2143 if (flag) { 2144 PetscCallExternal(HYPRE_ADSCreate, &jac->hsolver); 2145 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ADS; 2146 pc->ops->view = PCView_HYPRE_ADS; 2147 jac->destroy = HYPRE_ADSDestroy; 2148 jac->setup = HYPRE_ADSSetup; 2149 jac->solve = HYPRE_ADSSolve; 2150 jac->coords[0] = NULL; 2151 jac->coords[1] = NULL; 2152 jac->coords[2] = NULL; 2153 /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE ADS */ 2154 jac->as_print = 0; 2155 jac->as_max_iter = 1; /* used as a preconditioner */ 2156 jac->as_tol = 0.; /* used as a preconditioner */ 2157 jac->ads_cycle_type = 13; 2158 /* Smoothing options */ 2159 jac->as_relax_type = 2; 2160 jac->as_relax_times = 1; 2161 jac->as_relax_weight = 1.0; 2162 jac->as_omega = 1.0; 2163 /* AMS solver parameters: cycle_type, coarsen type, agg_levels, relax_type, interp_type, Pmax */ 2164 jac->ams_cycle_type = 14; 2165 jac->as_amg_alpha_opts[0] = 10; 2166 jac->as_amg_alpha_opts[1] = 1; 2167 jac->as_amg_alpha_opts[2] = 6; 2168 jac->as_amg_alpha_opts[3] = 6; 2169 jac->as_amg_alpha_opts[4] = 4; 2170 jac->as_amg_alpha_theta = 0.25; 2171 /* Vector Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */ 2172 jac->as_amg_beta_opts[0] = 10; 2173 jac->as_amg_beta_opts[1] = 1; 2174 jac->as_amg_beta_opts[2] = 6; 2175 jac->as_amg_beta_opts[3] = 6; 2176 jac->as_amg_beta_opts[4] = 4; 2177 jac->as_amg_beta_theta = 0.25; 2178 PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print); 2179 PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter); 2180 PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ams_cycle_type); 2181 PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol); 2182 PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 2183 PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMG coarsen type */ 2184 jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 2185 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 2186 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 2187 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 2188 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 2189 PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 2190 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 2191 jac->as_amg_beta_opts[2], /* AMG relax_type */ 2192 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 2193 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 2194 PetscFunctionReturn(PETSC_SUCCESS); 2195 } 2196 PetscCall(PetscFree(jac->hypre_type)); 2197 2198 jac->hypre_type = NULL; 2199 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE preconditioner %s; Choices are euclid, pilut, parasails, boomeramg, ams", name); 2200 } 2201 2202 /* 2203 It only gets here if the HYPRE type has not been set before the call to 2204 ...SetFromOptions() which actually is most of the time 2205 */ 2206 static PetscErrorCode PCSetFromOptions_HYPRE(PC pc, PetscOptionItems *PetscOptionsObject) 2207 { 2208 PetscInt indx; 2209 const char *type[] = {"euclid", "pilut", "parasails", "boomeramg", "ams", "ads"}; 2210 PetscBool flg; 2211 2212 PetscFunctionBegin; 2213 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE preconditioner options"); 2214 PetscCall(PetscOptionsEList("-pc_hypre_type", "HYPRE preconditioner type", "PCHYPRESetType", type, PETSC_STATIC_ARRAY_LENGTH(type), "boomeramg", &indx, &flg)); 2215 if (flg) { 2216 PetscCall(PCHYPRESetType_HYPRE(pc, type[indx])); 2217 } else { 2218 PetscCall(PCHYPRESetType_HYPRE(pc, "boomeramg")); 2219 } 2220 PetscTryTypeMethod(pc, setfromoptions, PetscOptionsObject); 2221 PetscOptionsHeadEnd(); 2222 PetscFunctionReturn(PETSC_SUCCESS); 2223 } 2224 2225 /*@C 2226 PCHYPRESetType - Sets which hypre preconditioner you wish to use 2227 2228 Input Parameters: 2229 + pc - the preconditioner context 2230 - name - either euclid, pilut, parasails, boomeramg, ams, ads 2231 2232 Options Database Key: 2233 . pc_hypre_type - One of euclid, pilut, parasails, boomeramg, ams, ads 2234 2235 Level: intermediate 2236 2237 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRE` 2238 @*/ 2239 PetscErrorCode PCHYPRESetType(PC pc, const char name[]) 2240 { 2241 PetscFunctionBegin; 2242 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2243 PetscAssertPointer(name, 2); 2244 PetscTryMethod(pc, "PCHYPRESetType_C", (PC, const char[]), (pc, name)); 2245 PetscFunctionReturn(PETSC_SUCCESS); 2246 } 2247 2248 /*@C 2249 PCHYPREGetType - Gets which hypre preconditioner you are using 2250 2251 Input Parameter: 2252 . pc - the preconditioner context 2253 2254 Output Parameter: 2255 . name - either euclid, pilut, parasails, boomeramg, ams, ads 2256 2257 Level: intermediate 2258 2259 .seealso: `PCCreate()`, `PCHYPRESetType()`, `PCType`, `PC`, `PCHYPRE` 2260 @*/ 2261 PetscErrorCode PCHYPREGetType(PC pc, const char *name[]) 2262 { 2263 PetscFunctionBegin; 2264 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2265 PetscAssertPointer(name, 2); 2266 PetscTryMethod(pc, "PCHYPREGetType_C", (PC, const char *[]), (pc, name)); 2267 PetscFunctionReturn(PETSC_SUCCESS); 2268 } 2269 2270 /*@C 2271 PCMGGalerkinSetMatProductAlgorithm - Set type of SpGEMM for hypre to use on GPUs 2272 2273 Logically Collective 2274 2275 Input Parameters: 2276 + pc - the hypre context 2277 - name - one of 'cusparse', 'hypre' 2278 2279 Options Database Key: 2280 . -pc_mg_galerkin_mat_product_algorithm <cusparse,hypre> - Type of SpGEMM to use in hypre 2281 2282 Level: intermediate 2283 2284 Developer Notes: 2285 How the name starts with `PCMG`, should it not be `PCHYPREBoomerAMG`? 2286 2287 .seealso: `PCHYPRE`, `PCMGGalerkinGetMatProductAlgorithm()` 2288 @*/ 2289 PetscErrorCode PCMGGalerkinSetMatProductAlgorithm(PC pc, const char name[]) 2290 { 2291 PetscFunctionBegin; 2292 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2293 PetscTryMethod(pc, "PCMGGalerkinSetMatProductAlgorithm_C", (PC, const char[]), (pc, name)); 2294 PetscFunctionReturn(PETSC_SUCCESS); 2295 } 2296 2297 /*@C 2298 PCMGGalerkinGetMatProductAlgorithm - Get type of SpGEMM for hypre to use on GPUs 2299 2300 Not Collective 2301 2302 Input Parameter: 2303 . pc - the multigrid context 2304 2305 Output Parameter: 2306 . name - one of 'cusparse', 'hypre' 2307 2308 Level: intermediate 2309 2310 .seealso: `PCHYPRE`, ``PCMGGalerkinSetMatProductAlgorithm()` 2311 @*/ 2312 PetscErrorCode PCMGGalerkinGetMatProductAlgorithm(PC pc, const char *name[]) 2313 { 2314 PetscFunctionBegin; 2315 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2316 PetscTryMethod(pc, "PCMGGalerkinGetMatProductAlgorithm_C", (PC, const char *[]), (pc, name)); 2317 PetscFunctionReturn(PETSC_SUCCESS); 2318 } 2319 2320 /*MC 2321 PCHYPRE - Allows you to use the matrix element based preconditioners in the LLNL package hypre as PETSc `PC` 2322 2323 Options Database Keys: 2324 + -pc_hypre_type - One of `euclid`, `pilut`, `parasails`, `boomeramg`, `ams`, or `ads` 2325 . -pc_hypre_boomeramg_nodal_coarsen <n> - where n is from 1 to 6 (see `HYPRE_BOOMERAMGSetNodal()`) 2326 . -pc_hypre_boomeramg_vec_interp_variant <v> - where v is from 1 to 3 (see `HYPRE_BoomerAMGSetInterpVecVariant()`) 2327 - Many others, run with `-pc_type hypre` `-pc_hypre_type XXX` `-help` to see options for the XXX preconditioner 2328 2329 Level: intermediate 2330 2331 Notes: 2332 Apart from `-pc_hypre_type` (for which there is `PCHYPRESetType()`), 2333 the many hypre options can ONLY be set via the options database (e.g. the command line 2334 or with `PetscOptionsSetValue()`, there are no functions to set them) 2335 2336 The options `-pc_hypre_boomeramg_max_iter` and `-pc_hypre_boomeramg_tol` refer to the number of iterations 2337 (V-cycles) and tolerance that boomerAMG does EACH time it is called. So for example, if 2338 `-pc_hypre_boomeramg_max_iter` is set to 2 then 2-V-cycles are being used to define the preconditioner 2339 (`-pc_hypre_boomeramg_tol` should be set to 0.0 - the default - to strictly use a fixed number of 2340 iterations per hypre call). `-ksp_max_it` and `-ksp_rtol` STILL determine the total number of iterations 2341 and tolerance for the Krylov solver. For example, if `-pc_hypre_boomeramg_max_iter` is 2 and `-ksp_max_it` is 10 2342 then AT MOST twenty V-cycles of boomeramg will be used. 2343 2344 Note that the option `-pc_hypre_boomeramg_relax_type_all` defaults to symmetric relaxation 2345 (symmetric-SOR/Jacobi), which is required for Krylov solvers like CG that expect symmetry. 2346 Otherwise, you may want to use `-pc_hypre_boomeramg_relax_type_all SOR/Jacobi`. 2347 2348 `MatSetNearNullSpace()` - if you provide a near null space to your matrix it is ignored by hypre UNLESS you also use 2349 the following two options: `-pc_hypre_boomeramg_nodal_coarsen <n> -pc_hypre_boomeramg_vec_interp_variant <v>` 2350 2351 See `PCPFMG`, `PCSMG`, and `PCSYSPFMG` for access to hypre's other (nonalgebraic) multigrid solvers 2352 2353 For `PCHYPRE` type of `ams` or `ads` auxiliary data must be provided to the preconditioner with `PCHYPRESetDiscreteGradient()`, 2354 `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`, 2355 `PCHYPREAMSSetInteriorNodes()` 2356 2357 Sometimes people want to try algebraic multigrid as a "standalone" solver, that is not accelerating it with a Krylov method. Though we generally do not recommend this 2358 since it is usually slower, one should use a `KSPType` of `KSPRICHARDSON` 2359 (or equivalently `-ksp_type richardson`) to achieve this. Using `KSPPREONLY` will not work since it only applies a single cycle of multigrid. 2360 2361 PETSc provides its own geometric and algebraic multigrid solvers `PCMG` and `PCGAMG`, also see `PCHMG` which is useful for certain multicomponent problems 2362 2363 GPU Notes: 2364 To configure hypre BoomerAMG so that it can utilize NVIDIA GPUs run ./configure --download-hypre --with-cuda 2365 Then pass `VECCUDA` vectors and `MATAIJCUSPARSE` matrices to the solvers and PETSc will automatically utilize hypre's GPU solvers. 2366 2367 To configure hypre BoomerAMG so that it can utilize AMD GPUs run ./configure --download-hypre --with-hip 2368 Then pass `VECHIP` vectors to the solvers and PETSc will automatically utilize hypre's GPU solvers. 2369 2370 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRESetType()`, `PCPFMG`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`, `PCHYPRESetDiscreteGradient()`, 2371 `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`, 2372 PCHYPREAMSSetInteriorNodes() 2373 M*/ 2374 2375 PETSC_EXTERN PetscErrorCode PCCreate_HYPRE(PC pc) 2376 { 2377 PC_HYPRE *jac; 2378 2379 PetscFunctionBegin; 2380 PetscCall(PetscNew(&jac)); 2381 2382 pc->data = jac; 2383 pc->ops->reset = PCReset_HYPRE; 2384 pc->ops->destroy = PCDestroy_HYPRE; 2385 pc->ops->setfromoptions = PCSetFromOptions_HYPRE; 2386 pc->ops->setup = PCSetUp_HYPRE; 2387 pc->ops->apply = PCApply_HYPRE; 2388 jac->comm_hypre = MPI_COMM_NULL; 2389 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", PCHYPRESetType_HYPRE)); 2390 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", PCHYPREGetType_HYPRE)); 2391 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_HYPRE)); 2392 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", PCHYPRESetDiscreteGradient_HYPRE)); 2393 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", PCHYPRESetDiscreteCurl_HYPRE)); 2394 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", PCHYPRESetInterpolations_HYPRE)); 2395 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", PCHYPRESetEdgeConstantVectors_HYPRE)); 2396 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", PCHYPREAMSSetInteriorNodes_HYPRE)); 2397 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", PCHYPRESetPoissonMatrix_HYPRE)); 2398 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG)); 2399 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG)); 2400 #if defined(PETSC_HAVE_HYPRE_DEVICE) 2401 #if defined(HYPRE_USING_HIP) 2402 PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP)); 2403 #endif 2404 #if defined(HYPRE_USING_CUDA) 2405 PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA)); 2406 #endif 2407 #endif 2408 PetscHYPREInitialize(); 2409 PetscFunctionReturn(PETSC_SUCCESS); 2410 } 2411 2412 typedef struct { 2413 MPI_Comm hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */ 2414 HYPRE_StructSolver hsolver; 2415 2416 /* keep copy of PFMG options used so may view them */ 2417 PetscInt its; 2418 double tol; 2419 PetscInt relax_type; 2420 PetscInt rap_type; 2421 PetscInt num_pre_relax, num_post_relax; 2422 PetscInt max_levels; 2423 PetscInt skip_relax; 2424 PetscBool print_statistics; 2425 } PC_PFMG; 2426 2427 static PetscErrorCode PCDestroy_PFMG(PC pc) 2428 { 2429 PC_PFMG *ex = (PC_PFMG *)pc->data; 2430 2431 PetscFunctionBegin; 2432 if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver); 2433 PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2434 PetscCall(PetscFree(pc->data)); 2435 PetscFunctionReturn(PETSC_SUCCESS); 2436 } 2437 2438 static const char *PFMGRelaxType[] = {"Jacobi", "Weighted-Jacobi", "symmetric-Red/Black-Gauss-Seidel", "Red/Black-Gauss-Seidel"}; 2439 static const char *PFMGRAPType[] = {"Galerkin", "non-Galerkin"}; 2440 2441 static PetscErrorCode PCView_PFMG(PC pc, PetscViewer viewer) 2442 { 2443 PetscBool iascii; 2444 PC_PFMG *ex = (PC_PFMG *)pc->data; 2445 2446 PetscFunctionBegin; 2447 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 2448 if (iascii) { 2449 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE PFMG preconditioning\n")); 2450 PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its)); 2451 PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol)); 2452 PetscCall(PetscViewerASCIIPrintf(viewer, " relax type %s\n", PFMGRelaxType[ex->relax_type])); 2453 PetscCall(PetscViewerASCIIPrintf(viewer, " RAP type %s\n", PFMGRAPType[ex->rap_type])); 2454 PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax)); 2455 PetscCall(PetscViewerASCIIPrintf(viewer, " max levels %" PetscInt_FMT "\n", ex->max_levels)); 2456 PetscCall(PetscViewerASCIIPrintf(viewer, " skip relax %" PetscInt_FMT "\n", ex->skip_relax)); 2457 } 2458 PetscFunctionReturn(PETSC_SUCCESS); 2459 } 2460 2461 static PetscErrorCode PCSetFromOptions_PFMG(PC pc, PetscOptionItems *PetscOptionsObject) 2462 { 2463 PC_PFMG *ex = (PC_PFMG *)pc->data; 2464 2465 PetscFunctionBegin; 2466 PetscOptionsHeadBegin(PetscOptionsObject, "PFMG options"); 2467 PetscCall(PetscOptionsBool("-pc_pfmg_print_statistics", "Print statistics", "HYPRE_StructPFMGSetPrintLevel", ex->print_statistics, &ex->print_statistics, NULL)); 2468 PetscCall(PetscOptionsInt("-pc_pfmg_its", "Number of iterations of PFMG to use as preconditioner", "HYPRE_StructPFMGSetMaxIter", ex->its, &ex->its, NULL)); 2469 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its); 2470 PetscCall(PetscOptionsInt("-pc_pfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL)); 2471 PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax); 2472 PetscCall(PetscOptionsInt("-pc_pfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL)); 2473 PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax); 2474 2475 PetscCall(PetscOptionsInt("-pc_pfmg_max_levels", "Max Levels for MG hierarchy", "HYPRE_StructPFMGSetMaxLevels", ex->max_levels, &ex->max_levels, NULL)); 2476 PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels); 2477 2478 PetscCall(PetscOptionsReal("-pc_pfmg_tol", "Tolerance of PFMG", "HYPRE_StructPFMGSetTol", ex->tol, &ex->tol, NULL)); 2479 PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol); 2480 PetscCall(PetscOptionsEList("-pc_pfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_StructPFMGSetRelaxType", PFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(PFMGRelaxType), PFMGRelaxType[ex->relax_type], &ex->relax_type, NULL)); 2481 PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type); 2482 PetscCall(PetscOptionsEList("-pc_pfmg_rap_type", "RAP type", "HYPRE_StructPFMGSetRAPType", PFMGRAPType, PETSC_STATIC_ARRAY_LENGTH(PFMGRAPType), PFMGRAPType[ex->rap_type], &ex->rap_type, NULL)); 2483 PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type); 2484 PetscCall(PetscOptionsInt("-pc_pfmg_skip_relax", "Skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations when the underlying problem is isotropic", "HYPRE_StructPFMGSetSkipRelax", ex->skip_relax, &ex->skip_relax, NULL)); 2485 PetscCallExternal(HYPRE_StructPFMGSetSkipRelax, ex->hsolver, ex->skip_relax); 2486 PetscOptionsHeadEnd(); 2487 PetscFunctionReturn(PETSC_SUCCESS); 2488 } 2489 2490 static PetscErrorCode PCApply_PFMG(PC pc, Vec x, Vec y) 2491 { 2492 PC_PFMG *ex = (PC_PFMG *)pc->data; 2493 PetscScalar *yy; 2494 const PetscScalar *xx; 2495 PetscInt ilower[3], iupper[3]; 2496 HYPRE_Int hlower[3], hupper[3]; 2497 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data); 2498 2499 PetscFunctionBegin; 2500 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2501 PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2])); 2502 /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */ 2503 iupper[0] += ilower[0] - 1; 2504 iupper[1] += ilower[1] - 1; 2505 iupper[2] += ilower[2] - 1; 2506 hlower[0] = (HYPRE_Int)ilower[0]; 2507 hlower[1] = (HYPRE_Int)ilower[1]; 2508 hlower[2] = (HYPRE_Int)ilower[2]; 2509 hupper[0] = (HYPRE_Int)iupper[0]; 2510 hupper[1] = (HYPRE_Int)iupper[1]; 2511 hupper[2] = (HYPRE_Int)iupper[2]; 2512 2513 /* copy x values over to hypre */ 2514 PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0); 2515 PetscCall(VecGetArrayRead(x, &xx)); 2516 PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx); 2517 PetscCall(VecRestoreArrayRead(x, &xx)); 2518 PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb); 2519 PetscCallExternal(HYPRE_StructPFMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx); 2520 2521 /* copy solution values back to PETSc */ 2522 PetscCall(VecGetArray(y, &yy)); 2523 PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy); 2524 PetscCall(VecRestoreArray(y, &yy)); 2525 PetscFunctionReturn(PETSC_SUCCESS); 2526 } 2527 2528 static PetscErrorCode PCApplyRichardson_PFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 2529 { 2530 PC_PFMG *jac = (PC_PFMG *)pc->data; 2531 HYPRE_Int oits; 2532 2533 PetscFunctionBegin; 2534 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2535 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, its * jac->its); 2536 PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, rtol); 2537 2538 PetscCall(PCApply_PFMG(pc, b, y)); 2539 PetscCallExternal(HYPRE_StructPFMGGetNumIterations, jac->hsolver, &oits); 2540 *outits = oits; 2541 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 2542 else *reason = PCRICHARDSON_CONVERGED_RTOL; 2543 PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, jac->tol); 2544 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, jac->its); 2545 PetscFunctionReturn(PETSC_SUCCESS); 2546 } 2547 2548 static PetscErrorCode PCSetUp_PFMG(PC pc) 2549 { 2550 PC_PFMG *ex = (PC_PFMG *)pc->data; 2551 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data); 2552 PetscBool flg; 2553 2554 PetscFunctionBegin; 2555 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg)); 2556 PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner"); 2557 2558 /* create the hypre solver object and set its information */ 2559 if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver); 2560 PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver); 2561 2562 // Print Hypre statistics about the solve process 2563 if (ex->print_statistics) PetscCallExternal(HYPRE_StructPFMGSetPrintLevel, ex->hsolver, 3); 2564 2565 // The hypre options must be repeated here because the StructPFMG was destroyed and recreated 2566 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its); 2567 PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax); 2568 PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax); 2569 PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels); 2570 PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol); 2571 PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type); 2572 PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type); 2573 2574 PetscCallExternal(HYPRE_StructPFMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx); 2575 PetscCallExternal(HYPRE_StructPFMGSetZeroGuess, ex->hsolver); 2576 PetscFunctionReturn(PETSC_SUCCESS); 2577 } 2578 2579 /*MC 2580 PCPFMG - the hypre PFMG multigrid solver 2581 2582 Options Database Keys: 2583 + -pc_pfmg_its <its> - number of iterations of PFMG to use as preconditioner 2584 . -pc_pfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid solve 2585 . -pc_pfmg_num_post_relax <steps> - number of smoothing steps after coarse grid solve 2586 . -pc_pfmg_tol <tol> - tolerance of PFMG 2587 . -pc_pfmg_relax_type - relaxation type for the up and down cycles, one of Jacobi,Weighted-Jacobi,symmetric-Red/Black-Gauss-Seidel,Red/Black-Gauss-Seidel 2588 . -pc_pfmg_rap_type - type of coarse matrix generation, one of Galerkin,non-Galerkin 2589 - -pc_pfmg_skip_relax - skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations 2590 when the underlying problem is isotropic, one of 0,1 2591 2592 Level: advanced 2593 2594 Notes: 2595 This is for CELL-centered descretizations 2596 2597 See `PCSYSPFMG` for a version suitable for systems of PDEs, and `PCSMG` 2598 2599 See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver 2600 2601 This must be used with the `MATHYPRESTRUCT` matrix type. 2602 2603 This provides only some of the functionality of PFMG, it supports only one block per process defined by a PETSc `DMDA`. 2604 2605 .seealso: `PCMG`, `MATHYPRESTRUCT`, `PCHYPRE`, `PCGAMG`, `PCSYSPFMG`, `PCSMG` 2606 M*/ 2607 2608 PETSC_EXTERN PetscErrorCode PCCreate_PFMG(PC pc) 2609 { 2610 PC_PFMG *ex; 2611 2612 PetscFunctionBegin; 2613 PetscCall(PetscNew(&ex)); 2614 pc->data = ex; 2615 2616 ex->its = 1; 2617 ex->tol = 1.e-8; 2618 ex->relax_type = 1; 2619 ex->rap_type = 0; 2620 ex->num_pre_relax = 1; 2621 ex->num_post_relax = 1; 2622 ex->max_levels = 0; 2623 ex->skip_relax = 0; 2624 ex->print_statistics = PETSC_FALSE; 2625 2626 pc->ops->setfromoptions = PCSetFromOptions_PFMG; 2627 pc->ops->view = PCView_PFMG; 2628 pc->ops->destroy = PCDestroy_PFMG; 2629 pc->ops->apply = PCApply_PFMG; 2630 pc->ops->applyrichardson = PCApplyRichardson_PFMG; 2631 pc->ops->setup = PCSetUp_PFMG; 2632 2633 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2634 PetscHYPREInitialize(); 2635 PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver); 2636 PetscFunctionReturn(PETSC_SUCCESS); 2637 } 2638 2639 /* we know we are working with a HYPRE_SStructMatrix */ 2640 typedef struct { 2641 MPI_Comm hcomm; /* does not share comm with HYPRE_SStructMatrix because need to create solver before getting matrix */ 2642 HYPRE_SStructSolver ss_solver; 2643 2644 /* keep copy of SYSPFMG options used so may view them */ 2645 PetscInt its; 2646 double tol; 2647 PetscInt relax_type; 2648 PetscInt num_pre_relax, num_post_relax; 2649 } PC_SysPFMG; 2650 2651 static PetscErrorCode PCDestroy_SysPFMG(PC pc) 2652 { 2653 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2654 2655 PetscFunctionBegin; 2656 if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver); 2657 PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2658 PetscCall(PetscFree(pc->data)); 2659 PetscFunctionReturn(PETSC_SUCCESS); 2660 } 2661 2662 static const char *SysPFMGRelaxType[] = {"Weighted-Jacobi", "Red/Black-Gauss-Seidel"}; 2663 2664 static PetscErrorCode PCView_SysPFMG(PC pc, PetscViewer viewer) 2665 { 2666 PetscBool iascii; 2667 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2668 2669 PetscFunctionBegin; 2670 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 2671 if (iascii) { 2672 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE SysPFMG preconditioning\n")); 2673 PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its)); 2674 PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol)); 2675 PetscCall(PetscViewerASCIIPrintf(viewer, " relax type %s\n", PFMGRelaxType[ex->relax_type])); 2676 PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax)); 2677 } 2678 PetscFunctionReturn(PETSC_SUCCESS); 2679 } 2680 2681 static PetscErrorCode PCSetFromOptions_SysPFMG(PC pc, PetscOptionItems *PetscOptionsObject) 2682 { 2683 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2684 PetscBool flg = PETSC_FALSE; 2685 2686 PetscFunctionBegin; 2687 PetscOptionsHeadBegin(PetscOptionsObject, "SysPFMG options"); 2688 PetscCall(PetscOptionsBool("-pc_syspfmg_print_statistics", "Print statistics", "HYPRE_SStructSysPFMGSetPrintLevel", flg, &flg, NULL)); 2689 if (flg) PetscCallExternal(HYPRE_SStructSysPFMGSetPrintLevel, ex->ss_solver, 3); 2690 PetscCall(PetscOptionsInt("-pc_syspfmg_its", "Number of iterations of SysPFMG to use as preconditioner", "HYPRE_SStructSysPFMGSetMaxIter", ex->its, &ex->its, NULL)); 2691 PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, ex->ss_solver, ex->its); 2692 PetscCall(PetscOptionsInt("-pc_syspfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_SStructSysPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL)); 2693 PetscCallExternal(HYPRE_SStructSysPFMGSetNumPreRelax, ex->ss_solver, ex->num_pre_relax); 2694 PetscCall(PetscOptionsInt("-pc_syspfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_SStructSysPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL)); 2695 PetscCallExternal(HYPRE_SStructSysPFMGSetNumPostRelax, ex->ss_solver, ex->num_post_relax); 2696 2697 PetscCall(PetscOptionsReal("-pc_syspfmg_tol", "Tolerance of SysPFMG", "HYPRE_SStructSysPFMGSetTol", ex->tol, &ex->tol, NULL)); 2698 PetscCallExternal(HYPRE_SStructSysPFMGSetTol, ex->ss_solver, ex->tol); 2699 PetscCall(PetscOptionsEList("-pc_syspfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_SStructSysPFMGSetRelaxType", SysPFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(SysPFMGRelaxType), SysPFMGRelaxType[ex->relax_type], &ex->relax_type, NULL)); 2700 PetscCallExternal(HYPRE_SStructSysPFMGSetRelaxType, ex->ss_solver, ex->relax_type); 2701 PetscOptionsHeadEnd(); 2702 PetscFunctionReturn(PETSC_SUCCESS); 2703 } 2704 2705 static PetscErrorCode PCApply_SysPFMG(PC pc, Vec x, Vec y) 2706 { 2707 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2708 PetscScalar *yy; 2709 const PetscScalar *xx; 2710 PetscInt ilower[3], iupper[3]; 2711 HYPRE_Int hlower[3], hupper[3]; 2712 Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)(pc->pmat->data); 2713 PetscInt ordering = mx->dofs_order; 2714 PetscInt nvars = mx->nvars; 2715 PetscInt part = 0; 2716 PetscInt size; 2717 PetscInt i; 2718 2719 PetscFunctionBegin; 2720 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2721 PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2])); 2722 /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */ 2723 iupper[0] += ilower[0] - 1; 2724 iupper[1] += ilower[1] - 1; 2725 iupper[2] += ilower[2] - 1; 2726 hlower[0] = (HYPRE_Int)ilower[0]; 2727 hlower[1] = (HYPRE_Int)ilower[1]; 2728 hlower[2] = (HYPRE_Int)ilower[2]; 2729 hupper[0] = (HYPRE_Int)iupper[0]; 2730 hupper[1] = (HYPRE_Int)iupper[1]; 2731 hupper[2] = (HYPRE_Int)iupper[2]; 2732 2733 size = 1; 2734 for (i = 0; i < 3; i++) size *= (iupper[i] - ilower[i] + 1); 2735 2736 /* copy x values over to hypre for variable ordering */ 2737 if (ordering) { 2738 PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0); 2739 PetscCall(VecGetArrayRead(x, &xx)); 2740 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(xx + (size * i))); 2741 PetscCall(VecRestoreArrayRead(x, &xx)); 2742 PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b); 2743 PetscCallExternal(HYPRE_SStructMatrixMatvec, 1.0, mx->ss_mat, mx->ss_b, 0.0, mx->ss_x); 2744 PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x); 2745 2746 /* copy solution values back to PETSc */ 2747 PetscCall(VecGetArray(y, &yy)); 2748 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(yy + (size * i))); 2749 PetscCall(VecRestoreArray(y, &yy)); 2750 } else { /* nodal ordering must be mapped to variable ordering for sys_pfmg */ 2751 PetscScalar *z; 2752 PetscInt j, k; 2753 2754 PetscCall(PetscMalloc1(nvars * size, &z)); 2755 PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0); 2756 PetscCall(VecGetArrayRead(x, &xx)); 2757 2758 /* transform nodal to hypre's variable ordering for sys_pfmg */ 2759 for (i = 0; i < size; i++) { 2760 k = i * nvars; 2761 for (j = 0; j < nvars; j++) z[j * size + i] = xx[k + j]; 2762 } 2763 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i))); 2764 PetscCall(VecRestoreArrayRead(x, &xx)); 2765 PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b); 2766 PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x); 2767 2768 /* copy solution values back to PETSc */ 2769 PetscCall(VecGetArray(y, &yy)); 2770 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i))); 2771 /* transform hypre's variable ordering for sys_pfmg to nodal ordering */ 2772 for (i = 0; i < size; i++) { 2773 k = i * nvars; 2774 for (j = 0; j < nvars; j++) yy[k + j] = z[j * size + i]; 2775 } 2776 PetscCall(VecRestoreArray(y, &yy)); 2777 PetscCall(PetscFree(z)); 2778 } 2779 PetscFunctionReturn(PETSC_SUCCESS); 2780 } 2781 2782 static PetscErrorCode PCApplyRichardson_SysPFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 2783 { 2784 PC_SysPFMG *jac = (PC_SysPFMG *)pc->data; 2785 HYPRE_Int oits; 2786 2787 PetscFunctionBegin; 2788 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2789 PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, its * jac->its); 2790 PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, rtol); 2791 PetscCall(PCApply_SysPFMG(pc, b, y)); 2792 PetscCallExternal(HYPRE_SStructSysPFMGGetNumIterations, jac->ss_solver, &oits); 2793 *outits = oits; 2794 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 2795 else *reason = PCRICHARDSON_CONVERGED_RTOL; 2796 PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, jac->tol); 2797 PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, jac->its); 2798 PetscFunctionReturn(PETSC_SUCCESS); 2799 } 2800 2801 static PetscErrorCode PCSetUp_SysPFMG(PC pc) 2802 { 2803 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2804 Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)(pc->pmat->data); 2805 PetscBool flg; 2806 2807 PetscFunctionBegin; 2808 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESSTRUCT, &flg)); 2809 PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESSTRUCT with this preconditioner"); 2810 2811 /* create the hypre sstruct solver object and set its information */ 2812 if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver); 2813 PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver); 2814 PetscCallExternal(HYPRE_SStructSysPFMGSetZeroGuess, ex->ss_solver); 2815 PetscCallExternal(HYPRE_SStructSysPFMGSetup, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x); 2816 PetscFunctionReturn(PETSC_SUCCESS); 2817 } 2818 2819 /*MC 2820 PCSYSPFMG - the hypre SysPFMG multigrid solver 2821 2822 Level: advanced 2823 2824 Options Database Keys: 2825 + -pc_syspfmg_its <its> - number of iterations of SysPFMG to use as preconditioner 2826 . -pc_syspfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid 2827 . -pc_syspfmg_num_post_relax <steps> - number of smoothing steps after coarse grid 2828 . -pc_syspfmg_tol <tol> - tolerance of SysPFMG 2829 - -pc_syspfmg_relax_type <Weighted-Jacobi,Red/Black-Gauss-Seidel> - relaxation type for the up and down cycles 2830 2831 Notes: 2832 See `PCPFMG` for hypre's PFMG that works for a scalar PDE and `PCSMG` 2833 2834 See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver 2835 2836 This is for CELL-centered descretizations 2837 2838 This must be used with the `MATHYPRESSTRUCT` matrix type. 2839 2840 This does not give access to all the functionality of hypres SysPFMG, it supports only one part, and one block per process defined by a PETSc `DMDA`. 2841 2842 .seealso: `PCMG`, `MATHYPRESSTRUCT`, `PCPFMG`, `PCHYPRE`, `PCGAMG`, `PCSMG` 2843 M*/ 2844 2845 PETSC_EXTERN PetscErrorCode PCCreate_SysPFMG(PC pc) 2846 { 2847 PC_SysPFMG *ex; 2848 2849 PetscFunctionBegin; 2850 PetscCall(PetscNew(&ex)); 2851 pc->data = ex; 2852 2853 ex->its = 1; 2854 ex->tol = 1.e-8; 2855 ex->relax_type = 1; 2856 ex->num_pre_relax = 1; 2857 ex->num_post_relax = 1; 2858 2859 pc->ops->setfromoptions = PCSetFromOptions_SysPFMG; 2860 pc->ops->view = PCView_SysPFMG; 2861 pc->ops->destroy = PCDestroy_SysPFMG; 2862 pc->ops->apply = PCApply_SysPFMG; 2863 pc->ops->applyrichardson = PCApplyRichardson_SysPFMG; 2864 pc->ops->setup = PCSetUp_SysPFMG; 2865 2866 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2867 PetscHYPREInitialize(); 2868 PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver); 2869 PetscFunctionReturn(PETSC_SUCCESS); 2870 } 2871 2872 /* PC SMG */ 2873 typedef struct { 2874 MPI_Comm hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */ 2875 HYPRE_StructSolver hsolver; 2876 PetscInt its; /* keep copy of SMG options used so may view them */ 2877 double tol; 2878 PetscBool print_statistics; 2879 PetscInt num_pre_relax, num_post_relax; 2880 } PC_SMG; 2881 2882 static PetscErrorCode PCDestroy_SMG(PC pc) 2883 { 2884 PC_SMG *ex = (PC_SMG *)pc->data; 2885 2886 PetscFunctionBegin; 2887 if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, ex->hsolver); 2888 PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2889 PetscCall(PetscFree(pc->data)); 2890 PetscFunctionReturn(PETSC_SUCCESS); 2891 } 2892 2893 static PetscErrorCode PCView_SMG(PC pc, PetscViewer viewer) 2894 { 2895 PetscBool iascii; 2896 PC_SMG *ex = (PC_SMG *)pc->data; 2897 2898 PetscFunctionBegin; 2899 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 2900 if (iascii) { 2901 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE SMG preconditioning\n")); 2902 PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its)); 2903 PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol)); 2904 PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax)); 2905 } 2906 PetscFunctionReturn(PETSC_SUCCESS); 2907 } 2908 2909 static PetscErrorCode PCSetFromOptions_SMG(PC pc, PetscOptionItems *PetscOptionsObject) 2910 { 2911 PC_SMG *ex = (PC_SMG *)pc->data; 2912 2913 PetscFunctionBegin; 2914 PetscOptionsHeadBegin(PetscOptionsObject, "SMG options"); 2915 2916 PetscCall(PetscOptionsInt("-pc_smg_its", "Number of iterations of SMG to use as preconditioner", "HYPRE_StructSMGSetMaxIter", ex->its, &ex->its, NULL)); 2917 PetscCall(PetscOptionsInt("-pc_smg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructSMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL)); 2918 PetscCall(PetscOptionsInt("-pc_smg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructSMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL)); 2919 PetscCall(PetscOptionsReal("-pc_smg_tol", "Tolerance of SMG", "HYPRE_StructSMGSetTol", ex->tol, &ex->tol, NULL)); 2920 2921 PetscOptionsHeadEnd(); 2922 PetscFunctionReturn(PETSC_SUCCESS); 2923 } 2924 2925 static PetscErrorCode PCApply_SMG(PC pc, Vec x, Vec y) 2926 { 2927 PC_SMG *ex = (PC_SMG *)pc->data; 2928 PetscScalar *yy; 2929 const PetscScalar *xx; 2930 PetscInt ilower[3], iupper[3]; 2931 HYPRE_Int hlower[3], hupper[3]; 2932 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data); 2933 2934 PetscFunctionBegin; 2935 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2936 PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2])); 2937 /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */ 2938 iupper[0] += ilower[0] - 1; 2939 iupper[1] += ilower[1] - 1; 2940 iupper[2] += ilower[2] - 1; 2941 hlower[0] = (HYPRE_Int)ilower[0]; 2942 hlower[1] = (HYPRE_Int)ilower[1]; 2943 hlower[2] = (HYPRE_Int)ilower[2]; 2944 hupper[0] = (HYPRE_Int)iupper[0]; 2945 hupper[1] = (HYPRE_Int)iupper[1]; 2946 hupper[2] = (HYPRE_Int)iupper[2]; 2947 2948 /* copy x values over to hypre */ 2949 PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0); 2950 PetscCall(VecGetArrayRead(x, &xx)); 2951 PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx); 2952 PetscCall(VecRestoreArrayRead(x, &xx)); 2953 PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb); 2954 PetscCallExternal(HYPRE_StructSMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx); 2955 2956 /* copy solution values back to PETSc */ 2957 PetscCall(VecGetArray(y, &yy)); 2958 PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy); 2959 PetscCall(VecRestoreArray(y, &yy)); 2960 PetscFunctionReturn(PETSC_SUCCESS); 2961 } 2962 2963 static PetscErrorCode PCApplyRichardson_SMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 2964 { 2965 PC_SMG *jac = (PC_SMG *)pc->data; 2966 HYPRE_Int oits; 2967 2968 PetscFunctionBegin; 2969 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2970 PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, its * jac->its); 2971 PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, rtol); 2972 2973 PetscCall(PCApply_SMG(pc, b, y)); 2974 PetscCallExternal(HYPRE_StructSMGGetNumIterations, jac->hsolver, &oits); 2975 *outits = oits; 2976 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 2977 else *reason = PCRICHARDSON_CONVERGED_RTOL; 2978 PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, jac->tol); 2979 PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, jac->its); 2980 PetscFunctionReturn(PETSC_SUCCESS); 2981 } 2982 2983 static PetscErrorCode PCSetUp_SMG(PC pc) 2984 { 2985 PetscInt i, dim; 2986 PC_SMG *ex = (PC_SMG *)pc->data; 2987 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data); 2988 PetscBool flg; 2989 DMBoundaryType p[3]; 2990 PetscInt M[3]; 2991 2992 PetscFunctionBegin; 2993 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg)); 2994 PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner"); 2995 2996 PetscCall(DMDAGetInfo(mx->da, &dim, &M[0], &M[1], &M[2], 0, 0, 0, 0, 0, &p[0], &p[1], &p[2], 0)); 2997 // Check if power of 2 in periodic directions 2998 for (i = 0; i < dim; i++) { 2999 if (((M[i] & (M[i] - 1)) != 0) && (p[i] == DM_BOUNDARY_PERIODIC)) { 3000 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "With SMG, the number of points in a periodic direction must be a power of 2, but is here %" PetscInt_FMT ".", M[i]); 3001 } 3002 } 3003 3004 /* create the hypre solver object and set its information */ 3005 if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, (ex->hsolver)); 3006 PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver); 3007 // The hypre options must be set here and not in SetFromOptions because it is created here! 3008 PetscCallExternal(HYPRE_StructSMGSetMaxIter, ex->hsolver, ex->its); 3009 PetscCallExternal(HYPRE_StructSMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax); 3010 PetscCallExternal(HYPRE_StructSMGSetNumPostRelax, ex->hsolver, ex->num_post_relax); 3011 PetscCallExternal(HYPRE_StructSMGSetTol, ex->hsolver, ex->tol); 3012 3013 PetscCallExternal(HYPRE_StructSMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx); 3014 PetscCallExternal(HYPRE_StructSMGSetZeroGuess, ex->hsolver); 3015 PetscFunctionReturn(PETSC_SUCCESS); 3016 } 3017 3018 /*MC 3019 PCSMG - the hypre (structured grid) SMG multigrid solver 3020 3021 Level: advanced 3022 3023 Options Database Keys: 3024 + -pc_smg_its <its> - number of iterations of SMG to use as preconditioner 3025 . -pc_smg_num_pre_relax <steps> - number of smoothing steps before coarse grid 3026 . -pc_smg_num_post_relax <steps> - number of smoothing steps after coarse grid 3027 - -pc_smg_tol <tol> - tolerance of SMG 3028 3029 Notes: 3030 This is for CELL-centered descretizations 3031 3032 This must be used with the `MATHYPRESTRUCT` `MatType`. 3033 3034 This does not provide all the functionality of hypre's SMG solver, it supports only one block per process defined by a PETSc `DMDA`. 3035 3036 See `PCSYSPFMG`, `PCSMG`, `PCPFMG`, and `PCHYPRE` for access to hypre's other preconditioners 3037 3038 .seealso: `PCMG`, `MATHYPRESTRUCT`, `PCPFMG`, `PCSYSPFMG`, `PCHYPRE`, `PCGAMG` 3039 M*/ 3040 3041 PETSC_EXTERN PetscErrorCode PCCreate_SMG(PC pc) 3042 { 3043 PC_SMG *ex; 3044 3045 PetscFunctionBegin; 3046 PetscCall(PetscNew(&ex)); 3047 pc->data = ex; 3048 3049 ex->its = 1; 3050 ex->tol = 1.e-8; 3051 ex->num_pre_relax = 1; 3052 ex->num_post_relax = 1; 3053 3054 pc->ops->setfromoptions = PCSetFromOptions_SMG; 3055 pc->ops->view = PCView_SMG; 3056 pc->ops->destroy = PCDestroy_SMG; 3057 pc->ops->apply = PCApply_SMG; 3058 pc->ops->applyrichardson = PCApplyRichardson_SMG; 3059 pc->ops->setup = PCSetUp_SMG; 3060 3061 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 3062 PetscHYPREInitialize(); 3063 PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver); 3064 PetscFunctionReturn(PETSC_SUCCESS); 3065 } 3066