1 /* 2 Defines the multigrid preconditioner interface. 3 */ 4 #include <petsc/private/pcmgimpl.h> /*I "petscksp.h" I*/ 5 #include <petsc/private/kspimpl.h> 6 #include <petscdm.h> 7 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC, PetscBool *); 8 9 /* 10 Contains the list of registered coarse space construction routines 11 */ 12 PetscFunctionList PCMGCoarseList = NULL; 13 14 PetscErrorCode PCMGMCycle_Private(PC pc, PC_MG_Levels **mglevelsin, PetscBool transpose, PetscBool matapp, PCRichardsonConvergedReason *reason) 15 { 16 PC_MG *mg = (PC_MG *)pc->data; 17 PC_MG_Levels *mgc, *mglevels = *mglevelsin; 18 PetscInt cycles = (mglevels->level == 1) ? 1 : mglevels->cycles; 19 20 PetscFunctionBegin; 21 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 22 if (!transpose) { 23 if (matapp) { 24 PetscCall(KSPMatSolve(mglevels->smoothd, mglevels->B, mglevels->X)); /* pre-smooth */ 25 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, NULL)); 26 } else { 27 PetscCall(KSPSolve(mglevels->smoothd, mglevels->b, mglevels->x)); /* pre-smooth */ 28 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x)); 29 } 30 } else { 31 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 32 PetscCall(KSPSolveTranspose(mglevels->smoothu, mglevels->b, mglevels->x)); /* transpose of post-smooth */ 33 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x)); 34 } 35 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 36 if (mglevels->level) { /* not the coarsest grid */ 37 if (mglevels->eventresidual) PetscCall(PetscLogEventBegin(mglevels->eventresidual, 0, 0, 0, 0)); 38 if (matapp && !mglevels->R) PetscCall(MatDuplicate(mglevels->B, MAT_DO_NOT_COPY_VALUES, &mglevels->R)); 39 if (!transpose) { 40 if (matapp) PetscCall((*mglevels->matresidual)(mglevels->A, mglevels->B, mglevels->X, mglevels->R)); 41 else PetscCall((*mglevels->residual)(mglevels->A, mglevels->b, mglevels->x, mglevels->r)); 42 } else { 43 if (matapp) PetscCall((*mglevels->matresidualtranspose)(mglevels->A, mglevels->B, mglevels->X, mglevels->R)); 44 else PetscCall((*mglevels->residualtranspose)(mglevels->A, mglevels->b, mglevels->x, mglevels->r)); 45 } 46 if (mglevels->eventresidual) PetscCall(PetscLogEventEnd(mglevels->eventresidual, 0, 0, 0, 0)); 47 48 /* if on finest level and have convergence criteria set */ 49 if (mglevels->level == mglevels->levels - 1 && mg->ttol && reason) { 50 PetscReal rnorm; 51 PetscCall(VecNorm(mglevels->r, NORM_2, &rnorm)); 52 if (rnorm <= mg->ttol) { 53 if (rnorm < mg->abstol) { 54 *reason = PCRICHARDSON_CONVERGED_ATOL; 55 PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than absolute tolerance %g\n", (double)rnorm, (double)mg->abstol)); 56 } else { 57 *reason = PCRICHARDSON_CONVERGED_RTOL; 58 PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than relative tolerance times initial residual norm %g\n", (double)rnorm, (double)mg->ttol)); 59 } 60 PetscFunctionReturn(PETSC_SUCCESS); 61 } 62 } 63 64 mgc = *(mglevelsin - 1); 65 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0)); 66 if (!transpose) { 67 if (matapp) PetscCall(MatMatRestrict(mglevels->restrct, mglevels->R, &mgc->B)); 68 else PetscCall(MatRestrict(mglevels->restrct, mglevels->r, mgc->b)); 69 } else { 70 if (matapp) PetscCall(MatMatRestrict(mglevels->interpolate, mglevels->R, &mgc->B)); 71 else PetscCall(MatRestrict(mglevels->interpolate, mglevels->r, mgc->b)); 72 } 73 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0)); 74 if (matapp) { 75 if (!mgc->X) { 76 PetscCall(MatDuplicate(mgc->B, MAT_DO_NOT_COPY_VALUES, &mgc->X)); 77 } else { 78 PetscCall(MatZeroEntries(mgc->X)); 79 } 80 } else { 81 PetscCall(VecZeroEntries(mgc->x)); 82 } 83 while (cycles--) PetscCall(PCMGMCycle_Private(pc, mglevelsin - 1, transpose, matapp, reason)); 84 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0)); 85 if (!transpose) { 86 if (matapp) PetscCall(MatMatInterpolateAdd(mglevels->interpolate, mgc->X, mglevels->X, &mglevels->X)); 87 else PetscCall(MatInterpolateAdd(mglevels->interpolate, mgc->x, mglevels->x, mglevels->x)); 88 } else { 89 PetscCall(MatInterpolateAdd(mglevels->restrct, mgc->x, mglevels->x, mglevels->x)); 90 } 91 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0)); 92 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 93 if (!transpose) { 94 if (matapp) { 95 PetscCall(KSPMatSolve(mglevels->smoothu, mglevels->B, mglevels->X)); /* post smooth */ 96 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, NULL)); 97 } else { 98 PetscCall(KSPSolve(mglevels->smoothu, mglevels->b, mglevels->x)); /* post smooth */ 99 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x)); 100 } 101 } else { 102 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 103 PetscCall(KSPSolveTranspose(mglevels->smoothd, mglevels->b, mglevels->x)); /* post smooth */ 104 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x)); 105 } 106 if (mglevels->cr) { 107 Mat crA; 108 109 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 110 /* TODO Turn on copy and turn off noisy if we have an exact solution 111 PetscCall(VecCopy(mglevels->x, mglevels->crx)); 112 PetscCall(VecCopy(mglevels->b, mglevels->crb)); */ 113 PetscCall(KSPGetOperators(mglevels->cr, &crA, NULL)); 114 PetscCall(KSPSetNoisy_Private(crA, mglevels->crx)); 115 PetscCall(KSPSolve(mglevels->cr, mglevels->crb, mglevels->crx)); /* compatible relaxation */ 116 PetscCall(KSPCheckSolve(mglevels->cr, pc, mglevels->crx)); 117 } 118 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 119 } 120 PetscFunctionReturn(PETSC_SUCCESS); 121 } 122 123 static PetscErrorCode PCApplyRichardson_MG(PC pc, Vec b, Vec x, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason) 124 { 125 PC_MG *mg = (PC_MG *)pc->data; 126 PC_MG_Levels **mglevels = mg->levels; 127 PC tpc; 128 PetscBool changeu, changed; 129 PetscInt levels = mglevels[0]->levels, i; 130 131 PetscFunctionBegin; 132 /* When the DM is supplying the matrix then it will not exist until here */ 133 for (i = 0; i < levels; i++) { 134 if (!mglevels[i]->A) { 135 PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL)); 136 PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A)); 137 } 138 } 139 140 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc)); 141 PetscCall(PCPreSolveChangeRHS(tpc, &changed)); 142 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc)); 143 PetscCall(PCPreSolveChangeRHS(tpc, &changeu)); 144 if (!changed && !changeu) { 145 PetscCall(VecDestroy(&mglevels[levels - 1]->b)); 146 mglevels[levels - 1]->b = b; 147 } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */ 148 if (!mglevels[levels - 1]->b) { 149 Vec *vec; 150 151 PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL)); 152 mglevels[levels - 1]->b = *vec; 153 PetscCall(PetscFree(vec)); 154 } 155 PetscCall(VecCopy(b, mglevels[levels - 1]->b)); 156 } 157 mglevels[levels - 1]->x = x; 158 159 mg->rtol = rtol; 160 mg->abstol = abstol; 161 mg->dtol = dtol; 162 if (rtol) { 163 /* compute initial residual norm for relative convergence test */ 164 PetscReal rnorm; 165 if (zeroguess) { 166 PetscCall(VecNorm(b, NORM_2, &rnorm)); 167 } else { 168 PetscCall((*mglevels[levels - 1]->residual)(mglevels[levels - 1]->A, b, x, w)); 169 PetscCall(VecNorm(w, NORM_2, &rnorm)); 170 } 171 mg->ttol = PetscMax(rtol * rnorm, abstol); 172 } else if (abstol) mg->ttol = abstol; 173 else mg->ttol = 0.0; 174 175 /* since smoother is applied to full system, not just residual we need to make sure that smoothers don't 176 stop prematurely due to small residual */ 177 for (i = 1; i < levels; i++) { 178 PetscCall(KSPSetTolerances(mglevels[i]->smoothu, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT)); 179 if (mglevels[i]->smoothu != mglevels[i]->smoothd) { 180 /* For Richardson the initial guess is nonzero since it is solving in each cycle the original system not just applying as a preconditioner */ 181 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE)); 182 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT)); 183 } 184 } 185 186 *reason = PCRICHARDSON_NOT_SET; 187 for (i = 0; i < its; i++) { 188 PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, PETSC_FALSE, PETSC_FALSE, reason)); 189 if (*reason) break; 190 } 191 if (*reason == PCRICHARDSON_NOT_SET) *reason = PCRICHARDSON_CONVERGED_ITS; 192 *outits = i; 193 if (!changed && !changeu) mglevels[levels - 1]->b = NULL; 194 PetscFunctionReturn(PETSC_SUCCESS); 195 } 196 197 PetscErrorCode PCReset_MG(PC pc) 198 { 199 PC_MG *mg = (PC_MG *)pc->data; 200 PC_MG_Levels **mglevels = mg->levels; 201 PetscInt i, n; 202 203 PetscFunctionBegin; 204 if (mglevels) { 205 n = mglevels[0]->levels; 206 for (i = 0; i < n - 1; i++) { 207 PetscCall(VecDestroy(&mglevels[i + 1]->r)); 208 PetscCall(VecDestroy(&mglevels[i]->b)); 209 PetscCall(VecDestroy(&mglevels[i]->x)); 210 PetscCall(MatDestroy(&mglevels[i + 1]->R)); 211 PetscCall(MatDestroy(&mglevels[i]->B)); 212 PetscCall(MatDestroy(&mglevels[i]->X)); 213 PetscCall(VecDestroy(&mglevels[i]->crx)); 214 PetscCall(VecDestroy(&mglevels[i]->crb)); 215 PetscCall(MatDestroy(&mglevels[i + 1]->restrct)); 216 PetscCall(MatDestroy(&mglevels[i + 1]->interpolate)); 217 PetscCall(MatDestroy(&mglevels[i + 1]->inject)); 218 PetscCall(VecDestroy(&mglevels[i + 1]->rscale)); 219 } 220 PetscCall(VecDestroy(&mglevels[n - 1]->crx)); 221 PetscCall(VecDestroy(&mglevels[n - 1]->crb)); 222 /* this is not null only if the smoother on the finest level 223 changes the rhs during PreSolve */ 224 PetscCall(VecDestroy(&mglevels[n - 1]->b)); 225 PetscCall(MatDestroy(&mglevels[n - 1]->B)); 226 227 for (i = 0; i < n; i++) { 228 PetscCall(MatDestroy(&mglevels[i]->coarseSpace)); 229 PetscCall(MatDestroy(&mglevels[i]->A)); 230 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPReset(mglevels[i]->smoothd)); 231 PetscCall(KSPReset(mglevels[i]->smoothu)); 232 if (mglevels[i]->cr) PetscCall(KSPReset(mglevels[i]->cr)); 233 } 234 mg->Nc = 0; 235 } 236 PetscFunctionReturn(PETSC_SUCCESS); 237 } 238 239 /* Implementing CR 240 241 We only want to make corrections that ``do not change'' the coarse solution. What we mean by not changing is that if I prolong my coarse solution to the fine grid and then inject that fine solution back to the coarse grid, I get the same answer. Injection is what Brannick calls R. We want the complementary projector to Inj, which we will call S, after Brannick, so that Inj S = 0. Now the orthogonal projector onto the range of Inj^T is 242 243 Inj^T (Inj Inj^T)^{-1} Inj 244 245 and if Inj is a VecScatter, as it is now in PETSc, we have 246 247 Inj^T Inj 248 249 and 250 251 S = I - Inj^T Inj 252 253 since 254 255 Inj S = Inj - (Inj Inj^T) Inj = 0. 256 257 Brannick suggests 258 259 A \to S^T A S \qquad\mathrm{and}\qquad M \to S^T M S 260 261 but I do not think his :math:`S^T S = I` is correct. Our S is an orthogonal projector, so :math:`S^T S = S^2 = S`. We will use 262 263 M^{-1} A \to S M^{-1} A S 264 265 In fact, since it is somewhat hard in PETSc to do the symmetric application, we will just apply S on the left. 266 267 Check: || Inj P - I ||_F < tol 268 Check: In general, Inj Inj^T = I 269 */ 270 271 typedef struct { 272 PC mg; /* The PCMG object */ 273 PetscInt l; /* The multigrid level for this solver */ 274 Mat Inj; /* The injection matrix */ 275 Mat S; /* I - Inj^T Inj */ 276 } CRContext; 277 278 static PetscErrorCode CRSetup_Private(PC pc) 279 { 280 CRContext *ctx; 281 Mat It; 282 283 PetscFunctionBeginUser; 284 PetscCall(PCShellGetContext(pc, &ctx)); 285 PetscCall(PCMGGetInjection(ctx->mg, ctx->l, &It)); 286 PetscCheck(It, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "CR requires that injection be defined for this PCMG"); 287 PetscCall(MatCreateTranspose(It, &ctx->Inj)); 288 PetscCall(MatCreateNormal(ctx->Inj, &ctx->S)); 289 PetscCall(MatScale(ctx->S, -1.0)); 290 PetscCall(MatShift(ctx->S, 1.0)); 291 PetscFunctionReturn(PETSC_SUCCESS); 292 } 293 294 static PetscErrorCode CRApply_Private(PC pc, Vec x, Vec y) 295 { 296 CRContext *ctx; 297 298 PetscFunctionBeginUser; 299 PetscCall(PCShellGetContext(pc, &ctx)); 300 PetscCall(MatMult(ctx->S, x, y)); 301 PetscFunctionReturn(PETSC_SUCCESS); 302 } 303 304 static PetscErrorCode CRDestroy_Private(PC pc) 305 { 306 CRContext *ctx; 307 308 PetscFunctionBeginUser; 309 PetscCall(PCShellGetContext(pc, &ctx)); 310 PetscCall(MatDestroy(&ctx->Inj)); 311 PetscCall(MatDestroy(&ctx->S)); 312 PetscCall(PetscFree(ctx)); 313 PetscCall(PCShellSetContext(pc, NULL)); 314 PetscFunctionReturn(PETSC_SUCCESS); 315 } 316 317 static PetscErrorCode CreateCR_Private(PC pc, PetscInt l, PC *cr) 318 { 319 CRContext *ctx; 320 321 PetscFunctionBeginUser; 322 PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), cr)); 323 PetscCall(PetscObjectSetName((PetscObject)*cr, "S (complementary projector to injection)")); 324 PetscCall(PetscCalloc1(1, &ctx)); 325 ctx->mg = pc; 326 ctx->l = l; 327 PetscCall(PCSetType(*cr, PCSHELL)); 328 PetscCall(PCShellSetContext(*cr, ctx)); 329 PetscCall(PCShellSetApply(*cr, CRApply_Private)); 330 PetscCall(PCShellSetSetUp(*cr, CRSetup_Private)); 331 PetscCall(PCShellSetDestroy(*cr, CRDestroy_Private)); 332 PetscFunctionReturn(PETSC_SUCCESS); 333 } 334 335 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char[], const char[], const char *[], const char *[], PetscBool *); 336 337 PetscErrorCode PCMGSetLevels_MG(PC pc, PetscInt levels, MPI_Comm *comms) 338 { 339 PC_MG *mg = (PC_MG *)pc->data; 340 MPI_Comm comm; 341 PC_MG_Levels **mglevels = mg->levels; 342 PCMGType mgtype = mg->am; 343 PetscInt mgctype = (PetscInt)PC_MG_CYCLE_V; 344 PetscInt i; 345 PetscMPIInt size; 346 const char *prefix; 347 PC ipc; 348 PetscInt n; 349 350 PetscFunctionBegin; 351 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 352 PetscValidLogicalCollectiveInt(pc, levels, 2); 353 if (mg->nlevels == levels) PetscFunctionReturn(PETSC_SUCCESS); 354 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 355 if (mglevels) { 356 mgctype = mglevels[0]->cycles; 357 /* changing the number of levels so free up the previous stuff */ 358 PetscCall(PCReset_MG(pc)); 359 n = mglevels[0]->levels; 360 for (i = 0; i < n; i++) { 361 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd)); 362 PetscCall(KSPDestroy(&mglevels[i]->smoothu)); 363 PetscCall(KSPDestroy(&mglevels[i]->cr)); 364 PetscCall(PetscFree(mglevels[i])); 365 } 366 PetscCall(PetscFree(mg->levels)); 367 } 368 369 mg->nlevels = levels; 370 371 PetscCall(PetscMalloc1(levels, &mglevels)); 372 373 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 374 375 mg->stageApply = 0; 376 for (i = 0; i < levels; i++) { 377 PetscCall(PetscNew(&mglevels[i])); 378 379 mglevels[i]->level = i; 380 mglevels[i]->levels = levels; 381 mglevels[i]->cycles = mgctype; 382 mg->default_smoothu = 2; 383 mg->default_smoothd = 2; 384 mglevels[i]->eventsmoothsetup = 0; 385 mglevels[i]->eventsmoothsolve = 0; 386 mglevels[i]->eventresidual = 0; 387 mglevels[i]->eventinterprestrict = 0; 388 389 if (comms) comm = comms[i]; 390 if (comm != MPI_COMM_NULL) { 391 PetscCall(KSPCreate(comm, &mglevels[i]->smoothd)); 392 PetscCall(KSPSetNestLevel(mglevels[i]->smoothd, pc->kspnestlevel)); 393 PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->smoothd, pc->erroriffailure)); 394 PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->smoothd, (PetscObject)pc, levels - i)); 395 PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, prefix)); 396 PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->smoothd, PetscMGLevelId, mglevels[i]->level)); 397 if (i == 0 && levels > 1) { // coarse grid 398 PetscCall(KSPAppendOptionsPrefix(mglevels[0]->smoothd, "mg_coarse_")); 399 400 /* coarse solve is (redundant) LU by default; set shifttype NONZERO to avoid annoying zero-pivot in LU preconditioner */ 401 PetscCall(KSPSetType(mglevels[0]->smoothd, KSPPREONLY)); 402 PetscCall(KSPGetPC(mglevels[0]->smoothd, &ipc)); 403 PetscCallMPI(MPI_Comm_size(comm, &size)); 404 if (size > 1) { 405 PetscCall(PCSetType(ipc, PCREDUNDANT)); 406 } else { 407 PetscCall(PCSetType(ipc, PCLU)); 408 } 409 PetscCall(PCFactorSetShiftType(ipc, MAT_SHIFT_INBLOCKS)); 410 } else { 411 char tprefix[128]; 412 413 PetscCall(KSPSetType(mglevels[i]->smoothd, KSPCHEBYSHEV)); 414 PetscCall(KSPSetConvergenceTest(mglevels[i]->smoothd, KSPConvergedSkip, NULL, NULL)); 415 PetscCall(KSPSetNormType(mglevels[i]->smoothd, KSP_NORM_NONE)); 416 PetscCall(KSPGetPC(mglevels[i]->smoothd, &ipc)); 417 PetscCall(PCSetType(ipc, PCSOR)); 418 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd)); 419 420 if (i == levels - 1 && levels > 1) { // replace 'mg_finegrid_' with 'mg_levels_X_' 421 PetscBool set; 422 PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)mglevels[i]->smoothd)->options, ((PetscObject)mglevels[i]->smoothd)->prefix, "-mg_fine_", NULL, NULL, &set)); 423 if (set) { 424 if (prefix) PetscCall(PetscSNPrintf(tprefix, 128, "%smg_fine_", prefix)); 425 else PetscCall(PetscSNPrintf(tprefix, 128, "mg_fine_")); 426 PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, tprefix)); 427 } else { 428 PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i)); 429 PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix)); 430 } 431 } else { 432 PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i)); 433 PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix)); 434 } 435 } 436 } 437 mglevels[i]->smoothu = mglevels[i]->smoothd; 438 mg->rtol = 0.0; 439 mg->abstol = 0.0; 440 mg->dtol = 0.0; 441 mg->ttol = 0.0; 442 mg->cyclesperpcapply = 1; 443 } 444 mg->levels = mglevels; 445 PetscCall(PCMGSetType(pc, mgtype)); 446 PetscFunctionReturn(PETSC_SUCCESS); 447 } 448 449 /*@C 450 PCMGSetLevels - Sets the number of levels to use with `PCMG`. 451 Must be called before any other `PCMG` routine. 452 453 Logically Collective 454 455 Input Parameters: 456 + pc - the preconditioner context 457 . levels - the number of levels 458 - comms - optional communicators for each level; this is to allow solving the coarser problems 459 on smaller sets of processes. For processes that are not included in the computation 460 you must pass `MPI_COMM_NULL`. Use comms = `NULL` to specify that all processes 461 should participate in each level of problem. 462 463 Level: intermediate 464 465 Notes: 466 If the number of levels is one then the multigrid uses the `-mg_levels` prefix 467 for setting the level options rather than the `-mg_coarse` or `-mg_fine` prefix. 468 469 You can free the information in comms after this routine is called. 470 471 The array of MPI communicators must contain `MPI_COMM_NULL` for those ranks that at each level 472 are not participating in the coarser solve. For example, with 2 levels and 1 and 2 ranks on 473 the two levels, rank 0 in the original communicator will pass in an array of 2 communicators 474 of size 2 and 1, while rank 1 in the original communicator will pass in array of 2 communicators 475 the first of size 2 and the second of value `MPI_COMM_NULL` since the rank 1 does not participate 476 in the coarse grid solve. 477 478 Since each coarser level may have a new `MPI_Comm` with fewer ranks than the previous, one 479 must take special care in providing the restriction and interpolation operation. We recommend 480 providing these as two step operations; first perform a standard restriction or interpolation on 481 the full number of ranks for that level and then use an MPI call to copy the resulting vector 482 array entries (after calls to VecGetArray()) to the smaller or larger number of ranks, note in both 483 cases the MPI calls must be made on the larger of the two communicators. Traditional MPI send and 484 receives or `MPI_AlltoAllv()` could be used to do the reshuffling of the vector entries. 485 486 Fortran Notes: 487 Use comms = `PETSC_NULL_MPI_COMM` as the equivalent of `NULL` in the C interface. Note `PETSC_NULL_MPI_COMM` 488 is not `MPI_COMM_NULL`. It is more like `PETSC_NULL_INTEGER`, `PETSC_NULL_REAL` etc. 489 490 .seealso: [](ch_ksp), `PCMGSetType()`, `PCMGGetLevels()` 491 @*/ 492 PetscErrorCode PCMGSetLevels(PC pc, PetscInt levels, MPI_Comm *comms) 493 { 494 PetscFunctionBegin; 495 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 496 if (comms) PetscAssertPointer(comms, 3); 497 PetscTryMethod(pc, "PCMGSetLevels_C", (PC, PetscInt, MPI_Comm *), (pc, levels, comms)); 498 PetscFunctionReturn(PETSC_SUCCESS); 499 } 500 501 PetscErrorCode PCDestroy_MG(PC pc) 502 { 503 PC_MG *mg = (PC_MG *)pc->data; 504 PC_MG_Levels **mglevels = mg->levels; 505 PetscInt i, n; 506 507 PetscFunctionBegin; 508 PetscCall(PCReset_MG(pc)); 509 if (mglevels) { 510 n = mglevels[0]->levels; 511 for (i = 0; i < n; i++) { 512 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd)); 513 PetscCall(KSPDestroy(&mglevels[i]->smoothu)); 514 PetscCall(KSPDestroy(&mglevels[i]->cr)); 515 PetscCall(PetscFree(mglevels[i])); 516 } 517 PetscCall(PetscFree(mg->levels)); 518 } 519 PetscCall(PetscFree(pc->data)); 520 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 521 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 522 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", NULL)); 523 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", NULL)); 524 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", NULL)); 525 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 526 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 527 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", NULL)); 528 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", NULL)); 529 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", NULL)); 530 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", NULL)); 531 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", NULL)); 532 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", NULL)); 533 PetscFunctionReturn(PETSC_SUCCESS); 534 } 535 536 /* 537 PCApply_MG - Runs either an additive, multiplicative, Kaskadic 538 or full cycle of multigrid. 539 540 Note: 541 A simple wrapper which calls PCMGMCycle(),PCMGACycle(), or PCMGFCycle(). 542 */ 543 static PetscErrorCode PCApply_MG_Internal(PC pc, Vec b, Vec x, Mat B, Mat X, PetscBool transpose) 544 { 545 PC_MG *mg = (PC_MG *)pc->data; 546 PC_MG_Levels **mglevels = mg->levels; 547 PC tpc; 548 PetscInt levels = mglevels[0]->levels, i; 549 PetscBool changeu, changed, matapp; 550 551 PetscFunctionBegin; 552 matapp = (PetscBool)(B && X); 553 if (mg->stageApply) PetscCall(PetscLogStagePush(mg->stageApply)); 554 /* When the DM is supplying the matrix then it will not exist until here */ 555 for (i = 0; i < levels; i++) { 556 if (!mglevels[i]->A) { 557 PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL)); 558 PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A)); 559 } 560 } 561 562 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc)); 563 PetscCall(PCPreSolveChangeRHS(tpc, &changed)); 564 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc)); 565 PetscCall(PCPreSolveChangeRHS(tpc, &changeu)); 566 if (!changeu && !changed) { 567 if (matapp) { 568 PetscCall(MatDestroy(&mglevels[levels - 1]->B)); 569 mglevels[levels - 1]->B = B; 570 } else { 571 PetscCall(VecDestroy(&mglevels[levels - 1]->b)); 572 mglevels[levels - 1]->b = b; 573 } 574 } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */ 575 if (matapp) { 576 if (mglevels[levels - 1]->B) { 577 PetscInt N1, N2; 578 PetscBool flg; 579 580 PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &N1)); 581 PetscCall(MatGetSize(B, NULL, &N2)); 582 PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 1]->B, ((PetscObject)B)->type_name, &flg)); 583 if (N1 != N2 || !flg) PetscCall(MatDestroy(&mglevels[levels - 1]->B)); 584 } 585 if (!mglevels[levels - 1]->B) { 586 PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &mglevels[levels - 1]->B)); 587 } else { 588 PetscCall(MatCopy(B, mglevels[levels - 1]->B, SAME_NONZERO_PATTERN)); 589 } 590 } else { 591 if (!mglevels[levels - 1]->b) { 592 Vec *vec; 593 594 PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL)); 595 mglevels[levels - 1]->b = *vec; 596 PetscCall(PetscFree(vec)); 597 } 598 PetscCall(VecCopy(b, mglevels[levels - 1]->b)); 599 } 600 } 601 if (matapp) { 602 mglevels[levels - 1]->X = X; 603 } else { 604 mglevels[levels - 1]->x = x; 605 } 606 607 /* If coarser Xs are present, it means we have already block applied the PC at least once 608 Reset operators if sizes/type do no match */ 609 if (matapp && levels > 1 && mglevels[levels - 2]->X) { 610 PetscInt Xc, Bc; 611 PetscBool flg; 612 613 PetscCall(MatGetSize(mglevels[levels - 2]->X, NULL, &Xc)); 614 PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &Bc)); 615 PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 2]->X, ((PetscObject)mglevels[levels - 1]->X)->type_name, &flg)); 616 if (Xc != Bc || !flg) { 617 PetscCall(MatDestroy(&mglevels[levels - 1]->R)); 618 for (i = 0; i < levels - 1; i++) { 619 PetscCall(MatDestroy(&mglevels[i]->R)); 620 PetscCall(MatDestroy(&mglevels[i]->B)); 621 PetscCall(MatDestroy(&mglevels[i]->X)); 622 } 623 } 624 } 625 626 if (mg->am == PC_MG_MULTIPLICATIVE) { 627 if (matapp) PetscCall(MatZeroEntries(X)); 628 else PetscCall(VecZeroEntries(x)); 629 for (i = 0; i < mg->cyclesperpcapply; i++) PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, transpose, matapp, NULL)); 630 } else if (mg->am == PC_MG_ADDITIVE) { 631 PetscCall(PCMGACycle_Private(pc, mglevels, transpose, matapp)); 632 } else if (mg->am == PC_MG_KASKADE) { 633 PetscCall(PCMGKCycle_Private(pc, mglevels, transpose, matapp)); 634 } else { 635 PetscCall(PCMGFCycle_Private(pc, mglevels, transpose, matapp)); 636 } 637 if (mg->stageApply) PetscCall(PetscLogStagePop()); 638 if (!changeu && !changed) { 639 if (matapp) { 640 mglevels[levels - 1]->B = NULL; 641 } else { 642 mglevels[levels - 1]->b = NULL; 643 } 644 } 645 PetscFunctionReturn(PETSC_SUCCESS); 646 } 647 648 static PetscErrorCode PCApply_MG(PC pc, Vec b, Vec x) 649 { 650 PetscFunctionBegin; 651 PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_FALSE)); 652 PetscFunctionReturn(PETSC_SUCCESS); 653 } 654 655 static PetscErrorCode PCApplyTranspose_MG(PC pc, Vec b, Vec x) 656 { 657 PetscFunctionBegin; 658 PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_TRUE)); 659 PetscFunctionReturn(PETSC_SUCCESS); 660 } 661 662 static PetscErrorCode PCMatApply_MG(PC pc, Mat b, Mat x) 663 { 664 PetscFunctionBegin; 665 PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_FALSE)); 666 PetscFunctionReturn(PETSC_SUCCESS); 667 } 668 669 static PetscErrorCode PCMatApplyTranspose_MG(PC pc, Mat b, Mat x) 670 { 671 PetscFunctionBegin; 672 PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_TRUE)); 673 PetscFunctionReturn(PETSC_SUCCESS); 674 } 675 676 PetscErrorCode PCSetFromOptions_MG(PC pc, PetscOptionItems PetscOptionsObject) 677 { 678 PetscInt levels, cycles; 679 PetscBool flg, flg2; 680 PC_MG *mg = (PC_MG *)pc->data; 681 PC_MG_Levels **mglevels; 682 PCMGType mgtype; 683 PCMGCycleType mgctype; 684 PCMGGalerkinType gtype; 685 PCMGCoarseSpaceType coarseSpaceType; 686 687 PetscFunctionBegin; 688 levels = PetscMax(mg->nlevels, 1); 689 PetscOptionsHeadBegin(PetscOptionsObject, "Multigrid options"); 690 PetscCall(PetscOptionsInt("-pc_mg_levels", "Number of Levels", "PCMGSetLevels", levels, &levels, &flg)); 691 if (!flg && !mg->levels && pc->dm) { 692 PetscCall(DMGetRefineLevel(pc->dm, &levels)); 693 levels++; 694 mg->usedmfornumberoflevels = PETSC_TRUE; 695 } 696 PetscCall(PCMGSetLevels(pc, levels, NULL)); 697 mglevels = mg->levels; 698 699 mgctype = (PCMGCycleType)mglevels[0]->cycles; 700 PetscCall(PetscOptionsEnum("-pc_mg_cycle_type", "V cycle or for W-cycle", "PCMGSetCycleType", PCMGCycleTypes, (PetscEnum)mgctype, (PetscEnum *)&mgctype, &flg)); 701 if (flg) PetscCall(PCMGSetCycleType(pc, mgctype)); 702 coarseSpaceType = mg->coarseSpaceType; 703 PetscCall(PetscOptionsEnum("-pc_mg_adapt_interp_coarse_space", "Type of adaptive coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw", "PCMGSetAdaptCoarseSpaceType", PCMGCoarseSpaceTypes, (PetscEnum)coarseSpaceType, (PetscEnum *)&coarseSpaceType, &flg)); 704 if (flg) PetscCall(PCMGSetAdaptCoarseSpaceType(pc, coarseSpaceType)); 705 PetscCall(PetscOptionsInt("-pc_mg_adapt_interp_n", "Size of the coarse space for adaptive interpolation", "PCMGSetCoarseSpace", mg->Nc, &mg->Nc, &flg)); 706 PetscCall(PetscOptionsBool("-pc_mg_mesp_monitor", "Monitor the multilevel eigensolver", "PCMGSetAdaptInterpolation", PETSC_FALSE, &mg->mespMonitor, &flg)); 707 flg2 = PETSC_FALSE; 708 PetscCall(PetscOptionsBool("-pc_mg_adapt_cr", "Monitor coarse space quality using Compatible Relaxation (CR)", "PCMGSetAdaptCR", PETSC_FALSE, &flg2, &flg)); 709 if (flg) PetscCall(PCMGSetAdaptCR(pc, flg2)); 710 flg = PETSC_FALSE; 711 PetscCall(PetscOptionsBool("-pc_mg_distinct_smoothup", "Create separate smoothup KSP and append the prefix _up", "PCMGSetDistinctSmoothUp", PETSC_FALSE, &flg, NULL)); 712 if (flg) PetscCall(PCMGSetDistinctSmoothUp(pc)); 713 PetscCall(PetscOptionsEnum("-pc_mg_galerkin", "Use Galerkin process to compute coarser operators", "PCMGSetGalerkin", PCMGGalerkinTypes, (PetscEnum)mg->galerkin, (PetscEnum *)>ype, &flg)); 714 if (flg) PetscCall(PCMGSetGalerkin(pc, gtype)); 715 mgtype = mg->am; 716 PetscCall(PetscOptionsEnum("-pc_mg_type", "Multigrid type", "PCMGSetType", PCMGTypes, (PetscEnum)mgtype, (PetscEnum *)&mgtype, &flg)); 717 if (flg) PetscCall(PCMGSetType(pc, mgtype)); 718 if (mg->am == PC_MG_MULTIPLICATIVE) { 719 PetscCall(PetscOptionsInt("-pc_mg_multiplicative_cycles", "Number of cycles for each preconditioner step", "PCMGMultiplicativeSetCycles", mg->cyclesperpcapply, &cycles, &flg)); 720 if (flg) PetscCall(PCMGMultiplicativeSetCycles(pc, cycles)); 721 } 722 flg = PETSC_FALSE; 723 PetscCall(PetscOptionsBool("-pc_mg_log", "Log times for each multigrid level", "None", flg, &flg, NULL)); 724 if (flg) { 725 PetscInt i; 726 char eventname[128]; 727 728 levels = mglevels[0]->levels; 729 for (i = 0; i < levels; i++) { 730 PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSetup Level %" PetscInt_FMT, i)); 731 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsetup)); 732 PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSmooth Level %" PetscInt_FMT, i)); 733 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsolve)); 734 if (i) { 735 PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGResid Level %" PetscInt_FMT, i)); 736 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventresidual)); 737 PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGInterp Level %" PetscInt_FMT, i)); 738 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventinterprestrict)); 739 } 740 } 741 742 if (PetscDefined(USE_LOG)) { 743 const char sname[] = "MG Apply"; 744 745 PetscCall(PetscLogStageGetId(sname, &mg->stageApply)); 746 if (mg->stageApply < 0) PetscCall(PetscLogStageRegister(sname, &mg->stageApply)); 747 } 748 } 749 PetscOptionsHeadEnd(); 750 /* Check option consistency */ 751 PetscCall(PCMGGetGalerkin(pc, >ype)); 752 PetscCall(PCMGGetAdaptInterpolation(pc, &flg)); 753 PetscCheck(!flg || !(gtype >= PC_MG_GALERKIN_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use Galerkin coarse operators when adapting the interpolator"); 754 PetscFunctionReturn(PETSC_SUCCESS); 755 } 756 757 const char *const PCMGTypes[] = {"MULTIPLICATIVE", "ADDITIVE", "FULL", "KASKADE", "PCMGType", "PC_MG", NULL}; 758 const char *const PCMGCycleTypes[] = {"invalid", "v", "w", "PCMGCycleType", "PC_MG_CYCLE", NULL}; 759 const char *const PCMGGalerkinTypes[] = {"both", "pmat", "mat", "none", "external", "PCMGGalerkinType", "PC_MG_GALERKIN", NULL}; 760 const char *const PCMGCoarseSpaceTypes[] = {"none", "polynomial", "harmonic", "eigenvector", "generalized_eigenvector", "gdsw", "PCMGCoarseSpaceType", "PCMG_ADAPT_NONE", NULL}; 761 762 #include <petscdraw.h> 763 PetscErrorCode PCView_MG(PC pc, PetscViewer viewer) 764 { 765 PC_MG *mg = (PC_MG *)pc->data; 766 PC_MG_Levels **mglevels = mg->levels; 767 PetscInt levels = mglevels ? mglevels[0]->levels : 0, i; 768 PetscBool iascii, isbinary, isdraw; 769 770 PetscFunctionBegin; 771 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 772 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 773 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 774 if (iascii) { 775 const char *cyclename = levels ? (mglevels[0]->cycles == PC_MG_CYCLE_V ? "v" : "w") : "unknown"; 776 PetscCall(PetscViewerASCIIPrintf(viewer, " type is %s, levels=%" PetscInt_FMT " cycles=%s\n", PCMGTypes[mg->am], levels, cyclename)); 777 if (mg->am == PC_MG_MULTIPLICATIVE) PetscCall(PetscViewerASCIIPrintf(viewer, " Cycles per PCApply=%" PetscInt_FMT "\n", mg->cyclesperpcapply)); 778 if (mg->galerkin == PC_MG_GALERKIN_BOTH) { 779 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices\n")); 780 } else if (mg->galerkin == PC_MG_GALERKIN_PMAT) { 781 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for pmat\n")); 782 } else if (mg->galerkin == PC_MG_GALERKIN_MAT) { 783 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for mat\n")); 784 } else if (mg->galerkin == PC_MG_GALERKIN_EXTERNAL) { 785 PetscCall(PetscViewerASCIIPrintf(viewer, " Using externally compute Galerkin coarse grid matrices\n")); 786 } else { 787 PetscCall(PetscViewerASCIIPrintf(viewer, " Not using Galerkin computed coarse grid matrices\n")); 788 } 789 if (mg->view) PetscCall((*mg->view)(pc, viewer)); 790 for (i = 0; i < levels; i++) { 791 if (i) { 792 PetscCall(PetscViewerASCIIPrintf(viewer, "Down solver (pre-smoother) on level %" PetscInt_FMT " -------------------------------\n", i)); 793 } else { 794 PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse grid solver -- level %" PetscInt_FMT " -------------------------------\n", i)); 795 } 796 PetscCall(PetscViewerASCIIPushTab(viewer)); 797 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 798 PetscCall(PetscViewerASCIIPopTab(viewer)); 799 if (i && mglevels[i]->smoothd == mglevels[i]->smoothu) { 800 PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) same as down solver (pre-smoother)\n")); 801 } else if (i) { 802 PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) on level %" PetscInt_FMT " -------------------------------\n", i)); 803 PetscCall(PetscViewerASCIIPushTab(viewer)); 804 PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 805 PetscCall(PetscViewerASCIIPopTab(viewer)); 806 } 807 if (i && mglevels[i]->cr) { 808 PetscCall(PetscViewerASCIIPrintf(viewer, "CR solver on level %" PetscInt_FMT " -------------------------------\n", i)); 809 PetscCall(PetscViewerASCIIPushTab(viewer)); 810 PetscCall(KSPView(mglevels[i]->cr, viewer)); 811 PetscCall(PetscViewerASCIIPopTab(viewer)); 812 } 813 } 814 } else if (isbinary) { 815 for (i = levels - 1; i >= 0; i--) { 816 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 817 if (i && mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 818 } 819 } else if (isdraw) { 820 PetscDraw draw; 821 PetscReal x, w, y, bottom, th; 822 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 823 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 824 PetscCall(PetscDrawStringGetSize(draw, NULL, &th)); 825 bottom = y - th; 826 for (i = levels - 1; i >= 0; i--) { 827 if (!mglevels[i]->smoothu || (mglevels[i]->smoothu == mglevels[i]->smoothd)) { 828 PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom)); 829 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 830 PetscCall(PetscDrawPopCurrentPoint(draw)); 831 } else { 832 w = 0.5 * PetscMin(1.0 - x, x); 833 PetscCall(PetscDrawPushCurrentPoint(draw, x + w, bottom)); 834 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 835 PetscCall(PetscDrawPopCurrentPoint(draw)); 836 PetscCall(PetscDrawPushCurrentPoint(draw, x - w, bottom)); 837 PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 838 PetscCall(PetscDrawPopCurrentPoint(draw)); 839 } 840 PetscCall(PetscDrawGetBoundingBox(draw, NULL, &bottom, NULL, NULL)); 841 bottom -= th; 842 } 843 } 844 PetscFunctionReturn(PETSC_SUCCESS); 845 } 846 847 #include <petsc/private/kspimpl.h> 848 849 /* 850 Calls setup for the KSP on each level 851 */ 852 PetscErrorCode PCSetUp_MG(PC pc) 853 { 854 PC_MG *mg = (PC_MG *)pc->data; 855 PC_MG_Levels **mglevels = mg->levels; 856 PetscInt i, n; 857 PC cpc; 858 PetscBool dump = PETSC_FALSE, opsset, use_amat, missinginterpolate = PETSC_FALSE; 859 Mat dA, dB; 860 Vec tvec; 861 DM *dms; 862 PetscViewer viewer = NULL; 863 PetscBool dAeqdB = PETSC_FALSE, needRestricts = PETSC_FALSE, doCR = PETSC_FALSE; 864 PetscBool adaptInterpolation = mg->adaptInterpolation; 865 866 PetscFunctionBegin; 867 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels with PCMGSetLevels() before setting up"); 868 n = mglevels[0]->levels; 869 /* FIX: Move this to PCSetFromOptions_MG? */ 870 if (mg->usedmfornumberoflevels) { 871 PetscInt levels; 872 PetscCall(DMGetRefineLevel(pc->dm, &levels)); 873 levels++; 874 if (levels > n) { /* the problem is now being solved on a finer grid */ 875 PetscCall(PCMGSetLevels(pc, levels, NULL)); 876 n = levels; 877 PetscCall(PCSetFromOptions(pc)); /* it is bad to call this here, but otherwise will never be called for the new hierarchy */ 878 mglevels = mg->levels; 879 } 880 } 881 PetscCall(KSPGetPC(mglevels[0]->smoothd, &cpc)); 882 883 /* If user did not provide fine grid operators OR operator was not updated since last global KSPSetOperators() */ 884 /* so use those from global PC */ 885 /* Is this what we always want? What if user wants to keep old one? */ 886 PetscCall(KSPGetOperatorsSet(mglevels[n - 1]->smoothd, NULL, &opsset)); 887 if (opsset) { 888 Mat mmat; 889 PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, NULL, &mmat)); 890 if (mmat == pc->pmat) opsset = PETSC_FALSE; 891 } 892 893 /* Create CR solvers */ 894 PetscCall(PCMGGetAdaptCR(pc, &doCR)); 895 if (doCR) { 896 const char *prefix; 897 898 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 899 for (i = 1; i < n; ++i) { 900 PC ipc, cr; 901 char crprefix[128]; 902 903 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &mglevels[i]->cr)); 904 PetscCall(KSPSetNestLevel(mglevels[i]->cr, pc->kspnestlevel)); 905 PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->cr, PETSC_FALSE)); 906 PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->cr, (PetscObject)pc, n - i)); 907 PetscCall(KSPSetOptionsPrefix(mglevels[i]->cr, prefix)); 908 PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->cr, PetscMGLevelId, mglevels[i]->level)); 909 PetscCall(KSPSetType(mglevels[i]->cr, KSPCHEBYSHEV)); 910 PetscCall(KSPSetConvergenceTest(mglevels[i]->cr, KSPConvergedSkip, NULL, NULL)); 911 PetscCall(KSPSetNormType(mglevels[i]->cr, KSP_NORM_PRECONDITIONED)); 912 PetscCall(KSPGetPC(mglevels[i]->cr, &ipc)); 913 914 PetscCall(PCSetType(ipc, PCCOMPOSITE)); 915 PetscCall(PCCompositeSetType(ipc, PC_COMPOSITE_MULTIPLICATIVE)); 916 PetscCall(PCCompositeAddPCType(ipc, PCSOR)); 917 PetscCall(CreateCR_Private(pc, i, &cr)); 918 PetscCall(PCCompositeAddPC(ipc, cr)); 919 PetscCall(PCDestroy(&cr)); 920 921 PetscCall(KSPSetTolerances(mglevels[i]->cr, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd)); 922 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 923 PetscCall(PetscSNPrintf(crprefix, 128, "mg_levels_%" PetscInt_FMT "_cr_", i)); 924 PetscCall(KSPAppendOptionsPrefix(mglevels[i]->cr, crprefix)); 925 } 926 } 927 928 if (!opsset) { 929 PetscCall(PCGetUseAmat(pc, &use_amat)); 930 if (use_amat) { 931 PetscCall(PetscInfo(pc, "Using outer operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n")); 932 PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->mat, pc->pmat)); 933 } else { 934 PetscCall(PetscInfo(pc, "Using matrix (pmat) operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n")); 935 PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->pmat, pc->pmat)); 936 } 937 } 938 939 for (i = n - 1; i > 0; i--) { 940 if (!(mglevels[i]->interpolate || mglevels[i]->restrct)) { 941 missinginterpolate = PETSC_TRUE; 942 break; 943 } 944 } 945 946 PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, &dA, &dB)); 947 if (dA == dB) dAeqdB = PETSC_TRUE; 948 if (mg->galerkin == PC_MG_GALERKIN_NONE || ((mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_MAT) && !dAeqdB)) { 949 needRestricts = PETSC_TRUE; /* user must compute either mat, pmat, or both so must restrict x to coarser levels */ 950 } 951 952 if (pc->dm && !pc->setupcalled) { 953 /* finest smoother also gets DM but it is not active, independent of whether galerkin==PC_MG_GALERKIN_EXTERNAL */ 954 PetscCall(KSPSetDM(mglevels[n - 1]->smoothd, pc->dm)); 955 PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothd, PETSC_FALSE)); 956 if (mglevels[n - 1]->smoothd != mglevels[n - 1]->smoothu) { 957 PetscCall(KSPSetDM(mglevels[n - 1]->smoothu, pc->dm)); 958 PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothu, PETSC_FALSE)); 959 } 960 if (mglevels[n - 1]->cr) { 961 PetscCall(KSPSetDM(mglevels[n - 1]->cr, pc->dm)); 962 PetscCall(KSPSetDMActive(mglevels[n - 1]->cr, PETSC_FALSE)); 963 } 964 } 965 966 /* 967 Skipping if user has provided all interpolation/restriction needed (since DM might not be able to produce them (when coming from SNES/TS) 968 Skipping for externally managed hierarchy (such as ML and GAMG). Cleaner logic here would be great. Wrap ML/GAMG as DMs? 969 */ 970 if (missinginterpolate && mg->galerkin != PC_MG_GALERKIN_EXTERNAL && !pc->setupcalled) { 971 /* first see if we can compute a coarse space */ 972 if (mg->coarseSpaceType == PCMG_ADAPT_GDSW) { 973 for (i = n - 2; i > -1; i--) { 974 if (!mglevels[i + 1]->restrct && !mglevels[i + 1]->interpolate) { 975 PetscCall(PCMGComputeCoarseSpace_Internal(pc, i + 1, mg->coarseSpaceType, mg->Nc, NULL, &mglevels[i + 1]->coarseSpace)); 976 PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->coarseSpace)); 977 } 978 } 979 } else { /* construct the interpolation from the DMs */ 980 Mat p; 981 Vec rscale; 982 PetscCall(PetscMalloc1(n, &dms)); 983 dms[n - 1] = pc->dm; 984 /* Separately create them so we do not get DMKSP interference between levels */ 985 for (i = n - 2; i > -1; i--) PetscCall(DMCoarsen(dms[i + 1], MPI_COMM_NULL, &dms[i])); 986 for (i = n - 2; i > -1; i--) { 987 DMKSP kdm; 988 PetscBool dmhasrestrict, dmhasinject; 989 PetscCall(KSPSetDM(mglevels[i]->smoothd, dms[i])); 990 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothd, PETSC_FALSE)); 991 if (mglevels[i]->smoothd != mglevels[i]->smoothu) { 992 PetscCall(KSPSetDM(mglevels[i]->smoothu, dms[i])); 993 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothu, PETSC_FALSE)); 994 } 995 if (mglevels[i]->cr) { 996 PetscCall(KSPSetDM(mglevels[i]->cr, dms[i])); 997 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->cr, PETSC_FALSE)); 998 } 999 PetscCall(DMGetDMKSPWrite(dms[i], &kdm)); 1000 /* Ugly hack so that the next KSPSetUp() will use the RHS that we set. A better fix is to change dmActive to take 1001 * a bitwise OR of computing the matrix, RHS, and initial iterate. */ 1002 kdm->ops->computerhs = NULL; 1003 kdm->rhsctx = NULL; 1004 if (!mglevels[i + 1]->interpolate) { 1005 PetscCall(DMCreateInterpolation(dms[i], dms[i + 1], &p, &rscale)); 1006 PetscCall(PCMGSetInterpolation(pc, i + 1, p)); 1007 if (rscale) PetscCall(PCMGSetRScale(pc, i + 1, rscale)); 1008 PetscCall(VecDestroy(&rscale)); 1009 PetscCall(MatDestroy(&p)); 1010 } 1011 PetscCall(DMHasCreateRestriction(dms[i], &dmhasrestrict)); 1012 if (dmhasrestrict && !mglevels[i + 1]->restrct) { 1013 PetscCall(DMCreateRestriction(dms[i], dms[i + 1], &p)); 1014 PetscCall(PCMGSetRestriction(pc, i + 1, p)); 1015 PetscCall(MatDestroy(&p)); 1016 } 1017 PetscCall(DMHasCreateInjection(dms[i], &dmhasinject)); 1018 if (dmhasinject && !mglevels[i + 1]->inject) { 1019 PetscCall(DMCreateInjection(dms[i], dms[i + 1], &p)); 1020 PetscCall(PCMGSetInjection(pc, i + 1, p)); 1021 PetscCall(MatDestroy(&p)); 1022 } 1023 } 1024 1025 for (i = n - 2; i > -1; i--) PetscCall(DMDestroy(&dms[i])); 1026 PetscCall(PetscFree(dms)); 1027 } 1028 } 1029 1030 if (mg->galerkin < PC_MG_GALERKIN_NONE) { 1031 Mat A, B; 1032 PetscBool doA = PETSC_FALSE, doB = PETSC_FALSE; 1033 MatReuse reuse = MAT_INITIAL_MATRIX; 1034 1035 if (mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_BOTH) doB = PETSC_TRUE; 1036 if (mg->galerkin == PC_MG_GALERKIN_MAT || (mg->galerkin == PC_MG_GALERKIN_BOTH && dA != dB)) doA = PETSC_TRUE; 1037 if (pc->setupcalled) reuse = MAT_REUSE_MATRIX; 1038 for (i = n - 2; i > -1; i--) { 1039 PetscCheck(mglevels[i + 1]->restrct || mglevels[i + 1]->interpolate, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must provide interpolation or restriction for each MG level except level 0"); 1040 if (!mglevels[i + 1]->interpolate) PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->restrct)); 1041 if (!mglevels[i + 1]->restrct) PetscCall(PCMGSetRestriction(pc, i + 1, mglevels[i + 1]->interpolate)); 1042 if (reuse == MAT_REUSE_MATRIX) PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, &B)); 1043 if (doA) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dA, mglevels[i + 1]->interpolate, reuse, 1.0, &A)); 1044 if (doB) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dB, mglevels[i + 1]->interpolate, reuse, 1.0, &B)); 1045 /* the management of the PetscObjectReference() and PetscObjecDereference() below is rather delicate */ 1046 if (!doA && dAeqdB) { 1047 if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)B)); 1048 A = B; 1049 } else if (!doA && reuse == MAT_INITIAL_MATRIX) { 1050 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, NULL)); 1051 PetscCall(PetscObjectReference((PetscObject)A)); 1052 } 1053 if (!doB && dAeqdB) { 1054 if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)A)); 1055 B = A; 1056 } else if (!doB && reuse == MAT_INITIAL_MATRIX) { 1057 PetscCall(KSPGetOperators(mglevels[i]->smoothd, NULL, &B)); 1058 PetscCall(PetscObjectReference((PetscObject)B)); 1059 } 1060 if (reuse == MAT_INITIAL_MATRIX) { 1061 PetscCall(KSPSetOperators(mglevels[i]->smoothd, A, B)); 1062 PetscCall(PetscObjectDereference((PetscObject)A)); 1063 PetscCall(PetscObjectDereference((PetscObject)B)); 1064 } 1065 dA = A; 1066 dB = B; 1067 } 1068 } 1069 1070 /* Adapt interpolation matrices */ 1071 if (adaptInterpolation) { 1072 for (i = 0; i < n; ++i) { 1073 if (!mglevels[i]->coarseSpace) PetscCall(PCMGComputeCoarseSpace_Internal(pc, i, mg->coarseSpaceType, mg->Nc, !i ? NULL : mglevels[i - 1]->coarseSpace, &mglevels[i]->coarseSpace)); 1074 if (i) PetscCall(PCMGAdaptInterpolator_Internal(pc, i, mglevels[i - 1]->smoothu, mglevels[i]->smoothu, mglevels[i - 1]->coarseSpace, mglevels[i]->coarseSpace)); 1075 } 1076 for (i = n - 2; i > -1; --i) PetscCall(PCMGRecomputeLevelOperators_Internal(pc, i)); 1077 } 1078 1079 if (needRestricts && pc->dm) { 1080 for (i = n - 2; i >= 0; i--) { 1081 DM dmfine, dmcoarse; 1082 Mat Restrict, Inject; 1083 Vec rscale; 1084 PetscCall(KSPGetDM(mglevels[i + 1]->smoothd, &dmfine)); 1085 PetscCall(KSPGetDM(mglevels[i]->smoothd, &dmcoarse)); 1086 PetscCall(PCMGGetRestriction(pc, i + 1, &Restrict)); 1087 PetscCall(PCMGGetRScale(pc, i + 1, &rscale)); 1088 PetscCall(PCMGGetInjection(pc, i + 1, &Inject)); 1089 PetscCall(DMRestrict(dmfine, Restrict, rscale, Inject, dmcoarse)); 1090 } 1091 } 1092 1093 if (!pc->setupcalled) { 1094 for (i = 0; i < n; i++) PetscCall(KSPSetFromOptions(mglevels[i]->smoothd)); 1095 for (i = 1; i < n; i++) { 1096 if (mglevels[i]->smoothu && (mglevels[i]->smoothu != mglevels[i]->smoothd)) PetscCall(KSPSetFromOptions(mglevels[i]->smoothu)); 1097 if (mglevels[i]->cr) PetscCall(KSPSetFromOptions(mglevels[i]->cr)); 1098 } 1099 /* insure that if either interpolation or restriction is set the other one is set */ 1100 for (i = 1; i < n; i++) { 1101 PetscCall(PCMGGetInterpolation(pc, i, NULL)); 1102 PetscCall(PCMGGetRestriction(pc, i, NULL)); 1103 } 1104 for (i = 0; i < n - 1; i++) { 1105 if (!mglevels[i]->b) { 1106 Vec *vec; 1107 PetscCall(KSPCreateVecs(mglevels[i]->smoothd, 1, &vec, 0, NULL)); 1108 PetscCall(PCMGSetRhs(pc, i, *vec)); 1109 PetscCall(VecDestroy(vec)); 1110 PetscCall(PetscFree(vec)); 1111 } 1112 if (!mglevels[i]->r && i) { 1113 PetscCall(VecDuplicate(mglevels[i]->b, &tvec)); 1114 PetscCall(PCMGSetR(pc, i, tvec)); 1115 PetscCall(VecDestroy(&tvec)); 1116 } 1117 if (!mglevels[i]->x) { 1118 PetscCall(VecDuplicate(mglevels[i]->b, &tvec)); 1119 PetscCall(PCMGSetX(pc, i, tvec)); 1120 PetscCall(VecDestroy(&tvec)); 1121 } 1122 if (doCR) { 1123 PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crx)); 1124 PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crb)); 1125 PetscCall(VecZeroEntries(mglevels[i]->crb)); 1126 } 1127 } 1128 if (n != 1 && !mglevels[n - 1]->r) { 1129 /* PCMGSetR() on the finest level if user did not supply it */ 1130 Vec *vec; 1131 PetscCall(KSPCreateVecs(mglevels[n - 1]->smoothd, 1, &vec, 0, NULL)); 1132 PetscCall(PCMGSetR(pc, n - 1, *vec)); 1133 PetscCall(VecDestroy(vec)); 1134 PetscCall(PetscFree(vec)); 1135 } 1136 if (doCR) { 1137 PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crx)); 1138 PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crb)); 1139 PetscCall(VecZeroEntries(mglevels[n - 1]->crb)); 1140 } 1141 } 1142 1143 if (pc->dm) { 1144 /* need to tell all the coarser levels to rebuild the matrix using the DM for that level */ 1145 for (i = 0; i < n - 1; i++) { 1146 if (mglevels[i]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[i]->smoothd->setupstage = KSP_SETUP_NEWMATRIX; 1147 } 1148 } 1149 // We got here (PCSetUp_MG) because the matrix has changed, which means the smoother needs to be set up again (e.g., 1150 // new diagonal for Jacobi). Setting it here allows it to be logged under PCSetUp rather than deep inside a PCApply. 1151 if (mglevels[n - 1]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[n - 1]->smoothd->setupstage = KSP_SETUP_NEWMATRIX; 1152 1153 for (i = 1; i < n; i++) { 1154 if (mglevels[i]->smoothu == mglevels[i]->smoothd || mg->am == PC_MG_FULL || mg->am == PC_MG_KASKADE || mg->cyclesperpcapply > 1) { 1155 /* if doing only down then initial guess is zero */ 1156 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE)); 1157 } 1158 if (mglevels[i]->cr) PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 1159 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1160 PetscCall(KSPSetUp(mglevels[i]->smoothd)); 1161 if (mglevels[i]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR; 1162 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1163 if (!mglevels[i]->residual) { 1164 Mat mat; 1165 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL)); 1166 PetscCall(PCMGSetResidual(pc, i, PCMGResidualDefault, mat)); 1167 } 1168 if (!mglevels[i]->residualtranspose) { 1169 Mat mat; 1170 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL)); 1171 PetscCall(PCMGSetResidualTranspose(pc, i, PCMGResidualTransposeDefault, mat)); 1172 } 1173 } 1174 for (i = 1; i < n; i++) { 1175 if (mglevels[i]->smoothu && mglevels[i]->smoothu != mglevels[i]->smoothd) { 1176 Mat downmat, downpmat; 1177 1178 /* check if operators have been set for up, if not use down operators to set them */ 1179 PetscCall(KSPGetOperatorsSet(mglevels[i]->smoothu, &opsset, NULL)); 1180 if (!opsset) { 1181 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat)); 1182 PetscCall(KSPSetOperators(mglevels[i]->smoothu, downmat, downpmat)); 1183 } 1184 1185 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothu, PETSC_TRUE)); 1186 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1187 PetscCall(KSPSetUp(mglevels[i]->smoothu)); 1188 if (mglevels[i]->smoothu->reason) pc->failedreason = PC_SUBPC_ERROR; 1189 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1190 } 1191 if (mglevels[i]->cr) { 1192 Mat downmat, downpmat; 1193 1194 /* check if operators have been set for up, if not use down operators to set them */ 1195 PetscCall(KSPGetOperatorsSet(mglevels[i]->cr, &opsset, NULL)); 1196 if (!opsset) { 1197 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat)); 1198 PetscCall(KSPSetOperators(mglevels[i]->cr, downmat, downpmat)); 1199 } 1200 1201 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 1202 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1203 PetscCall(KSPSetUp(mglevels[i]->cr)); 1204 if (mglevels[i]->cr->reason) pc->failedreason = PC_SUBPC_ERROR; 1205 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1206 } 1207 } 1208 1209 if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0)); 1210 PetscCall(KSPSetUp(mglevels[0]->smoothd)); 1211 if (mglevels[0]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR; 1212 if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0)); 1213 1214 /* 1215 Dump the interpolation/restriction matrices plus the 1216 Jacobian/stiffness on each level. This allows MATLAB users to 1217 easily check if the Galerkin condition A_c = R A_f R^T is satisfied. 1218 1219 Only support one or the other at the same time. 1220 */ 1221 #if defined(PETSC_USE_SOCKET_VIEWER) 1222 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_matlab", &dump, NULL)); 1223 if (dump) viewer = PETSC_VIEWER_SOCKET_(PetscObjectComm((PetscObject)pc)); 1224 dump = PETSC_FALSE; 1225 #endif 1226 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_binary", &dump, NULL)); 1227 if (dump) viewer = PETSC_VIEWER_BINARY_(PetscObjectComm((PetscObject)pc)); 1228 1229 if (viewer) { 1230 for (i = 1; i < n; i++) PetscCall(MatView(mglevels[i]->restrct, viewer)); 1231 for (i = 0; i < n; i++) { 1232 PetscCall(KSPGetPC(mglevels[i]->smoothd, &pc)); 1233 PetscCall(MatView(pc->mat, viewer)); 1234 } 1235 } 1236 PetscFunctionReturn(PETSC_SUCCESS); 1237 } 1238 1239 PetscErrorCode PCMGGetLevels_MG(PC pc, PetscInt *levels) 1240 { 1241 PC_MG *mg = (PC_MG *)pc->data; 1242 1243 PetscFunctionBegin; 1244 *levels = mg->nlevels; 1245 PetscFunctionReturn(PETSC_SUCCESS); 1246 } 1247 1248 /*@ 1249 PCMGGetLevels - Gets the number of levels to use with `PCMG`. 1250 1251 Not Collective 1252 1253 Input Parameter: 1254 . pc - the preconditioner context 1255 1256 Output Parameter: 1257 . levels - the number of levels 1258 1259 Level: advanced 1260 1261 .seealso: [](ch_ksp), `PCMG`, `PCMGSetLevels()` 1262 @*/ 1263 PetscErrorCode PCMGGetLevels(PC pc, PetscInt *levels) 1264 { 1265 PetscFunctionBegin; 1266 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1267 PetscAssertPointer(levels, 2); 1268 *levels = 0; 1269 PetscTryMethod(pc, "PCMGGetLevels_C", (PC, PetscInt *), (pc, levels)); 1270 PetscFunctionReturn(PETSC_SUCCESS); 1271 } 1272 1273 /*@ 1274 PCMGGetGridComplexity - compute operator and grid complexity of the `PCMG` hierarchy 1275 1276 Input Parameter: 1277 . pc - the preconditioner context 1278 1279 Output Parameters: 1280 + gc - grid complexity = sum_i(n_i) / n_0 1281 - oc - operator complexity = sum_i(nnz_i) / nnz_0 1282 1283 Level: advanced 1284 1285 Note: 1286 This is often call the operator complexity in multigrid literature 1287 1288 .seealso: [](ch_ksp), `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()` 1289 @*/ 1290 PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc, PetscReal *oc) 1291 { 1292 PC_MG *mg = (PC_MG *)pc->data; 1293 PC_MG_Levels **mglevels = mg->levels; 1294 PetscInt lev, N; 1295 PetscLogDouble nnz0 = 0, sgc = 0, soc = 0, n0 = 0; 1296 MatInfo info; 1297 1298 PetscFunctionBegin; 1299 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1300 if (gc) PetscAssertPointer(gc, 2); 1301 if (oc) PetscAssertPointer(oc, 3); 1302 if (!pc->setupcalled) { 1303 if (gc) *gc = 0; 1304 if (oc) *oc = 0; 1305 PetscFunctionReturn(PETSC_SUCCESS); 1306 } 1307 PetscCheck(mg->nlevels > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MG has no levels"); 1308 for (lev = 0; lev < mg->nlevels; lev++) { 1309 Mat dB; 1310 PetscCall(KSPGetOperators(mglevels[lev]->smoothd, NULL, &dB)); 1311 PetscCall(MatGetInfo(dB, MAT_GLOBAL_SUM, &info)); /* global reduction */ 1312 PetscCall(MatGetSize(dB, &N, NULL)); 1313 sgc += N; 1314 soc += info.nz_used; 1315 if (lev == mg->nlevels - 1) { 1316 nnz0 = info.nz_used; 1317 n0 = N; 1318 } 1319 } 1320 PetscCheck(n0 > 0 && gc, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number for grid points on finest level is not available"); 1321 *gc = (PetscReal)(sgc / n0); 1322 if (nnz0 > 0 && oc) *oc = (PetscReal)(soc / nnz0); 1323 PetscFunctionReturn(PETSC_SUCCESS); 1324 } 1325 1326 /*@ 1327 PCMGSetType - Determines the form of multigrid to use, either 1328 multiplicative, additive, full, or the Kaskade algorithm. 1329 1330 Logically Collective 1331 1332 Input Parameters: 1333 + pc - the preconditioner context 1334 - form - multigrid form, one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE` 1335 1336 Options Database Key: 1337 . -pc_mg_type <form> - Sets <form>, one of multiplicative, additive, full, kaskade 1338 1339 Level: advanced 1340 1341 .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGGetType()`, `PCMGCycleType` 1342 @*/ 1343 PetscErrorCode PCMGSetType(PC pc, PCMGType form) 1344 { 1345 PC_MG *mg = (PC_MG *)pc->data; 1346 1347 PetscFunctionBegin; 1348 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1349 PetscValidLogicalCollectiveEnum(pc, form, 2); 1350 mg->am = form; 1351 if (form == PC_MG_MULTIPLICATIVE) pc->ops->applyrichardson = PCApplyRichardson_MG; 1352 else pc->ops->applyrichardson = NULL; 1353 PetscFunctionReturn(PETSC_SUCCESS); 1354 } 1355 1356 /*@ 1357 PCMGGetType - Finds the form of multigrid the `PCMG` is using multiplicative, additive, full, or the Kaskade algorithm. 1358 1359 Logically Collective 1360 1361 Input Parameter: 1362 . pc - the preconditioner context 1363 1364 Output Parameter: 1365 . type - one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`, `PCMGCycleType` 1366 1367 Level: advanced 1368 1369 .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGSetType()` 1370 @*/ 1371 PetscErrorCode PCMGGetType(PC pc, PCMGType *type) 1372 { 1373 PC_MG *mg = (PC_MG *)pc->data; 1374 1375 PetscFunctionBegin; 1376 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1377 *type = mg->am; 1378 PetscFunctionReturn(PETSC_SUCCESS); 1379 } 1380 1381 /*@ 1382 PCMGSetCycleType - Sets the type cycles to use. Use `PCMGSetCycleTypeOnLevel()` for more 1383 complicated cycling. 1384 1385 Logically Collective 1386 1387 Input Parameters: 1388 + pc - the multigrid context 1389 - n - either `PC_MG_CYCLE_V` or `PC_MG_CYCLE_W` 1390 1391 Options Database Key: 1392 . -pc_mg_cycle_type <v,w> - provide the cycle desired 1393 1394 Level: advanced 1395 1396 .seealso: [](ch_ksp), `PCMG`, `PCMGSetCycleTypeOnLevel()`, `PCMGType`, `PCMGCycleType` 1397 @*/ 1398 PetscErrorCode PCMGSetCycleType(PC pc, PCMGCycleType n) 1399 { 1400 PC_MG *mg = (PC_MG *)pc->data; 1401 PC_MG_Levels **mglevels = mg->levels; 1402 PetscInt i, levels; 1403 1404 PetscFunctionBegin; 1405 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1406 PetscValidLogicalCollectiveEnum(pc, n, 2); 1407 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1408 levels = mglevels[0]->levels; 1409 for (i = 0; i < levels; i++) mglevels[i]->cycles = n; 1410 PetscFunctionReturn(PETSC_SUCCESS); 1411 } 1412 1413 /*@ 1414 PCMGMultiplicativeSetCycles - Sets the number of cycles to use for each preconditioner step 1415 of multigrid when `PCMGType` is `PC_MG_MULTIPLICATIVE` 1416 1417 Logically Collective 1418 1419 Input Parameters: 1420 + pc - the multigrid context 1421 - n - number of cycles (default is 1) 1422 1423 Options Database Key: 1424 . -pc_mg_multiplicative_cycles n - set the number of cycles 1425 1426 Level: advanced 1427 1428 Note: 1429 This is not associated with setting a v or w cycle, that is set with `PCMGSetCycleType()` 1430 1431 .seealso: [](ch_ksp), `PCMGSetCycleTypeOnLevel()`, `PCMGSetCycleType()`, `PCMGCycleType`, `PCMGType` 1432 @*/ 1433 PetscErrorCode PCMGMultiplicativeSetCycles(PC pc, PetscInt n) 1434 { 1435 PC_MG *mg = (PC_MG *)pc->data; 1436 1437 PetscFunctionBegin; 1438 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1439 PetscValidLogicalCollectiveInt(pc, n, 2); 1440 mg->cyclesperpcapply = n; 1441 PetscFunctionReturn(PETSC_SUCCESS); 1442 } 1443 1444 static PetscErrorCode PCMGSetGalerkin_MG(PC pc, PCMGGalerkinType use) 1445 { 1446 PC_MG *mg = (PC_MG *)pc->data; 1447 1448 PetscFunctionBegin; 1449 mg->galerkin = use; 1450 PetscFunctionReturn(PETSC_SUCCESS); 1451 } 1452 1453 /*@ 1454 PCMGSetGalerkin - Causes the coarser grid matrices to be computed from the 1455 finest grid via the Galerkin process: A_i-1 = r_i * A_i * p_i 1456 1457 Logically Collective 1458 1459 Input Parameters: 1460 + pc - the multigrid context 1461 - use - one of `PC_MG_GALERKIN_BOTH`, `PC_MG_GALERKIN_PMAT`, `PC_MG_GALERKIN_MAT`, or `PC_MG_GALERKIN_NONE` 1462 1463 Options Database Key: 1464 . -pc_mg_galerkin <both,pmat,mat,none> - set the matrices to form via the Galerkin process 1465 1466 Level: intermediate 1467 1468 Note: 1469 Some codes that use `PCMG` such as `PCGAMG` use Galerkin internally while constructing the hierarchy and thus do not 1470 use the `PCMG` construction of the coarser grids. 1471 1472 .seealso: [](ch_ksp), `PCMG`, `PCMGGetGalerkin()`, `PCMGGalerkinType` 1473 @*/ 1474 PetscErrorCode PCMGSetGalerkin(PC pc, PCMGGalerkinType use) 1475 { 1476 PetscFunctionBegin; 1477 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1478 PetscTryMethod(pc, "PCMGSetGalerkin_C", (PC, PCMGGalerkinType), (pc, use)); 1479 PetscFunctionReturn(PETSC_SUCCESS); 1480 } 1481 1482 /*@ 1483 PCMGGetGalerkin - Checks if Galerkin multigrid is being used, i.e. A_i-1 = r_i * A_i * p_i 1484 1485 Not Collective 1486 1487 Input Parameter: 1488 . pc - the multigrid context 1489 1490 Output Parameter: 1491 . galerkin - one of `PC_MG_GALERKIN_BOTH`,`PC_MG_GALERKIN_PMAT`,`PC_MG_GALERKIN_MAT`, `PC_MG_GALERKIN_NONE`, or `PC_MG_GALERKIN_EXTERNAL` 1492 1493 Level: intermediate 1494 1495 .seealso: [](ch_ksp), `PCMG`, `PCMGSetGalerkin()`, `PCMGGalerkinType` 1496 @*/ 1497 PetscErrorCode PCMGGetGalerkin(PC pc, PCMGGalerkinType *galerkin) 1498 { 1499 PC_MG *mg = (PC_MG *)pc->data; 1500 1501 PetscFunctionBegin; 1502 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1503 *galerkin = mg->galerkin; 1504 PetscFunctionReturn(PETSC_SUCCESS); 1505 } 1506 1507 static PetscErrorCode PCMGSetAdaptInterpolation_MG(PC pc, PetscBool adapt) 1508 { 1509 PC_MG *mg = (PC_MG *)pc->data; 1510 1511 PetscFunctionBegin; 1512 mg->adaptInterpolation = adapt; 1513 PetscFunctionReturn(PETSC_SUCCESS); 1514 } 1515 1516 static PetscErrorCode PCMGGetAdaptInterpolation_MG(PC pc, PetscBool *adapt) 1517 { 1518 PC_MG *mg = (PC_MG *)pc->data; 1519 1520 PetscFunctionBegin; 1521 *adapt = mg->adaptInterpolation; 1522 PetscFunctionReturn(PETSC_SUCCESS); 1523 } 1524 1525 static PetscErrorCode PCMGSetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType ctype) 1526 { 1527 PC_MG *mg = (PC_MG *)pc->data; 1528 1529 PetscFunctionBegin; 1530 mg->adaptInterpolation = ctype != PCMG_ADAPT_NONE ? PETSC_TRUE : PETSC_FALSE; 1531 mg->coarseSpaceType = ctype; 1532 PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_BOTH)); 1533 PetscFunctionReturn(PETSC_SUCCESS); 1534 } 1535 1536 static PetscErrorCode PCMGGetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType *ctype) 1537 { 1538 PC_MG *mg = (PC_MG *)pc->data; 1539 1540 PetscFunctionBegin; 1541 *ctype = mg->coarseSpaceType; 1542 PetscFunctionReturn(PETSC_SUCCESS); 1543 } 1544 1545 static PetscErrorCode PCMGSetAdaptCR_MG(PC pc, PetscBool cr) 1546 { 1547 PC_MG *mg = (PC_MG *)pc->data; 1548 1549 PetscFunctionBegin; 1550 mg->compatibleRelaxation = cr; 1551 PetscFunctionReturn(PETSC_SUCCESS); 1552 } 1553 1554 static PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr) 1555 { 1556 PC_MG *mg = (PC_MG *)pc->data; 1557 1558 PetscFunctionBegin; 1559 *cr = mg->compatibleRelaxation; 1560 PetscFunctionReturn(PETSC_SUCCESS); 1561 } 1562 1563 /*@ 1564 PCMGSetAdaptCoarseSpaceType - Set the type of adaptive coarse space. 1565 1566 Adapts or creates the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated. 1567 1568 Logically Collective 1569 1570 Input Parameters: 1571 + pc - the multigrid context 1572 - ctype - the type of coarse space 1573 1574 Options Database Keys: 1575 + -pc_mg_adapt_interp_n <int> - The number of modes to use 1576 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: none, `polynomial`, `harmonic`, `eigenvector`, `generalized_eigenvector`, `gdsw` 1577 1578 Level: intermediate 1579 1580 Note: 1581 Requires a `DM` with specific functionality be attached to the `PC`. 1582 1583 .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`, `DM` 1584 @*/ 1585 PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype) 1586 { 1587 PetscFunctionBegin; 1588 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1589 PetscValidLogicalCollectiveEnum(pc, ctype, 2); 1590 PetscTryMethod(pc, "PCMGSetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType), (pc, ctype)); 1591 PetscFunctionReturn(PETSC_SUCCESS); 1592 } 1593 1594 /*@ 1595 PCMGGetAdaptCoarseSpaceType - Get the type of adaptive coarse space. 1596 1597 Not Collective 1598 1599 Input Parameter: 1600 . pc - the multigrid context 1601 1602 Output Parameter: 1603 . ctype - the type of coarse space 1604 1605 Level: intermediate 1606 1607 .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGSetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()` 1608 @*/ 1609 PetscErrorCode PCMGGetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType *ctype) 1610 { 1611 PetscFunctionBegin; 1612 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1613 PetscAssertPointer(ctype, 2); 1614 PetscUseMethod(pc, "PCMGGetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType *), (pc, ctype)); 1615 PetscFunctionReturn(PETSC_SUCCESS); 1616 } 1617 1618 /* MATT: REMOVE? */ 1619 /*@ 1620 PCMGSetAdaptInterpolation - Adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated. 1621 1622 Logically Collective 1623 1624 Input Parameters: 1625 + pc - the multigrid context 1626 - adapt - flag for adaptation of the interpolator 1627 1628 Options Database Keys: 1629 + -pc_mg_adapt_interp - Turn on adaptation 1630 . -pc_mg_adapt_interp_n <int> - The number of modes to use, should be divisible by dimension 1631 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: polynomial, harmonic, eigenvector, generalized_eigenvector 1632 1633 Level: intermediate 1634 1635 .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1636 @*/ 1637 PetscErrorCode PCMGSetAdaptInterpolation(PC pc, PetscBool adapt) 1638 { 1639 PetscFunctionBegin; 1640 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1641 PetscTryMethod(pc, "PCMGSetAdaptInterpolation_C", (PC, PetscBool), (pc, adapt)); 1642 PetscFunctionReturn(PETSC_SUCCESS); 1643 } 1644 1645 /*@ 1646 PCMGGetAdaptInterpolation - Get the flag to adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, 1647 and thus accurately interpolated. 1648 1649 Not Collective 1650 1651 Input Parameter: 1652 . pc - the multigrid context 1653 1654 Output Parameter: 1655 . adapt - flag for adaptation of the interpolator 1656 1657 Level: intermediate 1658 1659 .seealso: [](ch_ksp), `PCMG`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1660 @*/ 1661 PetscErrorCode PCMGGetAdaptInterpolation(PC pc, PetscBool *adapt) 1662 { 1663 PetscFunctionBegin; 1664 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1665 PetscAssertPointer(adapt, 2); 1666 PetscUseMethod(pc, "PCMGGetAdaptInterpolation_C", (PC, PetscBool *), (pc, adapt)); 1667 PetscFunctionReturn(PETSC_SUCCESS); 1668 } 1669 1670 /*@ 1671 PCMGSetAdaptCR - Monitor the coarse space quality using an auxiliary solve with compatible relaxation. 1672 1673 Logically Collective 1674 1675 Input Parameters: 1676 + pc - the multigrid context 1677 - cr - flag for compatible relaxation 1678 1679 Options Database Key: 1680 . -pc_mg_adapt_cr - Turn on compatible relaxation 1681 1682 Level: intermediate 1683 1684 .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptCR()`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1685 @*/ 1686 PetscErrorCode PCMGSetAdaptCR(PC pc, PetscBool cr) 1687 { 1688 PetscFunctionBegin; 1689 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1690 PetscTryMethod(pc, "PCMGSetAdaptCR_C", (PC, PetscBool), (pc, cr)); 1691 PetscFunctionReturn(PETSC_SUCCESS); 1692 } 1693 1694 /*@ 1695 PCMGGetAdaptCR - Get the flag to monitor coarse space quality using an auxiliary solve with compatible relaxation. 1696 1697 Not Collective 1698 1699 Input Parameter: 1700 . pc - the multigrid context 1701 1702 Output Parameter: 1703 . cr - flag for compatible relaxaion 1704 1705 Level: intermediate 1706 1707 .seealso: [](ch_ksp), `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1708 @*/ 1709 PetscErrorCode PCMGGetAdaptCR(PC pc, PetscBool *cr) 1710 { 1711 PetscFunctionBegin; 1712 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1713 PetscAssertPointer(cr, 2); 1714 PetscUseMethod(pc, "PCMGGetAdaptCR_C", (PC, PetscBool *), (pc, cr)); 1715 PetscFunctionReturn(PETSC_SUCCESS); 1716 } 1717 1718 /*@ 1719 PCMGSetNumberSmooth - Sets the number of pre and post-smoothing steps to use 1720 on all levels. Use `PCMGDistinctSmoothUp()` to create separate up and down smoothers if you want different numbers of 1721 pre- and post-smoothing steps. 1722 1723 Logically Collective 1724 1725 Input Parameters: 1726 + pc - the multigrid context 1727 - n - the number of smoothing steps 1728 1729 Options Database Key: 1730 . -mg_levels_ksp_max_it <n> - Sets number of pre and post-smoothing steps 1731 1732 Level: advanced 1733 1734 Note: 1735 This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid. 1736 1737 .seealso: [](ch_ksp), `PCMG`, `PCMGSetDistinctSmoothUp()` 1738 @*/ 1739 PetscErrorCode PCMGSetNumberSmooth(PC pc, PetscInt n) 1740 { 1741 PC_MG *mg = (PC_MG *)pc->data; 1742 PC_MG_Levels **mglevels = mg->levels; 1743 PetscInt i, levels; 1744 1745 PetscFunctionBegin; 1746 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1747 PetscValidLogicalCollectiveInt(pc, n, 2); 1748 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1749 levels = mglevels[0]->levels; 1750 1751 for (i = 1; i < levels; i++) { 1752 PetscCall(KSPSetTolerances(mglevels[i]->smoothu, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n)); 1753 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n)); 1754 mg->default_smoothu = n; 1755 mg->default_smoothd = n; 1756 } 1757 PetscFunctionReturn(PETSC_SUCCESS); 1758 } 1759 1760 /*@ 1761 PCMGSetDistinctSmoothUp - sets the up (post) smoother to be a separate `KSP` from the down (pre) smoother on all levels 1762 and adds the suffix _up to the options name 1763 1764 Logically Collective 1765 1766 Input Parameter: 1767 . pc - the preconditioner context 1768 1769 Options Database Key: 1770 . -pc_mg_distinct_smoothup <bool> - use distinct smoothing objects 1771 1772 Level: advanced 1773 1774 Note: 1775 This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid. 1776 1777 .seealso: [](ch_ksp), `PCMG`, `PCMGSetNumberSmooth()` 1778 @*/ 1779 PetscErrorCode PCMGSetDistinctSmoothUp(PC pc) 1780 { 1781 PC_MG *mg = (PC_MG *)pc->data; 1782 PC_MG_Levels **mglevels = mg->levels; 1783 PetscInt i, levels; 1784 KSP subksp; 1785 1786 PetscFunctionBegin; 1787 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1788 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1789 levels = mglevels[0]->levels; 1790 1791 for (i = 1; i < levels; i++) { 1792 const char *prefix = NULL; 1793 /* make sure smoother up and down are different */ 1794 PetscCall(PCMGGetSmootherUp(pc, i, &subksp)); 1795 PetscCall(KSPGetOptionsPrefix(mglevels[i]->smoothd, &prefix)); 1796 PetscCall(KSPSetOptionsPrefix(subksp, prefix)); 1797 PetscCall(KSPAppendOptionsPrefix(subksp, "up_")); 1798 } 1799 PetscFunctionReturn(PETSC_SUCCESS); 1800 } 1801 1802 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */ 1803 static PetscErrorCode PCGetInterpolations_MG(PC pc, PetscInt *num_levels, Mat *interpolations[]) 1804 { 1805 PC_MG *mg = (PC_MG *)pc->data; 1806 PC_MG_Levels **mglevels = mg->levels; 1807 Mat *mat; 1808 PetscInt l; 1809 1810 PetscFunctionBegin; 1811 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling"); 1812 PetscCall(PetscMalloc1(mg->nlevels, &mat)); 1813 for (l = 1; l < mg->nlevels; l++) { 1814 mat[l - 1] = mglevels[l]->interpolate; 1815 PetscCall(PetscObjectReference((PetscObject)mat[l - 1])); 1816 } 1817 *num_levels = mg->nlevels; 1818 *interpolations = mat; 1819 PetscFunctionReturn(PETSC_SUCCESS); 1820 } 1821 1822 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */ 1823 static PetscErrorCode PCGetCoarseOperators_MG(PC pc, PetscInt *num_levels, Mat *coarseOperators[]) 1824 { 1825 PC_MG *mg = (PC_MG *)pc->data; 1826 PC_MG_Levels **mglevels = mg->levels; 1827 PetscInt l; 1828 Mat *mat; 1829 1830 PetscFunctionBegin; 1831 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling"); 1832 PetscCall(PetscMalloc1(mg->nlevels, &mat)); 1833 for (l = 0; l < mg->nlevels - 1; l++) { 1834 PetscCall(KSPGetOperators(mglevels[l]->smoothd, NULL, &mat[l])); 1835 PetscCall(PetscObjectReference((PetscObject)mat[l])); 1836 } 1837 *num_levels = mg->nlevels; 1838 *coarseOperators = mat; 1839 PetscFunctionReturn(PETSC_SUCCESS); 1840 } 1841 1842 /*@C 1843 PCMGRegisterCoarseSpaceConstructor - Adds a method to the `PCMG` package for coarse space construction. 1844 1845 Not Collective, No Fortran Support 1846 1847 Input Parameters: 1848 + name - name of the constructor 1849 - function - constructor routine, see `PCMGCoarseSpaceConstructorFn` 1850 1851 Level: advanced 1852 1853 Developer Notes: 1854 This does not appear to be used anywhere 1855 1856 .seealso: [](ch_ksp), `PCMGCoarseSpaceConstructorFn`, `PCMG`, `PCMGGetCoarseSpaceConstructor()`, `PCRegister()` 1857 @*/ 1858 PetscErrorCode PCMGRegisterCoarseSpaceConstructor(const char name[], PCMGCoarseSpaceConstructorFn *function) 1859 { 1860 PetscFunctionBegin; 1861 PetscCall(PCInitializePackage()); 1862 PetscCall(PetscFunctionListAdd(&PCMGCoarseList, name, function)); 1863 PetscFunctionReturn(PETSC_SUCCESS); 1864 } 1865 1866 /*@C 1867 PCMGGetCoarseSpaceConstructor - Returns the given coarse space construction method. 1868 1869 Not Collective, No Fortran Support 1870 1871 Input Parameter: 1872 . name - name of the constructor 1873 1874 Output Parameter: 1875 . function - constructor routine 1876 1877 Level: advanced 1878 1879 .seealso: [](ch_ksp), `PCMGCoarseSpaceConstructorFn`, `PCMG`, `PCMGRegisterCoarseSpaceConstructor()`, `PCRegister()` 1880 @*/ 1881 PetscErrorCode PCMGGetCoarseSpaceConstructor(const char name[], PCMGCoarseSpaceConstructorFn **function) 1882 { 1883 PetscFunctionBegin; 1884 PetscCall(PetscFunctionListFind(PCMGCoarseList, name, function)); 1885 PetscFunctionReturn(PETSC_SUCCESS); 1886 } 1887 1888 /*MC 1889 PCMG - Use multigrid preconditioning. This preconditioner requires you provide additional 1890 information about the coarser grid matrices and restriction/interpolation operators. 1891 1892 Options Database Keys: 1893 + -pc_mg_levels <nlevels> - number of levels including finest 1894 . -pc_mg_cycle_type <v,w> - provide the cycle desired 1895 . -pc_mg_type <additive,multiplicative,full,kaskade> - multiplicative is the default 1896 . -pc_mg_log - log information about time spent on each level of the solver 1897 . -pc_mg_distinct_smoothup - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes) 1898 . -pc_mg_galerkin <both,pmat,mat,none> - use Galerkin process to compute coarser operators, i.e. Acoarse = R A R' 1899 . -pc_mg_multiplicative_cycles - number of cycles to use as the preconditioner (defaults to 1) 1900 . -pc_mg_dump_matlab - dumps the matrices for each level and the restriction/interpolation matrices 1901 to a `PETSCVIEWERSOCKET` for reading from MATLAB. 1902 - -pc_mg_dump_binary -dumps the matrices for each level and the restriction/interpolation matrices 1903 to the binary output file called binaryoutput 1904 1905 Level: intermediate 1906 1907 Notes: 1908 The Krylov solver (if any) and preconditioner (smoother) and their parameters are controlled from the options database with the standard 1909 options database keywords prefixed with `-mg_levels_` to affect all the levels but the coarsest, which is controlled with `-mg_coarse_`, 1910 and the finest where `-mg_fine_` can override `-mg_levels_`. One can set different preconditioners etc on specific levels with the prefix 1911 `-mg_levels_n_` where `n` is the level number (zero being the coarse level. For example 1912 .vb 1913 -mg_levels_ksp_type gmres -mg_levels_pc_type bjacobi -mg_coarse_pc_type svd -mg_levels_2_pc_type sor 1914 .ve 1915 These options also work for controlling the smoothers etc inside `PCGAMG` 1916 1917 If one uses a Krylov method such `KSPGMRES` or `KSPCG` as the smoother then one must use `KSPFGMRES`, `KSPGCR`, or `KSPRICHARDSON` as the outer Krylov method 1918 1919 When run with a single level the smoother options are used on that level NOT the coarse grid solver options 1920 1921 When run with `KSPRICHARDSON` the convergence test changes slightly if monitor is turned on. The iteration count may change slightly. This 1922 is because without monitoring the residual norm is computed WITHIN each multigrid cycle on the finest level after the pre-smoothing 1923 (because the residual has just been computed for the multigrid algorithm and is hence available for free) while with monitoring the 1924 residual is computed at the end of each cycle. 1925 1926 .seealso: [](sec_mg), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMGType`, `PCEXOTIC`, `PCGAMG`, `PCML`, `PCHYPRE` 1927 `PCMGSetLevels()`, `PCMGGetLevels()`, `PCMGSetType()`, `PCMGSetCycleType()`, 1928 `PCMGSetDistinctSmoothUp()`, `PCMGGetCoarseSolve()`, `PCMGSetResidual()`, `PCMGSetInterpolation()`, 1929 `PCMGSetRestriction()`, `PCMGGetSmoother()`, `PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`, 1930 `PCMGSetCycleTypeOnLevel()`, `PCMGSetRhs()`, `PCMGSetX()`, `PCMGSetR()`, 1931 `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1932 M*/ 1933 1934 PETSC_EXTERN PetscErrorCode PCCreate_MG(PC pc) 1935 { 1936 PC_MG *mg; 1937 1938 PetscFunctionBegin; 1939 PetscCall(PetscNew(&mg)); 1940 pc->data = mg; 1941 mg->nlevels = -1; 1942 mg->am = PC_MG_MULTIPLICATIVE; 1943 mg->galerkin = PC_MG_GALERKIN_NONE; 1944 mg->adaptInterpolation = PETSC_FALSE; 1945 mg->Nc = -1; 1946 mg->eigenvalue = -1; 1947 1948 pc->useAmat = PETSC_TRUE; 1949 1950 pc->ops->apply = PCApply_MG; 1951 pc->ops->applytranspose = PCApplyTranspose_MG; 1952 pc->ops->matapply = PCMatApply_MG; 1953 pc->ops->matapplytranspose = PCMatApplyTranspose_MG; 1954 pc->ops->setup = PCSetUp_MG; 1955 pc->ops->reset = PCReset_MG; 1956 pc->ops->destroy = PCDestroy_MG; 1957 pc->ops->setfromoptions = PCSetFromOptions_MG; 1958 pc->ops->view = PCView_MG; 1959 1960 PetscCall(PetscObjectComposedDataRegister(&mg->eigenvalue)); 1961 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", PCMGSetGalerkin_MG)); 1962 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", PCMGGetLevels_MG)); 1963 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", PCMGSetLevels_MG)); 1964 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_MG)); 1965 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_MG)); 1966 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", PCMGSetAdaptInterpolation_MG)); 1967 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", PCMGGetAdaptInterpolation_MG)); 1968 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", PCMGSetAdaptCR_MG)); 1969 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", PCMGGetAdaptCR_MG)); 1970 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", PCMGSetAdaptCoarseSpaceType_MG)); 1971 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", PCMGGetAdaptCoarseSpaceType_MG)); 1972 PetscFunctionReturn(PETSC_SUCCESS); 1973 } 1974