1 /* 2 Defines the multigrid preconditioner interface. 3 */ 4 #include <petsc/private/pcmgimpl.h> /*I "petscksp.h" I*/ 5 #include <petsc/private/kspimpl.h> 6 #include <petscdm.h> 7 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC, PetscBool *); 8 9 /* 10 Contains the list of registered coarse space construction routines 11 */ 12 PetscFunctionList PCMGCoarseList = NULL; 13 14 PetscErrorCode PCMGMCycle_Private(PC pc, PC_MG_Levels **mglevelsin, PetscBool transpose, PetscBool matapp, PCRichardsonConvergedReason *reason) 15 { 16 PC_MG *mg = (PC_MG *)pc->data; 17 PC_MG_Levels *mgc, *mglevels = *mglevelsin; 18 PetscInt cycles = (mglevels->level == 1) ? 1 : mglevels->cycles; 19 20 PetscFunctionBegin; 21 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 22 if (!transpose) { 23 if (matapp) { 24 PetscCall(KSPMatSolve(mglevels->smoothd, mglevels->B, mglevels->X)); /* pre-smooth */ 25 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, NULL)); 26 } else { 27 PetscCall(KSPSolve(mglevels->smoothd, mglevels->b, mglevels->x)); /* pre-smooth */ 28 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x)); 29 } 30 } else { 31 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 32 PetscCall(KSPSolveTranspose(mglevels->smoothu, mglevels->b, mglevels->x)); /* transpose of post-smooth */ 33 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x)); 34 } 35 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 36 if (mglevels->level) { /* not the coarsest grid */ 37 if (mglevels->eventresidual) PetscCall(PetscLogEventBegin(mglevels->eventresidual, 0, 0, 0, 0)); 38 if (matapp && !mglevels->R) PetscCall(MatDuplicate(mglevels->B, MAT_DO_NOT_COPY_VALUES, &mglevels->R)); 39 if (!transpose) { 40 if (matapp) PetscCall((*mglevels->matresidual)(mglevels->A, mglevels->B, mglevels->X, mglevels->R)); 41 else PetscCall((*mglevels->residual)(mglevels->A, mglevels->b, mglevels->x, mglevels->r)); 42 } else { 43 if (matapp) PetscCall((*mglevels->matresidualtranspose)(mglevels->A, mglevels->B, mglevels->X, mglevels->R)); 44 else PetscCall((*mglevels->residualtranspose)(mglevels->A, mglevels->b, mglevels->x, mglevels->r)); 45 } 46 if (mglevels->eventresidual) PetscCall(PetscLogEventEnd(mglevels->eventresidual, 0, 0, 0, 0)); 47 48 /* if on finest level and have convergence criteria set */ 49 if (mglevels->level == mglevels->levels - 1 && mg->ttol && reason) { 50 PetscReal rnorm; 51 PetscCall(VecNorm(mglevels->r, NORM_2, &rnorm)); 52 if (rnorm <= mg->ttol) { 53 if (rnorm < mg->abstol) { 54 *reason = PCRICHARDSON_CONVERGED_ATOL; 55 PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than absolute tolerance %g\n", (double)rnorm, (double)mg->abstol)); 56 } else { 57 *reason = PCRICHARDSON_CONVERGED_RTOL; 58 PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than relative tolerance times initial residual norm %g\n", (double)rnorm, (double)mg->ttol)); 59 } 60 PetscFunctionReturn(PETSC_SUCCESS); 61 } 62 } 63 64 mgc = *(mglevelsin - 1); 65 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0)); 66 if (!transpose) { 67 if (matapp) PetscCall(MatMatRestrict(mglevels->restrct, mglevels->R, &mgc->B)); 68 else PetscCall(MatRestrict(mglevels->restrct, mglevels->r, mgc->b)); 69 } else { 70 if (matapp) PetscCall(MatMatRestrict(mglevels->interpolate, mglevels->R, &mgc->B)); 71 else PetscCall(MatRestrict(mglevels->interpolate, mglevels->r, mgc->b)); 72 } 73 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0)); 74 if (matapp) { 75 if (!mgc->X) { 76 PetscCall(MatDuplicate(mgc->B, MAT_DO_NOT_COPY_VALUES, &mgc->X)); 77 } else { 78 PetscCall(MatZeroEntries(mgc->X)); 79 } 80 } else { 81 PetscCall(VecZeroEntries(mgc->x)); 82 } 83 while (cycles--) PetscCall(PCMGMCycle_Private(pc, mglevelsin - 1, transpose, matapp, reason)); 84 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0)); 85 if (!transpose) { 86 if (matapp) PetscCall(MatMatInterpolateAdd(mglevels->interpolate, mgc->X, mglevels->X, &mglevels->X)); 87 else PetscCall(MatInterpolateAdd(mglevels->interpolate, mgc->x, mglevels->x, mglevels->x)); 88 } else { 89 PetscCall(MatInterpolateAdd(mglevels->restrct, mgc->x, mglevels->x, mglevels->x)); 90 } 91 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0)); 92 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 93 if (!transpose) { 94 if (matapp) { 95 PetscCall(KSPMatSolve(mglevels->smoothu, mglevels->B, mglevels->X)); /* post smooth */ 96 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, NULL)); 97 } else { 98 PetscCall(KSPSolve(mglevels->smoothu, mglevels->b, mglevels->x)); /* post smooth */ 99 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x)); 100 } 101 } else { 102 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 103 PetscCall(KSPSolveTranspose(mglevels->smoothd, mglevels->b, mglevels->x)); /* post smooth */ 104 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x)); 105 } 106 if (mglevels->cr) { 107 Mat crA; 108 109 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 110 /* TODO Turn on copy and turn off noisy if we have an exact solution 111 PetscCall(VecCopy(mglevels->x, mglevels->crx)); 112 PetscCall(VecCopy(mglevels->b, mglevels->crb)); */ 113 PetscCall(KSPGetOperators(mglevels->cr, &crA, NULL)); 114 PetscCall(KSPSetNoisy_Private(crA, mglevels->crx)); 115 PetscCall(KSPSolve(mglevels->cr, mglevels->crb, mglevels->crx)); /* compatible relaxation */ 116 PetscCall(KSPCheckSolve(mglevels->cr, pc, mglevels->crx)); 117 } 118 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 119 } 120 PetscFunctionReturn(PETSC_SUCCESS); 121 } 122 123 static PetscErrorCode PCApplyRichardson_MG(PC pc, Vec b, Vec x, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason) 124 { 125 PC_MG *mg = (PC_MG *)pc->data; 126 PC_MG_Levels **mglevels = mg->levels; 127 PC tpc; 128 PetscBool changeu, changed; 129 PetscInt levels = mglevels[0]->levels, i; 130 131 PetscFunctionBegin; 132 /* When the DM is supplying the matrix then it will not exist until here */ 133 for (i = 0; i < levels; i++) { 134 if (!mglevels[i]->A) { 135 PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL)); 136 PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A)); 137 } 138 } 139 140 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc)); 141 PetscCall(PCPreSolveChangeRHS(tpc, &changed)); 142 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc)); 143 PetscCall(PCPreSolveChangeRHS(tpc, &changeu)); 144 if (!changed && !changeu) { 145 PetscCall(VecDestroy(&mglevels[levels - 1]->b)); 146 mglevels[levels - 1]->b = b; 147 } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */ 148 if (!mglevels[levels - 1]->b) { 149 Vec *vec; 150 151 PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL)); 152 mglevels[levels - 1]->b = *vec; 153 PetscCall(PetscFree(vec)); 154 } 155 PetscCall(VecCopy(b, mglevels[levels - 1]->b)); 156 } 157 mglevels[levels - 1]->x = x; 158 159 mg->rtol = rtol; 160 mg->abstol = abstol; 161 mg->dtol = dtol; 162 if (rtol) { 163 /* compute initial residual norm for relative convergence test */ 164 PetscReal rnorm; 165 if (zeroguess) { 166 PetscCall(VecNorm(b, NORM_2, &rnorm)); 167 } else { 168 PetscCall((*mglevels[levels - 1]->residual)(mglevels[levels - 1]->A, b, x, w)); 169 PetscCall(VecNorm(w, NORM_2, &rnorm)); 170 } 171 mg->ttol = PetscMax(rtol * rnorm, abstol); 172 } else if (abstol) mg->ttol = abstol; 173 else mg->ttol = 0.0; 174 175 /* since smoother is applied to full system, not just residual we need to make sure that smoothers don't 176 stop prematurely due to small residual */ 177 for (i = 1; i < levels; i++) { 178 PetscCall(KSPSetTolerances(mglevels[i]->smoothu, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT)); 179 if (mglevels[i]->smoothu != mglevels[i]->smoothd) { 180 /* For Richardson the initial guess is nonzero since it is solving in each cycle the original system not just applying as a preconditioner */ 181 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE)); 182 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT)); 183 } 184 } 185 186 *reason = PCRICHARDSON_NOT_SET; 187 for (i = 0; i < its; i++) { 188 PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, PETSC_FALSE, PETSC_FALSE, reason)); 189 if (*reason) break; 190 } 191 if (*reason == PCRICHARDSON_NOT_SET) *reason = PCRICHARDSON_CONVERGED_ITS; 192 *outits = i; 193 if (!changed && !changeu) mglevels[levels - 1]->b = NULL; 194 PetscFunctionReturn(PETSC_SUCCESS); 195 } 196 197 PetscErrorCode PCReset_MG(PC pc) 198 { 199 PC_MG *mg = (PC_MG *)pc->data; 200 PC_MG_Levels **mglevels = mg->levels; 201 PetscInt i, n; 202 203 PetscFunctionBegin; 204 if (mglevels) { 205 n = mglevels[0]->levels; 206 for (i = 0; i < n - 1; i++) { 207 PetscCall(VecDestroy(&mglevels[i + 1]->r)); 208 PetscCall(VecDestroy(&mglevels[i]->b)); 209 PetscCall(VecDestroy(&mglevels[i]->x)); 210 PetscCall(MatDestroy(&mglevels[i + 1]->R)); 211 PetscCall(MatDestroy(&mglevels[i]->B)); 212 PetscCall(MatDestroy(&mglevels[i]->X)); 213 PetscCall(VecDestroy(&mglevels[i]->crx)); 214 PetscCall(VecDestroy(&mglevels[i]->crb)); 215 PetscCall(MatDestroy(&mglevels[i + 1]->restrct)); 216 PetscCall(MatDestroy(&mglevels[i + 1]->interpolate)); 217 PetscCall(MatDestroy(&mglevels[i + 1]->inject)); 218 PetscCall(VecDestroy(&mglevels[i + 1]->rscale)); 219 } 220 PetscCall(VecDestroy(&mglevels[n - 1]->crx)); 221 PetscCall(VecDestroy(&mglevels[n - 1]->crb)); 222 /* this is not null only if the smoother on the finest level 223 changes the rhs during PreSolve */ 224 PetscCall(VecDestroy(&mglevels[n - 1]->b)); 225 PetscCall(MatDestroy(&mglevels[n - 1]->B)); 226 227 for (i = 0; i < n; i++) { 228 PetscCall(MatDestroy(&mglevels[i]->coarseSpace)); 229 PetscCall(MatDestroy(&mglevels[i]->A)); 230 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPReset(mglevels[i]->smoothd)); 231 PetscCall(KSPReset(mglevels[i]->smoothu)); 232 if (mglevels[i]->cr) PetscCall(KSPReset(mglevels[i]->cr)); 233 } 234 mg->Nc = 0; 235 } 236 PetscFunctionReturn(PETSC_SUCCESS); 237 } 238 239 /* Implementing CR 240 241 We only want to make corrections that ``do not change'' the coarse solution. What we mean by not changing is that if I prolong my coarse solution to the fine grid and then inject that fine solution back to the coarse grid, I get the same answer. Injection is what Brannick calls R. We want the complementary projector to Inj, which we will call S, after Brannick, so that Inj S = 0. Now the orthogonal projector onto the range of Inj^T is 242 243 Inj^T (Inj Inj^T)^{-1} Inj 244 245 and if Inj is a VecScatter, as it is now in PETSc, we have 246 247 Inj^T Inj 248 249 and 250 251 S = I - Inj^T Inj 252 253 since 254 255 Inj S = Inj - (Inj Inj^T) Inj = 0. 256 257 Brannick suggests 258 259 A \to S^T A S \qquad\mathrm{and}\qquad M \to S^T M S 260 261 but I do not think his :math:`S^T S = I` is correct. Our S is an orthogonal projector, so :math:`S^T S = S^2 = S`. We will use 262 263 M^{-1} A \to S M^{-1} A S 264 265 In fact, since it is somewhat hard in PETSc to do the symmetric application, we will just apply S on the left. 266 267 Check: || Inj P - I ||_F < tol 268 Check: In general, Inj Inj^T = I 269 */ 270 271 typedef struct { 272 PC mg; /* The PCMG object */ 273 PetscInt l; /* The multigrid level for this solver */ 274 Mat Inj; /* The injection matrix */ 275 Mat S; /* I - Inj^T Inj */ 276 } CRContext; 277 278 static PetscErrorCode CRSetup_Private(PC pc) 279 { 280 CRContext *ctx; 281 Mat It; 282 283 PetscFunctionBeginUser; 284 PetscCall(PCShellGetContext(pc, &ctx)); 285 PetscCall(PCMGGetInjection(ctx->mg, ctx->l, &It)); 286 PetscCheck(It, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "CR requires that injection be defined for this PCMG"); 287 PetscCall(MatCreateTranspose(It, &ctx->Inj)); 288 PetscCall(MatCreateNormal(ctx->Inj, &ctx->S)); 289 PetscCall(MatScale(ctx->S, -1.0)); 290 PetscCall(MatShift(ctx->S, 1.0)); 291 PetscFunctionReturn(PETSC_SUCCESS); 292 } 293 294 static PetscErrorCode CRApply_Private(PC pc, Vec x, Vec y) 295 { 296 CRContext *ctx; 297 298 PetscFunctionBeginUser; 299 PetscCall(PCShellGetContext(pc, &ctx)); 300 PetscCall(MatMult(ctx->S, x, y)); 301 PetscFunctionReturn(PETSC_SUCCESS); 302 } 303 304 static PetscErrorCode CRDestroy_Private(PC pc) 305 { 306 CRContext *ctx; 307 308 PetscFunctionBeginUser; 309 PetscCall(PCShellGetContext(pc, &ctx)); 310 PetscCall(MatDestroy(&ctx->Inj)); 311 PetscCall(MatDestroy(&ctx->S)); 312 PetscCall(PetscFree(ctx)); 313 PetscCall(PCShellSetContext(pc, NULL)); 314 PetscFunctionReturn(PETSC_SUCCESS); 315 } 316 317 static PetscErrorCode CreateCR_Private(PC pc, PetscInt l, PC *cr) 318 { 319 CRContext *ctx; 320 321 PetscFunctionBeginUser; 322 PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), cr)); 323 PetscCall(PetscObjectSetName((PetscObject)*cr, "S (complementary projector to injection)")); 324 PetscCall(PetscCalloc1(1, &ctx)); 325 ctx->mg = pc; 326 ctx->l = l; 327 PetscCall(PCSetType(*cr, PCSHELL)); 328 PetscCall(PCShellSetContext(*cr, ctx)); 329 PetscCall(PCShellSetApply(*cr, CRApply_Private)); 330 PetscCall(PCShellSetSetUp(*cr, CRSetup_Private)); 331 PetscCall(PCShellSetDestroy(*cr, CRDestroy_Private)); 332 PetscFunctionReturn(PETSC_SUCCESS); 333 } 334 335 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char[], const char[], const char *[], const char *[], PetscBool *); 336 337 PetscErrorCode PCMGSetLevels_MG(PC pc, PetscInt levels, MPI_Comm *comms) 338 { 339 PC_MG *mg = (PC_MG *)pc->data; 340 MPI_Comm comm; 341 PC_MG_Levels **mglevels = mg->levels; 342 PCMGType mgtype = mg->am; 343 PetscInt mgctype = (PetscInt)PC_MG_CYCLE_V; 344 PetscInt i; 345 PetscMPIInt size; 346 const char *prefix; 347 PC ipc; 348 PetscInt n; 349 350 PetscFunctionBegin; 351 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 352 PetscValidLogicalCollectiveInt(pc, levels, 2); 353 if (mg->nlevels == levels) PetscFunctionReturn(PETSC_SUCCESS); 354 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 355 if (mglevels) { 356 mgctype = mglevels[0]->cycles; 357 /* changing the number of levels so free up the previous stuff */ 358 PetscCall(PCReset_MG(pc)); 359 n = mglevels[0]->levels; 360 for (i = 0; i < n; i++) { 361 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd)); 362 PetscCall(KSPDestroy(&mglevels[i]->smoothu)); 363 PetscCall(KSPDestroy(&mglevels[i]->cr)); 364 PetscCall(PetscFree(mglevels[i])); 365 } 366 PetscCall(PetscFree(mg->levels)); 367 } 368 369 mg->nlevels = levels; 370 371 PetscCall(PetscMalloc1(levels, &mglevels)); 372 373 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 374 375 mg->stageApply = 0; 376 for (i = 0; i < levels; i++) { 377 PetscCall(PetscNew(&mglevels[i])); 378 379 mglevels[i]->level = i; 380 mglevels[i]->levels = levels; 381 mglevels[i]->cycles = mgctype; 382 mg->default_smoothu = 2; 383 mg->default_smoothd = 2; 384 mglevels[i]->eventsmoothsetup = 0; 385 mglevels[i]->eventsmoothsolve = 0; 386 mglevels[i]->eventresidual = 0; 387 mglevels[i]->eventinterprestrict = 0; 388 389 if (comms) comm = comms[i]; 390 if (comm != MPI_COMM_NULL) { 391 PetscCall(KSPCreate(comm, &mglevels[i]->smoothd)); 392 PetscCall(KSPSetNestLevel(mglevels[i]->smoothd, pc->kspnestlevel)); 393 PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->smoothd, pc->erroriffailure)); 394 PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->smoothd, (PetscObject)pc, levels - i)); 395 PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, prefix)); 396 PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->smoothd, PetscMGLevelId, mglevels[i]->level)); 397 if (i == 0 && levels > 1) { // coarse grid 398 PetscCall(KSPAppendOptionsPrefix(mglevels[0]->smoothd, "mg_coarse_")); 399 400 /* coarse solve is (redundant) LU by default; set shifttype NONZERO to avoid annoying zero-pivot in LU preconditioner */ 401 PetscCall(KSPSetType(mglevels[0]->smoothd, KSPPREONLY)); 402 PetscCall(KSPGetPC(mglevels[0]->smoothd, &ipc)); 403 PetscCallMPI(MPI_Comm_size(comm, &size)); 404 if (size > 1) { 405 PetscCall(PCSetType(ipc, PCREDUNDANT)); 406 } else { 407 PetscCall(PCSetType(ipc, PCLU)); 408 } 409 PetscCall(PCFactorSetShiftType(ipc, MAT_SHIFT_INBLOCKS)); 410 } else { 411 char tprefix[128]; 412 413 PetscCall(KSPSetType(mglevels[i]->smoothd, KSPCHEBYSHEV)); 414 PetscCall(KSPSetConvergenceTest(mglevels[i]->smoothd, KSPConvergedSkip, NULL, NULL)); 415 PetscCall(KSPSetNormType(mglevels[i]->smoothd, KSP_NORM_NONE)); 416 PetscCall(KSPGetPC(mglevels[i]->smoothd, &ipc)); 417 PetscCall(PCSetType(ipc, PCSOR)); 418 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd)); 419 420 if (i == levels - 1 && levels > 1) { // replace 'mg_finegrid_' with 'mg_levels_X_' 421 PetscBool set; 422 PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)mglevels[i]->smoothd)->options, ((PetscObject)mglevels[i]->smoothd)->prefix, "-mg_fine_", NULL, NULL, &set)); 423 if (set) { 424 if (prefix) PetscCall(PetscSNPrintf(tprefix, 128, "%smg_fine_", prefix)); 425 else PetscCall(PetscSNPrintf(tprefix, 128, "mg_fine_")); 426 PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, tprefix)); 427 } else { 428 PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i)); 429 PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix)); 430 } 431 } else { 432 PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i)); 433 PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix)); 434 } 435 } 436 } 437 mglevels[i]->smoothu = mglevels[i]->smoothd; 438 mg->rtol = 0.0; 439 mg->abstol = 0.0; 440 mg->dtol = 0.0; 441 mg->ttol = 0.0; 442 mg->cyclesperpcapply = 1; 443 } 444 mg->levels = mglevels; 445 PetscCall(PCMGSetType(pc, mgtype)); 446 PetscFunctionReturn(PETSC_SUCCESS); 447 } 448 449 /*@C 450 PCMGSetLevels - Sets the number of levels to use with `PCMG`. 451 Must be called before any other `PCMG` routine. 452 453 Logically Collective 454 455 Input Parameters: 456 + pc - the preconditioner context 457 . levels - the number of levels 458 - comms - optional communicators for each level; this is to allow solving the coarser problems 459 on smaller sets of processes. For processes that are not included in the computation 460 you must pass `MPI_COMM_NULL`. Use comms = `NULL` to specify that all processes 461 should participate in each level of problem. 462 463 Level: intermediate 464 465 Notes: 466 If the number of levels is one then the multigrid uses the `-mg_levels` prefix 467 for setting the level options rather than the `-mg_coarse` or `-mg_fine` prefix. 468 469 You can free the information in comms after this routine is called. 470 471 The array of MPI communicators must contain `MPI_COMM_NULL` for those ranks that at each level 472 are not participating in the coarser solve. For example, with 2 levels and 1 and 2 ranks on 473 the two levels, rank 0 in the original communicator will pass in an array of 2 communicators 474 of size 2 and 1, while rank 1 in the original communicator will pass in array of 2 communicators 475 the first of size 2 and the second of value `MPI_COMM_NULL` since the rank 1 does not participate 476 in the coarse grid solve. 477 478 Since each coarser level may have a new `MPI_Comm` with fewer ranks than the previous, one 479 must take special care in providing the restriction and interpolation operation. We recommend 480 providing these as two step operations; first perform a standard restriction or interpolation on 481 the full number of ranks for that level and then use an MPI call to copy the resulting vector 482 array entries (after calls to VecGetArray()) to the smaller or larger number of ranks, note in both 483 cases the MPI calls must be made on the larger of the two communicators. Traditional MPI send and 484 receives or `MPI_AlltoAllv()` could be used to do the reshuffling of the vector entries. 485 486 Fortran Notes: 487 Use comms = `PETSC_NULL_MPI_COMM` as the equivalent of `NULL` in the C interface. Note `PETSC_NULL_MPI_COMM` 488 is not `MPI_COMM_NULL`. It is more like `PETSC_NULL_INTEGER`, `PETSC_NULL_REAL` etc. 489 490 .seealso: [](ch_ksp), `PCMGSetType()`, `PCMGGetLevels()` 491 @*/ 492 PetscErrorCode PCMGSetLevels(PC pc, PetscInt levels, MPI_Comm *comms) 493 { 494 PetscFunctionBegin; 495 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 496 if (comms) PetscAssertPointer(comms, 3); 497 PetscTryMethod(pc, "PCMGSetLevels_C", (PC, PetscInt, MPI_Comm *), (pc, levels, comms)); 498 PetscFunctionReturn(PETSC_SUCCESS); 499 } 500 501 PetscErrorCode PCDestroy_MG(PC pc) 502 { 503 PC_MG *mg = (PC_MG *)pc->data; 504 PC_MG_Levels **mglevels = mg->levels; 505 PetscInt i, n; 506 507 PetscFunctionBegin; 508 PetscCall(PCReset_MG(pc)); 509 if (mglevels) { 510 n = mglevels[0]->levels; 511 for (i = 0; i < n; i++) { 512 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd)); 513 PetscCall(KSPDestroy(&mglevels[i]->smoothu)); 514 PetscCall(KSPDestroy(&mglevels[i]->cr)); 515 PetscCall(PetscFree(mglevels[i])); 516 } 517 PetscCall(PetscFree(mg->levels)); 518 } 519 PetscCall(PetscFree(pc->data)); 520 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 521 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 522 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", NULL)); 523 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", NULL)); 524 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", NULL)); 525 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 526 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 527 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", NULL)); 528 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", NULL)); 529 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", NULL)); 530 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", NULL)); 531 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", NULL)); 532 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", NULL)); 533 PetscFunctionReturn(PETSC_SUCCESS); 534 } 535 536 /* 537 PCApply_MG - Runs either an additive, multiplicative, Kaskadic 538 or full cycle of multigrid. 539 540 Note: 541 A simple wrapper which calls PCMGMCycle(),PCMGACycle(), or PCMGFCycle(). 542 */ 543 static PetscErrorCode PCApply_MG_Internal(PC pc, Vec b, Vec x, Mat B, Mat X, PetscBool transpose) 544 { 545 PC_MG *mg = (PC_MG *)pc->data; 546 PC_MG_Levels **mglevels = mg->levels; 547 PC tpc; 548 PetscInt levels = mglevels[0]->levels, i; 549 PetscBool changeu, changed, matapp; 550 551 PetscFunctionBegin; 552 matapp = (PetscBool)(B && X); 553 if (mg->stageApply) PetscCall(PetscLogStagePush(mg->stageApply)); 554 /* When the DM is supplying the matrix then it will not exist until here */ 555 for (i = 0; i < levels; i++) { 556 if (!mglevels[i]->A) { 557 PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL)); 558 PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A)); 559 } 560 } 561 562 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc)); 563 PetscCall(PCPreSolveChangeRHS(tpc, &changed)); 564 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc)); 565 PetscCall(PCPreSolveChangeRHS(tpc, &changeu)); 566 if (!changeu && !changed) { 567 if (matapp) { 568 PetscCall(MatDestroy(&mglevels[levels - 1]->B)); 569 mglevels[levels - 1]->B = B; 570 } else { 571 PetscCall(VecDestroy(&mglevels[levels - 1]->b)); 572 mglevels[levels - 1]->b = b; 573 } 574 } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */ 575 if (matapp) { 576 if (mglevels[levels - 1]->B) { 577 PetscInt N1, N2; 578 PetscBool flg; 579 580 PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &N1)); 581 PetscCall(MatGetSize(B, NULL, &N2)); 582 PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 1]->B, ((PetscObject)B)->type_name, &flg)); 583 if (N1 != N2 || !flg) PetscCall(MatDestroy(&mglevels[levels - 1]->B)); 584 } 585 if (!mglevels[levels - 1]->B) { 586 PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &mglevels[levels - 1]->B)); 587 } else { 588 PetscCall(MatCopy(B, mglevels[levels - 1]->B, SAME_NONZERO_PATTERN)); 589 } 590 } else { 591 if (!mglevels[levels - 1]->b) { 592 Vec *vec; 593 594 PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL)); 595 mglevels[levels - 1]->b = *vec; 596 PetscCall(PetscFree(vec)); 597 } 598 PetscCall(VecCopy(b, mglevels[levels - 1]->b)); 599 } 600 } 601 if (matapp) { 602 mglevels[levels - 1]->X = X; 603 } else { 604 mglevels[levels - 1]->x = x; 605 } 606 607 /* If coarser Xs are present, it means we have already block applied the PC at least once 608 Reset operators if sizes/type do no match */ 609 if (matapp && levels > 1 && mglevels[levels - 2]->X) { 610 PetscInt Xc, Bc; 611 PetscBool flg; 612 613 PetscCall(MatGetSize(mglevels[levels - 2]->X, NULL, &Xc)); 614 PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &Bc)); 615 PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 2]->X, ((PetscObject)mglevels[levels - 1]->X)->type_name, &flg)); 616 if (Xc != Bc || !flg) { 617 PetscCall(MatDestroy(&mglevels[levels - 1]->R)); 618 for (i = 0; i < levels - 1; i++) { 619 PetscCall(MatDestroy(&mglevels[i]->R)); 620 PetscCall(MatDestroy(&mglevels[i]->B)); 621 PetscCall(MatDestroy(&mglevels[i]->X)); 622 } 623 } 624 } 625 626 if (mg->am == PC_MG_MULTIPLICATIVE) { 627 if (matapp) PetscCall(MatZeroEntries(X)); 628 else PetscCall(VecZeroEntries(x)); 629 for (i = 0; i < mg->cyclesperpcapply; i++) PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, transpose, matapp, NULL)); 630 } else if (mg->am == PC_MG_ADDITIVE) { 631 PetscCall(PCMGACycle_Private(pc, mglevels, transpose, matapp)); 632 } else if (mg->am == PC_MG_KASKADE) { 633 PetscCall(PCMGKCycle_Private(pc, mglevels, transpose, matapp)); 634 } else { 635 PetscCall(PCMGFCycle_Private(pc, mglevels, transpose, matapp)); 636 } 637 if (mg->stageApply) PetscCall(PetscLogStagePop()); 638 if (!changeu && !changed) { 639 if (matapp) { 640 mglevels[levels - 1]->B = NULL; 641 } else { 642 mglevels[levels - 1]->b = NULL; 643 } 644 } 645 PetscFunctionReturn(PETSC_SUCCESS); 646 } 647 648 static PetscErrorCode PCApply_MG(PC pc, Vec b, Vec x) 649 { 650 PetscFunctionBegin; 651 PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_FALSE)); 652 PetscFunctionReturn(PETSC_SUCCESS); 653 } 654 655 static PetscErrorCode PCApplyTranspose_MG(PC pc, Vec b, Vec x) 656 { 657 PetscFunctionBegin; 658 PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_TRUE)); 659 PetscFunctionReturn(PETSC_SUCCESS); 660 } 661 662 static PetscErrorCode PCMatApply_MG(PC pc, Mat b, Mat x) 663 { 664 PetscFunctionBegin; 665 PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_FALSE)); 666 PetscFunctionReturn(PETSC_SUCCESS); 667 } 668 669 PetscErrorCode PCSetFromOptions_MG(PC pc, PetscOptionItems PetscOptionsObject) 670 { 671 PetscInt levels, cycles; 672 PetscBool flg, flg2; 673 PC_MG *mg = (PC_MG *)pc->data; 674 PC_MG_Levels **mglevels; 675 PCMGType mgtype; 676 PCMGCycleType mgctype; 677 PCMGGalerkinType gtype; 678 PCMGCoarseSpaceType coarseSpaceType; 679 680 PetscFunctionBegin; 681 levels = PetscMax(mg->nlevels, 1); 682 PetscOptionsHeadBegin(PetscOptionsObject, "Multigrid options"); 683 PetscCall(PetscOptionsInt("-pc_mg_levels", "Number of Levels", "PCMGSetLevels", levels, &levels, &flg)); 684 if (!flg && !mg->levels && pc->dm) { 685 PetscCall(DMGetRefineLevel(pc->dm, &levels)); 686 levels++; 687 mg->usedmfornumberoflevels = PETSC_TRUE; 688 } 689 PetscCall(PCMGSetLevels(pc, levels, NULL)); 690 mglevels = mg->levels; 691 692 mgctype = (PCMGCycleType)mglevels[0]->cycles; 693 PetscCall(PetscOptionsEnum("-pc_mg_cycle_type", "V cycle or for W-cycle", "PCMGSetCycleType", PCMGCycleTypes, (PetscEnum)mgctype, (PetscEnum *)&mgctype, &flg)); 694 if (flg) PetscCall(PCMGSetCycleType(pc, mgctype)); 695 coarseSpaceType = mg->coarseSpaceType; 696 PetscCall(PetscOptionsEnum("-pc_mg_adapt_interp_coarse_space", "Type of adaptive coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw", "PCMGSetAdaptCoarseSpaceType", PCMGCoarseSpaceTypes, (PetscEnum)coarseSpaceType, (PetscEnum *)&coarseSpaceType, &flg)); 697 if (flg) PetscCall(PCMGSetAdaptCoarseSpaceType(pc, coarseSpaceType)); 698 PetscCall(PetscOptionsInt("-pc_mg_adapt_interp_n", "Size of the coarse space for adaptive interpolation", "PCMGSetCoarseSpace", mg->Nc, &mg->Nc, &flg)); 699 PetscCall(PetscOptionsBool("-pc_mg_mesp_monitor", "Monitor the multilevel eigensolver", "PCMGSetAdaptInterpolation", PETSC_FALSE, &mg->mespMonitor, &flg)); 700 flg2 = PETSC_FALSE; 701 PetscCall(PetscOptionsBool("-pc_mg_adapt_cr", "Monitor coarse space quality using Compatible Relaxation (CR)", "PCMGSetAdaptCR", PETSC_FALSE, &flg2, &flg)); 702 if (flg) PetscCall(PCMGSetAdaptCR(pc, flg2)); 703 flg = PETSC_FALSE; 704 PetscCall(PetscOptionsBool("-pc_mg_distinct_smoothup", "Create separate smoothup KSP and append the prefix _up", "PCMGSetDistinctSmoothUp", PETSC_FALSE, &flg, NULL)); 705 if (flg) PetscCall(PCMGSetDistinctSmoothUp(pc)); 706 PetscCall(PetscOptionsEnum("-pc_mg_galerkin", "Use Galerkin process to compute coarser operators", "PCMGSetGalerkin", PCMGGalerkinTypes, (PetscEnum)mg->galerkin, (PetscEnum *)>ype, &flg)); 707 if (flg) PetscCall(PCMGSetGalerkin(pc, gtype)); 708 mgtype = mg->am; 709 PetscCall(PetscOptionsEnum("-pc_mg_type", "Multigrid type", "PCMGSetType", PCMGTypes, (PetscEnum)mgtype, (PetscEnum *)&mgtype, &flg)); 710 if (flg) PetscCall(PCMGSetType(pc, mgtype)); 711 if (mg->am == PC_MG_MULTIPLICATIVE) { 712 PetscCall(PetscOptionsInt("-pc_mg_multiplicative_cycles", "Number of cycles for each preconditioner step", "PCMGMultiplicativeSetCycles", mg->cyclesperpcapply, &cycles, &flg)); 713 if (flg) PetscCall(PCMGMultiplicativeSetCycles(pc, cycles)); 714 } 715 flg = PETSC_FALSE; 716 PetscCall(PetscOptionsBool("-pc_mg_log", "Log times for each multigrid level", "None", flg, &flg, NULL)); 717 if (flg) { 718 PetscInt i; 719 char eventname[128]; 720 721 levels = mglevels[0]->levels; 722 for (i = 0; i < levels; i++) { 723 PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSetup Level %" PetscInt_FMT, i)); 724 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsetup)); 725 PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSmooth Level %" PetscInt_FMT, i)); 726 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsolve)); 727 if (i) { 728 PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGResid Level %" PetscInt_FMT, i)); 729 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventresidual)); 730 PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGInterp Level %" PetscInt_FMT, i)); 731 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventinterprestrict)); 732 } 733 } 734 735 if (PetscDefined(USE_LOG)) { 736 const char sname[] = "MG Apply"; 737 738 PetscCall(PetscLogStageGetId(sname, &mg->stageApply)); 739 if (mg->stageApply < 0) PetscCall(PetscLogStageRegister(sname, &mg->stageApply)); 740 } 741 } 742 PetscOptionsHeadEnd(); 743 /* Check option consistency */ 744 PetscCall(PCMGGetGalerkin(pc, >ype)); 745 PetscCall(PCMGGetAdaptInterpolation(pc, &flg)); 746 PetscCheck(!flg || !(gtype >= PC_MG_GALERKIN_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use Galerkin coarse operators when adapting the interpolator"); 747 PetscFunctionReturn(PETSC_SUCCESS); 748 } 749 750 const char *const PCMGTypes[] = {"MULTIPLICATIVE", "ADDITIVE", "FULL", "KASKADE", "PCMGType", "PC_MG", NULL}; 751 const char *const PCMGCycleTypes[] = {"invalid", "v", "w", "PCMGCycleType", "PC_MG_CYCLE", NULL}; 752 const char *const PCMGGalerkinTypes[] = {"both", "pmat", "mat", "none", "external", "PCMGGalerkinType", "PC_MG_GALERKIN", NULL}; 753 const char *const PCMGCoarseSpaceTypes[] = {"none", "polynomial", "harmonic", "eigenvector", "generalized_eigenvector", "gdsw", "PCMGCoarseSpaceType", "PCMG_ADAPT_NONE", NULL}; 754 755 #include <petscdraw.h> 756 PetscErrorCode PCView_MG(PC pc, PetscViewer viewer) 757 { 758 PC_MG *mg = (PC_MG *)pc->data; 759 PC_MG_Levels **mglevels = mg->levels; 760 PetscInt levels = mglevels ? mglevels[0]->levels : 0, i; 761 PetscBool iascii, isbinary, isdraw; 762 763 PetscFunctionBegin; 764 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 765 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 766 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 767 if (iascii) { 768 const char *cyclename = levels ? (mglevels[0]->cycles == PC_MG_CYCLE_V ? "v" : "w") : "unknown"; 769 PetscCall(PetscViewerASCIIPrintf(viewer, " type is %s, levels=%" PetscInt_FMT " cycles=%s\n", PCMGTypes[mg->am], levels, cyclename)); 770 if (mg->am == PC_MG_MULTIPLICATIVE) PetscCall(PetscViewerASCIIPrintf(viewer, " Cycles per PCApply=%" PetscInt_FMT "\n", mg->cyclesperpcapply)); 771 if (mg->galerkin == PC_MG_GALERKIN_BOTH) { 772 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices\n")); 773 } else if (mg->galerkin == PC_MG_GALERKIN_PMAT) { 774 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for pmat\n")); 775 } else if (mg->galerkin == PC_MG_GALERKIN_MAT) { 776 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for mat\n")); 777 } else if (mg->galerkin == PC_MG_GALERKIN_EXTERNAL) { 778 PetscCall(PetscViewerASCIIPrintf(viewer, " Using externally compute Galerkin coarse grid matrices\n")); 779 } else { 780 PetscCall(PetscViewerASCIIPrintf(viewer, " Not using Galerkin computed coarse grid matrices\n")); 781 } 782 if (mg->view) PetscCall((*mg->view)(pc, viewer)); 783 for (i = 0; i < levels; i++) { 784 if (i) { 785 PetscCall(PetscViewerASCIIPrintf(viewer, "Down solver (pre-smoother) on level %" PetscInt_FMT " -------------------------------\n", i)); 786 } else { 787 PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse grid solver -- level %" PetscInt_FMT " -------------------------------\n", i)); 788 } 789 PetscCall(PetscViewerASCIIPushTab(viewer)); 790 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 791 PetscCall(PetscViewerASCIIPopTab(viewer)); 792 if (i && mglevels[i]->smoothd == mglevels[i]->smoothu) { 793 PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) same as down solver (pre-smoother)\n")); 794 } else if (i) { 795 PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) on level %" PetscInt_FMT " -------------------------------\n", i)); 796 PetscCall(PetscViewerASCIIPushTab(viewer)); 797 PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 798 PetscCall(PetscViewerASCIIPopTab(viewer)); 799 } 800 if (i && mglevels[i]->cr) { 801 PetscCall(PetscViewerASCIIPrintf(viewer, "CR solver on level %" PetscInt_FMT " -------------------------------\n", i)); 802 PetscCall(PetscViewerASCIIPushTab(viewer)); 803 PetscCall(KSPView(mglevels[i]->cr, viewer)); 804 PetscCall(PetscViewerASCIIPopTab(viewer)); 805 } 806 } 807 } else if (isbinary) { 808 for (i = levels - 1; i >= 0; i--) { 809 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 810 if (i && mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 811 } 812 } else if (isdraw) { 813 PetscDraw draw; 814 PetscReal x, w, y, bottom, th; 815 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 816 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 817 PetscCall(PetscDrawStringGetSize(draw, NULL, &th)); 818 bottom = y - th; 819 for (i = levels - 1; i >= 0; i--) { 820 if (!mglevels[i]->smoothu || (mglevels[i]->smoothu == mglevels[i]->smoothd)) { 821 PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom)); 822 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 823 PetscCall(PetscDrawPopCurrentPoint(draw)); 824 } else { 825 w = 0.5 * PetscMin(1.0 - x, x); 826 PetscCall(PetscDrawPushCurrentPoint(draw, x + w, bottom)); 827 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 828 PetscCall(PetscDrawPopCurrentPoint(draw)); 829 PetscCall(PetscDrawPushCurrentPoint(draw, x - w, bottom)); 830 PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 831 PetscCall(PetscDrawPopCurrentPoint(draw)); 832 } 833 PetscCall(PetscDrawGetBoundingBox(draw, NULL, &bottom, NULL, NULL)); 834 bottom -= th; 835 } 836 } 837 PetscFunctionReturn(PETSC_SUCCESS); 838 } 839 840 #include <petsc/private/kspimpl.h> 841 842 /* 843 Calls setup for the KSP on each level 844 */ 845 PetscErrorCode PCSetUp_MG(PC pc) 846 { 847 PC_MG *mg = (PC_MG *)pc->data; 848 PC_MG_Levels **mglevels = mg->levels; 849 PetscInt i, n; 850 PC cpc; 851 PetscBool dump = PETSC_FALSE, opsset, use_amat, missinginterpolate = PETSC_FALSE; 852 Mat dA, dB; 853 Vec tvec; 854 DM *dms; 855 PetscViewer viewer = NULL; 856 PetscBool dAeqdB = PETSC_FALSE, needRestricts = PETSC_FALSE, doCR = PETSC_FALSE; 857 PetscBool adaptInterpolation = mg->adaptInterpolation; 858 859 PetscFunctionBegin; 860 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels with PCMGSetLevels() before setting up"); 861 n = mglevels[0]->levels; 862 /* FIX: Move this to PCSetFromOptions_MG? */ 863 if (mg->usedmfornumberoflevels) { 864 PetscInt levels; 865 PetscCall(DMGetRefineLevel(pc->dm, &levels)); 866 levels++; 867 if (levels > n) { /* the problem is now being solved on a finer grid */ 868 PetscCall(PCMGSetLevels(pc, levels, NULL)); 869 n = levels; 870 PetscCall(PCSetFromOptions(pc)); /* it is bad to call this here, but otherwise will never be called for the new hierarchy */ 871 mglevels = mg->levels; 872 } 873 } 874 PetscCall(KSPGetPC(mglevels[0]->smoothd, &cpc)); 875 876 /* If user did not provide fine grid operators OR operator was not updated since last global KSPSetOperators() */ 877 /* so use those from global PC */ 878 /* Is this what we always want? What if user wants to keep old one? */ 879 PetscCall(KSPGetOperatorsSet(mglevels[n - 1]->smoothd, NULL, &opsset)); 880 if (opsset) { 881 Mat mmat; 882 PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, NULL, &mmat)); 883 if (mmat == pc->pmat) opsset = PETSC_FALSE; 884 } 885 886 /* Create CR solvers */ 887 PetscCall(PCMGGetAdaptCR(pc, &doCR)); 888 if (doCR) { 889 const char *prefix; 890 891 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 892 for (i = 1; i < n; ++i) { 893 PC ipc, cr; 894 char crprefix[128]; 895 896 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &mglevels[i]->cr)); 897 PetscCall(KSPSetNestLevel(mglevels[i]->cr, pc->kspnestlevel)); 898 PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->cr, PETSC_FALSE)); 899 PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->cr, (PetscObject)pc, n - i)); 900 PetscCall(KSPSetOptionsPrefix(mglevels[i]->cr, prefix)); 901 PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->cr, PetscMGLevelId, mglevels[i]->level)); 902 PetscCall(KSPSetType(mglevels[i]->cr, KSPCHEBYSHEV)); 903 PetscCall(KSPSetConvergenceTest(mglevels[i]->cr, KSPConvergedSkip, NULL, NULL)); 904 PetscCall(KSPSetNormType(mglevels[i]->cr, KSP_NORM_PRECONDITIONED)); 905 PetscCall(KSPGetPC(mglevels[i]->cr, &ipc)); 906 907 PetscCall(PCSetType(ipc, PCCOMPOSITE)); 908 PetscCall(PCCompositeSetType(ipc, PC_COMPOSITE_MULTIPLICATIVE)); 909 PetscCall(PCCompositeAddPCType(ipc, PCSOR)); 910 PetscCall(CreateCR_Private(pc, i, &cr)); 911 PetscCall(PCCompositeAddPC(ipc, cr)); 912 PetscCall(PCDestroy(&cr)); 913 914 PetscCall(KSPSetTolerances(mglevels[i]->cr, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd)); 915 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 916 PetscCall(PetscSNPrintf(crprefix, 128, "mg_levels_%" PetscInt_FMT "_cr_", i)); 917 PetscCall(KSPAppendOptionsPrefix(mglevels[i]->cr, crprefix)); 918 } 919 } 920 921 if (!opsset) { 922 PetscCall(PCGetUseAmat(pc, &use_amat)); 923 if (use_amat) { 924 PetscCall(PetscInfo(pc, "Using outer operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n")); 925 PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->mat, pc->pmat)); 926 } else { 927 PetscCall(PetscInfo(pc, "Using matrix (pmat) operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n")); 928 PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->pmat, pc->pmat)); 929 } 930 } 931 932 for (i = n - 1; i > 0; i--) { 933 if (!(mglevels[i]->interpolate || mglevels[i]->restrct)) { 934 missinginterpolate = PETSC_TRUE; 935 break; 936 } 937 } 938 939 PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, &dA, &dB)); 940 if (dA == dB) dAeqdB = PETSC_TRUE; 941 if (mg->galerkin == PC_MG_GALERKIN_NONE || ((mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_MAT) && !dAeqdB)) { 942 needRestricts = PETSC_TRUE; /* user must compute either mat, pmat, or both so must restrict x to coarser levels */ 943 } 944 945 if (pc->dm && !pc->setupcalled) { 946 /* finest smoother also gets DM but it is not active, independent of whether galerkin==PC_MG_GALERKIN_EXTERNAL */ 947 PetscCall(KSPSetDM(mglevels[n - 1]->smoothd, pc->dm)); 948 PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothd, PETSC_FALSE)); 949 if (mglevels[n - 1]->smoothd != mglevels[n - 1]->smoothu) { 950 PetscCall(KSPSetDM(mglevels[n - 1]->smoothu, pc->dm)); 951 PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothu, PETSC_FALSE)); 952 } 953 if (mglevels[n - 1]->cr) { 954 PetscCall(KSPSetDM(mglevels[n - 1]->cr, pc->dm)); 955 PetscCall(KSPSetDMActive(mglevels[n - 1]->cr, PETSC_FALSE)); 956 } 957 } 958 959 /* 960 Skipping if user has provided all interpolation/restriction needed (since DM might not be able to produce them (when coming from SNES/TS) 961 Skipping for externally managed hierarchy (such as ML and GAMG). Cleaner logic here would be great. Wrap ML/GAMG as DMs? 962 */ 963 if (missinginterpolate && mg->galerkin != PC_MG_GALERKIN_EXTERNAL && !pc->setupcalled) { 964 /* first see if we can compute a coarse space */ 965 if (mg->coarseSpaceType == PCMG_ADAPT_GDSW) { 966 for (i = n - 2; i > -1; i--) { 967 if (!mglevels[i + 1]->restrct && !mglevels[i + 1]->interpolate) { 968 PetscCall(PCMGComputeCoarseSpace_Internal(pc, i + 1, mg->coarseSpaceType, mg->Nc, NULL, &mglevels[i + 1]->coarseSpace)); 969 PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->coarseSpace)); 970 } 971 } 972 } else { /* construct the interpolation from the DMs */ 973 Mat p; 974 Vec rscale; 975 PetscCall(PetscMalloc1(n, &dms)); 976 dms[n - 1] = pc->dm; 977 /* Separately create them so we do not get DMKSP interference between levels */ 978 for (i = n - 2; i > -1; i--) PetscCall(DMCoarsen(dms[i + 1], MPI_COMM_NULL, &dms[i])); 979 for (i = n - 2; i > -1; i--) { 980 DMKSP kdm; 981 PetscBool dmhasrestrict, dmhasinject; 982 PetscCall(KSPSetDM(mglevels[i]->smoothd, dms[i])); 983 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothd, PETSC_FALSE)); 984 if (mglevels[i]->smoothd != mglevels[i]->smoothu) { 985 PetscCall(KSPSetDM(mglevels[i]->smoothu, dms[i])); 986 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothu, PETSC_FALSE)); 987 } 988 if (mglevels[i]->cr) { 989 PetscCall(KSPSetDM(mglevels[i]->cr, dms[i])); 990 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->cr, PETSC_FALSE)); 991 } 992 PetscCall(DMGetDMKSPWrite(dms[i], &kdm)); 993 /* Ugly hack so that the next KSPSetUp() will use the RHS that we set. A better fix is to change dmActive to take 994 * a bitwise OR of computing the matrix, RHS, and initial iterate. */ 995 kdm->ops->computerhs = NULL; 996 kdm->rhsctx = NULL; 997 if (!mglevels[i + 1]->interpolate) { 998 PetscCall(DMCreateInterpolation(dms[i], dms[i + 1], &p, &rscale)); 999 PetscCall(PCMGSetInterpolation(pc, i + 1, p)); 1000 if (rscale) PetscCall(PCMGSetRScale(pc, i + 1, rscale)); 1001 PetscCall(VecDestroy(&rscale)); 1002 PetscCall(MatDestroy(&p)); 1003 } 1004 PetscCall(DMHasCreateRestriction(dms[i], &dmhasrestrict)); 1005 if (dmhasrestrict && !mglevels[i + 1]->restrct) { 1006 PetscCall(DMCreateRestriction(dms[i], dms[i + 1], &p)); 1007 PetscCall(PCMGSetRestriction(pc, i + 1, p)); 1008 PetscCall(MatDestroy(&p)); 1009 } 1010 PetscCall(DMHasCreateInjection(dms[i], &dmhasinject)); 1011 if (dmhasinject && !mglevels[i + 1]->inject) { 1012 PetscCall(DMCreateInjection(dms[i], dms[i + 1], &p)); 1013 PetscCall(PCMGSetInjection(pc, i + 1, p)); 1014 PetscCall(MatDestroy(&p)); 1015 } 1016 } 1017 1018 for (i = n - 2; i > -1; i--) PetscCall(DMDestroy(&dms[i])); 1019 PetscCall(PetscFree(dms)); 1020 } 1021 } 1022 1023 if (mg->galerkin < PC_MG_GALERKIN_NONE) { 1024 Mat A, B; 1025 PetscBool doA = PETSC_FALSE, doB = PETSC_FALSE; 1026 MatReuse reuse = MAT_INITIAL_MATRIX; 1027 1028 if (mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_BOTH) doB = PETSC_TRUE; 1029 if (mg->galerkin == PC_MG_GALERKIN_MAT || (mg->galerkin == PC_MG_GALERKIN_BOTH && dA != dB)) doA = PETSC_TRUE; 1030 if (pc->setupcalled) reuse = MAT_REUSE_MATRIX; 1031 for (i = n - 2; i > -1; i--) { 1032 PetscCheck(mglevels[i + 1]->restrct || mglevels[i + 1]->interpolate, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must provide interpolation or restriction for each MG level except level 0"); 1033 if (!mglevels[i + 1]->interpolate) PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->restrct)); 1034 if (!mglevels[i + 1]->restrct) PetscCall(PCMGSetRestriction(pc, i + 1, mglevels[i + 1]->interpolate)); 1035 if (reuse == MAT_REUSE_MATRIX) PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, &B)); 1036 if (doA) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dA, mglevels[i + 1]->interpolate, reuse, 1.0, &A)); 1037 if (doB) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dB, mglevels[i + 1]->interpolate, reuse, 1.0, &B)); 1038 /* the management of the PetscObjectReference() and PetscObjecDereference() below is rather delicate */ 1039 if (!doA && dAeqdB) { 1040 if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)B)); 1041 A = B; 1042 } else if (!doA && reuse == MAT_INITIAL_MATRIX) { 1043 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, NULL)); 1044 PetscCall(PetscObjectReference((PetscObject)A)); 1045 } 1046 if (!doB && dAeqdB) { 1047 if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)A)); 1048 B = A; 1049 } else if (!doB && reuse == MAT_INITIAL_MATRIX) { 1050 PetscCall(KSPGetOperators(mglevels[i]->smoothd, NULL, &B)); 1051 PetscCall(PetscObjectReference((PetscObject)B)); 1052 } 1053 if (reuse == MAT_INITIAL_MATRIX) { 1054 PetscCall(KSPSetOperators(mglevels[i]->smoothd, A, B)); 1055 PetscCall(PetscObjectDereference((PetscObject)A)); 1056 PetscCall(PetscObjectDereference((PetscObject)B)); 1057 } 1058 dA = A; 1059 dB = B; 1060 } 1061 } 1062 1063 /* Adapt interpolation matrices */ 1064 if (adaptInterpolation) { 1065 for (i = 0; i < n; ++i) { 1066 if (!mglevels[i]->coarseSpace) PetscCall(PCMGComputeCoarseSpace_Internal(pc, i, mg->coarseSpaceType, mg->Nc, !i ? NULL : mglevels[i - 1]->coarseSpace, &mglevels[i]->coarseSpace)); 1067 if (i) PetscCall(PCMGAdaptInterpolator_Internal(pc, i, mglevels[i - 1]->smoothu, mglevels[i]->smoothu, mglevels[i - 1]->coarseSpace, mglevels[i]->coarseSpace)); 1068 } 1069 for (i = n - 2; i > -1; --i) PetscCall(PCMGRecomputeLevelOperators_Internal(pc, i)); 1070 } 1071 1072 if (needRestricts && pc->dm) { 1073 for (i = n - 2; i >= 0; i--) { 1074 DM dmfine, dmcoarse; 1075 Mat Restrict, Inject; 1076 Vec rscale; 1077 PetscCall(KSPGetDM(mglevels[i + 1]->smoothd, &dmfine)); 1078 PetscCall(KSPGetDM(mglevels[i]->smoothd, &dmcoarse)); 1079 PetscCall(PCMGGetRestriction(pc, i + 1, &Restrict)); 1080 PetscCall(PCMGGetRScale(pc, i + 1, &rscale)); 1081 PetscCall(PCMGGetInjection(pc, i + 1, &Inject)); 1082 PetscCall(DMRestrict(dmfine, Restrict, rscale, Inject, dmcoarse)); 1083 } 1084 } 1085 1086 if (!pc->setupcalled) { 1087 for (i = 0; i < n; i++) PetscCall(KSPSetFromOptions(mglevels[i]->smoothd)); 1088 for (i = 1; i < n; i++) { 1089 if (mglevels[i]->smoothu && (mglevels[i]->smoothu != mglevels[i]->smoothd)) PetscCall(KSPSetFromOptions(mglevels[i]->smoothu)); 1090 if (mglevels[i]->cr) PetscCall(KSPSetFromOptions(mglevels[i]->cr)); 1091 } 1092 /* insure that if either interpolation or restriction is set the other one is set */ 1093 for (i = 1; i < n; i++) { 1094 PetscCall(PCMGGetInterpolation(pc, i, NULL)); 1095 PetscCall(PCMGGetRestriction(pc, i, NULL)); 1096 } 1097 for (i = 0; i < n - 1; i++) { 1098 if (!mglevels[i]->b) { 1099 Vec *vec; 1100 PetscCall(KSPCreateVecs(mglevels[i]->smoothd, 1, &vec, 0, NULL)); 1101 PetscCall(PCMGSetRhs(pc, i, *vec)); 1102 PetscCall(VecDestroy(vec)); 1103 PetscCall(PetscFree(vec)); 1104 } 1105 if (!mglevels[i]->r && i) { 1106 PetscCall(VecDuplicate(mglevels[i]->b, &tvec)); 1107 PetscCall(PCMGSetR(pc, i, tvec)); 1108 PetscCall(VecDestroy(&tvec)); 1109 } 1110 if (!mglevels[i]->x) { 1111 PetscCall(VecDuplicate(mglevels[i]->b, &tvec)); 1112 PetscCall(PCMGSetX(pc, i, tvec)); 1113 PetscCall(VecDestroy(&tvec)); 1114 } 1115 if (doCR) { 1116 PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crx)); 1117 PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crb)); 1118 PetscCall(VecZeroEntries(mglevels[i]->crb)); 1119 } 1120 } 1121 if (n != 1 && !mglevels[n - 1]->r) { 1122 /* PCMGSetR() on the finest level if user did not supply it */ 1123 Vec *vec; 1124 PetscCall(KSPCreateVecs(mglevels[n - 1]->smoothd, 1, &vec, 0, NULL)); 1125 PetscCall(PCMGSetR(pc, n - 1, *vec)); 1126 PetscCall(VecDestroy(vec)); 1127 PetscCall(PetscFree(vec)); 1128 } 1129 if (doCR) { 1130 PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crx)); 1131 PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crb)); 1132 PetscCall(VecZeroEntries(mglevels[n - 1]->crb)); 1133 } 1134 } 1135 1136 if (pc->dm) { 1137 /* need to tell all the coarser levels to rebuild the matrix using the DM for that level */ 1138 for (i = 0; i < n - 1; i++) { 1139 if (mglevels[i]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[i]->smoothd->setupstage = KSP_SETUP_NEWMATRIX; 1140 } 1141 } 1142 // We got here (PCSetUp_MG) because the matrix has changed, which means the smoother needs to be set up again (e.g., 1143 // new diagonal for Jacobi). Setting it here allows it to be logged under PCSetUp rather than deep inside a PCApply. 1144 if (mglevels[n - 1]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[n - 1]->smoothd->setupstage = KSP_SETUP_NEWMATRIX; 1145 1146 for (i = 1; i < n; i++) { 1147 if (mglevels[i]->smoothu == mglevels[i]->smoothd || mg->am == PC_MG_FULL || mg->am == PC_MG_KASKADE || mg->cyclesperpcapply > 1) { 1148 /* if doing only down then initial guess is zero */ 1149 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE)); 1150 } 1151 if (mglevels[i]->cr) PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 1152 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1153 PetscCall(KSPSetUp(mglevels[i]->smoothd)); 1154 if (mglevels[i]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR; 1155 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1156 if (!mglevels[i]->residual) { 1157 Mat mat; 1158 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL)); 1159 PetscCall(PCMGSetResidual(pc, i, PCMGResidualDefault, mat)); 1160 } 1161 if (!mglevels[i]->residualtranspose) { 1162 Mat mat; 1163 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL)); 1164 PetscCall(PCMGSetResidualTranspose(pc, i, PCMGResidualTransposeDefault, mat)); 1165 } 1166 } 1167 for (i = 1; i < n; i++) { 1168 if (mglevels[i]->smoothu && mglevels[i]->smoothu != mglevels[i]->smoothd) { 1169 Mat downmat, downpmat; 1170 1171 /* check if operators have been set for up, if not use down operators to set them */ 1172 PetscCall(KSPGetOperatorsSet(mglevels[i]->smoothu, &opsset, NULL)); 1173 if (!opsset) { 1174 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat)); 1175 PetscCall(KSPSetOperators(mglevels[i]->smoothu, downmat, downpmat)); 1176 } 1177 1178 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothu, PETSC_TRUE)); 1179 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1180 PetscCall(KSPSetUp(mglevels[i]->smoothu)); 1181 if (mglevels[i]->smoothu->reason) pc->failedreason = PC_SUBPC_ERROR; 1182 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1183 } 1184 if (mglevels[i]->cr) { 1185 Mat downmat, downpmat; 1186 1187 /* check if operators have been set for up, if not use down operators to set them */ 1188 PetscCall(KSPGetOperatorsSet(mglevels[i]->cr, &opsset, NULL)); 1189 if (!opsset) { 1190 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat)); 1191 PetscCall(KSPSetOperators(mglevels[i]->cr, downmat, downpmat)); 1192 } 1193 1194 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 1195 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1196 PetscCall(KSPSetUp(mglevels[i]->cr)); 1197 if (mglevels[i]->cr->reason) pc->failedreason = PC_SUBPC_ERROR; 1198 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1199 } 1200 } 1201 1202 if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0)); 1203 PetscCall(KSPSetUp(mglevels[0]->smoothd)); 1204 if (mglevels[0]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR; 1205 if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0)); 1206 1207 /* 1208 Dump the interpolation/restriction matrices plus the 1209 Jacobian/stiffness on each level. This allows MATLAB users to 1210 easily check if the Galerkin condition A_c = R A_f R^T is satisfied. 1211 1212 Only support one or the other at the same time. 1213 */ 1214 #if defined(PETSC_USE_SOCKET_VIEWER) 1215 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_matlab", &dump, NULL)); 1216 if (dump) viewer = PETSC_VIEWER_SOCKET_(PetscObjectComm((PetscObject)pc)); 1217 dump = PETSC_FALSE; 1218 #endif 1219 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_binary", &dump, NULL)); 1220 if (dump) viewer = PETSC_VIEWER_BINARY_(PetscObjectComm((PetscObject)pc)); 1221 1222 if (viewer) { 1223 for (i = 1; i < n; i++) PetscCall(MatView(mglevels[i]->restrct, viewer)); 1224 for (i = 0; i < n; i++) { 1225 PetscCall(KSPGetPC(mglevels[i]->smoothd, &pc)); 1226 PetscCall(MatView(pc->mat, viewer)); 1227 } 1228 } 1229 PetscFunctionReturn(PETSC_SUCCESS); 1230 } 1231 1232 PetscErrorCode PCMGGetLevels_MG(PC pc, PetscInt *levels) 1233 { 1234 PC_MG *mg = (PC_MG *)pc->data; 1235 1236 PetscFunctionBegin; 1237 *levels = mg->nlevels; 1238 PetscFunctionReturn(PETSC_SUCCESS); 1239 } 1240 1241 /*@ 1242 PCMGGetLevels - Gets the number of levels to use with `PCMG`. 1243 1244 Not Collective 1245 1246 Input Parameter: 1247 . pc - the preconditioner context 1248 1249 Output Parameter: 1250 . levels - the number of levels 1251 1252 Level: advanced 1253 1254 .seealso: [](ch_ksp), `PCMG`, `PCMGSetLevels()` 1255 @*/ 1256 PetscErrorCode PCMGGetLevels(PC pc, PetscInt *levels) 1257 { 1258 PetscFunctionBegin; 1259 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1260 PetscAssertPointer(levels, 2); 1261 *levels = 0; 1262 PetscTryMethod(pc, "PCMGGetLevels_C", (PC, PetscInt *), (pc, levels)); 1263 PetscFunctionReturn(PETSC_SUCCESS); 1264 } 1265 1266 /*@ 1267 PCMGGetGridComplexity - compute operator and grid complexity of the `PCMG` hierarchy 1268 1269 Input Parameter: 1270 . pc - the preconditioner context 1271 1272 Output Parameters: 1273 + gc - grid complexity = sum_i(n_i) / n_0 1274 - oc - operator complexity = sum_i(nnz_i) / nnz_0 1275 1276 Level: advanced 1277 1278 Note: 1279 This is often call the operator complexity in multigrid literature 1280 1281 .seealso: [](ch_ksp), `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()` 1282 @*/ 1283 PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc, PetscReal *oc) 1284 { 1285 PC_MG *mg = (PC_MG *)pc->data; 1286 PC_MG_Levels **mglevels = mg->levels; 1287 PetscInt lev, N; 1288 PetscLogDouble nnz0 = 0, sgc = 0, soc = 0, n0 = 0; 1289 MatInfo info; 1290 1291 PetscFunctionBegin; 1292 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1293 if (gc) PetscAssertPointer(gc, 2); 1294 if (oc) PetscAssertPointer(oc, 3); 1295 if (!pc->setupcalled) { 1296 if (gc) *gc = 0; 1297 if (oc) *oc = 0; 1298 PetscFunctionReturn(PETSC_SUCCESS); 1299 } 1300 PetscCheck(mg->nlevels > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MG has no levels"); 1301 for (lev = 0; lev < mg->nlevels; lev++) { 1302 Mat dB; 1303 PetscCall(KSPGetOperators(mglevels[lev]->smoothd, NULL, &dB)); 1304 PetscCall(MatGetInfo(dB, MAT_GLOBAL_SUM, &info)); /* global reduction */ 1305 PetscCall(MatGetSize(dB, &N, NULL)); 1306 sgc += N; 1307 soc += info.nz_used; 1308 if (lev == mg->nlevels - 1) { 1309 nnz0 = info.nz_used; 1310 n0 = N; 1311 } 1312 } 1313 PetscCheck(n0 > 0 && gc, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number for grid points on finest level is not available"); 1314 *gc = (PetscReal)(sgc / n0); 1315 if (nnz0 > 0 && oc) *oc = (PetscReal)(soc / nnz0); 1316 PetscFunctionReturn(PETSC_SUCCESS); 1317 } 1318 1319 /*@ 1320 PCMGSetType - Determines the form of multigrid to use, either 1321 multiplicative, additive, full, or the Kaskade algorithm. 1322 1323 Logically Collective 1324 1325 Input Parameters: 1326 + pc - the preconditioner context 1327 - form - multigrid form, one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE` 1328 1329 Options Database Key: 1330 . -pc_mg_type <form> - Sets <form>, one of multiplicative, additive, full, kaskade 1331 1332 Level: advanced 1333 1334 .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGGetType()`, `PCMGCycleType` 1335 @*/ 1336 PetscErrorCode PCMGSetType(PC pc, PCMGType form) 1337 { 1338 PC_MG *mg = (PC_MG *)pc->data; 1339 1340 PetscFunctionBegin; 1341 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1342 PetscValidLogicalCollectiveEnum(pc, form, 2); 1343 mg->am = form; 1344 if (form == PC_MG_MULTIPLICATIVE) pc->ops->applyrichardson = PCApplyRichardson_MG; 1345 else pc->ops->applyrichardson = NULL; 1346 PetscFunctionReturn(PETSC_SUCCESS); 1347 } 1348 1349 /*@ 1350 PCMGGetType - Finds the form of multigrid the `PCMG` is using multiplicative, additive, full, or the Kaskade algorithm. 1351 1352 Logically Collective 1353 1354 Input Parameter: 1355 . pc - the preconditioner context 1356 1357 Output Parameter: 1358 . type - one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`, `PCMGCycleType` 1359 1360 Level: advanced 1361 1362 .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGSetType()` 1363 @*/ 1364 PetscErrorCode PCMGGetType(PC pc, PCMGType *type) 1365 { 1366 PC_MG *mg = (PC_MG *)pc->data; 1367 1368 PetscFunctionBegin; 1369 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1370 *type = mg->am; 1371 PetscFunctionReturn(PETSC_SUCCESS); 1372 } 1373 1374 /*@ 1375 PCMGSetCycleType - Sets the type cycles to use. Use `PCMGSetCycleTypeOnLevel()` for more 1376 complicated cycling. 1377 1378 Logically Collective 1379 1380 Input Parameters: 1381 + pc - the multigrid context 1382 - n - either `PC_MG_CYCLE_V` or `PC_MG_CYCLE_W` 1383 1384 Options Database Key: 1385 . -pc_mg_cycle_type <v,w> - provide the cycle desired 1386 1387 Level: advanced 1388 1389 .seealso: [](ch_ksp), `PCMG`, `PCMGSetCycleTypeOnLevel()`, `PCMGType`, `PCMGCycleType` 1390 @*/ 1391 PetscErrorCode PCMGSetCycleType(PC pc, PCMGCycleType n) 1392 { 1393 PC_MG *mg = (PC_MG *)pc->data; 1394 PC_MG_Levels **mglevels = mg->levels; 1395 PetscInt i, levels; 1396 1397 PetscFunctionBegin; 1398 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1399 PetscValidLogicalCollectiveEnum(pc, n, 2); 1400 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1401 levels = mglevels[0]->levels; 1402 for (i = 0; i < levels; i++) mglevels[i]->cycles = n; 1403 PetscFunctionReturn(PETSC_SUCCESS); 1404 } 1405 1406 /*@ 1407 PCMGMultiplicativeSetCycles - Sets the number of cycles to use for each preconditioner step 1408 of multigrid when `PCMGType` is `PC_MG_MULTIPLICATIVE` 1409 1410 Logically Collective 1411 1412 Input Parameters: 1413 + pc - the multigrid context 1414 - n - number of cycles (default is 1) 1415 1416 Options Database Key: 1417 . -pc_mg_multiplicative_cycles n - set the number of cycles 1418 1419 Level: advanced 1420 1421 Note: 1422 This is not associated with setting a v or w cycle, that is set with `PCMGSetCycleType()` 1423 1424 .seealso: [](ch_ksp), `PCMGSetCycleTypeOnLevel()`, `PCMGSetCycleType()`, `PCMGCycleType`, `PCMGType` 1425 @*/ 1426 PetscErrorCode PCMGMultiplicativeSetCycles(PC pc, PetscInt n) 1427 { 1428 PC_MG *mg = (PC_MG *)pc->data; 1429 1430 PetscFunctionBegin; 1431 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1432 PetscValidLogicalCollectiveInt(pc, n, 2); 1433 mg->cyclesperpcapply = n; 1434 PetscFunctionReturn(PETSC_SUCCESS); 1435 } 1436 1437 static PetscErrorCode PCMGSetGalerkin_MG(PC pc, PCMGGalerkinType use) 1438 { 1439 PC_MG *mg = (PC_MG *)pc->data; 1440 1441 PetscFunctionBegin; 1442 mg->galerkin = use; 1443 PetscFunctionReturn(PETSC_SUCCESS); 1444 } 1445 1446 /*@ 1447 PCMGSetGalerkin - Causes the coarser grid matrices to be computed from the 1448 finest grid via the Galerkin process: A_i-1 = r_i * A_i * p_i 1449 1450 Logically Collective 1451 1452 Input Parameters: 1453 + pc - the multigrid context 1454 - use - one of `PC_MG_GALERKIN_BOTH`, `PC_MG_GALERKIN_PMAT`, `PC_MG_GALERKIN_MAT`, or `PC_MG_GALERKIN_NONE` 1455 1456 Options Database Key: 1457 . -pc_mg_galerkin <both,pmat,mat,none> - set the matrices to form via the Galerkin process 1458 1459 Level: intermediate 1460 1461 Note: 1462 Some codes that use `PCMG` such as `PCGAMG` use Galerkin internally while constructing the hierarchy and thus do not 1463 use the `PCMG` construction of the coarser grids. 1464 1465 .seealso: [](ch_ksp), `PCMG`, `PCMGGetGalerkin()`, `PCMGGalerkinType` 1466 @*/ 1467 PetscErrorCode PCMGSetGalerkin(PC pc, PCMGGalerkinType use) 1468 { 1469 PetscFunctionBegin; 1470 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1471 PetscTryMethod(pc, "PCMGSetGalerkin_C", (PC, PCMGGalerkinType), (pc, use)); 1472 PetscFunctionReturn(PETSC_SUCCESS); 1473 } 1474 1475 /*@ 1476 PCMGGetGalerkin - Checks if Galerkin multigrid is being used, i.e. A_i-1 = r_i * A_i * p_i 1477 1478 Not Collective 1479 1480 Input Parameter: 1481 . pc - the multigrid context 1482 1483 Output Parameter: 1484 . galerkin - one of `PC_MG_GALERKIN_BOTH`,`PC_MG_GALERKIN_PMAT`,`PC_MG_GALERKIN_MAT`, `PC_MG_GALERKIN_NONE`, or `PC_MG_GALERKIN_EXTERNAL` 1485 1486 Level: intermediate 1487 1488 .seealso: [](ch_ksp), `PCMG`, `PCMGSetGalerkin()`, `PCMGGalerkinType` 1489 @*/ 1490 PetscErrorCode PCMGGetGalerkin(PC pc, PCMGGalerkinType *galerkin) 1491 { 1492 PC_MG *mg = (PC_MG *)pc->data; 1493 1494 PetscFunctionBegin; 1495 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1496 *galerkin = mg->galerkin; 1497 PetscFunctionReturn(PETSC_SUCCESS); 1498 } 1499 1500 static PetscErrorCode PCMGSetAdaptInterpolation_MG(PC pc, PetscBool adapt) 1501 { 1502 PC_MG *mg = (PC_MG *)pc->data; 1503 1504 PetscFunctionBegin; 1505 mg->adaptInterpolation = adapt; 1506 PetscFunctionReturn(PETSC_SUCCESS); 1507 } 1508 1509 static PetscErrorCode PCMGGetAdaptInterpolation_MG(PC pc, PetscBool *adapt) 1510 { 1511 PC_MG *mg = (PC_MG *)pc->data; 1512 1513 PetscFunctionBegin; 1514 *adapt = mg->adaptInterpolation; 1515 PetscFunctionReturn(PETSC_SUCCESS); 1516 } 1517 1518 static PetscErrorCode PCMGSetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType ctype) 1519 { 1520 PC_MG *mg = (PC_MG *)pc->data; 1521 1522 PetscFunctionBegin; 1523 mg->adaptInterpolation = ctype != PCMG_ADAPT_NONE ? PETSC_TRUE : PETSC_FALSE; 1524 mg->coarseSpaceType = ctype; 1525 PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_BOTH)); 1526 PetscFunctionReturn(PETSC_SUCCESS); 1527 } 1528 1529 static PetscErrorCode PCMGGetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType *ctype) 1530 { 1531 PC_MG *mg = (PC_MG *)pc->data; 1532 1533 PetscFunctionBegin; 1534 *ctype = mg->coarseSpaceType; 1535 PetscFunctionReturn(PETSC_SUCCESS); 1536 } 1537 1538 static PetscErrorCode PCMGSetAdaptCR_MG(PC pc, PetscBool cr) 1539 { 1540 PC_MG *mg = (PC_MG *)pc->data; 1541 1542 PetscFunctionBegin; 1543 mg->compatibleRelaxation = cr; 1544 PetscFunctionReturn(PETSC_SUCCESS); 1545 } 1546 1547 static PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr) 1548 { 1549 PC_MG *mg = (PC_MG *)pc->data; 1550 1551 PetscFunctionBegin; 1552 *cr = mg->compatibleRelaxation; 1553 PetscFunctionReturn(PETSC_SUCCESS); 1554 } 1555 1556 /*@ 1557 PCMGSetAdaptCoarseSpaceType - Set the type of adaptive coarse space. 1558 1559 Adapts or creates the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated. 1560 1561 Logically Collective 1562 1563 Input Parameters: 1564 + pc - the multigrid context 1565 - ctype - the type of coarse space 1566 1567 Options Database Keys: 1568 + -pc_mg_adapt_interp_n <int> - The number of modes to use 1569 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: none, `polynomial`, `harmonic`, `eigenvector`, `generalized_eigenvector`, `gdsw` 1570 1571 Level: intermediate 1572 1573 Note: 1574 Requires a `DM` with specific functionality be attached to the `PC`. 1575 1576 .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`, `DM` 1577 @*/ 1578 PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype) 1579 { 1580 PetscFunctionBegin; 1581 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1582 PetscValidLogicalCollectiveEnum(pc, ctype, 2); 1583 PetscTryMethod(pc, "PCMGSetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType), (pc, ctype)); 1584 PetscFunctionReturn(PETSC_SUCCESS); 1585 } 1586 1587 /*@ 1588 PCMGGetAdaptCoarseSpaceType - Get the type of adaptive coarse space. 1589 1590 Not Collective 1591 1592 Input Parameter: 1593 . pc - the multigrid context 1594 1595 Output Parameter: 1596 . ctype - the type of coarse space 1597 1598 Level: intermediate 1599 1600 .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGSetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()` 1601 @*/ 1602 PetscErrorCode PCMGGetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType *ctype) 1603 { 1604 PetscFunctionBegin; 1605 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1606 PetscAssertPointer(ctype, 2); 1607 PetscUseMethod(pc, "PCMGGetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType *), (pc, ctype)); 1608 PetscFunctionReturn(PETSC_SUCCESS); 1609 } 1610 1611 /* MATT: REMOVE? */ 1612 /*@ 1613 PCMGSetAdaptInterpolation - Adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated. 1614 1615 Logically Collective 1616 1617 Input Parameters: 1618 + pc - the multigrid context 1619 - adapt - flag for adaptation of the interpolator 1620 1621 Options Database Keys: 1622 + -pc_mg_adapt_interp - Turn on adaptation 1623 . -pc_mg_adapt_interp_n <int> - The number of modes to use, should be divisible by dimension 1624 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: polynomial, harmonic, eigenvector, generalized_eigenvector 1625 1626 Level: intermediate 1627 1628 .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1629 @*/ 1630 PetscErrorCode PCMGSetAdaptInterpolation(PC pc, PetscBool adapt) 1631 { 1632 PetscFunctionBegin; 1633 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1634 PetscTryMethod(pc, "PCMGSetAdaptInterpolation_C", (PC, PetscBool), (pc, adapt)); 1635 PetscFunctionReturn(PETSC_SUCCESS); 1636 } 1637 1638 /*@ 1639 PCMGGetAdaptInterpolation - Get the flag to adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, 1640 and thus accurately interpolated. 1641 1642 Not Collective 1643 1644 Input Parameter: 1645 . pc - the multigrid context 1646 1647 Output Parameter: 1648 . adapt - flag for adaptation of the interpolator 1649 1650 Level: intermediate 1651 1652 .seealso: [](ch_ksp), `PCMG`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1653 @*/ 1654 PetscErrorCode PCMGGetAdaptInterpolation(PC pc, PetscBool *adapt) 1655 { 1656 PetscFunctionBegin; 1657 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1658 PetscAssertPointer(adapt, 2); 1659 PetscUseMethod(pc, "PCMGGetAdaptInterpolation_C", (PC, PetscBool *), (pc, adapt)); 1660 PetscFunctionReturn(PETSC_SUCCESS); 1661 } 1662 1663 /*@ 1664 PCMGSetAdaptCR - Monitor the coarse space quality using an auxiliary solve with compatible relaxation. 1665 1666 Logically Collective 1667 1668 Input Parameters: 1669 + pc - the multigrid context 1670 - cr - flag for compatible relaxation 1671 1672 Options Database Key: 1673 . -pc_mg_adapt_cr - Turn on compatible relaxation 1674 1675 Level: intermediate 1676 1677 .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptCR()`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1678 @*/ 1679 PetscErrorCode PCMGSetAdaptCR(PC pc, PetscBool cr) 1680 { 1681 PetscFunctionBegin; 1682 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1683 PetscTryMethod(pc, "PCMGSetAdaptCR_C", (PC, PetscBool), (pc, cr)); 1684 PetscFunctionReturn(PETSC_SUCCESS); 1685 } 1686 1687 /*@ 1688 PCMGGetAdaptCR - Get the flag to monitor coarse space quality using an auxiliary solve with compatible relaxation. 1689 1690 Not Collective 1691 1692 Input Parameter: 1693 . pc - the multigrid context 1694 1695 Output Parameter: 1696 . cr - flag for compatible relaxaion 1697 1698 Level: intermediate 1699 1700 .seealso: [](ch_ksp), `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1701 @*/ 1702 PetscErrorCode PCMGGetAdaptCR(PC pc, PetscBool *cr) 1703 { 1704 PetscFunctionBegin; 1705 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1706 PetscAssertPointer(cr, 2); 1707 PetscUseMethod(pc, "PCMGGetAdaptCR_C", (PC, PetscBool *), (pc, cr)); 1708 PetscFunctionReturn(PETSC_SUCCESS); 1709 } 1710 1711 /*@ 1712 PCMGSetNumberSmooth - Sets the number of pre and post-smoothing steps to use 1713 on all levels. Use `PCMGDistinctSmoothUp()` to create separate up and down smoothers if you want different numbers of 1714 pre- and post-smoothing steps. 1715 1716 Logically Collective 1717 1718 Input Parameters: 1719 + pc - the multigrid context 1720 - n - the number of smoothing steps 1721 1722 Options Database Key: 1723 . -mg_levels_ksp_max_it <n> - Sets number of pre and post-smoothing steps 1724 1725 Level: advanced 1726 1727 Note: 1728 This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid. 1729 1730 .seealso: [](ch_ksp), `PCMG`, `PCMGSetDistinctSmoothUp()` 1731 @*/ 1732 PetscErrorCode PCMGSetNumberSmooth(PC pc, PetscInt n) 1733 { 1734 PC_MG *mg = (PC_MG *)pc->data; 1735 PC_MG_Levels **mglevels = mg->levels; 1736 PetscInt i, levels; 1737 1738 PetscFunctionBegin; 1739 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1740 PetscValidLogicalCollectiveInt(pc, n, 2); 1741 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1742 levels = mglevels[0]->levels; 1743 1744 for (i = 1; i < levels; i++) { 1745 PetscCall(KSPSetTolerances(mglevels[i]->smoothu, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n)); 1746 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n)); 1747 mg->default_smoothu = n; 1748 mg->default_smoothd = n; 1749 } 1750 PetscFunctionReturn(PETSC_SUCCESS); 1751 } 1752 1753 /*@ 1754 PCMGSetDistinctSmoothUp - sets the up (post) smoother to be a separate `KSP` from the down (pre) smoother on all levels 1755 and adds the suffix _up to the options name 1756 1757 Logically Collective 1758 1759 Input Parameter: 1760 . pc - the preconditioner context 1761 1762 Options Database Key: 1763 . -pc_mg_distinct_smoothup <bool> - use distinct smoothing objects 1764 1765 Level: advanced 1766 1767 Note: 1768 This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid. 1769 1770 .seealso: [](ch_ksp), `PCMG`, `PCMGSetNumberSmooth()` 1771 @*/ 1772 PetscErrorCode PCMGSetDistinctSmoothUp(PC pc) 1773 { 1774 PC_MG *mg = (PC_MG *)pc->data; 1775 PC_MG_Levels **mglevels = mg->levels; 1776 PetscInt i, levels; 1777 KSP subksp; 1778 1779 PetscFunctionBegin; 1780 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1781 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1782 levels = mglevels[0]->levels; 1783 1784 for (i = 1; i < levels; i++) { 1785 const char *prefix = NULL; 1786 /* make sure smoother up and down are different */ 1787 PetscCall(PCMGGetSmootherUp(pc, i, &subksp)); 1788 PetscCall(KSPGetOptionsPrefix(mglevels[i]->smoothd, &prefix)); 1789 PetscCall(KSPSetOptionsPrefix(subksp, prefix)); 1790 PetscCall(KSPAppendOptionsPrefix(subksp, "up_")); 1791 } 1792 PetscFunctionReturn(PETSC_SUCCESS); 1793 } 1794 1795 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */ 1796 static PetscErrorCode PCGetInterpolations_MG(PC pc, PetscInt *num_levels, Mat *interpolations[]) 1797 { 1798 PC_MG *mg = (PC_MG *)pc->data; 1799 PC_MG_Levels **mglevels = mg->levels; 1800 Mat *mat; 1801 PetscInt l; 1802 1803 PetscFunctionBegin; 1804 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling"); 1805 PetscCall(PetscMalloc1(mg->nlevels, &mat)); 1806 for (l = 1; l < mg->nlevels; l++) { 1807 mat[l - 1] = mglevels[l]->interpolate; 1808 PetscCall(PetscObjectReference((PetscObject)mat[l - 1])); 1809 } 1810 *num_levels = mg->nlevels; 1811 *interpolations = mat; 1812 PetscFunctionReturn(PETSC_SUCCESS); 1813 } 1814 1815 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */ 1816 static PetscErrorCode PCGetCoarseOperators_MG(PC pc, PetscInt *num_levels, Mat *coarseOperators[]) 1817 { 1818 PC_MG *mg = (PC_MG *)pc->data; 1819 PC_MG_Levels **mglevels = mg->levels; 1820 PetscInt l; 1821 Mat *mat; 1822 1823 PetscFunctionBegin; 1824 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling"); 1825 PetscCall(PetscMalloc1(mg->nlevels, &mat)); 1826 for (l = 0; l < mg->nlevels - 1; l++) { 1827 PetscCall(KSPGetOperators(mglevels[l]->smoothd, NULL, &mat[l])); 1828 PetscCall(PetscObjectReference((PetscObject)mat[l])); 1829 } 1830 *num_levels = mg->nlevels; 1831 *coarseOperators = mat; 1832 PetscFunctionReturn(PETSC_SUCCESS); 1833 } 1834 1835 /*@C 1836 PCMGRegisterCoarseSpaceConstructor - Adds a method to the `PCMG` package for coarse space construction. 1837 1838 Not Collective, No Fortran Support 1839 1840 Input Parameters: 1841 + name - name of the constructor 1842 - function - constructor routine 1843 1844 Calling sequence of `function`: 1845 + pc - The `PC` object 1846 . l - The multigrid level, 0 is the coarse level 1847 . dm - The `DM` for this level 1848 . smooth - The level smoother 1849 . Nc - The size of the coarse space 1850 . initGuess - Basis for an initial guess for the space 1851 - coarseSp - A basis for the computed coarse space 1852 1853 Level: advanced 1854 1855 Developer Notes: 1856 How come this is not used by `PCGAMG`? 1857 1858 .seealso: [](ch_ksp), `PCMG`, `PCMGGetCoarseSpaceConstructor()`, `PCRegister()` 1859 @*/ 1860 PetscErrorCode PCMGRegisterCoarseSpaceConstructor(const char name[], PetscErrorCode (*function)(PC pc, PetscInt l, DM dm, KSP smooth, PetscInt Nc, Mat initGuess, Mat *coarseSp)) 1861 { 1862 PetscFunctionBegin; 1863 PetscCall(PCInitializePackage()); 1864 PetscCall(PetscFunctionListAdd(&PCMGCoarseList, name, function)); 1865 PetscFunctionReturn(PETSC_SUCCESS); 1866 } 1867 1868 /*@C 1869 PCMGGetCoarseSpaceConstructor - Returns the given coarse space construction method. 1870 1871 Not Collective, No Fortran Support 1872 1873 Input Parameter: 1874 . name - name of the constructor 1875 1876 Output Parameter: 1877 . function - constructor routine 1878 1879 Level: advanced 1880 1881 .seealso: [](ch_ksp), `PCMG`, `PCMGRegisterCoarseSpaceConstructor()`, `PCRegister()` 1882 @*/ 1883 PetscErrorCode PCMGGetCoarseSpaceConstructor(const char name[], PetscErrorCode (**function)(PC, PetscInt, DM, KSP, PetscInt, Mat, Mat *)) 1884 { 1885 PetscFunctionBegin; 1886 PetscCall(PetscFunctionListFind(PCMGCoarseList, name, function)); 1887 PetscFunctionReturn(PETSC_SUCCESS); 1888 } 1889 1890 /*MC 1891 PCMG - Use multigrid preconditioning. This preconditioner requires you provide additional 1892 information about the coarser grid matrices and restriction/interpolation operators. 1893 1894 Options Database Keys: 1895 + -pc_mg_levels <nlevels> - number of levels including finest 1896 . -pc_mg_cycle_type <v,w> - provide the cycle desired 1897 . -pc_mg_type <additive,multiplicative,full,kaskade> - multiplicative is the default 1898 . -pc_mg_log - log information about time spent on each level of the solver 1899 . -pc_mg_distinct_smoothup - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes) 1900 . -pc_mg_galerkin <both,pmat,mat,none> - use Galerkin process to compute coarser operators, i.e. Acoarse = R A R' 1901 . -pc_mg_multiplicative_cycles - number of cycles to use as the preconditioner (defaults to 1) 1902 . -pc_mg_dump_matlab - dumps the matrices for each level and the restriction/interpolation matrices 1903 to a `PETSCVIEWERSOCKET` for reading from MATLAB. 1904 - -pc_mg_dump_binary -dumps the matrices for each level and the restriction/interpolation matrices 1905 to the binary output file called binaryoutput 1906 1907 Level: intermediate 1908 1909 Notes: 1910 The Krylov solver (if any) and preconditioner (smoother) and their parameters are controlled from the options database with the standard 1911 options database keywords prefixed with `-mg_levels_` to affect all the levels but the coarsest, which is controlled with `-mg_coarse_`, 1912 and the finest where `-mg_fine_` can override `-mg_levels_`. One can set different preconditioners etc on specific levels with the prefix 1913 `-mg_levels_n_` where `n` is the level number (zero being the coarse level. For example 1914 .vb 1915 -mg_levels_ksp_type gmres -mg_levels_pc_type bjacobi -mg_coarse_pc_type svd -mg_levels_2_pc_type sor 1916 .ve 1917 These options also work for controlling the smoothers etc inside `PCGAMG` 1918 1919 If one uses a Krylov method such `KSPGMRES` or `KSPCG` as the smoother then one must use `KSPFGMRES`, `KSPGCR`, or `KSPRICHARDSON` as the outer Krylov method 1920 1921 When run with a single level the smoother options are used on that level NOT the coarse grid solver options 1922 1923 When run with `KSPRICHARDSON` the convergence test changes slightly if monitor is turned on. The iteration count may change slightly. This 1924 is because without monitoring the residual norm is computed WITHIN each multigrid cycle on the finest level after the pre-smoothing 1925 (because the residual has just been computed for the multigrid algorithm and is hence available for free) while with monitoring the 1926 residual is computed at the end of each cycle. 1927 1928 .seealso: [](sec_mg), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMGType`, `PCEXOTIC`, `PCGAMG`, `PCML`, `PCHYPRE` 1929 `PCMGSetLevels()`, `PCMGGetLevels()`, `PCMGSetType()`, `PCMGSetCycleType()`, 1930 `PCMGSetDistinctSmoothUp()`, `PCMGGetCoarseSolve()`, `PCMGSetResidual()`, `PCMGSetInterpolation()`, 1931 `PCMGSetRestriction()`, `PCMGGetSmoother()`, `PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`, 1932 `PCMGSetCycleTypeOnLevel()`, `PCMGSetRhs()`, `PCMGSetX()`, `PCMGSetR()`, 1933 `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1934 M*/ 1935 1936 PETSC_EXTERN PetscErrorCode PCCreate_MG(PC pc) 1937 { 1938 PC_MG *mg; 1939 1940 PetscFunctionBegin; 1941 PetscCall(PetscNew(&mg)); 1942 pc->data = mg; 1943 mg->nlevels = -1; 1944 mg->am = PC_MG_MULTIPLICATIVE; 1945 mg->galerkin = PC_MG_GALERKIN_NONE; 1946 mg->adaptInterpolation = PETSC_FALSE; 1947 mg->Nc = -1; 1948 mg->eigenvalue = -1; 1949 1950 pc->useAmat = PETSC_TRUE; 1951 1952 pc->ops->apply = PCApply_MG; 1953 pc->ops->applytranspose = PCApplyTranspose_MG; 1954 pc->ops->matapply = PCMatApply_MG; 1955 pc->ops->setup = PCSetUp_MG; 1956 pc->ops->reset = PCReset_MG; 1957 pc->ops->destroy = PCDestroy_MG; 1958 pc->ops->setfromoptions = PCSetFromOptions_MG; 1959 pc->ops->view = PCView_MG; 1960 1961 PetscCall(PetscObjectComposedDataRegister(&mg->eigenvalue)); 1962 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", PCMGSetGalerkin_MG)); 1963 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", PCMGGetLevels_MG)); 1964 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", PCMGSetLevels_MG)); 1965 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_MG)); 1966 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_MG)); 1967 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", PCMGSetAdaptInterpolation_MG)); 1968 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", PCMGGetAdaptInterpolation_MG)); 1969 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", PCMGSetAdaptCR_MG)); 1970 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", PCMGGetAdaptCR_MG)); 1971 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", PCMGSetAdaptCoarseSpaceType_MG)); 1972 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", PCMGGetAdaptCoarseSpaceType_MG)); 1973 PetscFunctionReturn(PETSC_SUCCESS); 1974 } 1975