1 2 /* 3 Defines the multigrid preconditioner interface. 4 */ 5 #include <petsc/private/pcmgimpl.h> /*I "petscksp.h" I*/ 6 #include <petsc/private/kspimpl.h> 7 #include <petscdm.h> 8 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC, PetscBool *); 9 10 /* 11 Contains the list of registered coarse space construction routines 12 */ 13 PetscFunctionList PCMGCoarseList = NULL; 14 15 PetscErrorCode PCMGMCycle_Private(PC pc, PC_MG_Levels **mglevelsin, PetscBool transpose, PetscBool matapp, PCRichardsonConvergedReason *reason) { 16 PC_MG *mg = (PC_MG *)pc->data; 17 PC_MG_Levels *mgc, *mglevels = *mglevelsin; 18 PetscInt cycles = (mglevels->level == 1) ? 1 : (PetscInt)mglevels->cycles; 19 20 PetscFunctionBegin; 21 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 22 if (!transpose) { 23 if (matapp) { 24 PetscCall(KSPMatSolve(mglevels->smoothd, mglevels->B, mglevels->X)); /* pre-smooth */ 25 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, NULL)); 26 } else { 27 PetscCall(KSPSolve(mglevels->smoothd, mglevels->b, mglevels->x)); /* pre-smooth */ 28 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x)); 29 } 30 } else { 31 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 32 PetscCall(KSPSolveTranspose(mglevels->smoothu, mglevels->b, mglevels->x)); /* transpose of post-smooth */ 33 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x)); 34 } 35 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 36 if (mglevels->level) { /* not the coarsest grid */ 37 if (mglevels->eventresidual) PetscCall(PetscLogEventBegin(mglevels->eventresidual, 0, 0, 0, 0)); 38 if (matapp && !mglevels->R) PetscCall(MatDuplicate(mglevels->B, MAT_DO_NOT_COPY_VALUES, &mglevels->R)); 39 if (!transpose) { 40 if (matapp) PetscCall((*mglevels->matresidual)(mglevels->A, mglevels->B, mglevels->X, mglevels->R)); 41 else PetscCall((*mglevels->residual)(mglevels->A, mglevels->b, mglevels->x, mglevels->r)); 42 } else { 43 if (matapp) PetscCall((*mglevels->matresidualtranspose)(mglevels->A, mglevels->B, mglevels->X, mglevels->R)); 44 else PetscCall((*mglevels->residualtranspose)(mglevels->A, mglevels->b, mglevels->x, mglevels->r)); 45 } 46 if (mglevels->eventresidual) PetscCall(PetscLogEventEnd(mglevels->eventresidual, 0, 0, 0, 0)); 47 48 /* if on finest level and have convergence criteria set */ 49 if (mglevels->level == mglevels->levels - 1 && mg->ttol && reason) { 50 PetscReal rnorm; 51 PetscCall(VecNorm(mglevels->r, NORM_2, &rnorm)); 52 if (rnorm <= mg->ttol) { 53 if (rnorm < mg->abstol) { 54 *reason = PCRICHARDSON_CONVERGED_ATOL; 55 PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than absolute tolerance %g\n", (double)rnorm, (double)mg->abstol)); 56 } else { 57 *reason = PCRICHARDSON_CONVERGED_RTOL; 58 PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than relative tolerance times initial residual norm %g\n", (double)rnorm, (double)mg->ttol)); 59 } 60 PetscFunctionReturn(0); 61 } 62 } 63 64 mgc = *(mglevelsin - 1); 65 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0)); 66 if (!transpose) { 67 if (matapp) PetscCall(MatMatRestrict(mglevels->restrct, mglevels->R, &mgc->B)); 68 else PetscCall(MatRestrict(mglevels->restrct, mglevels->r, mgc->b)); 69 } else { 70 if (matapp) PetscCall(MatMatRestrict(mglevels->interpolate, mglevels->R, &mgc->B)); 71 else PetscCall(MatRestrict(mglevels->interpolate, mglevels->r, mgc->b)); 72 } 73 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0)); 74 if (matapp) { 75 if (!mgc->X) { 76 PetscCall(MatDuplicate(mgc->B, MAT_DO_NOT_COPY_VALUES, &mgc->X)); 77 } else { 78 PetscCall(MatZeroEntries(mgc->X)); 79 } 80 } else { 81 PetscCall(VecZeroEntries(mgc->x)); 82 } 83 while (cycles--) PetscCall(PCMGMCycle_Private(pc, mglevelsin - 1, transpose, matapp, reason)); 84 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0)); 85 if (!transpose) { 86 if (matapp) PetscCall(MatMatInterpolateAdd(mglevels->interpolate, mgc->X, mglevels->X, &mglevels->X)); 87 else PetscCall(MatInterpolateAdd(mglevels->interpolate, mgc->x, mglevels->x, mglevels->x)); 88 } else { 89 PetscCall(MatInterpolateAdd(mglevels->restrct, mgc->x, mglevels->x, mglevels->x)); 90 } 91 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0)); 92 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 93 if (!transpose) { 94 if (matapp) { 95 PetscCall(KSPMatSolve(mglevels->smoothu, mglevels->B, mglevels->X)); /* post smooth */ 96 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, NULL)); 97 } else { 98 PetscCall(KSPSolve(mglevels->smoothu, mglevels->b, mglevels->x)); /* post smooth */ 99 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x)); 100 } 101 } else { 102 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 103 PetscCall(KSPSolveTranspose(mglevels->smoothd, mglevels->b, mglevels->x)); /* post smooth */ 104 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x)); 105 } 106 if (mglevels->cr) { 107 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 108 /* TODO Turn on copy and turn off noisy if we have an exact solution 109 PetscCall(VecCopy(mglevels->x, mglevels->crx)); 110 PetscCall(VecCopy(mglevels->b, mglevels->crb)); */ 111 PetscCall(KSPSetNoisy_Private(mglevels->crx)); 112 PetscCall(KSPSolve(mglevels->cr, mglevels->crb, mglevels->crx)); /* compatible relaxation */ 113 PetscCall(KSPCheckSolve(mglevels->cr, pc, mglevels->crx)); 114 } 115 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 116 } 117 PetscFunctionReturn(0); 118 } 119 120 static PetscErrorCode PCApplyRichardson_MG(PC pc, Vec b, Vec x, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason) { 121 PC_MG *mg = (PC_MG *)pc->data; 122 PC_MG_Levels **mglevels = mg->levels; 123 PC tpc; 124 PetscBool changeu, changed; 125 PetscInt levels = mglevels[0]->levels, i; 126 127 PetscFunctionBegin; 128 /* When the DM is supplying the matrix then it will not exist until here */ 129 for (i = 0; i < levels; i++) { 130 if (!mglevels[i]->A) { 131 PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL)); 132 PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A)); 133 } 134 } 135 136 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc)); 137 PetscCall(PCPreSolveChangeRHS(tpc, &changed)); 138 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc)); 139 PetscCall(PCPreSolveChangeRHS(tpc, &changeu)); 140 if (!changed && !changeu) { 141 PetscCall(VecDestroy(&mglevels[levels - 1]->b)); 142 mglevels[levels - 1]->b = b; 143 } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */ 144 if (!mglevels[levels - 1]->b) { 145 Vec *vec; 146 147 PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL)); 148 mglevels[levels - 1]->b = *vec; 149 PetscCall(PetscFree(vec)); 150 } 151 PetscCall(VecCopy(b, mglevels[levels - 1]->b)); 152 } 153 mglevels[levels - 1]->x = x; 154 155 mg->rtol = rtol; 156 mg->abstol = abstol; 157 mg->dtol = dtol; 158 if (rtol) { 159 /* compute initial residual norm for relative convergence test */ 160 PetscReal rnorm; 161 if (zeroguess) { 162 PetscCall(VecNorm(b, NORM_2, &rnorm)); 163 } else { 164 PetscCall((*mglevels[levels - 1]->residual)(mglevels[levels - 1]->A, b, x, w)); 165 PetscCall(VecNorm(w, NORM_2, &rnorm)); 166 } 167 mg->ttol = PetscMax(rtol * rnorm, abstol); 168 } else if (abstol) mg->ttol = abstol; 169 else mg->ttol = 0.0; 170 171 /* since smoother is applied to full system, not just residual we need to make sure that smoothers don't 172 stop prematurely due to small residual */ 173 for (i = 1; i < levels; i++) { 174 PetscCall(KSPSetTolerances(mglevels[i]->smoothu, 0, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT)); 175 if (mglevels[i]->smoothu != mglevels[i]->smoothd) { 176 /* For Richardson the initial guess is nonzero since it is solving in each cycle the original system not just applying as a preconditioner */ 177 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE)); 178 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, 0, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT)); 179 } 180 } 181 182 *reason = (PCRichardsonConvergedReason)0; 183 for (i = 0; i < its; i++) { 184 PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, PETSC_FALSE, PETSC_FALSE, reason)); 185 if (*reason) break; 186 } 187 if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS; 188 *outits = i; 189 if (!changed && !changeu) mglevels[levels - 1]->b = NULL; 190 PetscFunctionReturn(0); 191 } 192 193 PetscErrorCode PCReset_MG(PC pc) { 194 PC_MG *mg = (PC_MG *)pc->data; 195 PC_MG_Levels **mglevels = mg->levels; 196 PetscInt i, n; 197 198 PetscFunctionBegin; 199 if (mglevels) { 200 n = mglevels[0]->levels; 201 for (i = 0; i < n - 1; i++) { 202 PetscCall(VecDestroy(&mglevels[i + 1]->r)); 203 PetscCall(VecDestroy(&mglevels[i]->b)); 204 PetscCall(VecDestroy(&mglevels[i]->x)); 205 PetscCall(MatDestroy(&mglevels[i + 1]->R)); 206 PetscCall(MatDestroy(&mglevels[i]->B)); 207 PetscCall(MatDestroy(&mglevels[i]->X)); 208 PetscCall(VecDestroy(&mglevels[i]->crx)); 209 PetscCall(VecDestroy(&mglevels[i]->crb)); 210 PetscCall(MatDestroy(&mglevels[i + 1]->restrct)); 211 PetscCall(MatDestroy(&mglevels[i + 1]->interpolate)); 212 PetscCall(MatDestroy(&mglevels[i + 1]->inject)); 213 PetscCall(VecDestroy(&mglevels[i + 1]->rscale)); 214 } 215 PetscCall(VecDestroy(&mglevels[n - 1]->crx)); 216 PetscCall(VecDestroy(&mglevels[n - 1]->crb)); 217 /* this is not null only if the smoother on the finest level 218 changes the rhs during PreSolve */ 219 PetscCall(VecDestroy(&mglevels[n - 1]->b)); 220 PetscCall(MatDestroy(&mglevels[n - 1]->B)); 221 222 for (i = 0; i < n; i++) { 223 PetscCall(MatDestroy(&mglevels[i]->coarseSpace)); 224 PetscCall(MatDestroy(&mglevels[i]->A)); 225 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPReset(mglevels[i]->smoothd)); 226 PetscCall(KSPReset(mglevels[i]->smoothu)); 227 if (mglevels[i]->cr) PetscCall(KSPReset(mglevels[i]->cr)); 228 } 229 mg->Nc = 0; 230 } 231 PetscFunctionReturn(0); 232 } 233 234 /* Implementing CR 235 236 We only want to make corrections that ``do not change'' the coarse solution. What we mean by not changing is that if I prolong my coarse solution to the fine grid and then inject that fine solution back to the coarse grid, I get the same answer. Injection is what Brannick calls R. We want the complementary projector to Inj, which we will call S, after Brannick, so that Inj S = 0. Now the orthogonal projector onto the range of Inj^T is 237 238 Inj^T (Inj Inj^T)^{-1} Inj 239 240 and if Inj is a VecScatter, as it is now in PETSc, we have 241 242 Inj^T Inj 243 244 and 245 246 S = I - Inj^T Inj 247 248 since 249 250 Inj S = Inj - (Inj Inj^T) Inj = 0. 251 252 Brannick suggests 253 254 A \to S^T A S \qquad\mathrm{and}\qquad M \to S^T M S 255 256 but I do not think his :math:`S^T S = I` is correct. Our S is an orthogonal projector, so :math:`S^T S = S^2 = S`. We will use 257 258 M^{-1} A \to S M^{-1} A S 259 260 In fact, since it is somewhat hard in PETSc to do the symmetric application, we will just apply S on the left. 261 262 Check: || Inj P - I ||_F < tol 263 Check: In general, Inj Inj^T = I 264 */ 265 266 typedef struct { 267 PC mg; /* The PCMG object */ 268 PetscInt l; /* The multigrid level for this solver */ 269 Mat Inj; /* The injection matrix */ 270 Mat S; /* I - Inj^T Inj */ 271 } CRContext; 272 273 static PetscErrorCode CRSetup_Private(PC pc) { 274 CRContext *ctx; 275 Mat It; 276 277 PetscFunctionBeginUser; 278 PetscCall(PCShellGetContext(pc, &ctx)); 279 PetscCall(PCMGGetInjection(ctx->mg, ctx->l, &It)); 280 PetscCheck(It, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "CR requires that injection be defined for this PCMG"); 281 PetscCall(MatCreateTranspose(It, &ctx->Inj)); 282 PetscCall(MatCreateNormal(ctx->Inj, &ctx->S)); 283 PetscCall(MatScale(ctx->S, -1.0)); 284 PetscCall(MatShift(ctx->S, 1.0)); 285 PetscFunctionReturn(0); 286 } 287 288 static PetscErrorCode CRApply_Private(PC pc, Vec x, Vec y) { 289 CRContext *ctx; 290 291 PetscFunctionBeginUser; 292 PetscCall(PCShellGetContext(pc, &ctx)); 293 PetscCall(MatMult(ctx->S, x, y)); 294 PetscFunctionReturn(0); 295 } 296 297 static PetscErrorCode CRDestroy_Private(PC pc) { 298 CRContext *ctx; 299 300 PetscFunctionBeginUser; 301 PetscCall(PCShellGetContext(pc, &ctx)); 302 PetscCall(MatDestroy(&ctx->Inj)); 303 PetscCall(MatDestroy(&ctx->S)); 304 PetscCall(PetscFree(ctx)); 305 PetscCall(PCShellSetContext(pc, NULL)); 306 PetscFunctionReturn(0); 307 } 308 309 static PetscErrorCode CreateCR_Private(PC pc, PetscInt l, PC *cr) { 310 CRContext *ctx; 311 312 PetscFunctionBeginUser; 313 PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), cr)); 314 PetscCall(PetscObjectSetName((PetscObject)*cr, "S (complementary projector to injection)")); 315 PetscCall(PetscCalloc1(1, &ctx)); 316 ctx->mg = pc; 317 ctx->l = l; 318 PetscCall(PCSetType(*cr, PCSHELL)); 319 PetscCall(PCShellSetContext(*cr, ctx)); 320 PetscCall(PCShellSetApply(*cr, CRApply_Private)); 321 PetscCall(PCShellSetSetUp(*cr, CRSetup_Private)); 322 PetscCall(PCShellSetDestroy(*cr, CRDestroy_Private)); 323 PetscFunctionReturn(0); 324 } 325 326 PetscErrorCode PCMGSetLevels_MG(PC pc, PetscInt levels, MPI_Comm *comms) { 327 PC_MG *mg = (PC_MG *)pc->data; 328 MPI_Comm comm; 329 PC_MG_Levels **mglevels = mg->levels; 330 PCMGType mgtype = mg->am; 331 PetscInt mgctype = (PetscInt)PC_MG_CYCLE_V; 332 PetscInt i; 333 PetscMPIInt size; 334 const char *prefix; 335 PC ipc; 336 PetscInt n; 337 338 PetscFunctionBegin; 339 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 340 PetscValidLogicalCollectiveInt(pc, levels, 2); 341 if (mg->nlevels == levels) PetscFunctionReturn(0); 342 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 343 if (mglevels) { 344 mgctype = mglevels[0]->cycles; 345 /* changing the number of levels so free up the previous stuff */ 346 PetscCall(PCReset_MG(pc)); 347 n = mglevels[0]->levels; 348 for (i = 0; i < n; i++) { 349 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd)); 350 PetscCall(KSPDestroy(&mglevels[i]->smoothu)); 351 PetscCall(KSPDestroy(&mglevels[i]->cr)); 352 PetscCall(PetscFree(mglevels[i])); 353 } 354 PetscCall(PetscFree(mg->levels)); 355 } 356 357 mg->nlevels = levels; 358 359 PetscCall(PetscMalloc1(levels, &mglevels)); 360 361 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 362 363 mg->stageApply = 0; 364 for (i = 0; i < levels; i++) { 365 PetscCall(PetscNew(&mglevels[i])); 366 367 mglevels[i]->level = i; 368 mglevels[i]->levels = levels; 369 mglevels[i]->cycles = mgctype; 370 mg->default_smoothu = 2; 371 mg->default_smoothd = 2; 372 mglevels[i]->eventsmoothsetup = 0; 373 mglevels[i]->eventsmoothsolve = 0; 374 mglevels[i]->eventresidual = 0; 375 mglevels[i]->eventinterprestrict = 0; 376 377 if (comms) comm = comms[i]; 378 if (comm != MPI_COMM_NULL) { 379 PetscCall(KSPCreate(comm, &mglevels[i]->smoothd)); 380 PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->smoothd, pc->erroriffailure)); 381 PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->smoothd, (PetscObject)pc, levels - i)); 382 PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, prefix)); 383 PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->smoothd, PetscMGLevelId, mglevels[i]->level)); 384 if (i || levels == 1) { 385 char tprefix[128]; 386 387 PetscCall(KSPSetType(mglevels[i]->smoothd, KSPCHEBYSHEV)); 388 PetscCall(KSPSetConvergenceTest(mglevels[i]->smoothd, KSPConvergedSkip, NULL, NULL)); 389 PetscCall(KSPSetNormType(mglevels[i]->smoothd, KSP_NORM_NONE)); 390 PetscCall(KSPGetPC(mglevels[i]->smoothd, &ipc)); 391 PetscCall(PCSetType(ipc, PCSOR)); 392 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, mg->default_smoothd)); 393 394 PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%d_", (int)i)); 395 PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix)); 396 } else { 397 PetscCall(KSPAppendOptionsPrefix(mglevels[0]->smoothd, "mg_coarse_")); 398 399 /* coarse solve is (redundant) LU by default; set shifttype NONZERO to avoid annoying zero-pivot in LU preconditioner */ 400 PetscCall(KSPSetType(mglevels[0]->smoothd, KSPPREONLY)); 401 PetscCall(KSPGetPC(mglevels[0]->smoothd, &ipc)); 402 PetscCallMPI(MPI_Comm_size(comm, &size)); 403 if (size > 1) { 404 PetscCall(PCSetType(ipc, PCREDUNDANT)); 405 } else { 406 PetscCall(PCSetType(ipc, PCLU)); 407 } 408 PetscCall(PCFactorSetShiftType(ipc, MAT_SHIFT_INBLOCKS)); 409 } 410 } 411 mglevels[i]->smoothu = mglevels[i]->smoothd; 412 mg->rtol = 0.0; 413 mg->abstol = 0.0; 414 mg->dtol = 0.0; 415 mg->ttol = 0.0; 416 mg->cyclesperpcapply = 1; 417 } 418 mg->levels = mglevels; 419 PetscCall(PCMGSetType(pc, mgtype)); 420 PetscFunctionReturn(0); 421 } 422 423 /*@C 424 PCMGSetLevels - Sets the number of levels to use with `PCMG`. 425 Must be called before any other `PCMG` routine. 426 427 Logically Collective on pc 428 429 Input Parameters: 430 + pc - the preconditioner context 431 . levels - the number of levels 432 - comms - optional communicators for each level; this is to allow solving the coarser problems 433 on smaller sets of processes. For processes that are not included in the computation 434 you must pass `MPI_COMM_NULL`. Use comms = NULL to specify that all processes 435 should participate in each level of problem. 436 437 Level: intermediate 438 439 Notes: 440 If the number of levels is one then the multigrid uses the -mg_levels prefix 441 for setting the level options rather than the -mg_coarse prefix. 442 443 You can free the information in comms after this routine is called. 444 445 The array of MPI communicators must contain `MPI_COMM_NULL` for those ranks that at each level 446 are not participating in the coarser solve. For example, with 2 levels and 1 and 2 ranks on 447 the two levels, rank 0 in the original communicator will pass in an array of 2 communicators 448 of size 2 and 1, while rank 1 in the original communicator will pass in array of 2 communicators 449 the first of size 2 and the second of value `MPI_COMM_NULL` since the rank 1 does not participate 450 in the coarse grid solve. 451 452 Since each coarser level may have a new `MPI_Comm` with fewer ranks than the previous, one 453 must take special care in providing the restriction and interpolation operation. We recommend 454 providing these as two step operations; first perform a standard restriction or interpolation on 455 the full number of ranks for that level and then use an MPI call to copy the resulting vector 456 array entries (after calls to VecGetArray()) to the smaller or larger number of ranks, note in both 457 cases the MPI calls must be made on the larger of the two communicators. Traditional MPI send and 458 recieves or MPI_AlltoAllv() could be used to do the reshuffling of the vector entries. 459 460 Fortran Note: 461 Use comms = `PETSC_NULL_MPI_COMM` as the equivalent of NULL in the C interface. Note `PETSC_NULL_MPI_COMM` 462 is not `MPI_COMM_NULL`. It is more like `PETSC_NULL_INTEGER`, `PETSC_NULL_REAL` etc. 463 464 .seealso: `PCMGSetType()`, `PCMGGetLevels()` 465 @*/ 466 PetscErrorCode PCMGSetLevels(PC pc, PetscInt levels, MPI_Comm *comms) { 467 PetscFunctionBegin; 468 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 469 if (comms) PetscValidPointer(comms, 3); 470 PetscTryMethod(pc, "PCMGSetLevels_C", (PC, PetscInt, MPI_Comm *), (pc, levels, comms)); 471 PetscFunctionReturn(0); 472 } 473 474 PetscErrorCode PCDestroy_MG(PC pc) { 475 PC_MG *mg = (PC_MG *)pc->data; 476 PC_MG_Levels **mglevels = mg->levels; 477 PetscInt i, n; 478 479 PetscFunctionBegin; 480 PetscCall(PCReset_MG(pc)); 481 if (mglevels) { 482 n = mglevels[0]->levels; 483 for (i = 0; i < n; i++) { 484 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd)); 485 PetscCall(KSPDestroy(&mglevels[i]->smoothu)); 486 PetscCall(KSPDestroy(&mglevels[i]->cr)); 487 PetscCall(PetscFree(mglevels[i])); 488 } 489 PetscCall(PetscFree(mg->levels)); 490 } 491 PetscCall(PetscFree(pc->data)); 492 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 493 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 494 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", NULL)); 495 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", NULL)); 496 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", NULL)); 497 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 498 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 499 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", NULL)); 500 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", NULL)); 501 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", NULL)); 502 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", NULL)); 503 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", NULL)); 504 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", NULL)); 505 PetscFunctionReturn(0); 506 } 507 508 /* 509 PCApply_MG - Runs either an additive, multiplicative, Kaskadic 510 or full cycle of multigrid. 511 512 Note: 513 A simple wrapper which calls PCMGMCycle(),PCMGACycle(), or PCMGFCycle(). 514 */ 515 static PetscErrorCode PCApply_MG_Internal(PC pc, Vec b, Vec x, Mat B, Mat X, PetscBool transpose) { 516 PC_MG *mg = (PC_MG *)pc->data; 517 PC_MG_Levels **mglevels = mg->levels; 518 PC tpc; 519 PetscInt levels = mglevels[0]->levels, i; 520 PetscBool changeu, changed, matapp; 521 522 PetscFunctionBegin; 523 matapp = (PetscBool)(B && X); 524 if (mg->stageApply) PetscCall(PetscLogStagePush(mg->stageApply)); 525 /* When the DM is supplying the matrix then it will not exist until here */ 526 for (i = 0; i < levels; i++) { 527 if (!mglevels[i]->A) { 528 PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL)); 529 PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A)); 530 } 531 } 532 533 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc)); 534 PetscCall(PCPreSolveChangeRHS(tpc, &changed)); 535 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc)); 536 PetscCall(PCPreSolveChangeRHS(tpc, &changeu)); 537 if (!changeu && !changed) { 538 if (matapp) { 539 PetscCall(MatDestroy(&mglevels[levels - 1]->B)); 540 mglevels[levels - 1]->B = B; 541 } else { 542 PetscCall(VecDestroy(&mglevels[levels - 1]->b)); 543 mglevels[levels - 1]->b = b; 544 } 545 } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */ 546 if (matapp) { 547 if (mglevels[levels - 1]->B) { 548 PetscInt N1, N2; 549 PetscBool flg; 550 551 PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &N1)); 552 PetscCall(MatGetSize(B, NULL, &N2)); 553 PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 1]->B, ((PetscObject)B)->type_name, &flg)); 554 if (N1 != N2 || !flg) PetscCall(MatDestroy(&mglevels[levels - 1]->B)); 555 } 556 if (!mglevels[levels - 1]->B) { 557 PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &mglevels[levels - 1]->B)); 558 } else { 559 PetscCall(MatCopy(B, mglevels[levels - 1]->B, SAME_NONZERO_PATTERN)); 560 } 561 } else { 562 if (!mglevels[levels - 1]->b) { 563 Vec *vec; 564 565 PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL)); 566 mglevels[levels - 1]->b = *vec; 567 PetscCall(PetscFree(vec)); 568 } 569 PetscCall(VecCopy(b, mglevels[levels - 1]->b)); 570 } 571 } 572 if (matapp) { 573 mglevels[levels - 1]->X = X; 574 } else { 575 mglevels[levels - 1]->x = x; 576 } 577 578 /* If coarser Xs are present, it means we have already block applied the PC at least once 579 Reset operators if sizes/type do no match */ 580 if (matapp && levels > 1 && mglevels[levels - 2]->X) { 581 PetscInt Xc, Bc; 582 PetscBool flg; 583 584 PetscCall(MatGetSize(mglevels[levels - 2]->X, NULL, &Xc)); 585 PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &Bc)); 586 PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 2]->X, ((PetscObject)mglevels[levels - 1]->X)->type_name, &flg)); 587 if (Xc != Bc || !flg) { 588 PetscCall(MatDestroy(&mglevels[levels - 1]->R)); 589 for (i = 0; i < levels - 1; i++) { 590 PetscCall(MatDestroy(&mglevels[i]->R)); 591 PetscCall(MatDestroy(&mglevels[i]->B)); 592 PetscCall(MatDestroy(&mglevels[i]->X)); 593 } 594 } 595 } 596 597 if (mg->am == PC_MG_MULTIPLICATIVE) { 598 if (matapp) PetscCall(MatZeroEntries(X)); 599 else PetscCall(VecZeroEntries(x)); 600 for (i = 0; i < mg->cyclesperpcapply; i++) PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, transpose, matapp, NULL)); 601 } else if (mg->am == PC_MG_ADDITIVE) { 602 PetscCall(PCMGACycle_Private(pc, mglevels, transpose, matapp)); 603 } else if (mg->am == PC_MG_KASKADE) { 604 PetscCall(PCMGKCycle_Private(pc, mglevels, transpose, matapp)); 605 } else { 606 PetscCall(PCMGFCycle_Private(pc, mglevels, transpose, matapp)); 607 } 608 if (mg->stageApply) PetscCall(PetscLogStagePop()); 609 if (!changeu && !changed) { 610 if (matapp) { 611 mglevels[levels - 1]->B = NULL; 612 } else { 613 mglevels[levels - 1]->b = NULL; 614 } 615 } 616 PetscFunctionReturn(0); 617 } 618 619 static PetscErrorCode PCApply_MG(PC pc, Vec b, Vec x) { 620 PetscFunctionBegin; 621 PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_FALSE)); 622 PetscFunctionReturn(0); 623 } 624 625 static PetscErrorCode PCApplyTranspose_MG(PC pc, Vec b, Vec x) { 626 PetscFunctionBegin; 627 PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_TRUE)); 628 PetscFunctionReturn(0); 629 } 630 631 static PetscErrorCode PCMatApply_MG(PC pc, Mat b, Mat x) { 632 PetscFunctionBegin; 633 PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_FALSE)); 634 PetscFunctionReturn(0); 635 } 636 637 PetscErrorCode PCSetFromOptions_MG(PC pc, PetscOptionItems *PetscOptionsObject) { 638 PetscInt levels, cycles; 639 PetscBool flg, flg2; 640 PC_MG *mg = (PC_MG *)pc->data; 641 PC_MG_Levels **mglevels; 642 PCMGType mgtype; 643 PCMGCycleType mgctype; 644 PCMGGalerkinType gtype; 645 PCMGCoarseSpaceType coarseSpaceType; 646 647 PetscFunctionBegin; 648 levels = PetscMax(mg->nlevels, 1); 649 PetscOptionsHeadBegin(PetscOptionsObject, "Multigrid options"); 650 PetscCall(PetscOptionsInt("-pc_mg_levels", "Number of Levels", "PCMGSetLevels", levels, &levels, &flg)); 651 if (!flg && !mg->levels && pc->dm) { 652 PetscCall(DMGetRefineLevel(pc->dm, &levels)); 653 levels++; 654 mg->usedmfornumberoflevels = PETSC_TRUE; 655 } 656 PetscCall(PCMGSetLevels(pc, levels, NULL)); 657 mglevels = mg->levels; 658 659 mgctype = (PCMGCycleType)mglevels[0]->cycles; 660 PetscCall(PetscOptionsEnum("-pc_mg_cycle_type", "V cycle or for W-cycle", "PCMGSetCycleType", PCMGCycleTypes, (PetscEnum)mgctype, (PetscEnum *)&mgctype, &flg)); 661 if (flg) PetscCall(PCMGSetCycleType(pc, mgctype)); 662 gtype = mg->galerkin; 663 PetscCall(PetscOptionsEnum("-pc_mg_galerkin", "Use Galerkin process to compute coarser operators", "PCMGSetGalerkin", PCMGGalerkinTypes, (PetscEnum)gtype, (PetscEnum *)>ype, &flg)); 664 if (flg) PetscCall(PCMGSetGalerkin(pc, gtype)); 665 coarseSpaceType = mg->coarseSpaceType; 666 PetscCall(PetscOptionsEnum("-pc_mg_adapt_interp_coarse_space", "Type of adaptive coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw", "PCMGSetAdaptCoarseSpaceType", PCMGCoarseSpaceTypes, (PetscEnum)coarseSpaceType, (PetscEnum *)&coarseSpaceType, &flg)); 667 if (flg) PetscCall(PCMGSetAdaptCoarseSpaceType(pc, coarseSpaceType)); 668 PetscCall(PetscOptionsInt("-pc_mg_adapt_interp_n", "Size of the coarse space for adaptive interpolation", "PCMGSetCoarseSpace", mg->Nc, &mg->Nc, &flg)); 669 PetscCall(PetscOptionsBool("-pc_mg_mesp_monitor", "Monitor the multilevel eigensolver", "PCMGSetAdaptInterpolation", PETSC_FALSE, &mg->mespMonitor, &flg)); 670 flg2 = PETSC_FALSE; 671 PetscCall(PetscOptionsBool("-pc_mg_adapt_cr", "Monitor coarse space quality using Compatible Relaxation (CR)", "PCMGSetAdaptCR", PETSC_FALSE, &flg2, &flg)); 672 if (flg) PetscCall(PCMGSetAdaptCR(pc, flg2)); 673 flg = PETSC_FALSE; 674 PetscCall(PetscOptionsBool("-pc_mg_distinct_smoothup", "Create separate smoothup KSP and append the prefix _up", "PCMGSetDistinctSmoothUp", PETSC_FALSE, &flg, NULL)); 675 if (flg) PetscCall(PCMGSetDistinctSmoothUp(pc)); 676 mgtype = mg->am; 677 PetscCall(PetscOptionsEnum("-pc_mg_type", "Multigrid type", "PCMGSetType", PCMGTypes, (PetscEnum)mgtype, (PetscEnum *)&mgtype, &flg)); 678 if (flg) PetscCall(PCMGSetType(pc, mgtype)); 679 if (mg->am == PC_MG_MULTIPLICATIVE) { 680 PetscCall(PetscOptionsInt("-pc_mg_multiplicative_cycles", "Number of cycles for each preconditioner step", "PCMGMultiplicativeSetCycles", mg->cyclesperpcapply, &cycles, &flg)); 681 if (flg) PetscCall(PCMGMultiplicativeSetCycles(pc, cycles)); 682 } 683 flg = PETSC_FALSE; 684 PetscCall(PetscOptionsBool("-pc_mg_log", "Log times for each multigrid level", "None", flg, &flg, NULL)); 685 if (flg) { 686 PetscInt i; 687 char eventname[128]; 688 689 levels = mglevels[0]->levels; 690 for (i = 0; i < levels; i++) { 691 sprintf(eventname, "MGSetup Level %d", (int)i); 692 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsetup)); 693 sprintf(eventname, "MGSmooth Level %d", (int)i); 694 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsolve)); 695 if (i) { 696 sprintf(eventname, "MGResid Level %d", (int)i); 697 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventresidual)); 698 sprintf(eventname, "MGInterp Level %d", (int)i); 699 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventinterprestrict)); 700 } 701 } 702 703 #if defined(PETSC_USE_LOG) 704 { 705 const char *sname = "MG Apply"; 706 PetscStageLog stageLog; 707 PetscInt st; 708 709 PetscCall(PetscLogGetStageLog(&stageLog)); 710 for (st = 0; st < stageLog->numStages; ++st) { 711 PetscBool same; 712 713 PetscCall(PetscStrcmp(stageLog->stageInfo[st].name, sname, &same)); 714 if (same) mg->stageApply = st; 715 } 716 if (!mg->stageApply) PetscCall(PetscLogStageRegister(sname, &mg->stageApply)); 717 } 718 #endif 719 } 720 PetscOptionsHeadEnd(); 721 /* Check option consistency */ 722 PetscCall(PCMGGetGalerkin(pc, >ype)); 723 PetscCall(PCMGGetAdaptInterpolation(pc, &flg)); 724 PetscCheck(!flg || !(gtype >= PC_MG_GALERKIN_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use Galerkin coarse operators when adapting the interpolator"); 725 PetscFunctionReturn(0); 726 } 727 728 const char *const PCMGTypes[] = {"MULTIPLICATIVE", "ADDITIVE", "FULL", "KASKADE", "PCMGType", "PC_MG", NULL}; 729 const char *const PCMGCycleTypes[] = {"invalid", "v", "w", "PCMGCycleType", "PC_MG_CYCLE", NULL}; 730 const char *const PCMGGalerkinTypes[] = {"both", "pmat", "mat", "none", "external", "PCMGGalerkinType", "PC_MG_GALERKIN", NULL}; 731 const char *const PCMGCoarseSpaceTypes[] = {"none", "polynomial", "harmonic", "eigenvector", "generalized_eigenvector", "gdsw", "PCMGCoarseSpaceType", "PCMG_ADAPT_NONE", NULL}; 732 733 #include <petscdraw.h> 734 PetscErrorCode PCView_MG(PC pc, PetscViewer viewer) { 735 PC_MG *mg = (PC_MG *)pc->data; 736 PC_MG_Levels **mglevels = mg->levels; 737 PetscInt levels = mglevels ? mglevels[0]->levels : 0, i; 738 PetscBool iascii, isbinary, isdraw; 739 740 PetscFunctionBegin; 741 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 742 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 743 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 744 if (iascii) { 745 const char *cyclename = levels ? (mglevels[0]->cycles == PC_MG_CYCLE_V ? "v" : "w") : "unknown"; 746 PetscCall(PetscViewerASCIIPrintf(viewer, " type is %s, levels=%" PetscInt_FMT " cycles=%s\n", PCMGTypes[mg->am], levels, cyclename)); 747 if (mg->am == PC_MG_MULTIPLICATIVE) PetscCall(PetscViewerASCIIPrintf(viewer, " Cycles per PCApply=%" PetscInt_FMT "\n", mg->cyclesperpcapply)); 748 if (mg->galerkin == PC_MG_GALERKIN_BOTH) { 749 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices\n")); 750 } else if (mg->galerkin == PC_MG_GALERKIN_PMAT) { 751 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for pmat\n")); 752 } else if (mg->galerkin == PC_MG_GALERKIN_MAT) { 753 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for mat\n")); 754 } else if (mg->galerkin == PC_MG_GALERKIN_EXTERNAL) { 755 PetscCall(PetscViewerASCIIPrintf(viewer, " Using externally compute Galerkin coarse grid matrices\n")); 756 } else { 757 PetscCall(PetscViewerASCIIPrintf(viewer, " Not using Galerkin computed coarse grid matrices\n")); 758 } 759 if (mg->view) PetscCall((*mg->view)(pc, viewer)); 760 for (i = 0; i < levels; i++) { 761 if (i) { 762 PetscCall(PetscViewerASCIIPrintf(viewer, "Down solver (pre-smoother) on level %" PetscInt_FMT " -------------------------------\n", i)); 763 } else { 764 PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse grid solver -- level %" PetscInt_FMT " -------------------------------\n", i)); 765 } 766 PetscCall(PetscViewerASCIIPushTab(viewer)); 767 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 768 PetscCall(PetscViewerASCIIPopTab(viewer)); 769 if (i && mglevels[i]->smoothd == mglevels[i]->smoothu) { 770 PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) same as down solver (pre-smoother)\n")); 771 } else if (i) { 772 PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) on level %" PetscInt_FMT " -------------------------------\n", i)); 773 PetscCall(PetscViewerASCIIPushTab(viewer)); 774 PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 775 PetscCall(PetscViewerASCIIPopTab(viewer)); 776 } 777 if (i && mglevels[i]->cr) { 778 PetscCall(PetscViewerASCIIPrintf(viewer, "CR solver on level %" PetscInt_FMT " -------------------------------\n", i)); 779 PetscCall(PetscViewerASCIIPushTab(viewer)); 780 PetscCall(KSPView(mglevels[i]->cr, viewer)); 781 PetscCall(PetscViewerASCIIPopTab(viewer)); 782 } 783 } 784 } else if (isbinary) { 785 for (i = levels - 1; i >= 0; i--) { 786 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 787 if (i && mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 788 } 789 } else if (isdraw) { 790 PetscDraw draw; 791 PetscReal x, w, y, bottom, th; 792 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 793 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 794 PetscCall(PetscDrawStringGetSize(draw, NULL, &th)); 795 bottom = y - th; 796 for (i = levels - 1; i >= 0; i--) { 797 if (!mglevels[i]->smoothu || (mglevels[i]->smoothu == mglevels[i]->smoothd)) { 798 PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom)); 799 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 800 PetscCall(PetscDrawPopCurrentPoint(draw)); 801 } else { 802 w = 0.5 * PetscMin(1.0 - x, x); 803 PetscCall(PetscDrawPushCurrentPoint(draw, x + w, bottom)); 804 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 805 PetscCall(PetscDrawPopCurrentPoint(draw)); 806 PetscCall(PetscDrawPushCurrentPoint(draw, x - w, bottom)); 807 PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 808 PetscCall(PetscDrawPopCurrentPoint(draw)); 809 } 810 PetscCall(PetscDrawGetBoundingBox(draw, NULL, &bottom, NULL, NULL)); 811 bottom -= th; 812 } 813 } 814 PetscFunctionReturn(0); 815 } 816 817 #include <petsc/private/kspimpl.h> 818 819 /* 820 Calls setup for the KSP on each level 821 */ 822 PetscErrorCode PCSetUp_MG(PC pc) { 823 PC_MG *mg = (PC_MG *)pc->data; 824 PC_MG_Levels **mglevels = mg->levels; 825 PetscInt i, n; 826 PC cpc; 827 PetscBool dump = PETSC_FALSE, opsset, use_amat, missinginterpolate = PETSC_FALSE; 828 Mat dA, dB; 829 Vec tvec; 830 DM *dms; 831 PetscViewer viewer = NULL; 832 PetscBool dAeqdB = PETSC_FALSE, needRestricts = PETSC_FALSE, doCR = PETSC_FALSE; 833 PetscBool adaptInterpolation = mg->adaptInterpolation; 834 835 PetscFunctionBegin; 836 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels with PCMGSetLevels() before setting up"); 837 n = mglevels[0]->levels; 838 /* FIX: Move this to PCSetFromOptions_MG? */ 839 if (mg->usedmfornumberoflevels) { 840 PetscInt levels; 841 PetscCall(DMGetRefineLevel(pc->dm, &levels)); 842 levels++; 843 if (levels > n) { /* the problem is now being solved on a finer grid */ 844 PetscCall(PCMGSetLevels(pc, levels, NULL)); 845 n = levels; 846 PetscCall(PCSetFromOptions(pc)); /* it is bad to call this here, but otherwise will never be called for the new hierarchy */ 847 mglevels = mg->levels; 848 } 849 } 850 PetscCall(KSPGetPC(mglevels[0]->smoothd, &cpc)); 851 852 /* If user did not provide fine grid operators OR operator was not updated since last global KSPSetOperators() */ 853 /* so use those from global PC */ 854 /* Is this what we always want? What if user wants to keep old one? */ 855 PetscCall(KSPGetOperatorsSet(mglevels[n - 1]->smoothd, NULL, &opsset)); 856 if (opsset) { 857 Mat mmat; 858 PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, NULL, &mmat)); 859 if (mmat == pc->pmat) opsset = PETSC_FALSE; 860 } 861 862 /* Create CR solvers */ 863 PetscCall(PCMGGetAdaptCR(pc, &doCR)); 864 if (doCR) { 865 const char *prefix; 866 867 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 868 for (i = 1; i < n; ++i) { 869 PC ipc, cr; 870 char crprefix[128]; 871 872 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &mglevels[i]->cr)); 873 PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->cr, PETSC_FALSE)); 874 PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->cr, (PetscObject)pc, n - i)); 875 PetscCall(KSPSetOptionsPrefix(mglevels[i]->cr, prefix)); 876 PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->cr, PetscMGLevelId, mglevels[i]->level)); 877 PetscCall(KSPSetType(mglevels[i]->cr, KSPCHEBYSHEV)); 878 PetscCall(KSPSetConvergenceTest(mglevels[i]->cr, KSPConvergedSkip, NULL, NULL)); 879 PetscCall(KSPSetNormType(mglevels[i]->cr, KSP_NORM_PRECONDITIONED)); 880 PetscCall(KSPGetPC(mglevels[i]->cr, &ipc)); 881 882 PetscCall(PCSetType(ipc, PCCOMPOSITE)); 883 PetscCall(PCCompositeSetType(ipc, PC_COMPOSITE_MULTIPLICATIVE)); 884 PetscCall(PCCompositeAddPCType(ipc, PCSOR)); 885 PetscCall(CreateCR_Private(pc, i, &cr)); 886 PetscCall(PCCompositeAddPC(ipc, cr)); 887 PetscCall(PCDestroy(&cr)); 888 889 PetscCall(KSPSetTolerances(mglevels[i]->cr, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, mg->default_smoothd)); 890 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 891 PetscCall(PetscSNPrintf(crprefix, 128, "mg_levels_%d_cr_", (int)i)); 892 PetscCall(KSPAppendOptionsPrefix(mglevels[i]->cr, crprefix)); 893 } 894 } 895 896 if (!opsset) { 897 PetscCall(PCGetUseAmat(pc, &use_amat)); 898 if (use_amat) { 899 PetscCall(PetscInfo(pc, "Using outer operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n")); 900 PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->mat, pc->pmat)); 901 } else { 902 PetscCall(PetscInfo(pc, "Using matrix (pmat) operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n")); 903 PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->pmat, pc->pmat)); 904 } 905 } 906 907 for (i = n - 1; i > 0; i--) { 908 if (!(mglevels[i]->interpolate || mglevels[i]->restrct)) { 909 missinginterpolate = PETSC_TRUE; 910 break; 911 } 912 } 913 914 PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, &dA, &dB)); 915 if (dA == dB) dAeqdB = PETSC_TRUE; 916 if (mg->galerkin == PC_MG_GALERKIN_NONE || ((mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_MAT) && !dAeqdB)) { 917 needRestricts = PETSC_TRUE; /* user must compute either mat, pmat, or both so must restrict x to coarser levels */ 918 } 919 920 if (pc->dm && !pc->setupcalled) { 921 /* finest smoother also gets DM but it is not active, independent of whether galerkin==PC_MG_GALERKIN_EXTERNAL */ 922 PetscCall(KSPSetDM(mglevels[n - 1]->smoothd, pc->dm)); 923 PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothd, PETSC_FALSE)); 924 if (mglevels[n - 1]->smoothd != mglevels[n - 1]->smoothu) { 925 PetscCall(KSPSetDM(mglevels[n - 1]->smoothu, pc->dm)); 926 PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothu, PETSC_FALSE)); 927 } 928 if (mglevels[n - 1]->cr) { 929 PetscCall(KSPSetDM(mglevels[n - 1]->cr, pc->dm)); 930 PetscCall(KSPSetDMActive(mglevels[n - 1]->cr, PETSC_FALSE)); 931 } 932 } 933 934 /* 935 Skipping if user has provided all interpolation/restriction needed (since DM might not be able to produce them (when coming from SNES/TS) 936 Skipping for externally managed hierarchy (such as ML and GAMG). Cleaner logic here would be great. Wrap ML/GAMG as DMs? 937 */ 938 if (missinginterpolate && mg->galerkin != PC_MG_GALERKIN_EXTERNAL && !pc->setupcalled) { 939 /* first see if we can compute a coarse space */ 940 if (mg->coarseSpaceType == PCMG_ADAPT_GDSW) { 941 for (i = n - 2; i > -1; i--) { 942 if (!mglevels[i + 1]->restrct && !mglevels[i + 1]->interpolate) { 943 PetscCall(PCMGComputeCoarseSpace_Internal(pc, i + 1, mg->coarseSpaceType, mg->Nc, NULL, &mglevels[i + 1]->coarseSpace)); 944 PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->coarseSpace)); 945 } 946 } 947 } else { /* construct the interpolation from the DMs */ 948 Mat p; 949 Vec rscale; 950 PetscCall(PetscMalloc1(n, &dms)); 951 dms[n - 1] = pc->dm; 952 /* Separately create them so we do not get DMKSP interference between levels */ 953 for (i = n - 2; i > -1; i--) PetscCall(DMCoarsen(dms[i + 1], MPI_COMM_NULL, &dms[i])); 954 for (i = n - 2; i > -1; i--) { 955 DMKSP kdm; 956 PetscBool dmhasrestrict, dmhasinject; 957 PetscCall(KSPSetDM(mglevels[i]->smoothd, dms[i])); 958 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothd, PETSC_FALSE)); 959 if (mglevels[i]->smoothd != mglevels[i]->smoothu) { 960 PetscCall(KSPSetDM(mglevels[i]->smoothu, dms[i])); 961 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothu, PETSC_FALSE)); 962 } 963 if (mglevels[i]->cr) { 964 PetscCall(KSPSetDM(mglevels[i]->cr, dms[i])); 965 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->cr, PETSC_FALSE)); 966 } 967 PetscCall(DMGetDMKSPWrite(dms[i], &kdm)); 968 /* Ugly hack so that the next KSPSetUp() will use the RHS that we set. A better fix is to change dmActive to take 969 * a bitwise OR of computing the matrix, RHS, and initial iterate. */ 970 kdm->ops->computerhs = NULL; 971 kdm->rhsctx = NULL; 972 if (!mglevels[i + 1]->interpolate) { 973 PetscCall(DMCreateInterpolation(dms[i], dms[i + 1], &p, &rscale)); 974 PetscCall(PCMGSetInterpolation(pc, i + 1, p)); 975 if (rscale) PetscCall(PCMGSetRScale(pc, i + 1, rscale)); 976 PetscCall(VecDestroy(&rscale)); 977 PetscCall(MatDestroy(&p)); 978 } 979 PetscCall(DMHasCreateRestriction(dms[i], &dmhasrestrict)); 980 if (dmhasrestrict && !mglevels[i + 1]->restrct) { 981 PetscCall(DMCreateRestriction(dms[i], dms[i + 1], &p)); 982 PetscCall(PCMGSetRestriction(pc, i + 1, p)); 983 PetscCall(MatDestroy(&p)); 984 } 985 PetscCall(DMHasCreateInjection(dms[i], &dmhasinject)); 986 if (dmhasinject && !mglevels[i + 1]->inject) { 987 PetscCall(DMCreateInjection(dms[i], dms[i + 1], &p)); 988 PetscCall(PCMGSetInjection(pc, i + 1, p)); 989 PetscCall(MatDestroy(&p)); 990 } 991 } 992 993 for (i = n - 2; i > -1; i--) PetscCall(DMDestroy(&dms[i])); 994 PetscCall(PetscFree(dms)); 995 } 996 } 997 998 if (mg->galerkin < PC_MG_GALERKIN_NONE) { 999 Mat A, B; 1000 PetscBool doA = PETSC_FALSE, doB = PETSC_FALSE; 1001 MatReuse reuse = MAT_INITIAL_MATRIX; 1002 1003 if (mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_BOTH) doB = PETSC_TRUE; 1004 if (mg->galerkin == PC_MG_GALERKIN_MAT || (mg->galerkin == PC_MG_GALERKIN_BOTH && dA != dB)) doA = PETSC_TRUE; 1005 if (pc->setupcalled) reuse = MAT_REUSE_MATRIX; 1006 for (i = n - 2; i > -1; i--) { 1007 PetscCheck(mglevels[i + 1]->restrct || mglevels[i + 1]->interpolate, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must provide interpolation or restriction for each MG level except level 0"); 1008 if (!mglevels[i + 1]->interpolate) PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->restrct)); 1009 if (!mglevels[i + 1]->restrct) PetscCall(PCMGSetRestriction(pc, i + 1, mglevels[i + 1]->interpolate)); 1010 if (reuse == MAT_REUSE_MATRIX) PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, &B)); 1011 if (doA) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dA, mglevels[i + 1]->interpolate, reuse, 1.0, &A)); 1012 if (doB) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dB, mglevels[i + 1]->interpolate, reuse, 1.0, &B)); 1013 /* the management of the PetscObjectReference() and PetscObjecDereference() below is rather delicate */ 1014 if (!doA && dAeqdB) { 1015 if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)B)); 1016 A = B; 1017 } else if (!doA && reuse == MAT_INITIAL_MATRIX) { 1018 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, NULL)); 1019 PetscCall(PetscObjectReference((PetscObject)A)); 1020 } 1021 if (!doB && dAeqdB) { 1022 if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)A)); 1023 B = A; 1024 } else if (!doB && reuse == MAT_INITIAL_MATRIX) { 1025 PetscCall(KSPGetOperators(mglevels[i]->smoothd, NULL, &B)); 1026 PetscCall(PetscObjectReference((PetscObject)B)); 1027 } 1028 if (reuse == MAT_INITIAL_MATRIX) { 1029 PetscCall(KSPSetOperators(mglevels[i]->smoothd, A, B)); 1030 PetscCall(PetscObjectDereference((PetscObject)A)); 1031 PetscCall(PetscObjectDereference((PetscObject)B)); 1032 } 1033 dA = A; 1034 dB = B; 1035 } 1036 } 1037 1038 /* Adapt interpolation matrices */ 1039 if (adaptInterpolation) { 1040 for (i = 0; i < n; ++i) { 1041 if (!mglevels[i]->coarseSpace) PetscCall(PCMGComputeCoarseSpace_Internal(pc, i, mg->coarseSpaceType, mg->Nc, !i ? NULL : mglevels[i - 1]->coarseSpace, &mglevels[i]->coarseSpace)); 1042 if (i) PetscCall(PCMGAdaptInterpolator_Internal(pc, i, mglevels[i - 1]->smoothu, mglevels[i]->smoothu, mglevels[i - 1]->coarseSpace, mglevels[i]->coarseSpace)); 1043 } 1044 for (i = n - 2; i > -1; --i) PetscCall(PCMGRecomputeLevelOperators_Internal(pc, i)); 1045 } 1046 1047 if (needRestricts && pc->dm) { 1048 for (i = n - 2; i >= 0; i--) { 1049 DM dmfine, dmcoarse; 1050 Mat Restrict, Inject; 1051 Vec rscale; 1052 PetscCall(KSPGetDM(mglevels[i + 1]->smoothd, &dmfine)); 1053 PetscCall(KSPGetDM(mglevels[i]->smoothd, &dmcoarse)); 1054 PetscCall(PCMGGetRestriction(pc, i + 1, &Restrict)); 1055 PetscCall(PCMGGetRScale(pc, i + 1, &rscale)); 1056 PetscCall(PCMGGetInjection(pc, i + 1, &Inject)); 1057 PetscCall(DMRestrict(dmfine, Restrict, rscale, Inject, dmcoarse)); 1058 } 1059 } 1060 1061 if (!pc->setupcalled) { 1062 for (i = 0; i < n; i++) PetscCall(KSPSetFromOptions(mglevels[i]->smoothd)); 1063 for (i = 1; i < n; i++) { 1064 if (mglevels[i]->smoothu && (mglevels[i]->smoothu != mglevels[i]->smoothd)) PetscCall(KSPSetFromOptions(mglevels[i]->smoothu)); 1065 if (mglevels[i]->cr) PetscCall(KSPSetFromOptions(mglevels[i]->cr)); 1066 } 1067 /* insure that if either interpolation or restriction is set the other other one is set */ 1068 for (i = 1; i < n; i++) { 1069 PetscCall(PCMGGetInterpolation(pc, i, NULL)); 1070 PetscCall(PCMGGetRestriction(pc, i, NULL)); 1071 } 1072 for (i = 0; i < n - 1; i++) { 1073 if (!mglevels[i]->b) { 1074 Vec *vec; 1075 PetscCall(KSPCreateVecs(mglevels[i]->smoothd, 1, &vec, 0, NULL)); 1076 PetscCall(PCMGSetRhs(pc, i, *vec)); 1077 PetscCall(VecDestroy(vec)); 1078 PetscCall(PetscFree(vec)); 1079 } 1080 if (!mglevels[i]->r && i) { 1081 PetscCall(VecDuplicate(mglevels[i]->b, &tvec)); 1082 PetscCall(PCMGSetR(pc, i, tvec)); 1083 PetscCall(VecDestroy(&tvec)); 1084 } 1085 if (!mglevels[i]->x) { 1086 PetscCall(VecDuplicate(mglevels[i]->b, &tvec)); 1087 PetscCall(PCMGSetX(pc, i, tvec)); 1088 PetscCall(VecDestroy(&tvec)); 1089 } 1090 if (doCR) { 1091 PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crx)); 1092 PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crb)); 1093 PetscCall(VecZeroEntries(mglevels[i]->crb)); 1094 } 1095 } 1096 if (n != 1 && !mglevels[n - 1]->r) { 1097 /* PCMGSetR() on the finest level if user did not supply it */ 1098 Vec *vec; 1099 PetscCall(KSPCreateVecs(mglevels[n - 1]->smoothd, 1, &vec, 0, NULL)); 1100 PetscCall(PCMGSetR(pc, n - 1, *vec)); 1101 PetscCall(VecDestroy(vec)); 1102 PetscCall(PetscFree(vec)); 1103 } 1104 if (doCR) { 1105 PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crx)); 1106 PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crb)); 1107 PetscCall(VecZeroEntries(mglevels[n - 1]->crb)); 1108 } 1109 } 1110 1111 if (pc->dm) { 1112 /* need to tell all the coarser levels to rebuild the matrix using the DM for that level */ 1113 for (i = 0; i < n - 1; i++) { 1114 if (mglevels[i]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[i]->smoothd->setupstage = KSP_SETUP_NEWMATRIX; 1115 } 1116 } 1117 // We got here (PCSetUp_MG) because the matrix has changed, which means the smoother needs to be set up again (e.g., 1118 // new diagonal for Jacobi). Setting it here allows it to be logged under PCSetUp rather than deep inside a PCApply. 1119 if (mglevels[n - 1]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[n - 1]->smoothd->setupstage = KSP_SETUP_NEWMATRIX; 1120 1121 for (i = 1; i < n; i++) { 1122 if (mglevels[i]->smoothu == mglevels[i]->smoothd || mg->am == PC_MG_FULL || mg->am == PC_MG_KASKADE || mg->cyclesperpcapply > 1) { 1123 /* if doing only down then initial guess is zero */ 1124 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE)); 1125 } 1126 if (mglevels[i]->cr) PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 1127 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1128 PetscCall(KSPSetUp(mglevels[i]->smoothd)); 1129 if (mglevels[i]->smoothd->reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR; 1130 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1131 if (!mglevels[i]->residual) { 1132 Mat mat; 1133 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL)); 1134 PetscCall(PCMGSetResidual(pc, i, PCMGResidualDefault, mat)); 1135 } 1136 if (!mglevels[i]->residualtranspose) { 1137 Mat mat; 1138 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL)); 1139 PetscCall(PCMGSetResidualTranspose(pc, i, PCMGResidualTransposeDefault, mat)); 1140 } 1141 } 1142 for (i = 1; i < n; i++) { 1143 if (mglevels[i]->smoothu && mglevels[i]->smoothu != mglevels[i]->smoothd) { 1144 Mat downmat, downpmat; 1145 1146 /* check if operators have been set for up, if not use down operators to set them */ 1147 PetscCall(KSPGetOperatorsSet(mglevels[i]->smoothu, &opsset, NULL)); 1148 if (!opsset) { 1149 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat)); 1150 PetscCall(KSPSetOperators(mglevels[i]->smoothu, downmat, downpmat)); 1151 } 1152 1153 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothu, PETSC_TRUE)); 1154 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1155 PetscCall(KSPSetUp(mglevels[i]->smoothu)); 1156 if (mglevels[i]->smoothu->reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR; 1157 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1158 } 1159 if (mglevels[i]->cr) { 1160 Mat downmat, downpmat; 1161 1162 /* check if operators have been set for up, if not use down operators to set them */ 1163 PetscCall(KSPGetOperatorsSet(mglevels[i]->cr, &opsset, NULL)); 1164 if (!opsset) { 1165 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat)); 1166 PetscCall(KSPSetOperators(mglevels[i]->cr, downmat, downpmat)); 1167 } 1168 1169 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 1170 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1171 PetscCall(KSPSetUp(mglevels[i]->cr)); 1172 if (mglevels[i]->cr->reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR; 1173 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1174 } 1175 } 1176 1177 if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0)); 1178 PetscCall(KSPSetUp(mglevels[0]->smoothd)); 1179 if (mglevels[0]->smoothd->reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR; 1180 if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0)); 1181 1182 /* 1183 Dump the interpolation/restriction matrices plus the 1184 Jacobian/stiffness on each level. This allows MATLAB users to 1185 easily check if the Galerkin condition A_c = R A_f R^T is satisfied. 1186 1187 Only support one or the other at the same time. 1188 */ 1189 #if defined(PETSC_USE_SOCKET_VIEWER) 1190 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_matlab", &dump, NULL)); 1191 if (dump) viewer = PETSC_VIEWER_SOCKET_(PetscObjectComm((PetscObject)pc)); 1192 dump = PETSC_FALSE; 1193 #endif 1194 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_binary", &dump, NULL)); 1195 if (dump) viewer = PETSC_VIEWER_BINARY_(PetscObjectComm((PetscObject)pc)); 1196 1197 if (viewer) { 1198 for (i = 1; i < n; i++) PetscCall(MatView(mglevels[i]->restrct, viewer)); 1199 for (i = 0; i < n; i++) { 1200 PetscCall(KSPGetPC(mglevels[i]->smoothd, &pc)); 1201 PetscCall(MatView(pc->mat, viewer)); 1202 } 1203 } 1204 PetscFunctionReturn(0); 1205 } 1206 1207 /* -------------------------------------------------------------------------------------*/ 1208 1209 PetscErrorCode PCMGGetLevels_MG(PC pc, PetscInt *levels) { 1210 PC_MG *mg = (PC_MG *)pc->data; 1211 1212 PetscFunctionBegin; 1213 *levels = mg->nlevels; 1214 PetscFunctionReturn(0); 1215 } 1216 1217 /*@ 1218 PCMGGetLevels - Gets the number of levels to use with `PCMG`. 1219 1220 Not Collective 1221 1222 Input Parameter: 1223 . pc - the preconditioner context 1224 1225 Output parameter: 1226 . levels - the number of levels 1227 1228 Level: advanced 1229 1230 .seealso: `PCMG`, `PCMGSetLevels()` 1231 @*/ 1232 PetscErrorCode PCMGGetLevels(PC pc, PetscInt *levels) { 1233 PetscFunctionBegin; 1234 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1235 PetscValidIntPointer(levels, 2); 1236 *levels = 0; 1237 PetscTryMethod(pc, "PCMGGetLevels_C", (PC, PetscInt *), (pc, levels)); 1238 PetscFunctionReturn(0); 1239 } 1240 1241 /*@ 1242 PCMGGetGridComplexity - compute operator and grid complexity of the `PCMG` hierarchy 1243 1244 Input Parameter: 1245 . pc - the preconditioner context 1246 1247 Output Parameters: 1248 + gc - grid complexity = sum_i(n_i) / n_0 1249 - oc - operator complexity = sum_i(nnz_i) / nnz_0 1250 1251 Level: advanced 1252 1253 Note: 1254 This is often call the operator complexity in multigrid literature 1255 1256 .seealso: `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()` 1257 @*/ 1258 PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc, PetscReal *oc) { 1259 PC_MG *mg = (PC_MG *)pc->data; 1260 PC_MG_Levels **mglevels = mg->levels; 1261 PetscInt lev, N; 1262 PetscLogDouble nnz0 = 0, sgc = 0, soc = 0, n0 = 0; 1263 MatInfo info; 1264 1265 PetscFunctionBegin; 1266 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1267 if (gc) PetscValidRealPointer(gc, 2); 1268 if (oc) PetscValidRealPointer(oc, 3); 1269 if (!pc->setupcalled) { 1270 if (gc) *gc = 0; 1271 if (oc) *oc = 0; 1272 PetscFunctionReturn(0); 1273 } 1274 PetscCheck(mg->nlevels > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MG has no levels"); 1275 for (lev = 0; lev < mg->nlevels; lev++) { 1276 Mat dB; 1277 PetscCall(KSPGetOperators(mglevels[lev]->smoothd, NULL, &dB)); 1278 PetscCall(MatGetInfo(dB, MAT_GLOBAL_SUM, &info)); /* global reduction */ 1279 PetscCall(MatGetSize(dB, &N, NULL)); 1280 sgc += N; 1281 soc += info.nz_used; 1282 if (lev == mg->nlevels - 1) { 1283 nnz0 = info.nz_used; 1284 n0 = N; 1285 } 1286 } 1287 if (n0 > 0 && gc) *gc = (PetscReal)(sgc / n0); 1288 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number for grid points on finest level is not available"); 1289 if (nnz0 > 0 && oc) *oc = (PetscReal)(soc / nnz0); 1290 PetscFunctionReturn(0); 1291 } 1292 1293 /*@ 1294 PCMGSetType - Determines the form of multigrid to use: 1295 multiplicative, additive, full, or the Kaskade algorithm. 1296 1297 Logically Collective on pc 1298 1299 Input Parameters: 1300 + pc - the preconditioner context 1301 - form - multigrid form, one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE` 1302 1303 Options Database Key: 1304 . -pc_mg_type <form> - Sets <form>, one of multiplicative, 1305 additive, full, kaskade 1306 1307 Level: advanced 1308 1309 .seealso: `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGGetType()`, `PCMGCycleType` 1310 @*/ 1311 PetscErrorCode PCMGSetType(PC pc, PCMGType form) { 1312 PC_MG *mg = (PC_MG *)pc->data; 1313 1314 PetscFunctionBegin; 1315 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1316 PetscValidLogicalCollectiveEnum(pc, form, 2); 1317 mg->am = form; 1318 if (form == PC_MG_MULTIPLICATIVE) pc->ops->applyrichardson = PCApplyRichardson_MG; 1319 else pc->ops->applyrichardson = NULL; 1320 PetscFunctionReturn(0); 1321 } 1322 1323 /*@ 1324 PCMGGetType - Finds the form of multigrid the `PCMG` is using multiplicative, additive, full, or the Kaskade algorithm. 1325 1326 Logically Collective on pc 1327 1328 Input Parameter: 1329 . pc - the preconditioner context 1330 1331 Output Parameter: 1332 . type - one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`, `PCMGCycleType` 1333 1334 Level: advanced 1335 1336 .seealso: `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGSetType()` 1337 @*/ 1338 PetscErrorCode PCMGGetType(PC pc, PCMGType *type) { 1339 PC_MG *mg = (PC_MG *)pc->data; 1340 1341 PetscFunctionBegin; 1342 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1343 *type = mg->am; 1344 PetscFunctionReturn(0); 1345 } 1346 1347 /*@ 1348 PCMGSetCycleType - Sets the type cycles to use. Use `PCMGSetCycleTypeOnLevel()` for more 1349 complicated cycling. 1350 1351 Logically Collective on pc 1352 1353 Input Parameters: 1354 + pc - the multigrid context 1355 - n - either `PC_MG_CYCLE_V` or `PC_MG_CYCLE_W` 1356 1357 Options Database Key: 1358 . -pc_mg_cycle_type <v,w> - provide the cycle desired 1359 1360 Level: advanced 1361 1362 .seealso: `PCMG`, `PCMGSetCycleTypeOnLevel()`, `PCMGType`, `PCMGCycleType` 1363 @*/ 1364 PetscErrorCode PCMGSetCycleType(PC pc, PCMGCycleType n) { 1365 PC_MG *mg = (PC_MG *)pc->data; 1366 PC_MG_Levels **mglevels = mg->levels; 1367 PetscInt i, levels; 1368 1369 PetscFunctionBegin; 1370 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1371 PetscValidLogicalCollectiveEnum(pc, n, 2); 1372 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1373 levels = mglevels[0]->levels; 1374 for (i = 0; i < levels; i++) mglevels[i]->cycles = n; 1375 PetscFunctionReturn(0); 1376 } 1377 1378 /*@ 1379 PCMGMultiplicativeSetCycles - Sets the number of cycles to use for each preconditioner step 1380 of multigrid when `PCMGType` is `PC_MG_MULTIPLICATIVE` 1381 1382 Logically Collective on pc 1383 1384 Input Parameters: 1385 + pc - the multigrid context 1386 - n - number of cycles (default is 1) 1387 1388 Options Database Key: 1389 . -pc_mg_multiplicative_cycles n - set the number of cycles 1390 1391 Level: advanced 1392 1393 Note: 1394 This is not associated with setting a v or w cycle, that is set with `PCMGSetCycleType()` 1395 1396 .seealso: `PCMGSetCycleTypeOnLevel()`, `PCMGSetCycleType()`, `PCMGCycleType`, `PCMGType` 1397 @*/ 1398 PetscErrorCode PCMGMultiplicativeSetCycles(PC pc, PetscInt n) { 1399 PC_MG *mg = (PC_MG *)pc->data; 1400 1401 PetscFunctionBegin; 1402 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1403 PetscValidLogicalCollectiveInt(pc, n, 2); 1404 mg->cyclesperpcapply = n; 1405 PetscFunctionReturn(0); 1406 } 1407 1408 PetscErrorCode PCMGSetGalerkin_MG(PC pc, PCMGGalerkinType use) { 1409 PC_MG *mg = (PC_MG *)pc->data; 1410 1411 PetscFunctionBegin; 1412 mg->galerkin = use; 1413 PetscFunctionReturn(0); 1414 } 1415 1416 /*@ 1417 PCMGSetGalerkin - Causes the coarser grid matrices to be computed from the 1418 finest grid via the Galerkin process: A_i-1 = r_i * A_i * p_i 1419 1420 Logically Collective on pc 1421 1422 Input Parameters: 1423 + pc - the multigrid context 1424 - use - one of `PC_MG_GALERKIN_BOTH`, `PC_MG_GALERKIN_PMAT`, `PC_MG_GALERKIN_MAT`, or `PC_MG_GALERKIN_NONE` 1425 1426 Options Database Key: 1427 . -pc_mg_galerkin <both,pmat,mat,none> - set the matrices to form via the Galerkin process 1428 1429 Level: intermediate 1430 1431 Note: 1432 Some codes that use `PCMG` such as `PCGAMG` use Galerkin internally while constructing the hierarchy and thus do not 1433 use the `PCMG` construction of the coarser grids. 1434 1435 .seealso: `PCMG`, `PCMGGetGalerkin()`, `PCMGGalerkinType` 1436 @*/ 1437 PetscErrorCode PCMGSetGalerkin(PC pc, PCMGGalerkinType use) { 1438 PetscFunctionBegin; 1439 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1440 PetscTryMethod(pc, "PCMGSetGalerkin_C", (PC, PCMGGalerkinType), (pc, use)); 1441 PetscFunctionReturn(0); 1442 } 1443 1444 /*@ 1445 PCMGGetGalerkin - Checks if Galerkin multigrid is being used, i.e. A_i-1 = r_i * A_i * p_i 1446 1447 Not Collective 1448 1449 Input Parameter: 1450 . pc - the multigrid context 1451 1452 Output Parameter: 1453 . galerkin - one of `PC_MG_GALERKIN_BOTH`,`PC_MG_GALERKIN_PMAT`,`PC_MG_GALERKIN_MAT`, `PC_MG_GALERKIN_NONE`, or `PC_MG_GALERKIN_EXTERNAL` 1454 1455 Level: intermediate 1456 1457 .seealso: `PCMG`, `PCMGSetGalerkin()`, `PCMGGalerkinType` 1458 @*/ 1459 PetscErrorCode PCMGGetGalerkin(PC pc, PCMGGalerkinType *galerkin) { 1460 PC_MG *mg = (PC_MG *)pc->data; 1461 1462 PetscFunctionBegin; 1463 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1464 *galerkin = mg->galerkin; 1465 PetscFunctionReturn(0); 1466 } 1467 1468 PetscErrorCode PCMGSetAdaptInterpolation_MG(PC pc, PetscBool adapt) { 1469 PC_MG *mg = (PC_MG *)pc->data; 1470 1471 PetscFunctionBegin; 1472 mg->adaptInterpolation = adapt; 1473 PetscFunctionReturn(0); 1474 } 1475 1476 PetscErrorCode PCMGGetAdaptInterpolation_MG(PC pc, PetscBool *adapt) { 1477 PC_MG *mg = (PC_MG *)pc->data; 1478 1479 PetscFunctionBegin; 1480 *adapt = mg->adaptInterpolation; 1481 PetscFunctionReturn(0); 1482 } 1483 1484 PetscErrorCode PCMGSetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType ctype) { 1485 PC_MG *mg = (PC_MG *)pc->data; 1486 1487 PetscFunctionBegin; 1488 mg->adaptInterpolation = ctype != PCMG_ADAPT_NONE ? PETSC_TRUE : PETSC_FALSE; 1489 mg->coarseSpaceType = ctype; 1490 PetscFunctionReturn(0); 1491 } 1492 1493 PetscErrorCode PCMGGetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType *ctype) { 1494 PC_MG *mg = (PC_MG *)pc->data; 1495 1496 PetscFunctionBegin; 1497 *ctype = mg->coarseSpaceType; 1498 PetscFunctionReturn(0); 1499 } 1500 1501 PetscErrorCode PCMGSetAdaptCR_MG(PC pc, PetscBool cr) { 1502 PC_MG *mg = (PC_MG *)pc->data; 1503 1504 PetscFunctionBegin; 1505 mg->compatibleRelaxation = cr; 1506 PetscFunctionReturn(0); 1507 } 1508 1509 PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr) { 1510 PC_MG *mg = (PC_MG *)pc->data; 1511 1512 PetscFunctionBegin; 1513 *cr = mg->compatibleRelaxation; 1514 PetscFunctionReturn(0); 1515 } 1516 1517 /*@C 1518 PCMGSetAdaptCoarseSpaceType - Set the type of adaptive coarse space. 1519 1520 Adapts or creates the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated. 1521 1522 Logically Collective on pc 1523 1524 Input Parameters: 1525 + pc - the multigrid context 1526 - ctype - the type of coarse space 1527 1528 Options Database Keys: 1529 + -pc_mg_adapt_interp_n <int> - The number of modes to use 1530 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw 1531 1532 Level: intermediate 1533 1534 .seealso: `PCMG`, `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()` 1535 @*/ 1536 PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype) { 1537 PetscFunctionBegin; 1538 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1539 PetscValidLogicalCollectiveEnum(pc, ctype, 2); 1540 PetscTryMethod(pc, "PCMGSetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType), (pc, ctype)); 1541 PetscFunctionReturn(0); 1542 } 1543 1544 /*@C 1545 PCMGGetAdaptCoarseSpaceType - Get the type of adaptive coarse space. 1546 1547 Not Collective 1548 1549 Input Parameter: 1550 . pc - the multigrid context 1551 1552 Output Parameter: 1553 . ctype - the type of coarse space 1554 1555 Level: intermediate 1556 1557 .seealso: `PCMG`, `PCMGCoarseSpaceType`, `PCMGSetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()` 1558 @*/ 1559 PetscErrorCode PCMGGetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType *ctype) { 1560 PetscFunctionBegin; 1561 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1562 PetscValidPointer(ctype, 2); 1563 PetscUseMethod(pc, "PCMGGetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType *), (pc, ctype)); 1564 PetscFunctionReturn(0); 1565 } 1566 1567 /* MATT: REMOVE? */ 1568 /*@ 1569 PCMGSetAdaptInterpolation - Adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated. 1570 1571 Logically Collective on pc 1572 1573 Input Parameters: 1574 + pc - the multigrid context 1575 - adapt - flag for adaptation of the interpolator 1576 1577 Options Database Keys: 1578 + -pc_mg_adapt_interp - Turn on adaptation 1579 . -pc_mg_adapt_interp_n <int> - The number of modes to use, should be divisible by dimension 1580 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: polynomial, harmonic, eigenvector, generalized_eigenvector 1581 1582 Level: intermediate 1583 1584 .seealso: `PCMG`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1585 @*/ 1586 PetscErrorCode PCMGSetAdaptInterpolation(PC pc, PetscBool adapt) { 1587 PetscFunctionBegin; 1588 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1589 PetscTryMethod(pc, "PCMGSetAdaptInterpolation_C", (PC, PetscBool), (pc, adapt)); 1590 PetscFunctionReturn(0); 1591 } 1592 1593 /*@ 1594 PCMGGetAdaptInterpolation - Get the flag to adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, 1595 and thus accurately interpolated. 1596 1597 Not collective 1598 1599 Input Parameter: 1600 . pc - the multigrid context 1601 1602 Output Parameter: 1603 . adapt - flag for adaptation of the interpolator 1604 1605 Level: intermediate 1606 1607 .seealso: `PCMG`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1608 @*/ 1609 PetscErrorCode PCMGGetAdaptInterpolation(PC pc, PetscBool *adapt) { 1610 PetscFunctionBegin; 1611 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1612 PetscValidBoolPointer(adapt, 2); 1613 PetscUseMethod(pc, "PCMGGetAdaptInterpolation_C", (PC, PetscBool *), (pc, adapt)); 1614 PetscFunctionReturn(0); 1615 } 1616 1617 /*@ 1618 PCMGSetAdaptCR - Monitor the coarse space quality using an auxiliary solve with compatible relaxation. 1619 1620 Logically Collective on pc 1621 1622 Input Parameters: 1623 + pc - the multigrid context 1624 - cr - flag for compatible relaxation 1625 1626 Options Database Key: 1627 . -pc_mg_adapt_cr - Turn on compatible relaxation 1628 1629 Level: intermediate 1630 1631 .seealso: `PCMG`, `PCMGGetAdaptCR()`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1632 @*/ 1633 PetscErrorCode PCMGSetAdaptCR(PC pc, PetscBool cr) { 1634 PetscFunctionBegin; 1635 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1636 PetscTryMethod(pc, "PCMGSetAdaptCR_C", (PC, PetscBool), (pc, cr)); 1637 PetscFunctionReturn(0); 1638 } 1639 1640 /*@ 1641 PCMGGetAdaptCR - Get the flag to monitor coarse space quality using an auxiliary solve with compatible relaxation. 1642 1643 Not collective 1644 1645 Input Parameter: 1646 . pc - the multigrid context 1647 1648 Output Parameter: 1649 . cr - flag for compatible relaxaion 1650 1651 Level: intermediate 1652 1653 .seealso: `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1654 @*/ 1655 PetscErrorCode PCMGGetAdaptCR(PC pc, PetscBool *cr) { 1656 PetscFunctionBegin; 1657 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1658 PetscValidBoolPointer(cr, 2); 1659 PetscUseMethod(pc, "PCMGGetAdaptCR_C", (PC, PetscBool *), (pc, cr)); 1660 PetscFunctionReturn(0); 1661 } 1662 1663 /*@ 1664 PCMGSetNumberSmooth - Sets the number of pre and post-smoothing steps to use 1665 on all levels. Use `PCMGDistinctSmoothUp()` to create separate up and down smoothers if you want different numbers of 1666 pre- and post-smoothing steps. 1667 1668 Logically Collective on pc 1669 1670 Input Parameters: 1671 + mg - the multigrid context 1672 - n - the number of smoothing steps 1673 1674 Options Database Key: 1675 . -mg_levels_ksp_max_it <n> - Sets number of pre and post-smoothing steps 1676 1677 Level: advanced 1678 1679 Note: 1680 This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid. 1681 1682 .seealso: `PCMG`, `PCMGSetDistinctSmoothUp()` 1683 @*/ 1684 PetscErrorCode PCMGSetNumberSmooth(PC pc, PetscInt n) { 1685 PC_MG *mg = (PC_MG *)pc->data; 1686 PC_MG_Levels **mglevels = mg->levels; 1687 PetscInt i, levels; 1688 1689 PetscFunctionBegin; 1690 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1691 PetscValidLogicalCollectiveInt(pc, n, 2); 1692 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1693 levels = mglevels[0]->levels; 1694 1695 for (i = 1; i < levels; i++) { 1696 PetscCall(KSPSetTolerances(mglevels[i]->smoothu, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, n)); 1697 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, n)); 1698 mg->default_smoothu = n; 1699 mg->default_smoothd = n; 1700 } 1701 PetscFunctionReturn(0); 1702 } 1703 1704 /*@ 1705 PCMGSetDistinctSmoothUp - sets the up (post) smoother to be a separate `KSP` from the down (pre) smoother on all levels 1706 and adds the suffix _up to the options name 1707 1708 Logically Collective on pc 1709 1710 Input Parameter: 1711 . pc - the preconditioner context 1712 1713 Options Database Key: 1714 . -pc_mg_distinct_smoothup <bool> - use distinct smoothing objects 1715 1716 Level: advanced 1717 1718 Note: 1719 This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid. 1720 1721 .seealso: `PCMG`, `PCMGSetNumberSmooth()` 1722 @*/ 1723 PetscErrorCode PCMGSetDistinctSmoothUp(PC pc) { 1724 PC_MG *mg = (PC_MG *)pc->data; 1725 PC_MG_Levels **mglevels = mg->levels; 1726 PetscInt i, levels; 1727 KSP subksp; 1728 1729 PetscFunctionBegin; 1730 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1731 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1732 levels = mglevels[0]->levels; 1733 1734 for (i = 1; i < levels; i++) { 1735 const char *prefix = NULL; 1736 /* make sure smoother up and down are different */ 1737 PetscCall(PCMGGetSmootherUp(pc, i, &subksp)); 1738 PetscCall(KSPGetOptionsPrefix(mglevels[i]->smoothd, &prefix)); 1739 PetscCall(KSPSetOptionsPrefix(subksp, prefix)); 1740 PetscCall(KSPAppendOptionsPrefix(subksp, "up_")); 1741 } 1742 PetscFunctionReturn(0); 1743 } 1744 1745 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */ 1746 PetscErrorCode PCGetInterpolations_MG(PC pc, PetscInt *num_levels, Mat *interpolations[]) { 1747 PC_MG *mg = (PC_MG *)pc->data; 1748 PC_MG_Levels **mglevels = mg->levels; 1749 Mat *mat; 1750 PetscInt l; 1751 1752 PetscFunctionBegin; 1753 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling"); 1754 PetscCall(PetscMalloc1(mg->nlevels, &mat)); 1755 for (l = 1; l < mg->nlevels; l++) { 1756 mat[l - 1] = mglevels[l]->interpolate; 1757 PetscCall(PetscObjectReference((PetscObject)mat[l - 1])); 1758 } 1759 *num_levels = mg->nlevels; 1760 *interpolations = mat; 1761 PetscFunctionReturn(0); 1762 } 1763 1764 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */ 1765 PetscErrorCode PCGetCoarseOperators_MG(PC pc, PetscInt *num_levels, Mat *coarseOperators[]) { 1766 PC_MG *mg = (PC_MG *)pc->data; 1767 PC_MG_Levels **mglevels = mg->levels; 1768 PetscInt l; 1769 Mat *mat; 1770 1771 PetscFunctionBegin; 1772 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling"); 1773 PetscCall(PetscMalloc1(mg->nlevels, &mat)); 1774 for (l = 0; l < mg->nlevels - 1; l++) { 1775 PetscCall(KSPGetOperators(mglevels[l]->smoothd, NULL, &(mat[l]))); 1776 PetscCall(PetscObjectReference((PetscObject)mat[l])); 1777 } 1778 *num_levels = mg->nlevels; 1779 *coarseOperators = mat; 1780 PetscFunctionReturn(0); 1781 } 1782 1783 /*@C 1784 PCMGRegisterCoarseSpaceConstructor - Adds a method to the `PCMG` package for coarse space construction. 1785 1786 Not collective 1787 1788 Input Parameters: 1789 + name - name of the constructor 1790 - function - constructor routine 1791 1792 Calling sequence for the routine: 1793 $ my_csp(PC pc, PetscInt l, DM dm, KSP smooth, PetscInt Nc, Mat initGuess, Mat *coarseSp) 1794 + pc - The PC object 1795 . l - The multigrid level, 0 is the coarse level 1796 . dm - The DM for this level 1797 . smooth - The level smoother 1798 . Nc - The size of the coarse space 1799 . initGuess - Basis for an initial guess for the space 1800 - coarseSp - A basis for the computed coarse space 1801 1802 Level: advanced 1803 1804 Developer Note: 1805 How come this is not used by `PCGAMG`? 1806 1807 .seealso: `PCMG`, `PCMGGetCoarseSpaceConstructor()`, `PCRegister()` 1808 @*/ 1809 PetscErrorCode PCMGRegisterCoarseSpaceConstructor(const char name[], PetscErrorCode (*function)(PC, PetscInt, DM, KSP, PetscInt, Mat, Mat *)) { 1810 PetscFunctionBegin; 1811 PetscCall(PCInitializePackage()); 1812 PetscCall(PetscFunctionListAdd(&PCMGCoarseList, name, function)); 1813 PetscFunctionReturn(0); 1814 } 1815 1816 /*@C 1817 PCMGGetCoarseSpaceConstructor - Returns the given coarse space construction method. 1818 1819 Not collective 1820 1821 Input Parameter: 1822 . name - name of the constructor 1823 1824 Output Parameter: 1825 . function - constructor routine 1826 1827 Level: advanced 1828 1829 .seealso: `PCMG`, `PCMGRegisterCoarseSpaceConstructor()`, `PCRegister()` 1830 @*/ 1831 PetscErrorCode PCMGGetCoarseSpaceConstructor(const char name[], PetscErrorCode (**function)(PC, PetscInt, DM, KSP, PetscInt, Mat, Mat *)) { 1832 PetscFunctionBegin; 1833 PetscCall(PetscFunctionListFind(PCMGCoarseList, name, function)); 1834 PetscFunctionReturn(0); 1835 } 1836 1837 /*MC 1838 PCMG - Use multigrid preconditioning. This preconditioner requires you provide additional 1839 information about the coarser grid matrices and restriction/interpolation operators. 1840 1841 Options Database Keys: 1842 + -pc_mg_levels <nlevels> - number of levels including finest 1843 . -pc_mg_cycle_type <v,w> - provide the cycle desired 1844 . -pc_mg_type <additive,multiplicative,full,kaskade> - multiplicative is the default 1845 . -pc_mg_log - log information about time spent on each level of the solver 1846 . -pc_mg_distinct_smoothup - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes) 1847 . -pc_mg_galerkin <both,pmat,mat,none> - use Galerkin process to compute coarser operators, i.e. Acoarse = R A R' 1848 . -pc_mg_multiplicative_cycles - number of cycles to use as the preconditioner (defaults to 1) 1849 . -pc_mg_dump_matlab - dumps the matrices for each level and the restriction/interpolation matrices 1850 to the Socket viewer for reading from MATLAB. 1851 - -pc_mg_dump_binary - dumps the matrices for each level and the restriction/interpolation matrices 1852 to the binary output file called binaryoutput 1853 1854 Notes: 1855 If one uses a Krylov method such `KSPGMRES` or `KSPCG` as the smoother then one must use `KSPFGMRES`, `KSPGCR`, or `KSPRICHARDSON` as the outer Krylov method 1856 1857 When run with a single level the smoother options are used on that level NOT the coarse grid solver options 1858 1859 When run with `KSPRICHARDSON` the convergence test changes slightly if monitor is turned on. The iteration count may change slightly. This 1860 is because without monitoring the residual norm is computed WITHIN each multigrid cycle on the finest level after the pre-smoothing 1861 (because the residual has just been computed for the multigrid algorithm and is hence available for free) while with monitoring the 1862 residual is computed at the end of each cycle. 1863 1864 Level: intermediate 1865 1866 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMGType`, `PCEXOTIC`, `PCGAMG`, `PCML`, `PCHYPRE` 1867 `PCMGSetLevels()`, `PCMGGetLevels()`, `PCMGSetType()`, `PCMGSetCycleType()`, 1868 `PCMGSetDistinctSmoothUp()`, `PCMGGetCoarseSolve()`, `PCMGSetResidual()`, `PCMGSetInterpolation()`, 1869 `PCMGSetRestriction()`, `PCMGGetSmoother()`, `PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`, 1870 `PCMGSetCycleTypeOnLevel()`, `PCMGSetRhs()`, `PCMGSetX()`, `PCMGSetR()`, 1871 `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1872 M*/ 1873 1874 PETSC_EXTERN PetscErrorCode PCCreate_MG(PC pc) { 1875 PC_MG *mg; 1876 1877 PetscFunctionBegin; 1878 PetscCall(PetscNew(&mg)); 1879 pc->data = mg; 1880 mg->nlevels = -1; 1881 mg->am = PC_MG_MULTIPLICATIVE; 1882 mg->galerkin = PC_MG_GALERKIN_NONE; 1883 mg->adaptInterpolation = PETSC_FALSE; 1884 mg->Nc = -1; 1885 mg->eigenvalue = -1; 1886 1887 pc->useAmat = PETSC_TRUE; 1888 1889 pc->ops->apply = PCApply_MG; 1890 pc->ops->applytranspose = PCApplyTranspose_MG; 1891 pc->ops->matapply = PCMatApply_MG; 1892 pc->ops->setup = PCSetUp_MG; 1893 pc->ops->reset = PCReset_MG; 1894 pc->ops->destroy = PCDestroy_MG; 1895 pc->ops->setfromoptions = PCSetFromOptions_MG; 1896 pc->ops->view = PCView_MG; 1897 1898 PetscCall(PetscObjectComposedDataRegister(&mg->eigenvalue)); 1899 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", PCMGSetGalerkin_MG)); 1900 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", PCMGGetLevels_MG)); 1901 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", PCMGSetLevels_MG)); 1902 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_MG)); 1903 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_MG)); 1904 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", PCMGSetAdaptInterpolation_MG)); 1905 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", PCMGGetAdaptInterpolation_MG)); 1906 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", PCMGSetAdaptCR_MG)); 1907 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", PCMGGetAdaptCR_MG)); 1908 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", PCMGSetAdaptCoarseSpaceType_MG)); 1909 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", PCMGGetAdaptCoarseSpaceType_MG)); 1910 PetscFunctionReturn(0); 1911 } 1912