1 2 /* 3 Defines the multigrid preconditioner interface. 4 */ 5 #include <petsc/private/pcmgimpl.h> /*I "petscksp.h" I*/ 6 #include <petsc/private/kspimpl.h> 7 #include <petscdm.h> 8 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC, PetscBool *); 9 10 /* 11 Contains the list of registered coarse space construction routines 12 */ 13 PetscFunctionList PCMGCoarseList = NULL; 14 15 PetscErrorCode PCMGMCycle_Private(PC pc, PC_MG_Levels **mglevelsin, PetscBool transpose, PetscBool matapp, PCRichardsonConvergedReason *reason) 16 { 17 PC_MG *mg = (PC_MG *)pc->data; 18 PC_MG_Levels *mgc, *mglevels = *mglevelsin; 19 PetscInt cycles = (mglevels->level == 1) ? 1 : (PetscInt)mglevels->cycles; 20 21 PetscFunctionBegin; 22 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 23 if (!transpose) { 24 if (matapp) { 25 PetscCall(KSPMatSolve(mglevels->smoothd, mglevels->B, mglevels->X)); /* pre-smooth */ 26 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, NULL)); 27 } else { 28 PetscCall(KSPSolve(mglevels->smoothd, mglevels->b, mglevels->x)); /* pre-smooth */ 29 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x)); 30 } 31 } else { 32 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 33 PetscCall(KSPSolveTranspose(mglevels->smoothu, mglevels->b, mglevels->x)); /* transpose of post-smooth */ 34 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x)); 35 } 36 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 37 if (mglevels->level) { /* not the coarsest grid */ 38 if (mglevels->eventresidual) PetscCall(PetscLogEventBegin(mglevels->eventresidual, 0, 0, 0, 0)); 39 if (matapp && !mglevels->R) PetscCall(MatDuplicate(mglevels->B, MAT_DO_NOT_COPY_VALUES, &mglevels->R)); 40 if (!transpose) { 41 if (matapp) PetscCall((*mglevels->matresidual)(mglevels->A, mglevels->B, mglevels->X, mglevels->R)); 42 else PetscCall((*mglevels->residual)(mglevels->A, mglevels->b, mglevels->x, mglevels->r)); 43 } else { 44 if (matapp) PetscCall((*mglevels->matresidualtranspose)(mglevels->A, mglevels->B, mglevels->X, mglevels->R)); 45 else PetscCall((*mglevels->residualtranspose)(mglevels->A, mglevels->b, mglevels->x, mglevels->r)); 46 } 47 if (mglevels->eventresidual) PetscCall(PetscLogEventEnd(mglevels->eventresidual, 0, 0, 0, 0)); 48 49 /* if on finest level and have convergence criteria set */ 50 if (mglevels->level == mglevels->levels - 1 && mg->ttol && reason) { 51 PetscReal rnorm; 52 PetscCall(VecNorm(mglevels->r, NORM_2, &rnorm)); 53 if (rnorm <= mg->ttol) { 54 if (rnorm < mg->abstol) { 55 *reason = PCRICHARDSON_CONVERGED_ATOL; 56 PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than absolute tolerance %g\n", (double)rnorm, (double)mg->abstol)); 57 } else { 58 *reason = PCRICHARDSON_CONVERGED_RTOL; 59 PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than relative tolerance times initial residual norm %g\n", (double)rnorm, (double)mg->ttol)); 60 } 61 PetscFunctionReturn(PETSC_SUCCESS); 62 } 63 } 64 65 mgc = *(mglevelsin - 1); 66 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0)); 67 if (!transpose) { 68 if (matapp) PetscCall(MatMatRestrict(mglevels->restrct, mglevels->R, &mgc->B)); 69 else PetscCall(MatRestrict(mglevels->restrct, mglevels->r, mgc->b)); 70 } else { 71 if (matapp) PetscCall(MatMatRestrict(mglevels->interpolate, mglevels->R, &mgc->B)); 72 else PetscCall(MatRestrict(mglevels->interpolate, mglevels->r, mgc->b)); 73 } 74 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0)); 75 if (matapp) { 76 if (!mgc->X) { 77 PetscCall(MatDuplicate(mgc->B, MAT_DO_NOT_COPY_VALUES, &mgc->X)); 78 } else { 79 PetscCall(MatZeroEntries(mgc->X)); 80 } 81 } else { 82 PetscCall(VecZeroEntries(mgc->x)); 83 } 84 while (cycles--) PetscCall(PCMGMCycle_Private(pc, mglevelsin - 1, transpose, matapp, reason)); 85 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0)); 86 if (!transpose) { 87 if (matapp) PetscCall(MatMatInterpolateAdd(mglevels->interpolate, mgc->X, mglevels->X, &mglevels->X)); 88 else PetscCall(MatInterpolateAdd(mglevels->interpolate, mgc->x, mglevels->x, mglevels->x)); 89 } else { 90 PetscCall(MatInterpolateAdd(mglevels->restrct, mgc->x, mglevels->x, mglevels->x)); 91 } 92 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0)); 93 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 94 if (!transpose) { 95 if (matapp) { 96 PetscCall(KSPMatSolve(mglevels->smoothu, mglevels->B, mglevels->X)); /* post smooth */ 97 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, NULL)); 98 } else { 99 PetscCall(KSPSolve(mglevels->smoothu, mglevels->b, mglevels->x)); /* post smooth */ 100 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x)); 101 } 102 } else { 103 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 104 PetscCall(KSPSolveTranspose(mglevels->smoothd, mglevels->b, mglevels->x)); /* post smooth */ 105 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x)); 106 } 107 if (mglevels->cr) { 108 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 109 /* TODO Turn on copy and turn off noisy if we have an exact solution 110 PetscCall(VecCopy(mglevels->x, mglevels->crx)); 111 PetscCall(VecCopy(mglevels->b, mglevels->crb)); */ 112 PetscCall(KSPSetNoisy_Private(mglevels->crx)); 113 PetscCall(KSPSolve(mglevels->cr, mglevels->crb, mglevels->crx)); /* compatible relaxation */ 114 PetscCall(KSPCheckSolve(mglevels->cr, pc, mglevels->crx)); 115 } 116 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 117 } 118 PetscFunctionReturn(PETSC_SUCCESS); 119 } 120 121 static PetscErrorCode PCApplyRichardson_MG(PC pc, Vec b, Vec x, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason) 122 { 123 PC_MG *mg = (PC_MG *)pc->data; 124 PC_MG_Levels **mglevels = mg->levels; 125 PC tpc; 126 PetscBool changeu, changed; 127 PetscInt levels = mglevels[0]->levels, i; 128 129 PetscFunctionBegin; 130 /* When the DM is supplying the matrix then it will not exist until here */ 131 for (i = 0; i < levels; i++) { 132 if (!mglevels[i]->A) { 133 PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL)); 134 PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A)); 135 } 136 } 137 138 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc)); 139 PetscCall(PCPreSolveChangeRHS(tpc, &changed)); 140 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc)); 141 PetscCall(PCPreSolveChangeRHS(tpc, &changeu)); 142 if (!changed && !changeu) { 143 PetscCall(VecDestroy(&mglevels[levels - 1]->b)); 144 mglevels[levels - 1]->b = b; 145 } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */ 146 if (!mglevels[levels - 1]->b) { 147 Vec *vec; 148 149 PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL)); 150 mglevels[levels - 1]->b = *vec; 151 PetscCall(PetscFree(vec)); 152 } 153 PetscCall(VecCopy(b, mglevels[levels - 1]->b)); 154 } 155 mglevels[levels - 1]->x = x; 156 157 mg->rtol = rtol; 158 mg->abstol = abstol; 159 mg->dtol = dtol; 160 if (rtol) { 161 /* compute initial residual norm for relative convergence test */ 162 PetscReal rnorm; 163 if (zeroguess) { 164 PetscCall(VecNorm(b, NORM_2, &rnorm)); 165 } else { 166 PetscCall((*mglevels[levels - 1]->residual)(mglevels[levels - 1]->A, b, x, w)); 167 PetscCall(VecNorm(w, NORM_2, &rnorm)); 168 } 169 mg->ttol = PetscMax(rtol * rnorm, abstol); 170 } else if (abstol) mg->ttol = abstol; 171 else mg->ttol = 0.0; 172 173 /* since smoother is applied to full system, not just residual we need to make sure that smoothers don't 174 stop prematurely due to small residual */ 175 for (i = 1; i < levels; i++) { 176 PetscCall(KSPSetTolerances(mglevels[i]->smoothu, 0, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT)); 177 if (mglevels[i]->smoothu != mglevels[i]->smoothd) { 178 /* For Richardson the initial guess is nonzero since it is solving in each cycle the original system not just applying as a preconditioner */ 179 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE)); 180 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, 0, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT)); 181 } 182 } 183 184 *reason = (PCRichardsonConvergedReason)0; 185 for (i = 0; i < its; i++) { 186 PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, PETSC_FALSE, PETSC_FALSE, reason)); 187 if (*reason) break; 188 } 189 if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS; 190 *outits = i; 191 if (!changed && !changeu) mglevels[levels - 1]->b = NULL; 192 PetscFunctionReturn(PETSC_SUCCESS); 193 } 194 195 PetscErrorCode PCReset_MG(PC pc) 196 { 197 PC_MG *mg = (PC_MG *)pc->data; 198 PC_MG_Levels **mglevels = mg->levels; 199 PetscInt i, n; 200 201 PetscFunctionBegin; 202 if (mglevels) { 203 n = mglevels[0]->levels; 204 for (i = 0; i < n - 1; i++) { 205 PetscCall(VecDestroy(&mglevels[i + 1]->r)); 206 PetscCall(VecDestroy(&mglevels[i]->b)); 207 PetscCall(VecDestroy(&mglevels[i]->x)); 208 PetscCall(MatDestroy(&mglevels[i + 1]->R)); 209 PetscCall(MatDestroy(&mglevels[i]->B)); 210 PetscCall(MatDestroy(&mglevels[i]->X)); 211 PetscCall(VecDestroy(&mglevels[i]->crx)); 212 PetscCall(VecDestroy(&mglevels[i]->crb)); 213 PetscCall(MatDestroy(&mglevels[i + 1]->restrct)); 214 PetscCall(MatDestroy(&mglevels[i + 1]->interpolate)); 215 PetscCall(MatDestroy(&mglevels[i + 1]->inject)); 216 PetscCall(VecDestroy(&mglevels[i + 1]->rscale)); 217 } 218 PetscCall(VecDestroy(&mglevels[n - 1]->crx)); 219 PetscCall(VecDestroy(&mglevels[n - 1]->crb)); 220 /* this is not null only if the smoother on the finest level 221 changes the rhs during PreSolve */ 222 PetscCall(VecDestroy(&mglevels[n - 1]->b)); 223 PetscCall(MatDestroy(&mglevels[n - 1]->B)); 224 225 for (i = 0; i < n; i++) { 226 PetscCall(MatDestroy(&mglevels[i]->coarseSpace)); 227 PetscCall(MatDestroy(&mglevels[i]->A)); 228 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPReset(mglevels[i]->smoothd)); 229 PetscCall(KSPReset(mglevels[i]->smoothu)); 230 if (mglevels[i]->cr) PetscCall(KSPReset(mglevels[i]->cr)); 231 } 232 mg->Nc = 0; 233 } 234 PetscFunctionReturn(PETSC_SUCCESS); 235 } 236 237 /* Implementing CR 238 239 We only want to make corrections that ``do not change'' the coarse solution. What we mean by not changing is that if I prolong my coarse solution to the fine grid and then inject that fine solution back to the coarse grid, I get the same answer. Injection is what Brannick calls R. We want the complementary projector to Inj, which we will call S, after Brannick, so that Inj S = 0. Now the orthogonal projector onto the range of Inj^T is 240 241 Inj^T (Inj Inj^T)^{-1} Inj 242 243 and if Inj is a VecScatter, as it is now in PETSc, we have 244 245 Inj^T Inj 246 247 and 248 249 S = I - Inj^T Inj 250 251 since 252 253 Inj S = Inj - (Inj Inj^T) Inj = 0. 254 255 Brannick suggests 256 257 A \to S^T A S \qquad\mathrm{and}\qquad M \to S^T M S 258 259 but I do not think his :math:`S^T S = I` is correct. Our S is an orthogonal projector, so :math:`S^T S = S^2 = S`. We will use 260 261 M^{-1} A \to S M^{-1} A S 262 263 In fact, since it is somewhat hard in PETSc to do the symmetric application, we will just apply S on the left. 264 265 Check: || Inj P - I ||_F < tol 266 Check: In general, Inj Inj^T = I 267 */ 268 269 typedef struct { 270 PC mg; /* The PCMG object */ 271 PetscInt l; /* The multigrid level for this solver */ 272 Mat Inj; /* The injection matrix */ 273 Mat S; /* I - Inj^T Inj */ 274 } CRContext; 275 276 static PetscErrorCode CRSetup_Private(PC pc) 277 { 278 CRContext *ctx; 279 Mat It; 280 281 PetscFunctionBeginUser; 282 PetscCall(PCShellGetContext(pc, &ctx)); 283 PetscCall(PCMGGetInjection(ctx->mg, ctx->l, &It)); 284 PetscCheck(It, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "CR requires that injection be defined for this PCMG"); 285 PetscCall(MatCreateTranspose(It, &ctx->Inj)); 286 PetscCall(MatCreateNormal(ctx->Inj, &ctx->S)); 287 PetscCall(MatScale(ctx->S, -1.0)); 288 PetscCall(MatShift(ctx->S, 1.0)); 289 PetscFunctionReturn(PETSC_SUCCESS); 290 } 291 292 static PetscErrorCode CRApply_Private(PC pc, Vec x, Vec y) 293 { 294 CRContext *ctx; 295 296 PetscFunctionBeginUser; 297 PetscCall(PCShellGetContext(pc, &ctx)); 298 PetscCall(MatMult(ctx->S, x, y)); 299 PetscFunctionReturn(PETSC_SUCCESS); 300 } 301 302 static PetscErrorCode CRDestroy_Private(PC pc) 303 { 304 CRContext *ctx; 305 306 PetscFunctionBeginUser; 307 PetscCall(PCShellGetContext(pc, &ctx)); 308 PetscCall(MatDestroy(&ctx->Inj)); 309 PetscCall(MatDestroy(&ctx->S)); 310 PetscCall(PetscFree(ctx)); 311 PetscCall(PCShellSetContext(pc, NULL)); 312 PetscFunctionReturn(PETSC_SUCCESS); 313 } 314 315 static PetscErrorCode CreateCR_Private(PC pc, PetscInt l, PC *cr) 316 { 317 CRContext *ctx; 318 319 PetscFunctionBeginUser; 320 PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), cr)); 321 PetscCall(PetscObjectSetName((PetscObject)*cr, "S (complementary projector to injection)")); 322 PetscCall(PetscCalloc1(1, &ctx)); 323 ctx->mg = pc; 324 ctx->l = l; 325 PetscCall(PCSetType(*cr, PCSHELL)); 326 PetscCall(PCShellSetContext(*cr, ctx)); 327 PetscCall(PCShellSetApply(*cr, CRApply_Private)); 328 PetscCall(PCShellSetSetUp(*cr, CRSetup_Private)); 329 PetscCall(PCShellSetDestroy(*cr, CRDestroy_Private)); 330 PetscFunctionReturn(PETSC_SUCCESS); 331 } 332 333 PetscErrorCode PCMGSetLevels_MG(PC pc, PetscInt levels, MPI_Comm *comms) 334 { 335 PC_MG *mg = (PC_MG *)pc->data; 336 MPI_Comm comm; 337 PC_MG_Levels **mglevels = mg->levels; 338 PCMGType mgtype = mg->am; 339 PetscInt mgctype = (PetscInt)PC_MG_CYCLE_V; 340 PetscInt i; 341 PetscMPIInt size; 342 const char *prefix; 343 PC ipc; 344 PetscInt n; 345 346 PetscFunctionBegin; 347 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 348 PetscValidLogicalCollectiveInt(pc, levels, 2); 349 if (mg->nlevels == levels) PetscFunctionReturn(PETSC_SUCCESS); 350 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 351 if (mglevels) { 352 mgctype = mglevels[0]->cycles; 353 /* changing the number of levels so free up the previous stuff */ 354 PetscCall(PCReset_MG(pc)); 355 n = mglevels[0]->levels; 356 for (i = 0; i < n; i++) { 357 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd)); 358 PetscCall(KSPDestroy(&mglevels[i]->smoothu)); 359 PetscCall(KSPDestroy(&mglevels[i]->cr)); 360 PetscCall(PetscFree(mglevels[i])); 361 } 362 PetscCall(PetscFree(mg->levels)); 363 } 364 365 mg->nlevels = levels; 366 367 PetscCall(PetscMalloc1(levels, &mglevels)); 368 369 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 370 371 mg->stageApply = 0; 372 for (i = 0; i < levels; i++) { 373 PetscCall(PetscNew(&mglevels[i])); 374 375 mglevels[i]->level = i; 376 mglevels[i]->levels = levels; 377 mglevels[i]->cycles = mgctype; 378 mg->default_smoothu = 2; 379 mg->default_smoothd = 2; 380 mglevels[i]->eventsmoothsetup = 0; 381 mglevels[i]->eventsmoothsolve = 0; 382 mglevels[i]->eventresidual = 0; 383 mglevels[i]->eventinterprestrict = 0; 384 385 if (comms) comm = comms[i]; 386 if (comm != MPI_COMM_NULL) { 387 PetscCall(KSPCreate(comm, &mglevels[i]->smoothd)); 388 PetscCall(KSPSetNestLevel(mglevels[i]->smoothd, pc->kspnestlevel)); 389 PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->smoothd, pc->erroriffailure)); 390 PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->smoothd, (PetscObject)pc, levels - i)); 391 PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, prefix)); 392 PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->smoothd, PetscMGLevelId, mglevels[i]->level)); 393 if (i || levels == 1) { 394 char tprefix[128]; 395 396 PetscCall(KSPSetType(mglevels[i]->smoothd, KSPCHEBYSHEV)); 397 PetscCall(KSPSetConvergenceTest(mglevels[i]->smoothd, KSPConvergedSkip, NULL, NULL)); 398 PetscCall(KSPSetNormType(mglevels[i]->smoothd, KSP_NORM_NONE)); 399 PetscCall(KSPGetPC(mglevels[i]->smoothd, &ipc)); 400 PetscCall(PCSetType(ipc, PCSOR)); 401 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, mg->default_smoothd)); 402 403 PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%d_", (int)i)); 404 PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix)); 405 } else { 406 PetscCall(KSPAppendOptionsPrefix(mglevels[0]->smoothd, "mg_coarse_")); 407 408 /* coarse solve is (redundant) LU by default; set shifttype NONZERO to avoid annoying zero-pivot in LU preconditioner */ 409 PetscCall(KSPSetType(mglevels[0]->smoothd, KSPPREONLY)); 410 PetscCall(KSPGetPC(mglevels[0]->smoothd, &ipc)); 411 PetscCallMPI(MPI_Comm_size(comm, &size)); 412 if (size > 1) { 413 PetscCall(PCSetType(ipc, PCREDUNDANT)); 414 } else { 415 PetscCall(PCSetType(ipc, PCLU)); 416 } 417 PetscCall(PCFactorSetShiftType(ipc, MAT_SHIFT_INBLOCKS)); 418 } 419 } 420 mglevels[i]->smoothu = mglevels[i]->smoothd; 421 mg->rtol = 0.0; 422 mg->abstol = 0.0; 423 mg->dtol = 0.0; 424 mg->ttol = 0.0; 425 mg->cyclesperpcapply = 1; 426 } 427 mg->levels = mglevels; 428 PetscCall(PCMGSetType(pc, mgtype)); 429 PetscFunctionReturn(PETSC_SUCCESS); 430 } 431 432 /*@C 433 PCMGSetLevels - Sets the number of levels to use with `PCMG`. 434 Must be called before any other `PCMG` routine. 435 436 Logically Collective 437 438 Input Parameters: 439 + pc - the preconditioner context 440 . levels - the number of levels 441 - comms - optional communicators for each level; this is to allow solving the coarser problems 442 on smaller sets of processes. For processes that are not included in the computation 443 you must pass `MPI_COMM_NULL`. Use comms = `NULL` to specify that all processes 444 should participate in each level of problem. 445 446 Level: intermediate 447 448 Notes: 449 If the number of levels is one then the multigrid uses the `-mg_levels` prefix 450 for setting the level options rather than the `-mg_coarse` prefix. 451 452 You can free the information in comms after this routine is called. 453 454 The array of MPI communicators must contain `MPI_COMM_NULL` for those ranks that at each level 455 are not participating in the coarser solve. For example, with 2 levels and 1 and 2 ranks on 456 the two levels, rank 0 in the original communicator will pass in an array of 2 communicators 457 of size 2 and 1, while rank 1 in the original communicator will pass in array of 2 communicators 458 the first of size 2 and the second of value `MPI_COMM_NULL` since the rank 1 does not participate 459 in the coarse grid solve. 460 461 Since each coarser level may have a new `MPI_Comm` with fewer ranks than the previous, one 462 must take special care in providing the restriction and interpolation operation. We recommend 463 providing these as two step operations; first perform a standard restriction or interpolation on 464 the full number of ranks for that level and then use an MPI call to copy the resulting vector 465 array entries (after calls to VecGetArray()) to the smaller or larger number of ranks, note in both 466 cases the MPI calls must be made on the larger of the two communicators. Traditional MPI send and 467 receives or `MPI_AlltoAllv()` could be used to do the reshuffling of the vector entries. 468 469 Fortran Notes: 470 Use comms = `PETSC_NULL_MPI_COMM` as the equivalent of `NULL` in the C interface. Note `PETSC_NULL_MPI_COMM` 471 is not `MPI_COMM_NULL`. It is more like `PETSC_NULL_INTEGER`, `PETSC_NULL_REAL` etc. 472 473 .seealso: `PCMGSetType()`, `PCMGGetLevels()` 474 @*/ 475 PetscErrorCode PCMGSetLevels(PC pc, PetscInt levels, MPI_Comm *comms) 476 { 477 PetscFunctionBegin; 478 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 479 if (comms) PetscAssertPointer(comms, 3); 480 PetscTryMethod(pc, "PCMGSetLevels_C", (PC, PetscInt, MPI_Comm *), (pc, levels, comms)); 481 PetscFunctionReturn(PETSC_SUCCESS); 482 } 483 484 PetscErrorCode PCDestroy_MG(PC pc) 485 { 486 PC_MG *mg = (PC_MG *)pc->data; 487 PC_MG_Levels **mglevels = mg->levels; 488 PetscInt i, n; 489 490 PetscFunctionBegin; 491 PetscCall(PCReset_MG(pc)); 492 if (mglevels) { 493 n = mglevels[0]->levels; 494 for (i = 0; i < n; i++) { 495 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd)); 496 PetscCall(KSPDestroy(&mglevels[i]->smoothu)); 497 PetscCall(KSPDestroy(&mglevels[i]->cr)); 498 PetscCall(PetscFree(mglevels[i])); 499 } 500 PetscCall(PetscFree(mg->levels)); 501 } 502 PetscCall(PetscFree(pc->data)); 503 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 504 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 505 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", NULL)); 506 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", NULL)); 507 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", NULL)); 508 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 509 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 510 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", NULL)); 511 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", NULL)); 512 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", NULL)); 513 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", NULL)); 514 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", NULL)); 515 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", NULL)); 516 PetscFunctionReturn(PETSC_SUCCESS); 517 } 518 519 /* 520 PCApply_MG - Runs either an additive, multiplicative, Kaskadic 521 or full cycle of multigrid. 522 523 Note: 524 A simple wrapper which calls PCMGMCycle(),PCMGACycle(), or PCMGFCycle(). 525 */ 526 static PetscErrorCode PCApply_MG_Internal(PC pc, Vec b, Vec x, Mat B, Mat X, PetscBool transpose) 527 { 528 PC_MG *mg = (PC_MG *)pc->data; 529 PC_MG_Levels **mglevels = mg->levels; 530 PC tpc; 531 PetscInt levels = mglevels[0]->levels, i; 532 PetscBool changeu, changed, matapp; 533 534 PetscFunctionBegin; 535 matapp = (PetscBool)(B && X); 536 if (mg->stageApply) PetscCall(PetscLogStagePush(mg->stageApply)); 537 /* When the DM is supplying the matrix then it will not exist until here */ 538 for (i = 0; i < levels; i++) { 539 if (!mglevels[i]->A) { 540 PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL)); 541 PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A)); 542 } 543 } 544 545 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc)); 546 PetscCall(PCPreSolveChangeRHS(tpc, &changed)); 547 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc)); 548 PetscCall(PCPreSolveChangeRHS(tpc, &changeu)); 549 if (!changeu && !changed) { 550 if (matapp) { 551 PetscCall(MatDestroy(&mglevels[levels - 1]->B)); 552 mglevels[levels - 1]->B = B; 553 } else { 554 PetscCall(VecDestroy(&mglevels[levels - 1]->b)); 555 mglevels[levels - 1]->b = b; 556 } 557 } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */ 558 if (matapp) { 559 if (mglevels[levels - 1]->B) { 560 PetscInt N1, N2; 561 PetscBool flg; 562 563 PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &N1)); 564 PetscCall(MatGetSize(B, NULL, &N2)); 565 PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 1]->B, ((PetscObject)B)->type_name, &flg)); 566 if (N1 != N2 || !flg) PetscCall(MatDestroy(&mglevels[levels - 1]->B)); 567 } 568 if (!mglevels[levels - 1]->B) { 569 PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &mglevels[levels - 1]->B)); 570 } else { 571 PetscCall(MatCopy(B, mglevels[levels - 1]->B, SAME_NONZERO_PATTERN)); 572 } 573 } else { 574 if (!mglevels[levels - 1]->b) { 575 Vec *vec; 576 577 PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL)); 578 mglevels[levels - 1]->b = *vec; 579 PetscCall(PetscFree(vec)); 580 } 581 PetscCall(VecCopy(b, mglevels[levels - 1]->b)); 582 } 583 } 584 if (matapp) { 585 mglevels[levels - 1]->X = X; 586 } else { 587 mglevels[levels - 1]->x = x; 588 } 589 590 /* If coarser Xs are present, it means we have already block applied the PC at least once 591 Reset operators if sizes/type do no match */ 592 if (matapp && levels > 1 && mglevels[levels - 2]->X) { 593 PetscInt Xc, Bc; 594 PetscBool flg; 595 596 PetscCall(MatGetSize(mglevels[levels - 2]->X, NULL, &Xc)); 597 PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &Bc)); 598 PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 2]->X, ((PetscObject)mglevels[levels - 1]->X)->type_name, &flg)); 599 if (Xc != Bc || !flg) { 600 PetscCall(MatDestroy(&mglevels[levels - 1]->R)); 601 for (i = 0; i < levels - 1; i++) { 602 PetscCall(MatDestroy(&mglevels[i]->R)); 603 PetscCall(MatDestroy(&mglevels[i]->B)); 604 PetscCall(MatDestroy(&mglevels[i]->X)); 605 } 606 } 607 } 608 609 if (mg->am == PC_MG_MULTIPLICATIVE) { 610 if (matapp) PetscCall(MatZeroEntries(X)); 611 else PetscCall(VecZeroEntries(x)); 612 for (i = 0; i < mg->cyclesperpcapply; i++) PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, transpose, matapp, NULL)); 613 } else if (mg->am == PC_MG_ADDITIVE) { 614 PetscCall(PCMGACycle_Private(pc, mglevels, transpose, matapp)); 615 } else if (mg->am == PC_MG_KASKADE) { 616 PetscCall(PCMGKCycle_Private(pc, mglevels, transpose, matapp)); 617 } else { 618 PetscCall(PCMGFCycle_Private(pc, mglevels, transpose, matapp)); 619 } 620 if (mg->stageApply) PetscCall(PetscLogStagePop()); 621 if (!changeu && !changed) { 622 if (matapp) { 623 mglevels[levels - 1]->B = NULL; 624 } else { 625 mglevels[levels - 1]->b = NULL; 626 } 627 } 628 PetscFunctionReturn(PETSC_SUCCESS); 629 } 630 631 static PetscErrorCode PCApply_MG(PC pc, Vec b, Vec x) 632 { 633 PetscFunctionBegin; 634 PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_FALSE)); 635 PetscFunctionReturn(PETSC_SUCCESS); 636 } 637 638 static PetscErrorCode PCApplyTranspose_MG(PC pc, Vec b, Vec x) 639 { 640 PetscFunctionBegin; 641 PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_TRUE)); 642 PetscFunctionReturn(PETSC_SUCCESS); 643 } 644 645 static PetscErrorCode PCMatApply_MG(PC pc, Mat b, Mat x) 646 { 647 PetscFunctionBegin; 648 PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_FALSE)); 649 PetscFunctionReturn(PETSC_SUCCESS); 650 } 651 652 PetscErrorCode PCSetFromOptions_MG(PC pc, PetscOptionItems *PetscOptionsObject) 653 { 654 PetscInt levels, cycles; 655 PetscBool flg, flg2; 656 PC_MG *mg = (PC_MG *)pc->data; 657 PC_MG_Levels **mglevels; 658 PCMGType mgtype; 659 PCMGCycleType mgctype; 660 PCMGGalerkinType gtype; 661 PCMGCoarseSpaceType coarseSpaceType; 662 663 PetscFunctionBegin; 664 levels = PetscMax(mg->nlevels, 1); 665 PetscOptionsHeadBegin(PetscOptionsObject, "Multigrid options"); 666 PetscCall(PetscOptionsInt("-pc_mg_levels", "Number of Levels", "PCMGSetLevels", levels, &levels, &flg)); 667 if (!flg && !mg->levels && pc->dm) { 668 PetscCall(DMGetRefineLevel(pc->dm, &levels)); 669 levels++; 670 mg->usedmfornumberoflevels = PETSC_TRUE; 671 } 672 PetscCall(PCMGSetLevels(pc, levels, NULL)); 673 mglevels = mg->levels; 674 675 mgctype = (PCMGCycleType)mglevels[0]->cycles; 676 PetscCall(PetscOptionsEnum("-pc_mg_cycle_type", "V cycle or for W-cycle", "PCMGSetCycleType", PCMGCycleTypes, (PetscEnum)mgctype, (PetscEnum *)&mgctype, &flg)); 677 if (flg) PetscCall(PCMGSetCycleType(pc, mgctype)); 678 gtype = mg->galerkin; 679 PetscCall(PetscOptionsEnum("-pc_mg_galerkin", "Use Galerkin process to compute coarser operators", "PCMGSetGalerkin", PCMGGalerkinTypes, (PetscEnum)gtype, (PetscEnum *)>ype, &flg)); 680 if (flg) PetscCall(PCMGSetGalerkin(pc, gtype)); 681 coarseSpaceType = mg->coarseSpaceType; 682 PetscCall(PetscOptionsEnum("-pc_mg_adapt_interp_coarse_space", "Type of adaptive coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw", "PCMGSetAdaptCoarseSpaceType", PCMGCoarseSpaceTypes, (PetscEnum)coarseSpaceType, (PetscEnum *)&coarseSpaceType, &flg)); 683 if (flg) PetscCall(PCMGSetAdaptCoarseSpaceType(pc, coarseSpaceType)); 684 PetscCall(PetscOptionsInt("-pc_mg_adapt_interp_n", "Size of the coarse space for adaptive interpolation", "PCMGSetCoarseSpace", mg->Nc, &mg->Nc, &flg)); 685 PetscCall(PetscOptionsBool("-pc_mg_mesp_monitor", "Monitor the multilevel eigensolver", "PCMGSetAdaptInterpolation", PETSC_FALSE, &mg->mespMonitor, &flg)); 686 flg2 = PETSC_FALSE; 687 PetscCall(PetscOptionsBool("-pc_mg_adapt_cr", "Monitor coarse space quality using Compatible Relaxation (CR)", "PCMGSetAdaptCR", PETSC_FALSE, &flg2, &flg)); 688 if (flg) PetscCall(PCMGSetAdaptCR(pc, flg2)); 689 flg = PETSC_FALSE; 690 PetscCall(PetscOptionsBool("-pc_mg_distinct_smoothup", "Create separate smoothup KSP and append the prefix _up", "PCMGSetDistinctSmoothUp", PETSC_FALSE, &flg, NULL)); 691 if (flg) PetscCall(PCMGSetDistinctSmoothUp(pc)); 692 mgtype = mg->am; 693 PetscCall(PetscOptionsEnum("-pc_mg_type", "Multigrid type", "PCMGSetType", PCMGTypes, (PetscEnum)mgtype, (PetscEnum *)&mgtype, &flg)); 694 if (flg) PetscCall(PCMGSetType(pc, mgtype)); 695 if (mg->am == PC_MG_MULTIPLICATIVE) { 696 PetscCall(PetscOptionsInt("-pc_mg_multiplicative_cycles", "Number of cycles for each preconditioner step", "PCMGMultiplicativeSetCycles", mg->cyclesperpcapply, &cycles, &flg)); 697 if (flg) PetscCall(PCMGMultiplicativeSetCycles(pc, cycles)); 698 } 699 flg = PETSC_FALSE; 700 PetscCall(PetscOptionsBool("-pc_mg_log", "Log times for each multigrid level", "None", flg, &flg, NULL)); 701 if (flg) { 702 PetscInt i; 703 char eventname[128]; 704 705 levels = mglevels[0]->levels; 706 for (i = 0; i < levels; i++) { 707 PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSetup Level %d", (int)i)); 708 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsetup)); 709 PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSmooth Level %d", (int)i)); 710 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsolve)); 711 if (i) { 712 PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGResid Level %d", (int)i)); 713 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventresidual)); 714 PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGInterp Level %d", (int)i)); 715 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventinterprestrict)); 716 } 717 } 718 719 #if defined(PETSC_USE_LOG) 720 { 721 const char *sname = "MG Apply"; 722 PetscStageLog stageLog; 723 PetscInt st; 724 725 PetscCall(PetscLogGetStageLog(&stageLog)); 726 for (st = 0; st < stageLog->numStages; ++st) { 727 PetscBool same; 728 729 PetscCall(PetscStrcmp(stageLog->stageInfo[st].name, sname, &same)); 730 if (same) mg->stageApply = st; 731 } 732 if (!mg->stageApply) PetscCall(PetscLogStageRegister(sname, &mg->stageApply)); 733 } 734 #endif 735 } 736 PetscOptionsHeadEnd(); 737 /* Check option consistency */ 738 PetscCall(PCMGGetGalerkin(pc, >ype)); 739 PetscCall(PCMGGetAdaptInterpolation(pc, &flg)); 740 PetscCheck(!flg || !(gtype >= PC_MG_GALERKIN_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use Galerkin coarse operators when adapting the interpolator"); 741 PetscFunctionReturn(PETSC_SUCCESS); 742 } 743 744 const char *const PCMGTypes[] = {"MULTIPLICATIVE", "ADDITIVE", "FULL", "KASKADE", "PCMGType", "PC_MG", NULL}; 745 const char *const PCMGCycleTypes[] = {"invalid", "v", "w", "PCMGCycleType", "PC_MG_CYCLE", NULL}; 746 const char *const PCMGGalerkinTypes[] = {"both", "pmat", "mat", "none", "external", "PCMGGalerkinType", "PC_MG_GALERKIN", NULL}; 747 const char *const PCMGCoarseSpaceTypes[] = {"none", "polynomial", "harmonic", "eigenvector", "generalized_eigenvector", "gdsw", "PCMGCoarseSpaceType", "PCMG_ADAPT_NONE", NULL}; 748 749 #include <petscdraw.h> 750 PetscErrorCode PCView_MG(PC pc, PetscViewer viewer) 751 { 752 PC_MG *mg = (PC_MG *)pc->data; 753 PC_MG_Levels **mglevels = mg->levels; 754 PetscInt levels = mglevels ? mglevels[0]->levels : 0, i; 755 PetscBool iascii, isbinary, isdraw; 756 757 PetscFunctionBegin; 758 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 759 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 760 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 761 if (iascii) { 762 const char *cyclename = levels ? (mglevels[0]->cycles == PC_MG_CYCLE_V ? "v" : "w") : "unknown"; 763 PetscCall(PetscViewerASCIIPrintf(viewer, " type is %s, levels=%" PetscInt_FMT " cycles=%s\n", PCMGTypes[mg->am], levels, cyclename)); 764 if (mg->am == PC_MG_MULTIPLICATIVE) PetscCall(PetscViewerASCIIPrintf(viewer, " Cycles per PCApply=%" PetscInt_FMT "\n", mg->cyclesperpcapply)); 765 if (mg->galerkin == PC_MG_GALERKIN_BOTH) { 766 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices\n")); 767 } else if (mg->galerkin == PC_MG_GALERKIN_PMAT) { 768 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for pmat\n")); 769 } else if (mg->galerkin == PC_MG_GALERKIN_MAT) { 770 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for mat\n")); 771 } else if (mg->galerkin == PC_MG_GALERKIN_EXTERNAL) { 772 PetscCall(PetscViewerASCIIPrintf(viewer, " Using externally compute Galerkin coarse grid matrices\n")); 773 } else { 774 PetscCall(PetscViewerASCIIPrintf(viewer, " Not using Galerkin computed coarse grid matrices\n")); 775 } 776 if (mg->view) PetscCall((*mg->view)(pc, viewer)); 777 for (i = 0; i < levels; i++) { 778 if (i) { 779 PetscCall(PetscViewerASCIIPrintf(viewer, "Down solver (pre-smoother) on level %" PetscInt_FMT " -------------------------------\n", i)); 780 } else { 781 PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse grid solver -- level %" PetscInt_FMT " -------------------------------\n", i)); 782 } 783 PetscCall(PetscViewerASCIIPushTab(viewer)); 784 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 785 PetscCall(PetscViewerASCIIPopTab(viewer)); 786 if (i && mglevels[i]->smoothd == mglevels[i]->smoothu) { 787 PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) same as down solver (pre-smoother)\n")); 788 } else if (i) { 789 PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) on level %" PetscInt_FMT " -------------------------------\n", i)); 790 PetscCall(PetscViewerASCIIPushTab(viewer)); 791 PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 792 PetscCall(PetscViewerASCIIPopTab(viewer)); 793 } 794 if (i && mglevels[i]->cr) { 795 PetscCall(PetscViewerASCIIPrintf(viewer, "CR solver on level %" PetscInt_FMT " -------------------------------\n", i)); 796 PetscCall(PetscViewerASCIIPushTab(viewer)); 797 PetscCall(KSPView(mglevels[i]->cr, viewer)); 798 PetscCall(PetscViewerASCIIPopTab(viewer)); 799 } 800 } 801 } else if (isbinary) { 802 for (i = levels - 1; i >= 0; i--) { 803 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 804 if (i && mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 805 } 806 } else if (isdraw) { 807 PetscDraw draw; 808 PetscReal x, w, y, bottom, th; 809 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 810 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 811 PetscCall(PetscDrawStringGetSize(draw, NULL, &th)); 812 bottom = y - th; 813 for (i = levels - 1; i >= 0; i--) { 814 if (!mglevels[i]->smoothu || (mglevels[i]->smoothu == mglevels[i]->smoothd)) { 815 PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom)); 816 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 817 PetscCall(PetscDrawPopCurrentPoint(draw)); 818 } else { 819 w = 0.5 * PetscMin(1.0 - x, x); 820 PetscCall(PetscDrawPushCurrentPoint(draw, x + w, bottom)); 821 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 822 PetscCall(PetscDrawPopCurrentPoint(draw)); 823 PetscCall(PetscDrawPushCurrentPoint(draw, x - w, bottom)); 824 PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 825 PetscCall(PetscDrawPopCurrentPoint(draw)); 826 } 827 PetscCall(PetscDrawGetBoundingBox(draw, NULL, &bottom, NULL, NULL)); 828 bottom -= th; 829 } 830 } 831 PetscFunctionReturn(PETSC_SUCCESS); 832 } 833 834 #include <petsc/private/kspimpl.h> 835 836 /* 837 Calls setup for the KSP on each level 838 */ 839 PetscErrorCode PCSetUp_MG(PC pc) 840 { 841 PC_MG *mg = (PC_MG *)pc->data; 842 PC_MG_Levels **mglevels = mg->levels; 843 PetscInt i, n; 844 PC cpc; 845 PetscBool dump = PETSC_FALSE, opsset, use_amat, missinginterpolate = PETSC_FALSE; 846 Mat dA, dB; 847 Vec tvec; 848 DM *dms; 849 PetscViewer viewer = NULL; 850 PetscBool dAeqdB = PETSC_FALSE, needRestricts = PETSC_FALSE, doCR = PETSC_FALSE; 851 PetscBool adaptInterpolation = mg->adaptInterpolation; 852 853 PetscFunctionBegin; 854 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels with PCMGSetLevels() before setting up"); 855 n = mglevels[0]->levels; 856 /* FIX: Move this to PCSetFromOptions_MG? */ 857 if (mg->usedmfornumberoflevels) { 858 PetscInt levels; 859 PetscCall(DMGetRefineLevel(pc->dm, &levels)); 860 levels++; 861 if (levels > n) { /* the problem is now being solved on a finer grid */ 862 PetscCall(PCMGSetLevels(pc, levels, NULL)); 863 n = levels; 864 PetscCall(PCSetFromOptions(pc)); /* it is bad to call this here, but otherwise will never be called for the new hierarchy */ 865 mglevels = mg->levels; 866 } 867 } 868 PetscCall(KSPGetPC(mglevels[0]->smoothd, &cpc)); 869 870 /* If user did not provide fine grid operators OR operator was not updated since last global KSPSetOperators() */ 871 /* so use those from global PC */ 872 /* Is this what we always want? What if user wants to keep old one? */ 873 PetscCall(KSPGetOperatorsSet(mglevels[n - 1]->smoothd, NULL, &opsset)); 874 if (opsset) { 875 Mat mmat; 876 PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, NULL, &mmat)); 877 if (mmat == pc->pmat) opsset = PETSC_FALSE; 878 } 879 880 /* Create CR solvers */ 881 PetscCall(PCMGGetAdaptCR(pc, &doCR)); 882 if (doCR) { 883 const char *prefix; 884 885 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 886 for (i = 1; i < n; ++i) { 887 PC ipc, cr; 888 char crprefix[128]; 889 890 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &mglevels[i]->cr)); 891 PetscCall(KSPSetNestLevel(mglevels[i]->cr, pc->kspnestlevel)); 892 PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->cr, PETSC_FALSE)); 893 PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->cr, (PetscObject)pc, n - i)); 894 PetscCall(KSPSetOptionsPrefix(mglevels[i]->cr, prefix)); 895 PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->cr, PetscMGLevelId, mglevels[i]->level)); 896 PetscCall(KSPSetType(mglevels[i]->cr, KSPCHEBYSHEV)); 897 PetscCall(KSPSetConvergenceTest(mglevels[i]->cr, KSPConvergedSkip, NULL, NULL)); 898 PetscCall(KSPSetNormType(mglevels[i]->cr, KSP_NORM_PRECONDITIONED)); 899 PetscCall(KSPGetPC(mglevels[i]->cr, &ipc)); 900 901 PetscCall(PCSetType(ipc, PCCOMPOSITE)); 902 PetscCall(PCCompositeSetType(ipc, PC_COMPOSITE_MULTIPLICATIVE)); 903 PetscCall(PCCompositeAddPCType(ipc, PCSOR)); 904 PetscCall(CreateCR_Private(pc, i, &cr)); 905 PetscCall(PCCompositeAddPC(ipc, cr)); 906 PetscCall(PCDestroy(&cr)); 907 908 PetscCall(KSPSetTolerances(mglevels[i]->cr, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, mg->default_smoothd)); 909 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 910 PetscCall(PetscSNPrintf(crprefix, 128, "mg_levels_%d_cr_", (int)i)); 911 PetscCall(KSPAppendOptionsPrefix(mglevels[i]->cr, crprefix)); 912 } 913 } 914 915 if (!opsset) { 916 PetscCall(PCGetUseAmat(pc, &use_amat)); 917 if (use_amat) { 918 PetscCall(PetscInfo(pc, "Using outer operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n")); 919 PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->mat, pc->pmat)); 920 } else { 921 PetscCall(PetscInfo(pc, "Using matrix (pmat) operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n")); 922 PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->pmat, pc->pmat)); 923 } 924 } 925 926 for (i = n - 1; i > 0; i--) { 927 if (!(mglevels[i]->interpolate || mglevels[i]->restrct)) { 928 missinginterpolate = PETSC_TRUE; 929 break; 930 } 931 } 932 933 PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, &dA, &dB)); 934 if (dA == dB) dAeqdB = PETSC_TRUE; 935 if (mg->galerkin == PC_MG_GALERKIN_NONE || ((mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_MAT) && !dAeqdB)) { 936 needRestricts = PETSC_TRUE; /* user must compute either mat, pmat, or both so must restrict x to coarser levels */ 937 } 938 939 if (pc->dm && !pc->setupcalled) { 940 /* finest smoother also gets DM but it is not active, independent of whether galerkin==PC_MG_GALERKIN_EXTERNAL */ 941 PetscCall(KSPSetDM(mglevels[n - 1]->smoothd, pc->dm)); 942 PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothd, PETSC_FALSE)); 943 if (mglevels[n - 1]->smoothd != mglevels[n - 1]->smoothu) { 944 PetscCall(KSPSetDM(mglevels[n - 1]->smoothu, pc->dm)); 945 PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothu, PETSC_FALSE)); 946 } 947 if (mglevels[n - 1]->cr) { 948 PetscCall(KSPSetDM(mglevels[n - 1]->cr, pc->dm)); 949 PetscCall(KSPSetDMActive(mglevels[n - 1]->cr, PETSC_FALSE)); 950 } 951 } 952 953 /* 954 Skipping if user has provided all interpolation/restriction needed (since DM might not be able to produce them (when coming from SNES/TS) 955 Skipping for externally managed hierarchy (such as ML and GAMG). Cleaner logic here would be great. Wrap ML/GAMG as DMs? 956 */ 957 if (missinginterpolate && mg->galerkin != PC_MG_GALERKIN_EXTERNAL && !pc->setupcalled) { 958 /* first see if we can compute a coarse space */ 959 if (mg->coarseSpaceType == PCMG_ADAPT_GDSW) { 960 for (i = n - 2; i > -1; i--) { 961 if (!mglevels[i + 1]->restrct && !mglevels[i + 1]->interpolate) { 962 PetscCall(PCMGComputeCoarseSpace_Internal(pc, i + 1, mg->coarseSpaceType, mg->Nc, NULL, &mglevels[i + 1]->coarseSpace)); 963 PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->coarseSpace)); 964 } 965 } 966 } else { /* construct the interpolation from the DMs */ 967 Mat p; 968 Vec rscale; 969 PetscCall(PetscMalloc1(n, &dms)); 970 dms[n - 1] = pc->dm; 971 /* Separately create them so we do not get DMKSP interference between levels */ 972 for (i = n - 2; i > -1; i--) PetscCall(DMCoarsen(dms[i + 1], MPI_COMM_NULL, &dms[i])); 973 for (i = n - 2; i > -1; i--) { 974 DMKSP kdm; 975 PetscBool dmhasrestrict, dmhasinject; 976 PetscCall(KSPSetDM(mglevels[i]->smoothd, dms[i])); 977 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothd, PETSC_FALSE)); 978 if (mglevels[i]->smoothd != mglevels[i]->smoothu) { 979 PetscCall(KSPSetDM(mglevels[i]->smoothu, dms[i])); 980 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothu, PETSC_FALSE)); 981 } 982 if (mglevels[i]->cr) { 983 PetscCall(KSPSetDM(mglevels[i]->cr, dms[i])); 984 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->cr, PETSC_FALSE)); 985 } 986 PetscCall(DMGetDMKSPWrite(dms[i], &kdm)); 987 /* Ugly hack so that the next KSPSetUp() will use the RHS that we set. A better fix is to change dmActive to take 988 * a bitwise OR of computing the matrix, RHS, and initial iterate. */ 989 kdm->ops->computerhs = NULL; 990 kdm->rhsctx = NULL; 991 if (!mglevels[i + 1]->interpolate) { 992 PetscCall(DMCreateInterpolation(dms[i], dms[i + 1], &p, &rscale)); 993 PetscCall(PCMGSetInterpolation(pc, i + 1, p)); 994 if (rscale) PetscCall(PCMGSetRScale(pc, i + 1, rscale)); 995 PetscCall(VecDestroy(&rscale)); 996 PetscCall(MatDestroy(&p)); 997 } 998 PetscCall(DMHasCreateRestriction(dms[i], &dmhasrestrict)); 999 if (dmhasrestrict && !mglevels[i + 1]->restrct) { 1000 PetscCall(DMCreateRestriction(dms[i], dms[i + 1], &p)); 1001 PetscCall(PCMGSetRestriction(pc, i + 1, p)); 1002 PetscCall(MatDestroy(&p)); 1003 } 1004 PetscCall(DMHasCreateInjection(dms[i], &dmhasinject)); 1005 if (dmhasinject && !mglevels[i + 1]->inject) { 1006 PetscCall(DMCreateInjection(dms[i], dms[i + 1], &p)); 1007 PetscCall(PCMGSetInjection(pc, i + 1, p)); 1008 PetscCall(MatDestroy(&p)); 1009 } 1010 } 1011 1012 for (i = n - 2; i > -1; i--) PetscCall(DMDestroy(&dms[i])); 1013 PetscCall(PetscFree(dms)); 1014 } 1015 } 1016 1017 if (mg->galerkin < PC_MG_GALERKIN_NONE) { 1018 Mat A, B; 1019 PetscBool doA = PETSC_FALSE, doB = PETSC_FALSE; 1020 MatReuse reuse = MAT_INITIAL_MATRIX; 1021 1022 if (mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_BOTH) doB = PETSC_TRUE; 1023 if (mg->galerkin == PC_MG_GALERKIN_MAT || (mg->galerkin == PC_MG_GALERKIN_BOTH && dA != dB)) doA = PETSC_TRUE; 1024 if (pc->setupcalled) reuse = MAT_REUSE_MATRIX; 1025 for (i = n - 2; i > -1; i--) { 1026 PetscCheck(mglevels[i + 1]->restrct || mglevels[i + 1]->interpolate, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must provide interpolation or restriction for each MG level except level 0"); 1027 if (!mglevels[i + 1]->interpolate) PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->restrct)); 1028 if (!mglevels[i + 1]->restrct) PetscCall(PCMGSetRestriction(pc, i + 1, mglevels[i + 1]->interpolate)); 1029 if (reuse == MAT_REUSE_MATRIX) PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, &B)); 1030 if (doA) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dA, mglevels[i + 1]->interpolate, reuse, 1.0, &A)); 1031 if (doB) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dB, mglevels[i + 1]->interpolate, reuse, 1.0, &B)); 1032 /* the management of the PetscObjectReference() and PetscObjecDereference() below is rather delicate */ 1033 if (!doA && dAeqdB) { 1034 if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)B)); 1035 A = B; 1036 } else if (!doA && reuse == MAT_INITIAL_MATRIX) { 1037 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, NULL)); 1038 PetscCall(PetscObjectReference((PetscObject)A)); 1039 } 1040 if (!doB && dAeqdB) { 1041 if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)A)); 1042 B = A; 1043 } else if (!doB && reuse == MAT_INITIAL_MATRIX) { 1044 PetscCall(KSPGetOperators(mglevels[i]->smoothd, NULL, &B)); 1045 PetscCall(PetscObjectReference((PetscObject)B)); 1046 } 1047 if (reuse == MAT_INITIAL_MATRIX) { 1048 PetscCall(KSPSetOperators(mglevels[i]->smoothd, A, B)); 1049 PetscCall(PetscObjectDereference((PetscObject)A)); 1050 PetscCall(PetscObjectDereference((PetscObject)B)); 1051 } 1052 dA = A; 1053 dB = B; 1054 } 1055 } 1056 1057 /* Adapt interpolation matrices */ 1058 if (adaptInterpolation) { 1059 for (i = 0; i < n; ++i) { 1060 if (!mglevels[i]->coarseSpace) PetscCall(PCMGComputeCoarseSpace_Internal(pc, i, mg->coarseSpaceType, mg->Nc, !i ? NULL : mglevels[i - 1]->coarseSpace, &mglevels[i]->coarseSpace)); 1061 if (i) PetscCall(PCMGAdaptInterpolator_Internal(pc, i, mglevels[i - 1]->smoothu, mglevels[i]->smoothu, mglevels[i - 1]->coarseSpace, mglevels[i]->coarseSpace)); 1062 } 1063 for (i = n - 2; i > -1; --i) PetscCall(PCMGRecomputeLevelOperators_Internal(pc, i)); 1064 } 1065 1066 if (needRestricts && pc->dm) { 1067 for (i = n - 2; i >= 0; i--) { 1068 DM dmfine, dmcoarse; 1069 Mat Restrict, Inject; 1070 Vec rscale; 1071 PetscCall(KSPGetDM(mglevels[i + 1]->smoothd, &dmfine)); 1072 PetscCall(KSPGetDM(mglevels[i]->smoothd, &dmcoarse)); 1073 PetscCall(PCMGGetRestriction(pc, i + 1, &Restrict)); 1074 PetscCall(PCMGGetRScale(pc, i + 1, &rscale)); 1075 PetscCall(PCMGGetInjection(pc, i + 1, &Inject)); 1076 PetscCall(DMRestrict(dmfine, Restrict, rscale, Inject, dmcoarse)); 1077 } 1078 } 1079 1080 if (!pc->setupcalled) { 1081 for (i = 0; i < n; i++) PetscCall(KSPSetFromOptions(mglevels[i]->smoothd)); 1082 for (i = 1; i < n; i++) { 1083 if (mglevels[i]->smoothu && (mglevels[i]->smoothu != mglevels[i]->smoothd)) PetscCall(KSPSetFromOptions(mglevels[i]->smoothu)); 1084 if (mglevels[i]->cr) PetscCall(KSPSetFromOptions(mglevels[i]->cr)); 1085 } 1086 /* insure that if either interpolation or restriction is set the other other one is set */ 1087 for (i = 1; i < n; i++) { 1088 PetscCall(PCMGGetInterpolation(pc, i, NULL)); 1089 PetscCall(PCMGGetRestriction(pc, i, NULL)); 1090 } 1091 for (i = 0; i < n - 1; i++) { 1092 if (!mglevels[i]->b) { 1093 Vec *vec; 1094 PetscCall(KSPCreateVecs(mglevels[i]->smoothd, 1, &vec, 0, NULL)); 1095 PetscCall(PCMGSetRhs(pc, i, *vec)); 1096 PetscCall(VecDestroy(vec)); 1097 PetscCall(PetscFree(vec)); 1098 } 1099 if (!mglevels[i]->r && i) { 1100 PetscCall(VecDuplicate(mglevels[i]->b, &tvec)); 1101 PetscCall(PCMGSetR(pc, i, tvec)); 1102 PetscCall(VecDestroy(&tvec)); 1103 } 1104 if (!mglevels[i]->x) { 1105 PetscCall(VecDuplicate(mglevels[i]->b, &tvec)); 1106 PetscCall(PCMGSetX(pc, i, tvec)); 1107 PetscCall(VecDestroy(&tvec)); 1108 } 1109 if (doCR) { 1110 PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crx)); 1111 PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crb)); 1112 PetscCall(VecZeroEntries(mglevels[i]->crb)); 1113 } 1114 } 1115 if (n != 1 && !mglevels[n - 1]->r) { 1116 /* PCMGSetR() on the finest level if user did not supply it */ 1117 Vec *vec; 1118 PetscCall(KSPCreateVecs(mglevels[n - 1]->smoothd, 1, &vec, 0, NULL)); 1119 PetscCall(PCMGSetR(pc, n - 1, *vec)); 1120 PetscCall(VecDestroy(vec)); 1121 PetscCall(PetscFree(vec)); 1122 } 1123 if (doCR) { 1124 PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crx)); 1125 PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crb)); 1126 PetscCall(VecZeroEntries(mglevels[n - 1]->crb)); 1127 } 1128 } 1129 1130 if (pc->dm) { 1131 /* need to tell all the coarser levels to rebuild the matrix using the DM for that level */ 1132 for (i = 0; i < n - 1; i++) { 1133 if (mglevels[i]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[i]->smoothd->setupstage = KSP_SETUP_NEWMATRIX; 1134 } 1135 } 1136 // We got here (PCSetUp_MG) because the matrix has changed, which means the smoother needs to be set up again (e.g., 1137 // new diagonal for Jacobi). Setting it here allows it to be logged under PCSetUp rather than deep inside a PCApply. 1138 if (mglevels[n - 1]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[n - 1]->smoothd->setupstage = KSP_SETUP_NEWMATRIX; 1139 1140 for (i = 1; i < n; i++) { 1141 if (mglevels[i]->smoothu == mglevels[i]->smoothd || mg->am == PC_MG_FULL || mg->am == PC_MG_KASKADE || mg->cyclesperpcapply > 1) { 1142 /* if doing only down then initial guess is zero */ 1143 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE)); 1144 } 1145 if (mglevels[i]->cr) PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 1146 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1147 PetscCall(KSPSetUp(mglevels[i]->smoothd)); 1148 if (mglevels[i]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR; 1149 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1150 if (!mglevels[i]->residual) { 1151 Mat mat; 1152 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL)); 1153 PetscCall(PCMGSetResidual(pc, i, PCMGResidualDefault, mat)); 1154 } 1155 if (!mglevels[i]->residualtranspose) { 1156 Mat mat; 1157 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL)); 1158 PetscCall(PCMGSetResidualTranspose(pc, i, PCMGResidualTransposeDefault, mat)); 1159 } 1160 } 1161 for (i = 1; i < n; i++) { 1162 if (mglevels[i]->smoothu && mglevels[i]->smoothu != mglevels[i]->smoothd) { 1163 Mat downmat, downpmat; 1164 1165 /* check if operators have been set for up, if not use down operators to set them */ 1166 PetscCall(KSPGetOperatorsSet(mglevels[i]->smoothu, &opsset, NULL)); 1167 if (!opsset) { 1168 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat)); 1169 PetscCall(KSPSetOperators(mglevels[i]->smoothu, downmat, downpmat)); 1170 } 1171 1172 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothu, PETSC_TRUE)); 1173 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1174 PetscCall(KSPSetUp(mglevels[i]->smoothu)); 1175 if (mglevels[i]->smoothu->reason) pc->failedreason = PC_SUBPC_ERROR; 1176 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1177 } 1178 if (mglevels[i]->cr) { 1179 Mat downmat, downpmat; 1180 1181 /* check if operators have been set for up, if not use down operators to set them */ 1182 PetscCall(KSPGetOperatorsSet(mglevels[i]->cr, &opsset, NULL)); 1183 if (!opsset) { 1184 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat)); 1185 PetscCall(KSPSetOperators(mglevels[i]->cr, downmat, downpmat)); 1186 } 1187 1188 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 1189 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1190 PetscCall(KSPSetUp(mglevels[i]->cr)); 1191 if (mglevels[i]->cr->reason) pc->failedreason = PC_SUBPC_ERROR; 1192 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1193 } 1194 } 1195 1196 if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0)); 1197 PetscCall(KSPSetUp(mglevels[0]->smoothd)); 1198 if (mglevels[0]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR; 1199 if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0)); 1200 1201 /* 1202 Dump the interpolation/restriction matrices plus the 1203 Jacobian/stiffness on each level. This allows MATLAB users to 1204 easily check if the Galerkin condition A_c = R A_f R^T is satisfied. 1205 1206 Only support one or the other at the same time. 1207 */ 1208 #if defined(PETSC_USE_SOCKET_VIEWER) 1209 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_matlab", &dump, NULL)); 1210 if (dump) viewer = PETSC_VIEWER_SOCKET_(PetscObjectComm((PetscObject)pc)); 1211 dump = PETSC_FALSE; 1212 #endif 1213 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_binary", &dump, NULL)); 1214 if (dump) viewer = PETSC_VIEWER_BINARY_(PetscObjectComm((PetscObject)pc)); 1215 1216 if (viewer) { 1217 for (i = 1; i < n; i++) PetscCall(MatView(mglevels[i]->restrct, viewer)); 1218 for (i = 0; i < n; i++) { 1219 PetscCall(KSPGetPC(mglevels[i]->smoothd, &pc)); 1220 PetscCall(MatView(pc->mat, viewer)); 1221 } 1222 } 1223 PetscFunctionReturn(PETSC_SUCCESS); 1224 } 1225 1226 /* -------------------------------------------------------------------------------------*/ 1227 1228 PetscErrorCode PCMGGetLevels_MG(PC pc, PetscInt *levels) 1229 { 1230 PC_MG *mg = (PC_MG *)pc->data; 1231 1232 PetscFunctionBegin; 1233 *levels = mg->nlevels; 1234 PetscFunctionReturn(PETSC_SUCCESS); 1235 } 1236 1237 /*@ 1238 PCMGGetLevels - Gets the number of levels to use with `PCMG`. 1239 1240 Not Collective 1241 1242 Input Parameter: 1243 . pc - the preconditioner context 1244 1245 Output Parameter: 1246 . levels - the number of levels 1247 1248 Level: advanced 1249 1250 .seealso: `PCMG`, `PCMGSetLevels()` 1251 @*/ 1252 PetscErrorCode PCMGGetLevels(PC pc, PetscInt *levels) 1253 { 1254 PetscFunctionBegin; 1255 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1256 PetscAssertPointer(levels, 2); 1257 *levels = 0; 1258 PetscTryMethod(pc, "PCMGGetLevels_C", (PC, PetscInt *), (pc, levels)); 1259 PetscFunctionReturn(PETSC_SUCCESS); 1260 } 1261 1262 /*@ 1263 PCMGGetGridComplexity - compute operator and grid complexity of the `PCMG` hierarchy 1264 1265 Input Parameter: 1266 . pc - the preconditioner context 1267 1268 Output Parameters: 1269 + gc - grid complexity = sum_i(n_i) / n_0 1270 - oc - operator complexity = sum_i(nnz_i) / nnz_0 1271 1272 Level: advanced 1273 1274 Note: 1275 This is often call the operator complexity in multigrid literature 1276 1277 .seealso: `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()` 1278 @*/ 1279 PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc, PetscReal *oc) 1280 { 1281 PC_MG *mg = (PC_MG *)pc->data; 1282 PC_MG_Levels **mglevels = mg->levels; 1283 PetscInt lev, N; 1284 PetscLogDouble nnz0 = 0, sgc = 0, soc = 0, n0 = 0; 1285 MatInfo info; 1286 1287 PetscFunctionBegin; 1288 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1289 if (gc) PetscAssertPointer(gc, 2); 1290 if (oc) PetscAssertPointer(oc, 3); 1291 if (!pc->setupcalled) { 1292 if (gc) *gc = 0; 1293 if (oc) *oc = 0; 1294 PetscFunctionReturn(PETSC_SUCCESS); 1295 } 1296 PetscCheck(mg->nlevels > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MG has no levels"); 1297 for (lev = 0; lev < mg->nlevels; lev++) { 1298 Mat dB; 1299 PetscCall(KSPGetOperators(mglevels[lev]->smoothd, NULL, &dB)); 1300 PetscCall(MatGetInfo(dB, MAT_GLOBAL_SUM, &info)); /* global reduction */ 1301 PetscCall(MatGetSize(dB, &N, NULL)); 1302 sgc += N; 1303 soc += info.nz_used; 1304 if (lev == mg->nlevels - 1) { 1305 nnz0 = info.nz_used; 1306 n0 = N; 1307 } 1308 } 1309 PetscCheck(n0 > 0 && gc, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number for grid points on finest level is not available"); 1310 *gc = (PetscReal)(sgc / n0); 1311 if (nnz0 > 0 && oc) *oc = (PetscReal)(soc / nnz0); 1312 PetscFunctionReturn(PETSC_SUCCESS); 1313 } 1314 1315 /*@ 1316 PCMGSetType - Determines the form of multigrid to use: 1317 multiplicative, additive, full, or the Kaskade algorithm. 1318 1319 Logically Collective 1320 1321 Input Parameters: 1322 + pc - the preconditioner context 1323 - form - multigrid form, one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE` 1324 1325 Options Database Key: 1326 . -pc_mg_type <form> - Sets <form>, one of multiplicative, additive, full, kaskade 1327 1328 Level: advanced 1329 1330 .seealso: `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGGetType()`, `PCMGCycleType` 1331 @*/ 1332 PetscErrorCode PCMGSetType(PC pc, PCMGType form) 1333 { 1334 PC_MG *mg = (PC_MG *)pc->data; 1335 1336 PetscFunctionBegin; 1337 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1338 PetscValidLogicalCollectiveEnum(pc, form, 2); 1339 mg->am = form; 1340 if (form == PC_MG_MULTIPLICATIVE) pc->ops->applyrichardson = PCApplyRichardson_MG; 1341 else pc->ops->applyrichardson = NULL; 1342 PetscFunctionReturn(PETSC_SUCCESS); 1343 } 1344 1345 /*@ 1346 PCMGGetType - Finds the form of multigrid the `PCMG` is using multiplicative, additive, full, or the Kaskade algorithm. 1347 1348 Logically Collective 1349 1350 Input Parameter: 1351 . pc - the preconditioner context 1352 1353 Output Parameter: 1354 . type - one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`, `PCMGCycleType` 1355 1356 Level: advanced 1357 1358 .seealso: `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGSetType()` 1359 @*/ 1360 PetscErrorCode PCMGGetType(PC pc, PCMGType *type) 1361 { 1362 PC_MG *mg = (PC_MG *)pc->data; 1363 1364 PetscFunctionBegin; 1365 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1366 *type = mg->am; 1367 PetscFunctionReturn(PETSC_SUCCESS); 1368 } 1369 1370 /*@ 1371 PCMGSetCycleType - Sets the type cycles to use. Use `PCMGSetCycleTypeOnLevel()` for more 1372 complicated cycling. 1373 1374 Logically Collective 1375 1376 Input Parameters: 1377 + pc - the multigrid context 1378 - n - either `PC_MG_CYCLE_V` or `PC_MG_CYCLE_W` 1379 1380 Options Database Key: 1381 . -pc_mg_cycle_type <v,w> - provide the cycle desired 1382 1383 Level: advanced 1384 1385 .seealso: `PCMG`, `PCMGSetCycleTypeOnLevel()`, `PCMGType`, `PCMGCycleType` 1386 @*/ 1387 PetscErrorCode PCMGSetCycleType(PC pc, PCMGCycleType n) 1388 { 1389 PC_MG *mg = (PC_MG *)pc->data; 1390 PC_MG_Levels **mglevels = mg->levels; 1391 PetscInt i, levels; 1392 1393 PetscFunctionBegin; 1394 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1395 PetscValidLogicalCollectiveEnum(pc, n, 2); 1396 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1397 levels = mglevels[0]->levels; 1398 for (i = 0; i < levels; i++) mglevels[i]->cycles = n; 1399 PetscFunctionReturn(PETSC_SUCCESS); 1400 } 1401 1402 /*@ 1403 PCMGMultiplicativeSetCycles - Sets the number of cycles to use for each preconditioner step 1404 of multigrid when `PCMGType` is `PC_MG_MULTIPLICATIVE` 1405 1406 Logically Collective 1407 1408 Input Parameters: 1409 + pc - the multigrid context 1410 - n - number of cycles (default is 1) 1411 1412 Options Database Key: 1413 . -pc_mg_multiplicative_cycles n - set the number of cycles 1414 1415 Level: advanced 1416 1417 Note: 1418 This is not associated with setting a v or w cycle, that is set with `PCMGSetCycleType()` 1419 1420 .seealso: `PCMGSetCycleTypeOnLevel()`, `PCMGSetCycleType()`, `PCMGCycleType`, `PCMGType` 1421 @*/ 1422 PetscErrorCode PCMGMultiplicativeSetCycles(PC pc, PetscInt n) 1423 { 1424 PC_MG *mg = (PC_MG *)pc->data; 1425 1426 PetscFunctionBegin; 1427 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1428 PetscValidLogicalCollectiveInt(pc, n, 2); 1429 mg->cyclesperpcapply = n; 1430 PetscFunctionReturn(PETSC_SUCCESS); 1431 } 1432 1433 PetscErrorCode PCMGSetGalerkin_MG(PC pc, PCMGGalerkinType use) 1434 { 1435 PC_MG *mg = (PC_MG *)pc->data; 1436 1437 PetscFunctionBegin; 1438 mg->galerkin = use; 1439 PetscFunctionReturn(PETSC_SUCCESS); 1440 } 1441 1442 /*@ 1443 PCMGSetGalerkin - Causes the coarser grid matrices to be computed from the 1444 finest grid via the Galerkin process: A_i-1 = r_i * A_i * p_i 1445 1446 Logically Collective 1447 1448 Input Parameters: 1449 + pc - the multigrid context 1450 - use - one of `PC_MG_GALERKIN_BOTH`, `PC_MG_GALERKIN_PMAT`, `PC_MG_GALERKIN_MAT`, or `PC_MG_GALERKIN_NONE` 1451 1452 Options Database Key: 1453 . -pc_mg_galerkin <both,pmat,mat,none> - set the matrices to form via the Galerkin process 1454 1455 Level: intermediate 1456 1457 Note: 1458 Some codes that use `PCMG` such as `PCGAMG` use Galerkin internally while constructing the hierarchy and thus do not 1459 use the `PCMG` construction of the coarser grids. 1460 1461 .seealso: `PCMG`, `PCMGGetGalerkin()`, `PCMGGalerkinType` 1462 @*/ 1463 PetscErrorCode PCMGSetGalerkin(PC pc, PCMGGalerkinType use) 1464 { 1465 PetscFunctionBegin; 1466 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1467 PetscTryMethod(pc, "PCMGSetGalerkin_C", (PC, PCMGGalerkinType), (pc, use)); 1468 PetscFunctionReturn(PETSC_SUCCESS); 1469 } 1470 1471 /*@ 1472 PCMGGetGalerkin - Checks if Galerkin multigrid is being used, i.e. A_i-1 = r_i * A_i * p_i 1473 1474 Not Collective 1475 1476 Input Parameter: 1477 . pc - the multigrid context 1478 1479 Output Parameter: 1480 . galerkin - one of `PC_MG_GALERKIN_BOTH`,`PC_MG_GALERKIN_PMAT`,`PC_MG_GALERKIN_MAT`, `PC_MG_GALERKIN_NONE`, or `PC_MG_GALERKIN_EXTERNAL` 1481 1482 Level: intermediate 1483 1484 .seealso: `PCMG`, `PCMGSetGalerkin()`, `PCMGGalerkinType` 1485 @*/ 1486 PetscErrorCode PCMGGetGalerkin(PC pc, PCMGGalerkinType *galerkin) 1487 { 1488 PC_MG *mg = (PC_MG *)pc->data; 1489 1490 PetscFunctionBegin; 1491 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1492 *galerkin = mg->galerkin; 1493 PetscFunctionReturn(PETSC_SUCCESS); 1494 } 1495 1496 PetscErrorCode PCMGSetAdaptInterpolation_MG(PC pc, PetscBool adapt) 1497 { 1498 PC_MG *mg = (PC_MG *)pc->data; 1499 1500 PetscFunctionBegin; 1501 mg->adaptInterpolation = adapt; 1502 PetscFunctionReturn(PETSC_SUCCESS); 1503 } 1504 1505 PetscErrorCode PCMGGetAdaptInterpolation_MG(PC pc, PetscBool *adapt) 1506 { 1507 PC_MG *mg = (PC_MG *)pc->data; 1508 1509 PetscFunctionBegin; 1510 *adapt = mg->adaptInterpolation; 1511 PetscFunctionReturn(PETSC_SUCCESS); 1512 } 1513 1514 PetscErrorCode PCMGSetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType ctype) 1515 { 1516 PC_MG *mg = (PC_MG *)pc->data; 1517 1518 PetscFunctionBegin; 1519 mg->adaptInterpolation = ctype != PCMG_ADAPT_NONE ? PETSC_TRUE : PETSC_FALSE; 1520 mg->coarseSpaceType = ctype; 1521 PetscFunctionReturn(PETSC_SUCCESS); 1522 } 1523 1524 PetscErrorCode PCMGGetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType *ctype) 1525 { 1526 PC_MG *mg = (PC_MG *)pc->data; 1527 1528 PetscFunctionBegin; 1529 *ctype = mg->coarseSpaceType; 1530 PetscFunctionReturn(PETSC_SUCCESS); 1531 } 1532 1533 PetscErrorCode PCMGSetAdaptCR_MG(PC pc, PetscBool cr) 1534 { 1535 PC_MG *mg = (PC_MG *)pc->data; 1536 1537 PetscFunctionBegin; 1538 mg->compatibleRelaxation = cr; 1539 PetscFunctionReturn(PETSC_SUCCESS); 1540 } 1541 1542 PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr) 1543 { 1544 PC_MG *mg = (PC_MG *)pc->data; 1545 1546 PetscFunctionBegin; 1547 *cr = mg->compatibleRelaxation; 1548 PetscFunctionReturn(PETSC_SUCCESS); 1549 } 1550 1551 /*@C 1552 PCMGSetAdaptCoarseSpaceType - Set the type of adaptive coarse space. 1553 1554 Adapts or creates the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated. 1555 1556 Logically Collective 1557 1558 Input Parameters: 1559 + pc - the multigrid context 1560 - ctype - the type of coarse space 1561 1562 Options Database Keys: 1563 + -pc_mg_adapt_interp_n <int> - The number of modes to use 1564 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw 1565 1566 Level: intermediate 1567 1568 .seealso: `PCMG`, `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()` 1569 @*/ 1570 PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype) 1571 { 1572 PetscFunctionBegin; 1573 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1574 PetscValidLogicalCollectiveEnum(pc, ctype, 2); 1575 PetscTryMethod(pc, "PCMGSetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType), (pc, ctype)); 1576 PetscFunctionReturn(PETSC_SUCCESS); 1577 } 1578 1579 /*@C 1580 PCMGGetAdaptCoarseSpaceType - Get the type of adaptive coarse space. 1581 1582 Not Collective 1583 1584 Input Parameter: 1585 . pc - the multigrid context 1586 1587 Output Parameter: 1588 . ctype - the type of coarse space 1589 1590 Level: intermediate 1591 1592 .seealso: `PCMG`, `PCMGCoarseSpaceType`, `PCMGSetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()` 1593 @*/ 1594 PetscErrorCode PCMGGetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType *ctype) 1595 { 1596 PetscFunctionBegin; 1597 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1598 PetscAssertPointer(ctype, 2); 1599 PetscUseMethod(pc, "PCMGGetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType *), (pc, ctype)); 1600 PetscFunctionReturn(PETSC_SUCCESS); 1601 } 1602 1603 /* MATT: REMOVE? */ 1604 /*@ 1605 PCMGSetAdaptInterpolation - Adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated. 1606 1607 Logically Collective 1608 1609 Input Parameters: 1610 + pc - the multigrid context 1611 - adapt - flag for adaptation of the interpolator 1612 1613 Options Database Keys: 1614 + -pc_mg_adapt_interp - Turn on adaptation 1615 . -pc_mg_adapt_interp_n <int> - The number of modes to use, should be divisible by dimension 1616 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: polynomial, harmonic, eigenvector, generalized_eigenvector 1617 1618 Level: intermediate 1619 1620 .seealso: `PCMG`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1621 @*/ 1622 PetscErrorCode PCMGSetAdaptInterpolation(PC pc, PetscBool adapt) 1623 { 1624 PetscFunctionBegin; 1625 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1626 PetscTryMethod(pc, "PCMGSetAdaptInterpolation_C", (PC, PetscBool), (pc, adapt)); 1627 PetscFunctionReturn(PETSC_SUCCESS); 1628 } 1629 1630 /*@ 1631 PCMGGetAdaptInterpolation - Get the flag to adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, 1632 and thus accurately interpolated. 1633 1634 Not Collective 1635 1636 Input Parameter: 1637 . pc - the multigrid context 1638 1639 Output Parameter: 1640 . adapt - flag for adaptation of the interpolator 1641 1642 Level: intermediate 1643 1644 .seealso: `PCMG`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1645 @*/ 1646 PetscErrorCode PCMGGetAdaptInterpolation(PC pc, PetscBool *adapt) 1647 { 1648 PetscFunctionBegin; 1649 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1650 PetscAssertPointer(adapt, 2); 1651 PetscUseMethod(pc, "PCMGGetAdaptInterpolation_C", (PC, PetscBool *), (pc, adapt)); 1652 PetscFunctionReturn(PETSC_SUCCESS); 1653 } 1654 1655 /*@ 1656 PCMGSetAdaptCR - Monitor the coarse space quality using an auxiliary solve with compatible relaxation. 1657 1658 Logically Collective 1659 1660 Input Parameters: 1661 + pc - the multigrid context 1662 - cr - flag for compatible relaxation 1663 1664 Options Database Key: 1665 . -pc_mg_adapt_cr - Turn on compatible relaxation 1666 1667 Level: intermediate 1668 1669 .seealso: `PCMG`, `PCMGGetAdaptCR()`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1670 @*/ 1671 PetscErrorCode PCMGSetAdaptCR(PC pc, PetscBool cr) 1672 { 1673 PetscFunctionBegin; 1674 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1675 PetscTryMethod(pc, "PCMGSetAdaptCR_C", (PC, PetscBool), (pc, cr)); 1676 PetscFunctionReturn(PETSC_SUCCESS); 1677 } 1678 1679 /*@ 1680 PCMGGetAdaptCR - Get the flag to monitor coarse space quality using an auxiliary solve with compatible relaxation. 1681 1682 Not Collective 1683 1684 Input Parameter: 1685 . pc - the multigrid context 1686 1687 Output Parameter: 1688 . cr - flag for compatible relaxaion 1689 1690 Level: intermediate 1691 1692 .seealso: `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1693 @*/ 1694 PetscErrorCode PCMGGetAdaptCR(PC pc, PetscBool *cr) 1695 { 1696 PetscFunctionBegin; 1697 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1698 PetscAssertPointer(cr, 2); 1699 PetscUseMethod(pc, "PCMGGetAdaptCR_C", (PC, PetscBool *), (pc, cr)); 1700 PetscFunctionReturn(PETSC_SUCCESS); 1701 } 1702 1703 /*@ 1704 PCMGSetNumberSmooth - Sets the number of pre and post-smoothing steps to use 1705 on all levels. Use `PCMGDistinctSmoothUp()` to create separate up and down smoothers if you want different numbers of 1706 pre- and post-smoothing steps. 1707 1708 Logically Collective 1709 1710 Input Parameters: 1711 + pc - the multigrid context 1712 - n - the number of smoothing steps 1713 1714 Options Database Key: 1715 . -mg_levels_ksp_max_it <n> - Sets number of pre and post-smoothing steps 1716 1717 Level: advanced 1718 1719 Note: 1720 This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid. 1721 1722 .seealso: `PCMG`, `PCMGSetDistinctSmoothUp()` 1723 @*/ 1724 PetscErrorCode PCMGSetNumberSmooth(PC pc, PetscInt n) 1725 { 1726 PC_MG *mg = (PC_MG *)pc->data; 1727 PC_MG_Levels **mglevels = mg->levels; 1728 PetscInt i, levels; 1729 1730 PetscFunctionBegin; 1731 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1732 PetscValidLogicalCollectiveInt(pc, n, 2); 1733 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1734 levels = mglevels[0]->levels; 1735 1736 for (i = 1; i < levels; i++) { 1737 PetscCall(KSPSetTolerances(mglevels[i]->smoothu, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, n)); 1738 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, n)); 1739 mg->default_smoothu = n; 1740 mg->default_smoothd = n; 1741 } 1742 PetscFunctionReturn(PETSC_SUCCESS); 1743 } 1744 1745 /*@ 1746 PCMGSetDistinctSmoothUp - sets the up (post) smoother to be a separate `KSP` from the down (pre) smoother on all levels 1747 and adds the suffix _up to the options name 1748 1749 Logically Collective 1750 1751 Input Parameter: 1752 . pc - the preconditioner context 1753 1754 Options Database Key: 1755 . -pc_mg_distinct_smoothup <bool> - use distinct smoothing objects 1756 1757 Level: advanced 1758 1759 Note: 1760 This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid. 1761 1762 .seealso: `PCMG`, `PCMGSetNumberSmooth()` 1763 @*/ 1764 PetscErrorCode PCMGSetDistinctSmoothUp(PC pc) 1765 { 1766 PC_MG *mg = (PC_MG *)pc->data; 1767 PC_MG_Levels **mglevels = mg->levels; 1768 PetscInt i, levels; 1769 KSP subksp; 1770 1771 PetscFunctionBegin; 1772 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1773 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1774 levels = mglevels[0]->levels; 1775 1776 for (i = 1; i < levels; i++) { 1777 const char *prefix = NULL; 1778 /* make sure smoother up and down are different */ 1779 PetscCall(PCMGGetSmootherUp(pc, i, &subksp)); 1780 PetscCall(KSPGetOptionsPrefix(mglevels[i]->smoothd, &prefix)); 1781 PetscCall(KSPSetOptionsPrefix(subksp, prefix)); 1782 PetscCall(KSPAppendOptionsPrefix(subksp, "up_")); 1783 } 1784 PetscFunctionReturn(PETSC_SUCCESS); 1785 } 1786 1787 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */ 1788 PetscErrorCode PCGetInterpolations_MG(PC pc, PetscInt *num_levels, Mat *interpolations[]) 1789 { 1790 PC_MG *mg = (PC_MG *)pc->data; 1791 PC_MG_Levels **mglevels = mg->levels; 1792 Mat *mat; 1793 PetscInt l; 1794 1795 PetscFunctionBegin; 1796 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling"); 1797 PetscCall(PetscMalloc1(mg->nlevels, &mat)); 1798 for (l = 1; l < mg->nlevels; l++) { 1799 mat[l - 1] = mglevels[l]->interpolate; 1800 PetscCall(PetscObjectReference((PetscObject)mat[l - 1])); 1801 } 1802 *num_levels = mg->nlevels; 1803 *interpolations = mat; 1804 PetscFunctionReturn(PETSC_SUCCESS); 1805 } 1806 1807 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */ 1808 PetscErrorCode PCGetCoarseOperators_MG(PC pc, PetscInt *num_levels, Mat *coarseOperators[]) 1809 { 1810 PC_MG *mg = (PC_MG *)pc->data; 1811 PC_MG_Levels **mglevels = mg->levels; 1812 PetscInt l; 1813 Mat *mat; 1814 1815 PetscFunctionBegin; 1816 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling"); 1817 PetscCall(PetscMalloc1(mg->nlevels, &mat)); 1818 for (l = 0; l < mg->nlevels - 1; l++) { 1819 PetscCall(KSPGetOperators(mglevels[l]->smoothd, NULL, &(mat[l]))); 1820 PetscCall(PetscObjectReference((PetscObject)mat[l])); 1821 } 1822 *num_levels = mg->nlevels; 1823 *coarseOperators = mat; 1824 PetscFunctionReturn(PETSC_SUCCESS); 1825 } 1826 1827 /*@C 1828 PCMGRegisterCoarseSpaceConstructor - Adds a method to the `PCMG` package for coarse space construction. 1829 1830 Not Collective 1831 1832 Input Parameters: 1833 + name - name of the constructor 1834 - function - constructor routine 1835 1836 Calling sequence of `function`: 1837 $ PetscErrorCode my_csp(PC pc, PetscInt l, DM dm, KSP smooth, PetscInt Nc, Mat initGuess, Mat *coarseSp) 1838 + pc - The `PC` object 1839 . l - The multigrid level, 0 is the coarse level 1840 . dm - The `DM` for this level 1841 . smooth - The level smoother 1842 . Nc - The size of the coarse space 1843 . initGuess - Basis for an initial guess for the space 1844 - coarseSp - A basis for the computed coarse space 1845 1846 Level: advanced 1847 1848 Developer Notes: 1849 How come this is not used by `PCGAMG`? 1850 1851 .seealso: `PCMG`, `PCMGGetCoarseSpaceConstructor()`, `PCRegister()` 1852 @*/ 1853 PetscErrorCode PCMGRegisterCoarseSpaceConstructor(const char name[], PetscErrorCode (*function)(PC, PetscInt, DM, KSP, PetscInt, Mat, Mat *)) 1854 { 1855 PetscFunctionBegin; 1856 PetscCall(PCInitializePackage()); 1857 PetscCall(PetscFunctionListAdd(&PCMGCoarseList, name, function)); 1858 PetscFunctionReturn(PETSC_SUCCESS); 1859 } 1860 1861 /*@C 1862 PCMGGetCoarseSpaceConstructor - Returns the given coarse space construction method. 1863 1864 Not Collective 1865 1866 Input Parameter: 1867 . name - name of the constructor 1868 1869 Output Parameter: 1870 . function - constructor routine 1871 1872 Level: advanced 1873 1874 .seealso: `PCMG`, `PCMGRegisterCoarseSpaceConstructor()`, `PCRegister()` 1875 @*/ 1876 PetscErrorCode PCMGGetCoarseSpaceConstructor(const char name[], PetscErrorCode (**function)(PC, PetscInt, DM, KSP, PetscInt, Mat, Mat *)) 1877 { 1878 PetscFunctionBegin; 1879 PetscCall(PetscFunctionListFind(PCMGCoarseList, name, function)); 1880 PetscFunctionReturn(PETSC_SUCCESS); 1881 } 1882 1883 /*MC 1884 PCMG - Use multigrid preconditioning. This preconditioner requires you provide additional 1885 information about the coarser grid matrices and restriction/interpolation operators. 1886 1887 Options Database Keys: 1888 + -pc_mg_levels <nlevels> - number of levels including finest 1889 . -pc_mg_cycle_type <v,w> - provide the cycle desired 1890 . -pc_mg_type <additive,multiplicative,full,kaskade> - multiplicative is the default 1891 . -pc_mg_log - log information about time spent on each level of the solver 1892 . -pc_mg_distinct_smoothup - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes) 1893 . -pc_mg_galerkin <both,pmat,mat,none> - use Galerkin process to compute coarser operators, i.e. Acoarse = R A R' 1894 . -pc_mg_multiplicative_cycles - number of cycles to use as the preconditioner (defaults to 1) 1895 . -pc_mg_dump_matlab - dumps the matrices for each level and the restriction/interpolation matrices 1896 to the Socket viewer for reading from MATLAB. 1897 - -pc_mg_dump_binary - dumps the matrices for each level and the restriction/interpolation matrices 1898 to the binary output file called binaryoutput 1899 1900 Level: intermediate 1901 1902 Notes: 1903 If one uses a Krylov method such `KSPGMRES` or `KSPCG` as the smoother then one must use `KSPFGMRES`, `KSPGCR`, or `KSPRICHARDSON` as the outer Krylov method 1904 1905 When run with a single level the smoother options are used on that level NOT the coarse grid solver options 1906 1907 When run with `KSPRICHARDSON` the convergence test changes slightly if monitor is turned on. The iteration count may change slightly. This 1908 is because without monitoring the residual norm is computed WITHIN each multigrid cycle on the finest level after the pre-smoothing 1909 (because the residual has just been computed for the multigrid algorithm and is hence available for free) while with monitoring the 1910 residual is computed at the end of each cycle. 1911 1912 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMGType`, `PCEXOTIC`, `PCGAMG`, `PCML`, `PCHYPRE` 1913 `PCMGSetLevels()`, `PCMGGetLevels()`, `PCMGSetType()`, `PCMGSetCycleType()`, 1914 `PCMGSetDistinctSmoothUp()`, `PCMGGetCoarseSolve()`, `PCMGSetResidual()`, `PCMGSetInterpolation()`, 1915 `PCMGSetRestriction()`, `PCMGGetSmoother()`, `PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`, 1916 `PCMGSetCycleTypeOnLevel()`, `PCMGSetRhs()`, `PCMGSetX()`, `PCMGSetR()`, 1917 `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1918 M*/ 1919 1920 PETSC_EXTERN PetscErrorCode PCCreate_MG(PC pc) 1921 { 1922 PC_MG *mg; 1923 1924 PetscFunctionBegin; 1925 PetscCall(PetscNew(&mg)); 1926 pc->data = mg; 1927 mg->nlevels = -1; 1928 mg->am = PC_MG_MULTIPLICATIVE; 1929 mg->galerkin = PC_MG_GALERKIN_NONE; 1930 mg->adaptInterpolation = PETSC_FALSE; 1931 mg->Nc = -1; 1932 mg->eigenvalue = -1; 1933 1934 pc->useAmat = PETSC_TRUE; 1935 1936 pc->ops->apply = PCApply_MG; 1937 pc->ops->applytranspose = PCApplyTranspose_MG; 1938 pc->ops->matapply = PCMatApply_MG; 1939 pc->ops->setup = PCSetUp_MG; 1940 pc->ops->reset = PCReset_MG; 1941 pc->ops->destroy = PCDestroy_MG; 1942 pc->ops->setfromoptions = PCSetFromOptions_MG; 1943 pc->ops->view = PCView_MG; 1944 1945 PetscCall(PetscObjectComposedDataRegister(&mg->eigenvalue)); 1946 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", PCMGSetGalerkin_MG)); 1947 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", PCMGGetLevels_MG)); 1948 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", PCMGSetLevels_MG)); 1949 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_MG)); 1950 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_MG)); 1951 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", PCMGSetAdaptInterpolation_MG)); 1952 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", PCMGGetAdaptInterpolation_MG)); 1953 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", PCMGSetAdaptCR_MG)); 1954 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", PCMGGetAdaptCR_MG)); 1955 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", PCMGSetAdaptCoarseSpaceType_MG)); 1956 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", PCMGGetAdaptCoarseSpaceType_MG)); 1957 PetscFunctionReturn(PETSC_SUCCESS); 1958 } 1959