1 2 /* 3 Defines the multigrid preconditioner interface. 4 */ 5 #include <petsc/private/pcmgimpl.h> /*I "petscksp.h" I*/ 6 #include <petsc/private/kspimpl.h> 7 #include <petscdm.h> 8 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC, PetscBool *); 9 10 /* 11 Contains the list of registered coarse space construction routines 12 */ 13 PetscFunctionList PCMGCoarseList = NULL; 14 15 PetscErrorCode PCMGMCycle_Private(PC pc, PC_MG_Levels **mglevelsin, PetscBool transpose, PetscBool matapp, PCRichardsonConvergedReason *reason) { 16 PC_MG *mg = (PC_MG *)pc->data; 17 PC_MG_Levels *mgc, *mglevels = *mglevelsin; 18 PetscInt cycles = (mglevels->level == 1) ? 1 : (PetscInt)mglevels->cycles; 19 20 PetscFunctionBegin; 21 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 22 if (!transpose) { 23 if (matapp) { 24 PetscCall(KSPMatSolve(mglevels->smoothd, mglevels->B, mglevels->X)); /* pre-smooth */ 25 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, NULL)); 26 } else { 27 PetscCall(KSPSolve(mglevels->smoothd, mglevels->b, mglevels->x)); /* pre-smooth */ 28 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x)); 29 } 30 } else { 31 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 32 PetscCall(KSPSolveTranspose(mglevels->smoothu, mglevels->b, mglevels->x)); /* transpose of post-smooth */ 33 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x)); 34 } 35 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 36 if (mglevels->level) { /* not the coarsest grid */ 37 if (mglevels->eventresidual) PetscCall(PetscLogEventBegin(mglevels->eventresidual, 0, 0, 0, 0)); 38 if (matapp && !mglevels->R) { PetscCall(MatDuplicate(mglevels->B, MAT_DO_NOT_COPY_VALUES, &mglevels->R)); } 39 if (!transpose) { 40 if (matapp) PetscCall((*mglevels->matresidual)(mglevels->A, mglevels->B, mglevels->X, mglevels->R)); 41 else PetscCall((*mglevels->residual)(mglevels->A, mglevels->b, mglevels->x, mglevels->r)); 42 } else { 43 if (matapp) PetscCall((*mglevels->matresidualtranspose)(mglevels->A, mglevels->B, mglevels->X, mglevels->R)); 44 else PetscCall((*mglevels->residualtranspose)(mglevels->A, mglevels->b, mglevels->x, mglevels->r)); 45 } 46 if (mglevels->eventresidual) PetscCall(PetscLogEventEnd(mglevels->eventresidual, 0, 0, 0, 0)); 47 48 /* if on finest level and have convergence criteria set */ 49 if (mglevels->level == mglevels->levels - 1 && mg->ttol && reason) { 50 PetscReal rnorm; 51 PetscCall(VecNorm(mglevels->r, NORM_2, &rnorm)); 52 if (rnorm <= mg->ttol) { 53 if (rnorm < mg->abstol) { 54 *reason = PCRICHARDSON_CONVERGED_ATOL; 55 PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than absolute tolerance %g\n", (double)rnorm, (double)mg->abstol)); 56 } else { 57 *reason = PCRICHARDSON_CONVERGED_RTOL; 58 PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than relative tolerance times initial residual norm %g\n", (double)rnorm, (double)mg->ttol)); 59 } 60 PetscFunctionReturn(0); 61 } 62 } 63 64 mgc = *(mglevelsin - 1); 65 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0)); 66 if (!transpose) { 67 if (matapp) PetscCall(MatMatRestrict(mglevels->restrct, mglevels->R, &mgc->B)); 68 else PetscCall(MatRestrict(mglevels->restrct, mglevels->r, mgc->b)); 69 } else { 70 if (matapp) PetscCall(MatMatRestrict(mglevels->interpolate, mglevels->R, &mgc->B)); 71 else PetscCall(MatRestrict(mglevels->interpolate, mglevels->r, mgc->b)); 72 } 73 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0)); 74 if (matapp) { 75 if (!mgc->X) { 76 PetscCall(MatDuplicate(mgc->B, MAT_DO_NOT_COPY_VALUES, &mgc->X)); 77 } else { 78 PetscCall(MatZeroEntries(mgc->X)); 79 } 80 } else { 81 PetscCall(VecZeroEntries(mgc->x)); 82 } 83 while (cycles--) { PetscCall(PCMGMCycle_Private(pc, mglevelsin - 1, transpose, matapp, reason)); } 84 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0)); 85 if (!transpose) { 86 if (matapp) PetscCall(MatMatInterpolateAdd(mglevels->interpolate, mgc->X, mglevels->X, &mglevels->X)); 87 else PetscCall(MatInterpolateAdd(mglevels->interpolate, mgc->x, mglevels->x, mglevels->x)); 88 } else { 89 PetscCall(MatInterpolateAdd(mglevels->restrct, mgc->x, mglevels->x, mglevels->x)); 90 } 91 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0)); 92 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 93 if (!transpose) { 94 if (matapp) { 95 PetscCall(KSPMatSolve(mglevels->smoothu, mglevels->B, mglevels->X)); /* post smooth */ 96 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, NULL)); 97 } else { 98 PetscCall(KSPSolve(mglevels->smoothu, mglevels->b, mglevels->x)); /* post smooth */ 99 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x)); 100 } 101 } else { 102 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 103 PetscCall(KSPSolveTranspose(mglevels->smoothd, mglevels->b, mglevels->x)); /* post smooth */ 104 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x)); 105 } 106 if (mglevels->cr) { 107 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 108 /* TODO Turn on copy and turn off noisy if we have an exact solution 109 PetscCall(VecCopy(mglevels->x, mglevels->crx)); 110 PetscCall(VecCopy(mglevels->b, mglevels->crb)); */ 111 PetscCall(KSPSetNoisy_Private(mglevels->crx)); 112 PetscCall(KSPSolve(mglevels->cr, mglevels->crb, mglevels->crx)); /* compatible relaxation */ 113 PetscCall(KSPCheckSolve(mglevels->cr, pc, mglevels->crx)); 114 } 115 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 116 } 117 PetscFunctionReturn(0); 118 } 119 120 static PetscErrorCode PCApplyRichardson_MG(PC pc, Vec b, Vec x, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason) { 121 PC_MG *mg = (PC_MG *)pc->data; 122 PC_MG_Levels **mglevels = mg->levels; 123 PC tpc; 124 PetscBool changeu, changed; 125 PetscInt levels = mglevels[0]->levels, i; 126 127 PetscFunctionBegin; 128 /* When the DM is supplying the matrix then it will not exist until here */ 129 for (i = 0; i < levels; i++) { 130 if (!mglevels[i]->A) { 131 PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL)); 132 PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A)); 133 } 134 } 135 136 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc)); 137 PetscCall(PCPreSolveChangeRHS(tpc, &changed)); 138 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc)); 139 PetscCall(PCPreSolveChangeRHS(tpc, &changeu)); 140 if (!changed && !changeu) { 141 PetscCall(VecDestroy(&mglevels[levels - 1]->b)); 142 mglevels[levels - 1]->b = b; 143 } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */ 144 if (!mglevels[levels - 1]->b) { 145 Vec *vec; 146 147 PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL)); 148 mglevels[levels - 1]->b = *vec; 149 PetscCall(PetscFree(vec)); 150 } 151 PetscCall(VecCopy(b, mglevels[levels - 1]->b)); 152 } 153 mglevels[levels - 1]->x = x; 154 155 mg->rtol = rtol; 156 mg->abstol = abstol; 157 mg->dtol = dtol; 158 if (rtol) { 159 /* compute initial residual norm for relative convergence test */ 160 PetscReal rnorm; 161 if (zeroguess) { 162 PetscCall(VecNorm(b, NORM_2, &rnorm)); 163 } else { 164 PetscCall((*mglevels[levels - 1]->residual)(mglevels[levels - 1]->A, b, x, w)); 165 PetscCall(VecNorm(w, NORM_2, &rnorm)); 166 } 167 mg->ttol = PetscMax(rtol * rnorm, abstol); 168 } else if (abstol) mg->ttol = abstol; 169 else mg->ttol = 0.0; 170 171 /* since smoother is applied to full system, not just residual we need to make sure that smoothers don't 172 stop prematurely due to small residual */ 173 for (i = 1; i < levels; i++) { 174 PetscCall(KSPSetTolerances(mglevels[i]->smoothu, 0, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT)); 175 if (mglevels[i]->smoothu != mglevels[i]->smoothd) { 176 /* For Richardson the initial guess is nonzero since it is solving in each cycle the original system not just applying as a preconditioner */ 177 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE)); 178 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, 0, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT)); 179 } 180 } 181 182 *reason = (PCRichardsonConvergedReason)0; 183 for (i = 0; i < its; i++) { 184 PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, PETSC_FALSE, PETSC_FALSE, reason)); 185 if (*reason) break; 186 } 187 if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS; 188 *outits = i; 189 if (!changed && !changeu) mglevels[levels - 1]->b = NULL; 190 PetscFunctionReturn(0); 191 } 192 193 PetscErrorCode PCReset_MG(PC pc) { 194 PC_MG *mg = (PC_MG *)pc->data; 195 PC_MG_Levels **mglevels = mg->levels; 196 PetscInt i, n; 197 198 PetscFunctionBegin; 199 if (mglevels) { 200 n = mglevels[0]->levels; 201 for (i = 0; i < n - 1; i++) { 202 PetscCall(VecDestroy(&mglevels[i + 1]->r)); 203 PetscCall(VecDestroy(&mglevels[i]->b)); 204 PetscCall(VecDestroy(&mglevels[i]->x)); 205 PetscCall(MatDestroy(&mglevels[i + 1]->R)); 206 PetscCall(MatDestroy(&mglevels[i]->B)); 207 PetscCall(MatDestroy(&mglevels[i]->X)); 208 PetscCall(VecDestroy(&mglevels[i]->crx)); 209 PetscCall(VecDestroy(&mglevels[i]->crb)); 210 PetscCall(MatDestroy(&mglevels[i + 1]->restrct)); 211 PetscCall(MatDestroy(&mglevels[i + 1]->interpolate)); 212 PetscCall(MatDestroy(&mglevels[i + 1]->inject)); 213 PetscCall(VecDestroy(&mglevels[i + 1]->rscale)); 214 } 215 PetscCall(VecDestroy(&mglevels[n - 1]->crx)); 216 PetscCall(VecDestroy(&mglevels[n - 1]->crb)); 217 /* this is not null only if the smoother on the finest level 218 changes the rhs during PreSolve */ 219 PetscCall(VecDestroy(&mglevels[n - 1]->b)); 220 PetscCall(MatDestroy(&mglevels[n - 1]->B)); 221 222 for (i = 0; i < n; i++) { 223 PetscCall(MatDestroy(&mglevels[i]->coarseSpace)); 224 PetscCall(MatDestroy(&mglevels[i]->A)); 225 if (mglevels[i]->smoothd != mglevels[i]->smoothu) { PetscCall(KSPReset(mglevels[i]->smoothd)); } 226 PetscCall(KSPReset(mglevels[i]->smoothu)); 227 if (mglevels[i]->cr) PetscCall(KSPReset(mglevels[i]->cr)); 228 } 229 mg->Nc = 0; 230 } 231 PetscFunctionReturn(0); 232 } 233 234 /* Implementing CR 235 236 We only want to make corrections that ``do not change'' the coarse solution. What we mean by not changing is that if I prolong my coarse solution to the fine grid and then inject that fine solution back to the coarse grid, I get the same answer. Injection is what Brannick calls R. We want the complementary projector to Inj, which we will call S, after Brannick, so that Inj S = 0. Now the orthogonal projector onto the range of Inj^T is 237 238 Inj^T (Inj Inj^T)^{-1} Inj 239 240 and if Inj is a VecScatter, as it is now in PETSc, we have 241 242 Inj^T Inj 243 244 and 245 246 S = I - Inj^T Inj 247 248 since 249 250 Inj S = Inj - (Inj Inj^T) Inj = 0. 251 252 Brannick suggests 253 254 A \to S^T A S \qquad\mathrm{and}\qquad M \to S^T M S 255 256 but I do not think his :math:`S^T S = I` is correct. Our S is an orthogonal projector, so :math:`S^T S = S^2 = S`. We will use 257 258 M^{-1} A \to S M^{-1} A S 259 260 In fact, since it is somewhat hard in PETSc to do the symmetric application, we will just apply S on the left. 261 262 Check: || Inj P - I ||_F < tol 263 Check: In general, Inj Inj^T = I 264 */ 265 266 typedef struct { 267 PC mg; /* The PCMG object */ 268 PetscInt l; /* The multigrid level for this solver */ 269 Mat Inj; /* The injection matrix */ 270 Mat S; /* I - Inj^T Inj */ 271 } CRContext; 272 273 static PetscErrorCode CRSetup_Private(PC pc) { 274 CRContext *ctx; 275 Mat It; 276 277 PetscFunctionBeginUser; 278 PetscCall(PCShellGetContext(pc, &ctx)); 279 PetscCall(PCMGGetInjection(ctx->mg, ctx->l, &It)); 280 PetscCheck(It, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "CR requires that injection be defined for this PCMG"); 281 PetscCall(MatCreateTranspose(It, &ctx->Inj)); 282 PetscCall(MatCreateNormal(ctx->Inj, &ctx->S)); 283 PetscCall(MatScale(ctx->S, -1.0)); 284 PetscCall(MatShift(ctx->S, 1.0)); 285 PetscFunctionReturn(0); 286 } 287 288 static PetscErrorCode CRApply_Private(PC pc, Vec x, Vec y) { 289 CRContext *ctx; 290 291 PetscFunctionBeginUser; 292 PetscCall(PCShellGetContext(pc, &ctx)); 293 PetscCall(MatMult(ctx->S, x, y)); 294 PetscFunctionReturn(0); 295 } 296 297 static PetscErrorCode CRDestroy_Private(PC pc) { 298 CRContext *ctx; 299 300 PetscFunctionBeginUser; 301 PetscCall(PCShellGetContext(pc, &ctx)); 302 PetscCall(MatDestroy(&ctx->Inj)); 303 PetscCall(MatDestroy(&ctx->S)); 304 PetscCall(PetscFree(ctx)); 305 PetscCall(PCShellSetContext(pc, NULL)); 306 PetscFunctionReturn(0); 307 } 308 309 static PetscErrorCode CreateCR_Private(PC pc, PetscInt l, PC *cr) { 310 CRContext *ctx; 311 312 PetscFunctionBeginUser; 313 PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), cr)); 314 PetscCall(PetscObjectSetName((PetscObject)*cr, "S (complementary projector to injection)")); 315 PetscCall(PetscCalloc1(1, &ctx)); 316 ctx->mg = pc; 317 ctx->l = l; 318 PetscCall(PCSetType(*cr, PCSHELL)); 319 PetscCall(PCShellSetContext(*cr, ctx)); 320 PetscCall(PCShellSetApply(*cr, CRApply_Private)); 321 PetscCall(PCShellSetSetUp(*cr, CRSetup_Private)); 322 PetscCall(PCShellSetDestroy(*cr, CRDestroy_Private)); 323 PetscFunctionReturn(0); 324 } 325 326 PetscErrorCode PCMGSetLevels_MG(PC pc, PetscInt levels, MPI_Comm *comms) { 327 PC_MG *mg = (PC_MG *)pc->data; 328 MPI_Comm comm; 329 PC_MG_Levels **mglevels = mg->levels; 330 PCMGType mgtype = mg->am; 331 PetscInt mgctype = (PetscInt)PC_MG_CYCLE_V; 332 PetscInt i; 333 PetscMPIInt size; 334 const char *prefix; 335 PC ipc; 336 PetscInt n; 337 338 PetscFunctionBegin; 339 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 340 PetscValidLogicalCollectiveInt(pc, levels, 2); 341 if (mg->nlevels == levels) PetscFunctionReturn(0); 342 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 343 if (mglevels) { 344 mgctype = mglevels[0]->cycles; 345 /* changing the number of levels so free up the previous stuff */ 346 PetscCall(PCReset_MG(pc)); 347 n = mglevels[0]->levels; 348 for (i = 0; i < n; i++) { 349 if (mglevels[i]->smoothd != mglevels[i]->smoothu) { PetscCall(KSPDestroy(&mglevels[i]->smoothd)); } 350 PetscCall(KSPDestroy(&mglevels[i]->smoothu)); 351 PetscCall(KSPDestroy(&mglevels[i]->cr)); 352 PetscCall(PetscFree(mglevels[i])); 353 } 354 PetscCall(PetscFree(mg->levels)); 355 } 356 357 mg->nlevels = levels; 358 359 PetscCall(PetscMalloc1(levels, &mglevels)); 360 PetscCall(PetscLogObjectMemory((PetscObject)pc, levels * (sizeof(PC_MG *)))); 361 362 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 363 364 mg->stageApply = 0; 365 for (i = 0; i < levels; i++) { 366 PetscCall(PetscNewLog(pc, &mglevels[i])); 367 368 mglevels[i]->level = i; 369 mglevels[i]->levels = levels; 370 mglevels[i]->cycles = mgctype; 371 mg->default_smoothu = 2; 372 mg->default_smoothd = 2; 373 mglevels[i]->eventsmoothsetup = 0; 374 mglevels[i]->eventsmoothsolve = 0; 375 mglevels[i]->eventresidual = 0; 376 mglevels[i]->eventinterprestrict = 0; 377 378 if (comms) comm = comms[i]; 379 if (comm != MPI_COMM_NULL) { 380 PetscCall(KSPCreate(comm, &mglevels[i]->smoothd)); 381 PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->smoothd, pc->erroriffailure)); 382 PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->smoothd, (PetscObject)pc, levels - i)); 383 PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, prefix)); 384 PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->smoothd, PetscMGLevelId, mglevels[i]->level)); 385 if (i || levels == 1) { 386 char tprefix[128]; 387 388 PetscCall(KSPSetType(mglevels[i]->smoothd, KSPCHEBYSHEV)); 389 PetscCall(KSPSetConvergenceTest(mglevels[i]->smoothd, KSPConvergedSkip, NULL, NULL)); 390 PetscCall(KSPSetNormType(mglevels[i]->smoothd, KSP_NORM_NONE)); 391 PetscCall(KSPGetPC(mglevels[i]->smoothd, &ipc)); 392 PetscCall(PCSetType(ipc, PCSOR)); 393 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, mg->default_smoothd)); 394 395 PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%d_", (int)i)); 396 PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix)); 397 } else { 398 PetscCall(KSPAppendOptionsPrefix(mglevels[0]->smoothd, "mg_coarse_")); 399 400 /* coarse solve is (redundant) LU by default; set shifttype NONZERO to avoid annoying zero-pivot in LU preconditioner */ 401 PetscCall(KSPSetType(mglevels[0]->smoothd, KSPPREONLY)); 402 PetscCall(KSPGetPC(mglevels[0]->smoothd, &ipc)); 403 PetscCallMPI(MPI_Comm_size(comm, &size)); 404 if (size > 1) { 405 PetscCall(PCSetType(ipc, PCREDUNDANT)); 406 } else { 407 PetscCall(PCSetType(ipc, PCLU)); 408 } 409 PetscCall(PCFactorSetShiftType(ipc, MAT_SHIFT_INBLOCKS)); 410 } 411 PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)mglevels[i]->smoothd)); 412 } 413 mglevels[i]->smoothu = mglevels[i]->smoothd; 414 mg->rtol = 0.0; 415 mg->abstol = 0.0; 416 mg->dtol = 0.0; 417 mg->ttol = 0.0; 418 mg->cyclesperpcapply = 1; 419 } 420 mg->levels = mglevels; 421 PetscCall(PCMGSetType(pc, mgtype)); 422 PetscFunctionReturn(0); 423 } 424 425 /*@C 426 PCMGSetLevels - Sets the number of levels to use with MG. 427 Must be called before any other MG routine. 428 429 Logically Collective on PC 430 431 Input Parameters: 432 + pc - the preconditioner context 433 . levels - the number of levels 434 - comms - optional communicators for each level; this is to allow solving the coarser problems 435 on smaller sets of processes. For processes that are not included in the computation 436 you must pass MPI_COMM_NULL. Use comms = NULL to specify that all processes 437 should participate in each level of problem. 438 439 Level: intermediate 440 441 Notes: 442 If the number of levels is one then the multigrid uses the -mg_levels prefix 443 for setting the level options rather than the -mg_coarse prefix. 444 445 You can free the information in comms after this routine is called. 446 447 The array of MPI communicators must contain MPI_COMM_NULL for those ranks that at each level 448 are not participating in the coarser solve. For example, with 2 levels and 1 and 2 ranks on 449 the two levels, rank 0 in the original communicator will pass in an array of 2 communicators 450 of size 2 and 1, while rank 1 in the original communicator will pass in array of 2 communicators 451 the first of size 2 and the second of value MPI_COMM_NULL since the rank 1 does not participate 452 in the coarse grid solve. 453 454 Since each coarser level may have a new MPI_Comm with fewer ranks than the previous, one 455 must take special care in providing the restriction and interpolation operation. We recommend 456 providing these as two step operations; first perform a standard restriction or interpolation on 457 the full number of ranks for that level and then use an MPI call to copy the resulting vector 458 array entries (after calls to VecGetArray()) to the smaller or larger number of ranks, note in both 459 cases the MPI calls must be made on the larger of the two communicators. Traditional MPI send and 460 recieves or MPI_AlltoAllv() could be used to do the reshuffling of the vector entries. 461 462 Fortran Notes: 463 Use comms = PETSC_NULL_MPI_COMM as the equivalent of NULL in the C interface. Note PETSC_NULL_MPI_COMM 464 is not MPI_COMM_NULL. It is more like PETSC_NULL_INTEGER, PETSC_NULL_REAL etc. 465 466 .seealso: `PCMGSetType()`, `PCMGGetLevels()` 467 @*/ 468 PetscErrorCode PCMGSetLevels(PC pc, PetscInt levels, MPI_Comm *comms) { 469 PetscFunctionBegin; 470 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 471 if (comms) PetscValidPointer(comms, 3); 472 PetscTryMethod(pc, "PCMGSetLevels_C", (PC, PetscInt, MPI_Comm *), (pc, levels, comms)); 473 PetscFunctionReturn(0); 474 } 475 476 PetscErrorCode PCDestroy_MG(PC pc) { 477 PC_MG *mg = (PC_MG *)pc->data; 478 PC_MG_Levels **mglevels = mg->levels; 479 PetscInt i, n; 480 481 PetscFunctionBegin; 482 PetscCall(PCReset_MG(pc)); 483 if (mglevels) { 484 n = mglevels[0]->levels; 485 for (i = 0; i < n; i++) { 486 if (mglevels[i]->smoothd != mglevels[i]->smoothu) { PetscCall(KSPDestroy(&mglevels[i]->smoothd)); } 487 PetscCall(KSPDestroy(&mglevels[i]->smoothu)); 488 PetscCall(KSPDestroy(&mglevels[i]->cr)); 489 PetscCall(PetscFree(mglevels[i])); 490 } 491 PetscCall(PetscFree(mg->levels)); 492 } 493 PetscCall(PetscFree(pc->data)); 494 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 495 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 496 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", NULL)); 497 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", NULL)); 498 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", NULL)); 499 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 500 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 501 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", NULL)); 502 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", NULL)); 503 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", NULL)); 504 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", NULL)); 505 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", NULL)); 506 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", NULL)); 507 PetscFunctionReturn(0); 508 } 509 510 /* 511 PCApply_MG - Runs either an additive, multiplicative, Kaskadic 512 or full cycle of multigrid. 513 514 Note: 515 A simple wrapper which calls PCMGMCycle(),PCMGACycle(), or PCMGFCycle(). 516 */ 517 static PetscErrorCode PCApply_MG_Internal(PC pc, Vec b, Vec x, Mat B, Mat X, PetscBool transpose) { 518 PC_MG *mg = (PC_MG *)pc->data; 519 PC_MG_Levels **mglevels = mg->levels; 520 PC tpc; 521 PetscInt levels = mglevels[0]->levels, i; 522 PetscBool changeu, changed, matapp; 523 524 PetscFunctionBegin; 525 matapp = (PetscBool)(B && X); 526 if (mg->stageApply) PetscCall(PetscLogStagePush(mg->stageApply)); 527 /* When the DM is supplying the matrix then it will not exist until here */ 528 for (i = 0; i < levels; i++) { 529 if (!mglevels[i]->A) { 530 PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL)); 531 PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A)); 532 } 533 } 534 535 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc)); 536 PetscCall(PCPreSolveChangeRHS(tpc, &changed)); 537 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc)); 538 PetscCall(PCPreSolveChangeRHS(tpc, &changeu)); 539 if (!changeu && !changed) { 540 if (matapp) { 541 PetscCall(MatDestroy(&mglevels[levels - 1]->B)); 542 mglevels[levels - 1]->B = B; 543 } else { 544 PetscCall(VecDestroy(&mglevels[levels - 1]->b)); 545 mglevels[levels - 1]->b = b; 546 } 547 } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */ 548 if (matapp) { 549 if (mglevels[levels - 1]->B) { 550 PetscInt N1, N2; 551 PetscBool flg; 552 553 PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &N1)); 554 PetscCall(MatGetSize(B, NULL, &N2)); 555 PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 1]->B, ((PetscObject)B)->type_name, &flg)); 556 if (N1 != N2 || !flg) { PetscCall(MatDestroy(&mglevels[levels - 1]->B)); } 557 } 558 if (!mglevels[levels - 1]->B) { 559 PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &mglevels[levels - 1]->B)); 560 } else { 561 PetscCall(MatCopy(B, mglevels[levels - 1]->B, SAME_NONZERO_PATTERN)); 562 } 563 } else { 564 if (!mglevels[levels - 1]->b) { 565 Vec *vec; 566 567 PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL)); 568 mglevels[levels - 1]->b = *vec; 569 PetscCall(PetscFree(vec)); 570 } 571 PetscCall(VecCopy(b, mglevels[levels - 1]->b)); 572 } 573 } 574 if (matapp) { 575 mglevels[levels - 1]->X = X; 576 } else { 577 mglevels[levels - 1]->x = x; 578 } 579 580 /* If coarser Xs are present, it means we have already block applied the PC at least once 581 Reset operators if sizes/type do no match */ 582 if (matapp && levels > 1 && mglevels[levels - 2]->X) { 583 PetscInt Xc, Bc; 584 PetscBool flg; 585 586 PetscCall(MatGetSize(mglevels[levels - 2]->X, NULL, &Xc)); 587 PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &Bc)); 588 PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 2]->X, ((PetscObject)mglevels[levels - 1]->X)->type_name, &flg)); 589 if (Xc != Bc || !flg) { 590 PetscCall(MatDestroy(&mglevels[levels - 1]->R)); 591 for (i = 0; i < levels - 1; i++) { 592 PetscCall(MatDestroy(&mglevels[i]->R)); 593 PetscCall(MatDestroy(&mglevels[i]->B)); 594 PetscCall(MatDestroy(&mglevels[i]->X)); 595 } 596 } 597 } 598 599 if (mg->am == PC_MG_MULTIPLICATIVE) { 600 if (matapp) PetscCall(MatZeroEntries(X)); 601 else PetscCall(VecZeroEntries(x)); 602 for (i = 0; i < mg->cyclesperpcapply; i++) { PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, transpose, matapp, NULL)); } 603 } else if (mg->am == PC_MG_ADDITIVE) { 604 PetscCall(PCMGACycle_Private(pc, mglevels, transpose, matapp)); 605 } else if (mg->am == PC_MG_KASKADE) { 606 PetscCall(PCMGKCycle_Private(pc, mglevels, transpose, matapp)); 607 } else { 608 PetscCall(PCMGFCycle_Private(pc, mglevels, transpose, matapp)); 609 } 610 if (mg->stageApply) PetscCall(PetscLogStagePop()); 611 if (!changeu && !changed) { 612 if (matapp) { 613 mglevels[levels - 1]->B = NULL; 614 } else { 615 mglevels[levels - 1]->b = NULL; 616 } 617 } 618 PetscFunctionReturn(0); 619 } 620 621 static PetscErrorCode PCApply_MG(PC pc, Vec b, Vec x) { 622 PetscFunctionBegin; 623 PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_FALSE)); 624 PetscFunctionReturn(0); 625 } 626 627 static PetscErrorCode PCApplyTranspose_MG(PC pc, Vec b, Vec x) { 628 PetscFunctionBegin; 629 PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_TRUE)); 630 PetscFunctionReturn(0); 631 } 632 633 static PetscErrorCode PCMatApply_MG(PC pc, Mat b, Mat x) { 634 PetscFunctionBegin; 635 PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_FALSE)); 636 PetscFunctionReturn(0); 637 } 638 639 PetscErrorCode PCSetFromOptions_MG(PC pc, PetscOptionItems *PetscOptionsObject) { 640 PetscInt levels, cycles; 641 PetscBool flg, flg2; 642 PC_MG *mg = (PC_MG *)pc->data; 643 PC_MG_Levels **mglevels; 644 PCMGType mgtype; 645 PCMGCycleType mgctype; 646 PCMGGalerkinType gtype; 647 PCMGCoarseSpaceType coarseSpaceType; 648 649 PetscFunctionBegin; 650 levels = PetscMax(mg->nlevels, 1); 651 PetscOptionsHeadBegin(PetscOptionsObject, "Multigrid options"); 652 PetscCall(PetscOptionsInt("-pc_mg_levels", "Number of Levels", "PCMGSetLevels", levels, &levels, &flg)); 653 if (!flg && !mg->levels && pc->dm) { 654 PetscCall(DMGetRefineLevel(pc->dm, &levels)); 655 levels++; 656 mg->usedmfornumberoflevels = PETSC_TRUE; 657 } 658 PetscCall(PCMGSetLevels(pc, levels, NULL)); 659 mglevels = mg->levels; 660 661 mgctype = (PCMGCycleType)mglevels[0]->cycles; 662 PetscCall(PetscOptionsEnum("-pc_mg_cycle_type", "V cycle or for W-cycle", "PCMGSetCycleType", PCMGCycleTypes, (PetscEnum)mgctype, (PetscEnum *)&mgctype, &flg)); 663 if (flg) PetscCall(PCMGSetCycleType(pc, mgctype)); 664 gtype = mg->galerkin; 665 PetscCall(PetscOptionsEnum("-pc_mg_galerkin", "Use Galerkin process to compute coarser operators", "PCMGSetGalerkin", PCMGGalerkinTypes, (PetscEnum)gtype, (PetscEnum *)>ype, &flg)); 666 if (flg) PetscCall(PCMGSetGalerkin(pc, gtype)); 667 coarseSpaceType = mg->coarseSpaceType; 668 PetscCall(PetscOptionsEnum("-pc_mg_adapt_interp_coarse_space", "Type of adaptive coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw", "PCMGSetAdaptCoarseSpaceType", PCMGCoarseSpaceTypes, (PetscEnum)coarseSpaceType, (PetscEnum *)&coarseSpaceType, &flg)); 669 if (flg) PetscCall(PCMGSetAdaptCoarseSpaceType(pc, coarseSpaceType)); 670 PetscCall(PetscOptionsInt("-pc_mg_adapt_interp_n", "Size of the coarse space for adaptive interpolation", "PCMGSetCoarseSpace", mg->Nc, &mg->Nc, &flg)); 671 PetscCall(PetscOptionsBool("-pc_mg_mesp_monitor", "Monitor the multilevel eigensolver", "PCMGSetAdaptInterpolation", PETSC_FALSE, &mg->mespMonitor, &flg)); 672 flg2 = PETSC_FALSE; 673 PetscCall(PetscOptionsBool("-pc_mg_adapt_cr", "Monitor coarse space quality using Compatible Relaxation (CR)", "PCMGSetAdaptCR", PETSC_FALSE, &flg2, &flg)); 674 if (flg) PetscCall(PCMGSetAdaptCR(pc, flg2)); 675 flg = PETSC_FALSE; 676 PetscCall(PetscOptionsBool("-pc_mg_distinct_smoothup", "Create separate smoothup KSP and append the prefix _up", "PCMGSetDistinctSmoothUp", PETSC_FALSE, &flg, NULL)); 677 if (flg) PetscCall(PCMGSetDistinctSmoothUp(pc)); 678 mgtype = mg->am; 679 PetscCall(PetscOptionsEnum("-pc_mg_type", "Multigrid type", "PCMGSetType", PCMGTypes, (PetscEnum)mgtype, (PetscEnum *)&mgtype, &flg)); 680 if (flg) PetscCall(PCMGSetType(pc, mgtype)); 681 if (mg->am == PC_MG_MULTIPLICATIVE) { 682 PetscCall(PetscOptionsInt("-pc_mg_multiplicative_cycles", "Number of cycles for each preconditioner step", "PCMGMultiplicativeSetCycles", mg->cyclesperpcapply, &cycles, &flg)); 683 if (flg) PetscCall(PCMGMultiplicativeSetCycles(pc, cycles)); 684 } 685 flg = PETSC_FALSE; 686 PetscCall(PetscOptionsBool("-pc_mg_log", "Log times for each multigrid level", "None", flg, &flg, NULL)); 687 if (flg) { 688 PetscInt i; 689 char eventname[128]; 690 691 levels = mglevels[0]->levels; 692 for (i = 0; i < levels; i++) { 693 sprintf(eventname, "MGSetup Level %d", (int)i); 694 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsetup)); 695 sprintf(eventname, "MGSmooth Level %d", (int)i); 696 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsolve)); 697 if (i) { 698 sprintf(eventname, "MGResid Level %d", (int)i); 699 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventresidual)); 700 sprintf(eventname, "MGInterp Level %d", (int)i); 701 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventinterprestrict)); 702 } 703 } 704 705 #if defined(PETSC_USE_LOG) 706 { 707 const char *sname = "MG Apply"; 708 PetscStageLog stageLog; 709 PetscInt st; 710 711 PetscCall(PetscLogGetStageLog(&stageLog)); 712 for (st = 0; st < stageLog->numStages; ++st) { 713 PetscBool same; 714 715 PetscCall(PetscStrcmp(stageLog->stageInfo[st].name, sname, &same)); 716 if (same) mg->stageApply = st; 717 } 718 if (!mg->stageApply) { PetscCall(PetscLogStageRegister(sname, &mg->stageApply)); } 719 } 720 #endif 721 } 722 PetscOptionsHeadEnd(); 723 /* Check option consistency */ 724 PetscCall(PCMGGetGalerkin(pc, >ype)); 725 PetscCall(PCMGGetAdaptInterpolation(pc, &flg)); 726 PetscCheck(!flg || !(gtype >= PC_MG_GALERKIN_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use Galerkin coarse operators when adapting the interpolator"); 727 PetscFunctionReturn(0); 728 } 729 730 const char *const PCMGTypes[] = {"MULTIPLICATIVE", "ADDITIVE", "FULL", "KASKADE", "PCMGType", "PC_MG", NULL}; 731 const char *const PCMGCycleTypes[] = {"invalid", "v", "w", "PCMGCycleType", "PC_MG_CYCLE", NULL}; 732 const char *const PCMGGalerkinTypes[] = {"both", "pmat", "mat", "none", "external", "PCMGGalerkinType", "PC_MG_GALERKIN", NULL}; 733 const char *const PCMGCoarseSpaceTypes[] = {"none", "polynomial", "harmonic", "eigenvector", "generalized_eigenvector", "gdsw", "PCMGCoarseSpaceType", "PCMG_ADAPT_NONE", NULL}; 734 735 #include <petscdraw.h> 736 PetscErrorCode PCView_MG(PC pc, PetscViewer viewer) { 737 PC_MG *mg = (PC_MG *)pc->data; 738 PC_MG_Levels **mglevels = mg->levels; 739 PetscInt levels = mglevels ? mglevels[0]->levels : 0, i; 740 PetscBool iascii, isbinary, isdraw; 741 742 PetscFunctionBegin; 743 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 744 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 745 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 746 if (iascii) { 747 const char *cyclename = levels ? (mglevels[0]->cycles == PC_MG_CYCLE_V ? "v" : "w") : "unknown"; 748 PetscCall(PetscViewerASCIIPrintf(viewer, " type is %s, levels=%" PetscInt_FMT " cycles=%s\n", PCMGTypes[mg->am], levels, cyclename)); 749 if (mg->am == PC_MG_MULTIPLICATIVE) { PetscCall(PetscViewerASCIIPrintf(viewer, " Cycles per PCApply=%" PetscInt_FMT "\n", mg->cyclesperpcapply)); } 750 if (mg->galerkin == PC_MG_GALERKIN_BOTH) { 751 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices\n")); 752 } else if (mg->galerkin == PC_MG_GALERKIN_PMAT) { 753 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for pmat\n")); 754 } else if (mg->galerkin == PC_MG_GALERKIN_MAT) { 755 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for mat\n")); 756 } else if (mg->galerkin == PC_MG_GALERKIN_EXTERNAL) { 757 PetscCall(PetscViewerASCIIPrintf(viewer, " Using externally compute Galerkin coarse grid matrices\n")); 758 } else { 759 PetscCall(PetscViewerASCIIPrintf(viewer, " Not using Galerkin computed coarse grid matrices\n")); 760 } 761 if (mg->view) PetscCall((*mg->view)(pc, viewer)); 762 for (i = 0; i < levels; i++) { 763 if (i) { 764 PetscCall(PetscViewerASCIIPrintf(viewer, "Down solver (pre-smoother) on level %" PetscInt_FMT " -------------------------------\n", i)); 765 } else { 766 PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse grid solver -- level %" PetscInt_FMT " -------------------------------\n", i)); 767 } 768 PetscCall(PetscViewerASCIIPushTab(viewer)); 769 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 770 PetscCall(PetscViewerASCIIPopTab(viewer)); 771 if (i && mglevels[i]->smoothd == mglevels[i]->smoothu) { 772 PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) same as down solver (pre-smoother)\n")); 773 } else if (i) { 774 PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) on level %" PetscInt_FMT " -------------------------------\n", i)); 775 PetscCall(PetscViewerASCIIPushTab(viewer)); 776 PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 777 PetscCall(PetscViewerASCIIPopTab(viewer)); 778 } 779 if (i && mglevels[i]->cr) { 780 PetscCall(PetscViewerASCIIPrintf(viewer, "CR solver on level %" PetscInt_FMT " -------------------------------\n", i)); 781 PetscCall(PetscViewerASCIIPushTab(viewer)); 782 PetscCall(KSPView(mglevels[i]->cr, viewer)); 783 PetscCall(PetscViewerASCIIPopTab(viewer)); 784 } 785 } 786 } else if (isbinary) { 787 for (i = levels - 1; i >= 0; i--) { 788 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 789 if (i && mglevels[i]->smoothd != mglevels[i]->smoothu) { PetscCall(KSPView(mglevels[i]->smoothu, viewer)); } 790 } 791 } else if (isdraw) { 792 PetscDraw draw; 793 PetscReal x, w, y, bottom, th; 794 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 795 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 796 PetscCall(PetscDrawStringGetSize(draw, NULL, &th)); 797 bottom = y - th; 798 for (i = levels - 1; i >= 0; i--) { 799 if (!mglevels[i]->smoothu || (mglevels[i]->smoothu == mglevels[i]->smoothd)) { 800 PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom)); 801 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 802 PetscCall(PetscDrawPopCurrentPoint(draw)); 803 } else { 804 w = 0.5 * PetscMin(1.0 - x, x); 805 PetscCall(PetscDrawPushCurrentPoint(draw, x + w, bottom)); 806 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 807 PetscCall(PetscDrawPopCurrentPoint(draw)); 808 PetscCall(PetscDrawPushCurrentPoint(draw, x - w, bottom)); 809 PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 810 PetscCall(PetscDrawPopCurrentPoint(draw)); 811 } 812 PetscCall(PetscDrawGetBoundingBox(draw, NULL, &bottom, NULL, NULL)); 813 bottom -= th; 814 } 815 } 816 PetscFunctionReturn(0); 817 } 818 819 #include <petsc/private/kspimpl.h> 820 821 /* 822 Calls setup for the KSP on each level 823 */ 824 PetscErrorCode PCSetUp_MG(PC pc) { 825 PC_MG *mg = (PC_MG *)pc->data; 826 PC_MG_Levels **mglevels = mg->levels; 827 PetscInt i, n; 828 PC cpc; 829 PetscBool dump = PETSC_FALSE, opsset, use_amat, missinginterpolate = PETSC_FALSE; 830 Mat dA, dB; 831 Vec tvec; 832 DM *dms; 833 PetscViewer viewer = NULL; 834 PetscBool dAeqdB = PETSC_FALSE, needRestricts = PETSC_FALSE, doCR = PETSC_FALSE; 835 PetscBool adaptInterpolation = mg->adaptInterpolation; 836 837 PetscFunctionBegin; 838 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels with PCMGSetLevels() before setting up"); 839 n = mglevels[0]->levels; 840 /* FIX: Move this to PCSetFromOptions_MG? */ 841 if (mg->usedmfornumberoflevels) { 842 PetscInt levels; 843 PetscCall(DMGetRefineLevel(pc->dm, &levels)); 844 levels++; 845 if (levels > n) { /* the problem is now being solved on a finer grid */ 846 PetscCall(PCMGSetLevels(pc, levels, NULL)); 847 n = levels; 848 PetscCall(PCSetFromOptions(pc)); /* it is bad to call this here, but otherwise will never be called for the new hierarchy */ 849 mglevels = mg->levels; 850 } 851 } 852 PetscCall(KSPGetPC(mglevels[0]->smoothd, &cpc)); 853 854 /* If user did not provide fine grid operators OR operator was not updated since last global KSPSetOperators() */ 855 /* so use those from global PC */ 856 /* Is this what we always want? What if user wants to keep old one? */ 857 PetscCall(KSPGetOperatorsSet(mglevels[n - 1]->smoothd, NULL, &opsset)); 858 if (opsset) { 859 Mat mmat; 860 PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, NULL, &mmat)); 861 if (mmat == pc->pmat) opsset = PETSC_FALSE; 862 } 863 864 /* Create CR solvers */ 865 PetscCall(PCMGGetAdaptCR(pc, &doCR)); 866 if (doCR) { 867 const char *prefix; 868 869 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 870 for (i = 1; i < n; ++i) { 871 PC ipc, cr; 872 char crprefix[128]; 873 874 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &mglevels[i]->cr)); 875 PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->cr, PETSC_FALSE)); 876 PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->cr, (PetscObject)pc, n - i)); 877 PetscCall(KSPSetOptionsPrefix(mglevels[i]->cr, prefix)); 878 PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->cr, PetscMGLevelId, mglevels[i]->level)); 879 PetscCall(KSPSetType(mglevels[i]->cr, KSPCHEBYSHEV)); 880 PetscCall(KSPSetConvergenceTest(mglevels[i]->cr, KSPConvergedSkip, NULL, NULL)); 881 PetscCall(KSPSetNormType(mglevels[i]->cr, KSP_NORM_PRECONDITIONED)); 882 PetscCall(KSPGetPC(mglevels[i]->cr, &ipc)); 883 884 PetscCall(PCSetType(ipc, PCCOMPOSITE)); 885 PetscCall(PCCompositeSetType(ipc, PC_COMPOSITE_MULTIPLICATIVE)); 886 PetscCall(PCCompositeAddPCType(ipc, PCSOR)); 887 PetscCall(CreateCR_Private(pc, i, &cr)); 888 PetscCall(PCCompositeAddPC(ipc, cr)); 889 PetscCall(PCDestroy(&cr)); 890 891 PetscCall(KSPSetTolerances(mglevels[i]->cr, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, mg->default_smoothd)); 892 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 893 PetscCall(PetscSNPrintf(crprefix, 128, "mg_levels_%d_cr_", (int)i)); 894 PetscCall(KSPAppendOptionsPrefix(mglevels[i]->cr, crprefix)); 895 PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)mglevels[i]->cr)); 896 } 897 } 898 899 if (!opsset) { 900 PetscCall(PCGetUseAmat(pc, &use_amat)); 901 if (use_amat) { 902 PetscCall(PetscInfo(pc, "Using outer operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n")); 903 PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->mat, pc->pmat)); 904 } else { 905 PetscCall(PetscInfo(pc, "Using matrix (pmat) operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n")); 906 PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->pmat, pc->pmat)); 907 } 908 } 909 910 for (i = n - 1; i > 0; i--) { 911 if (!(mglevels[i]->interpolate || mglevels[i]->restrct)) { 912 missinginterpolate = PETSC_TRUE; 913 break; 914 } 915 } 916 917 PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, &dA, &dB)); 918 if (dA == dB) dAeqdB = PETSC_TRUE; 919 if (mg->galerkin == PC_MG_GALERKIN_NONE || ((mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_MAT) && !dAeqdB)) { 920 needRestricts = PETSC_TRUE; /* user must compute either mat, pmat, or both so must restrict x to coarser levels */ 921 } 922 923 if (pc->dm && !pc->setupcalled) { 924 /* finest smoother also gets DM but it is not active, independent of whether galerkin==PC_MG_GALERKIN_EXTERNAL */ 925 PetscCall(KSPSetDM(mglevels[n - 1]->smoothd, pc->dm)); 926 PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothd, PETSC_FALSE)); 927 if (mglevels[n - 1]->smoothd != mglevels[n - 1]->smoothu) { 928 PetscCall(KSPSetDM(mglevels[n - 1]->smoothu, pc->dm)); 929 PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothu, PETSC_FALSE)); 930 } 931 if (mglevels[n - 1]->cr) { 932 PetscCall(KSPSetDM(mglevels[n - 1]->cr, pc->dm)); 933 PetscCall(KSPSetDMActive(mglevels[n - 1]->cr, PETSC_FALSE)); 934 } 935 } 936 937 /* 938 Skipping if user has provided all interpolation/restriction needed (since DM might not be able to produce them (when coming from SNES/TS) 939 Skipping for externally managed hierarchy (such as ML and GAMG). Cleaner logic here would be great. Wrap ML/GAMG as DMs? 940 */ 941 if (missinginterpolate && mg->galerkin != PC_MG_GALERKIN_EXTERNAL && !pc->setupcalled) { 942 /* first see if we can compute a coarse space */ 943 if (mg->coarseSpaceType == PCMG_ADAPT_GDSW) { 944 for (i = n - 2; i > -1; i--) { 945 if (!mglevels[i + 1]->restrct && !mglevels[i + 1]->interpolate) { 946 PetscCall(PCMGComputeCoarseSpace_Internal(pc, i + 1, mg->coarseSpaceType, mg->Nc, NULL, &mglevels[i + 1]->coarseSpace)); 947 PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->coarseSpace)); 948 } 949 } 950 } else { /* construct the interpolation from the DMs */ 951 Mat p; 952 Vec rscale; 953 PetscCall(PetscMalloc1(n, &dms)); 954 dms[n - 1] = pc->dm; 955 /* Separately create them so we do not get DMKSP interference between levels */ 956 for (i = n - 2; i > -1; i--) PetscCall(DMCoarsen(dms[i + 1], MPI_COMM_NULL, &dms[i])); 957 for (i = n - 2; i > -1; i--) { 958 DMKSP kdm; 959 PetscBool dmhasrestrict, dmhasinject; 960 PetscCall(KSPSetDM(mglevels[i]->smoothd, dms[i])); 961 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothd, PETSC_FALSE)); 962 if (mglevels[i]->smoothd != mglevels[i]->smoothu) { 963 PetscCall(KSPSetDM(mglevels[i]->smoothu, dms[i])); 964 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothu, PETSC_FALSE)); 965 } 966 if (mglevels[i]->cr) { 967 PetscCall(KSPSetDM(mglevels[i]->cr, dms[i])); 968 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->cr, PETSC_FALSE)); 969 } 970 PetscCall(DMGetDMKSPWrite(dms[i], &kdm)); 971 /* Ugly hack so that the next KSPSetUp() will use the RHS that we set. A better fix is to change dmActive to take 972 * a bitwise OR of computing the matrix, RHS, and initial iterate. */ 973 kdm->ops->computerhs = NULL; 974 kdm->rhsctx = NULL; 975 if (!mglevels[i + 1]->interpolate) { 976 PetscCall(DMCreateInterpolation(dms[i], dms[i + 1], &p, &rscale)); 977 PetscCall(PCMGSetInterpolation(pc, i + 1, p)); 978 if (rscale) PetscCall(PCMGSetRScale(pc, i + 1, rscale)); 979 PetscCall(VecDestroy(&rscale)); 980 PetscCall(MatDestroy(&p)); 981 } 982 PetscCall(DMHasCreateRestriction(dms[i], &dmhasrestrict)); 983 if (dmhasrestrict && !mglevels[i + 1]->restrct) { 984 PetscCall(DMCreateRestriction(dms[i], dms[i + 1], &p)); 985 PetscCall(PCMGSetRestriction(pc, i + 1, p)); 986 PetscCall(MatDestroy(&p)); 987 } 988 PetscCall(DMHasCreateInjection(dms[i], &dmhasinject)); 989 if (dmhasinject && !mglevels[i + 1]->inject) { 990 PetscCall(DMCreateInjection(dms[i], dms[i + 1], &p)); 991 PetscCall(PCMGSetInjection(pc, i + 1, p)); 992 PetscCall(MatDestroy(&p)); 993 } 994 } 995 996 for (i = n - 2; i > -1; i--) PetscCall(DMDestroy(&dms[i])); 997 PetscCall(PetscFree(dms)); 998 } 999 } 1000 1001 if (mg->galerkin < PC_MG_GALERKIN_NONE) { 1002 Mat A, B; 1003 PetscBool doA = PETSC_FALSE, doB = PETSC_FALSE; 1004 MatReuse reuse = MAT_INITIAL_MATRIX; 1005 1006 if (mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_BOTH) doB = PETSC_TRUE; 1007 if (mg->galerkin == PC_MG_GALERKIN_MAT || (mg->galerkin == PC_MG_GALERKIN_BOTH && dA != dB)) doA = PETSC_TRUE; 1008 if (pc->setupcalled) reuse = MAT_REUSE_MATRIX; 1009 for (i = n - 2; i > -1; i--) { 1010 PetscCheck(mglevels[i + 1]->restrct || mglevels[i + 1]->interpolate, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must provide interpolation or restriction for each MG level except level 0"); 1011 if (!mglevels[i + 1]->interpolate) { PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->restrct)); } 1012 if (!mglevels[i + 1]->restrct) { PetscCall(PCMGSetRestriction(pc, i + 1, mglevels[i + 1]->interpolate)); } 1013 if (reuse == MAT_REUSE_MATRIX) { PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, &B)); } 1014 if (doA) { PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dA, mglevels[i + 1]->interpolate, reuse, 1.0, &A)); } 1015 if (doB) { PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dB, mglevels[i + 1]->interpolate, reuse, 1.0, &B)); } 1016 /* the management of the PetscObjectReference() and PetscObjecDereference() below is rather delicate */ 1017 if (!doA && dAeqdB) { 1018 if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)B)); 1019 A = B; 1020 } else if (!doA && reuse == MAT_INITIAL_MATRIX) { 1021 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, NULL)); 1022 PetscCall(PetscObjectReference((PetscObject)A)); 1023 } 1024 if (!doB && dAeqdB) { 1025 if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)A)); 1026 B = A; 1027 } else if (!doB && reuse == MAT_INITIAL_MATRIX) { 1028 PetscCall(KSPGetOperators(mglevels[i]->smoothd, NULL, &B)); 1029 PetscCall(PetscObjectReference((PetscObject)B)); 1030 } 1031 if (reuse == MAT_INITIAL_MATRIX) { 1032 PetscCall(KSPSetOperators(mglevels[i]->smoothd, A, B)); 1033 PetscCall(PetscObjectDereference((PetscObject)A)); 1034 PetscCall(PetscObjectDereference((PetscObject)B)); 1035 } 1036 dA = A; 1037 dB = B; 1038 } 1039 } 1040 1041 /* Adapt interpolation matrices */ 1042 if (adaptInterpolation) { 1043 for (i = 0; i < n; ++i) { 1044 if (!mglevels[i]->coarseSpace) { PetscCall(PCMGComputeCoarseSpace_Internal(pc, i, mg->coarseSpaceType, mg->Nc, !i ? NULL : mglevels[i - 1]->coarseSpace, &mglevels[i]->coarseSpace)); } 1045 if (i) PetscCall(PCMGAdaptInterpolator_Internal(pc, i, mglevels[i - 1]->smoothu, mglevels[i]->smoothu, mglevels[i - 1]->coarseSpace, mglevels[i]->coarseSpace)); 1046 } 1047 for (i = n - 2; i > -1; --i) { PetscCall(PCMGRecomputeLevelOperators_Internal(pc, i)); } 1048 } 1049 1050 if (needRestricts && pc->dm) { 1051 for (i = n - 2; i >= 0; i--) { 1052 DM dmfine, dmcoarse; 1053 Mat Restrict, Inject; 1054 Vec rscale; 1055 PetscCall(KSPGetDM(mglevels[i + 1]->smoothd, &dmfine)); 1056 PetscCall(KSPGetDM(mglevels[i]->smoothd, &dmcoarse)); 1057 PetscCall(PCMGGetRestriction(pc, i + 1, &Restrict)); 1058 PetscCall(PCMGGetRScale(pc, i + 1, &rscale)); 1059 PetscCall(PCMGGetInjection(pc, i + 1, &Inject)); 1060 PetscCall(DMRestrict(dmfine, Restrict, rscale, Inject, dmcoarse)); 1061 } 1062 } 1063 1064 if (!pc->setupcalled) { 1065 for (i = 0; i < n; i++) { PetscCall(KSPSetFromOptions(mglevels[i]->smoothd)); } 1066 for (i = 1; i < n; i++) { 1067 if (mglevels[i]->smoothu && (mglevels[i]->smoothu != mglevels[i]->smoothd)) { PetscCall(KSPSetFromOptions(mglevels[i]->smoothu)); } 1068 if (mglevels[i]->cr) { PetscCall(KSPSetFromOptions(mglevels[i]->cr)); } 1069 } 1070 /* insure that if either interpolation or restriction is set the other other one is set */ 1071 for (i = 1; i < n; i++) { 1072 PetscCall(PCMGGetInterpolation(pc, i, NULL)); 1073 PetscCall(PCMGGetRestriction(pc, i, NULL)); 1074 } 1075 for (i = 0; i < n - 1; i++) { 1076 if (!mglevels[i]->b) { 1077 Vec *vec; 1078 PetscCall(KSPCreateVecs(mglevels[i]->smoothd, 1, &vec, 0, NULL)); 1079 PetscCall(PCMGSetRhs(pc, i, *vec)); 1080 PetscCall(VecDestroy(vec)); 1081 PetscCall(PetscFree(vec)); 1082 } 1083 if (!mglevels[i]->r && i) { 1084 PetscCall(VecDuplicate(mglevels[i]->b, &tvec)); 1085 PetscCall(PCMGSetR(pc, i, tvec)); 1086 PetscCall(VecDestroy(&tvec)); 1087 } 1088 if (!mglevels[i]->x) { 1089 PetscCall(VecDuplicate(mglevels[i]->b, &tvec)); 1090 PetscCall(PCMGSetX(pc, i, tvec)); 1091 PetscCall(VecDestroy(&tvec)); 1092 } 1093 if (doCR) { 1094 PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crx)); 1095 PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crb)); 1096 PetscCall(VecZeroEntries(mglevels[i]->crb)); 1097 } 1098 } 1099 if (n != 1 && !mglevels[n - 1]->r) { 1100 /* PCMGSetR() on the finest level if user did not supply it */ 1101 Vec *vec; 1102 PetscCall(KSPCreateVecs(mglevels[n - 1]->smoothd, 1, &vec, 0, NULL)); 1103 PetscCall(PCMGSetR(pc, n - 1, *vec)); 1104 PetscCall(VecDestroy(vec)); 1105 PetscCall(PetscFree(vec)); 1106 } 1107 if (doCR) { 1108 PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crx)); 1109 PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crb)); 1110 PetscCall(VecZeroEntries(mglevels[n - 1]->crb)); 1111 } 1112 } 1113 1114 if (pc->dm) { 1115 /* need to tell all the coarser levels to rebuild the matrix using the DM for that level */ 1116 for (i = 0; i < n - 1; i++) { 1117 if (mglevels[i]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[i]->smoothd->setupstage = KSP_SETUP_NEWMATRIX; 1118 } 1119 } 1120 // We got here (PCSetUp_MG) because the matrix has changed, which means the smoother needs to be set up again (e.g., 1121 // new diagonal for Jacobi). Setting it here allows it to be logged under PCSetUp rather than deep inside a PCApply. 1122 if (mglevels[n - 1]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[n - 1]->smoothd->setupstage = KSP_SETUP_NEWMATRIX; 1123 1124 for (i = 1; i < n; i++) { 1125 if (mglevels[i]->smoothu == mglevels[i]->smoothd || mg->am == PC_MG_FULL || mg->am == PC_MG_KASKADE || mg->cyclesperpcapply > 1) { 1126 /* if doing only down then initial guess is zero */ 1127 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE)); 1128 } 1129 if (mglevels[i]->cr) PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 1130 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1131 PetscCall(KSPSetUp(mglevels[i]->smoothd)); 1132 if (mglevels[i]->smoothd->reason == KSP_DIVERGED_PC_FAILED) { pc->failedreason = PC_SUBPC_ERROR; } 1133 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1134 if (!mglevels[i]->residual) { 1135 Mat mat; 1136 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL)); 1137 PetscCall(PCMGSetResidual(pc, i, PCMGResidualDefault, mat)); 1138 } 1139 if (!mglevels[i]->residualtranspose) { 1140 Mat mat; 1141 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL)); 1142 PetscCall(PCMGSetResidualTranspose(pc, i, PCMGResidualTransposeDefault, mat)); 1143 } 1144 } 1145 for (i = 1; i < n; i++) { 1146 if (mglevels[i]->smoothu && mglevels[i]->smoothu != mglevels[i]->smoothd) { 1147 Mat downmat, downpmat; 1148 1149 /* check if operators have been set for up, if not use down operators to set them */ 1150 PetscCall(KSPGetOperatorsSet(mglevels[i]->smoothu, &opsset, NULL)); 1151 if (!opsset) { 1152 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat)); 1153 PetscCall(KSPSetOperators(mglevels[i]->smoothu, downmat, downpmat)); 1154 } 1155 1156 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothu, PETSC_TRUE)); 1157 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1158 PetscCall(KSPSetUp(mglevels[i]->smoothu)); 1159 if (mglevels[i]->smoothu->reason == KSP_DIVERGED_PC_FAILED) { pc->failedreason = PC_SUBPC_ERROR; } 1160 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1161 } 1162 if (mglevels[i]->cr) { 1163 Mat downmat, downpmat; 1164 1165 /* check if operators have been set for up, if not use down operators to set them */ 1166 PetscCall(KSPGetOperatorsSet(mglevels[i]->cr, &opsset, NULL)); 1167 if (!opsset) { 1168 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat)); 1169 PetscCall(KSPSetOperators(mglevels[i]->cr, downmat, downpmat)); 1170 } 1171 1172 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 1173 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1174 PetscCall(KSPSetUp(mglevels[i]->cr)); 1175 if (mglevels[i]->cr->reason == KSP_DIVERGED_PC_FAILED) { pc->failedreason = PC_SUBPC_ERROR; } 1176 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1177 } 1178 } 1179 1180 if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0)); 1181 PetscCall(KSPSetUp(mglevels[0]->smoothd)); 1182 if (mglevels[0]->smoothd->reason == KSP_DIVERGED_PC_FAILED) { pc->failedreason = PC_SUBPC_ERROR; } 1183 if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0)); 1184 1185 /* 1186 Dump the interpolation/restriction matrices plus the 1187 Jacobian/stiffness on each level. This allows MATLAB users to 1188 easily check if the Galerkin condition A_c = R A_f R^T is satisfied. 1189 1190 Only support one or the other at the same time. 1191 */ 1192 #if defined(PETSC_USE_SOCKET_VIEWER) 1193 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_matlab", &dump, NULL)); 1194 if (dump) viewer = PETSC_VIEWER_SOCKET_(PetscObjectComm((PetscObject)pc)); 1195 dump = PETSC_FALSE; 1196 #endif 1197 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_binary", &dump, NULL)); 1198 if (dump) viewer = PETSC_VIEWER_BINARY_(PetscObjectComm((PetscObject)pc)); 1199 1200 if (viewer) { 1201 for (i = 1; i < n; i++) { PetscCall(MatView(mglevels[i]->restrct, viewer)); } 1202 for (i = 0; i < n; i++) { 1203 PetscCall(KSPGetPC(mglevels[i]->smoothd, &pc)); 1204 PetscCall(MatView(pc->mat, viewer)); 1205 } 1206 } 1207 PetscFunctionReturn(0); 1208 } 1209 1210 /* -------------------------------------------------------------------------------------*/ 1211 1212 PetscErrorCode PCMGGetLevels_MG(PC pc, PetscInt *levels) { 1213 PC_MG *mg = (PC_MG *)pc->data; 1214 1215 PetscFunctionBegin; 1216 *levels = mg->nlevels; 1217 PetscFunctionReturn(0); 1218 } 1219 1220 /*@ 1221 PCMGGetLevels - Gets the number of levels to use with MG. 1222 1223 Not Collective 1224 1225 Input Parameter: 1226 . pc - the preconditioner context 1227 1228 Output parameter: 1229 . levels - the number of levels 1230 1231 Level: advanced 1232 1233 .seealso: `PCMGSetLevels()` 1234 @*/ 1235 PetscErrorCode PCMGGetLevels(PC pc, PetscInt *levels) { 1236 PetscFunctionBegin; 1237 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1238 PetscValidIntPointer(levels, 2); 1239 *levels = 0; 1240 PetscTryMethod(pc, "PCMGGetLevels_C", (PC, PetscInt *), (pc, levels)); 1241 PetscFunctionReturn(0); 1242 } 1243 1244 /*@ 1245 PCMGGetGridComplexity - compute operator and grid complexity of MG hierarchy 1246 1247 Input Parameter: 1248 . pc - the preconditioner context 1249 1250 Output Parameters: 1251 + gc - grid complexity = sum_i(n_i) / n_0 1252 - oc - operator complexity = sum_i(nnz_i) / nnz_0 1253 1254 Level: advanced 1255 1256 .seealso: `PCMGGetLevels()` 1257 @*/ 1258 PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc, PetscReal *oc) { 1259 PC_MG *mg = (PC_MG *)pc->data; 1260 PC_MG_Levels **mglevels = mg->levels; 1261 PetscInt lev, N; 1262 PetscLogDouble nnz0 = 0, sgc = 0, soc = 0, n0 = 0; 1263 MatInfo info; 1264 1265 PetscFunctionBegin; 1266 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1267 if (gc) PetscValidRealPointer(gc, 2); 1268 if (oc) PetscValidRealPointer(oc, 3); 1269 if (!pc->setupcalled) { 1270 if (gc) *gc = 0; 1271 if (oc) *oc = 0; 1272 PetscFunctionReturn(0); 1273 } 1274 PetscCheck(mg->nlevels > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MG has no levels"); 1275 for (lev = 0; lev < mg->nlevels; lev++) { 1276 Mat dB; 1277 PetscCall(KSPGetOperators(mglevels[lev]->smoothd, NULL, &dB)); 1278 PetscCall(MatGetInfo(dB, MAT_GLOBAL_SUM, &info)); /* global reduction */ 1279 PetscCall(MatGetSize(dB, &N, NULL)); 1280 sgc += N; 1281 soc += info.nz_used; 1282 if (lev == mg->nlevels - 1) { 1283 nnz0 = info.nz_used; 1284 n0 = N; 1285 } 1286 } 1287 if (n0 > 0 && gc) *gc = (PetscReal)(sgc / n0); 1288 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number for grid points on finest level is not available"); 1289 if (nnz0 > 0 && oc) *oc = (PetscReal)(soc / nnz0); 1290 PetscFunctionReturn(0); 1291 } 1292 1293 /*@ 1294 PCMGSetType - Determines the form of multigrid to use: 1295 multiplicative, additive, full, or the Kaskade algorithm. 1296 1297 Logically Collective on PC 1298 1299 Input Parameters: 1300 + pc - the preconditioner context 1301 - form - multigrid form, one of PC_MG_MULTIPLICATIVE, PC_MG_ADDITIVE, 1302 PC_MG_FULL, PC_MG_KASKADE 1303 1304 Options Database Key: 1305 . -pc_mg_type <form> - Sets <form>, one of multiplicative, 1306 additive, full, kaskade 1307 1308 Level: advanced 1309 1310 .seealso: `PCMGSetLevels()` 1311 @*/ 1312 PetscErrorCode PCMGSetType(PC pc, PCMGType form) { 1313 PC_MG *mg = (PC_MG *)pc->data; 1314 1315 PetscFunctionBegin; 1316 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1317 PetscValidLogicalCollectiveEnum(pc, form, 2); 1318 mg->am = form; 1319 if (form == PC_MG_MULTIPLICATIVE) pc->ops->applyrichardson = PCApplyRichardson_MG; 1320 else pc->ops->applyrichardson = NULL; 1321 PetscFunctionReturn(0); 1322 } 1323 1324 /*@ 1325 PCMGGetType - Determines the form of multigrid to use: 1326 multiplicative, additive, full, or the Kaskade algorithm. 1327 1328 Logically Collective on PC 1329 1330 Input Parameter: 1331 . pc - the preconditioner context 1332 1333 Output Parameter: 1334 . type - one of PC_MG_MULTIPLICATIVE, PC_MG_ADDITIVE,PC_MG_FULL, PC_MG_KASKADE 1335 1336 Level: advanced 1337 1338 .seealso: `PCMGSetLevels()` 1339 @*/ 1340 PetscErrorCode PCMGGetType(PC pc, PCMGType *type) { 1341 PC_MG *mg = (PC_MG *)pc->data; 1342 1343 PetscFunctionBegin; 1344 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1345 *type = mg->am; 1346 PetscFunctionReturn(0); 1347 } 1348 1349 /*@ 1350 PCMGSetCycleType - Sets the type cycles to use. Use PCMGSetCycleTypeOnLevel() for more 1351 complicated cycling. 1352 1353 Logically Collective on PC 1354 1355 Input Parameters: 1356 + pc - the multigrid context 1357 - n - either PC_MG_CYCLE_V or PC_MG_CYCLE_W 1358 1359 Options Database Key: 1360 . -pc_mg_cycle_type <v,w> - provide the cycle desired 1361 1362 Level: advanced 1363 1364 .seealso: `PCMGSetCycleTypeOnLevel()` 1365 @*/ 1366 PetscErrorCode PCMGSetCycleType(PC pc, PCMGCycleType n) { 1367 PC_MG *mg = (PC_MG *)pc->data; 1368 PC_MG_Levels **mglevels = mg->levels; 1369 PetscInt i, levels; 1370 1371 PetscFunctionBegin; 1372 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1373 PetscValidLogicalCollectiveEnum(pc, n, 2); 1374 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1375 levels = mglevels[0]->levels; 1376 for (i = 0; i < levels; i++) mglevels[i]->cycles = n; 1377 PetscFunctionReturn(0); 1378 } 1379 1380 /*@ 1381 PCMGMultiplicativeSetCycles - Sets the number of cycles to use for each preconditioner step 1382 of multigrid when PCMGType of PC_MG_MULTIPLICATIVE is used 1383 1384 Logically Collective on PC 1385 1386 Input Parameters: 1387 + pc - the multigrid context 1388 - n - number of cycles (default is 1) 1389 1390 Options Database Key: 1391 . -pc_mg_multiplicative_cycles n - set the number of cycles 1392 1393 Level: advanced 1394 1395 Notes: 1396 This is not associated with setting a v or w cycle, that is set with PCMGSetCycleType() 1397 1398 .seealso: `PCMGSetCycleTypeOnLevel()`, `PCMGSetCycleType()` 1399 @*/ 1400 PetscErrorCode PCMGMultiplicativeSetCycles(PC pc, PetscInt n) { 1401 PC_MG *mg = (PC_MG *)pc->data; 1402 1403 PetscFunctionBegin; 1404 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1405 PetscValidLogicalCollectiveInt(pc, n, 2); 1406 mg->cyclesperpcapply = n; 1407 PetscFunctionReturn(0); 1408 } 1409 1410 PetscErrorCode PCMGSetGalerkin_MG(PC pc, PCMGGalerkinType use) { 1411 PC_MG *mg = (PC_MG *)pc->data; 1412 1413 PetscFunctionBegin; 1414 mg->galerkin = use; 1415 PetscFunctionReturn(0); 1416 } 1417 1418 /*@ 1419 PCMGSetGalerkin - Causes the coarser grid matrices to be computed from the 1420 finest grid via the Galerkin process: A_i-1 = r_i * A_i * p_i 1421 1422 Logically Collective on PC 1423 1424 Input Parameters: 1425 + pc - the multigrid context 1426 - use - one of PC_MG_GALERKIN_BOTH,PC_MG_GALERKIN_PMAT,PC_MG_GALERKIN_MAT, or PC_MG_GALERKIN_NONE 1427 1428 Options Database Key: 1429 . -pc_mg_galerkin <both,pmat,mat,none> - set the matrices to form via the Galerkin process 1430 1431 Level: intermediate 1432 1433 Notes: 1434 Some codes that use PCMG such as PCGAMG use Galerkin internally while constructing the hierarchy and thus do not 1435 use the PCMG construction of the coarser grids. 1436 1437 .seealso: `PCMGGetGalerkin()`, `PCMGGalerkinType` 1438 1439 @*/ 1440 PetscErrorCode PCMGSetGalerkin(PC pc, PCMGGalerkinType use) { 1441 PetscFunctionBegin; 1442 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1443 PetscTryMethod(pc, "PCMGSetGalerkin_C", (PC, PCMGGalerkinType), (pc, use)); 1444 PetscFunctionReturn(0); 1445 } 1446 1447 /*@ 1448 PCMGGetGalerkin - Checks if Galerkin multigrid is being used, i.e. 1449 A_i-1 = r_i * A_i * p_i 1450 1451 Not Collective 1452 1453 Input Parameter: 1454 . pc - the multigrid context 1455 1456 Output Parameter: 1457 . galerkin - one of PC_MG_GALERKIN_BOTH,PC_MG_GALERKIN_PMAT,PC_MG_GALERKIN_MAT, PC_MG_GALERKIN_NONE, or PC_MG_GALERKIN_EXTERNAL 1458 1459 Level: intermediate 1460 1461 .seealso: `PCMGSetGalerkin()`, `PCMGGalerkinType` 1462 1463 @*/ 1464 PetscErrorCode PCMGGetGalerkin(PC pc, PCMGGalerkinType *galerkin) { 1465 PC_MG *mg = (PC_MG *)pc->data; 1466 1467 PetscFunctionBegin; 1468 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1469 *galerkin = mg->galerkin; 1470 PetscFunctionReturn(0); 1471 } 1472 1473 PetscErrorCode PCMGSetAdaptInterpolation_MG(PC pc, PetscBool adapt) { 1474 PC_MG *mg = (PC_MG *)pc->data; 1475 1476 PetscFunctionBegin; 1477 mg->adaptInterpolation = adapt; 1478 PetscFunctionReturn(0); 1479 } 1480 1481 PetscErrorCode PCMGGetAdaptInterpolation_MG(PC pc, PetscBool *adapt) { 1482 PC_MG *mg = (PC_MG *)pc->data; 1483 1484 PetscFunctionBegin; 1485 *adapt = mg->adaptInterpolation; 1486 PetscFunctionReturn(0); 1487 } 1488 1489 PetscErrorCode PCMGSetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType ctype) { 1490 PC_MG *mg = (PC_MG *)pc->data; 1491 1492 PetscFunctionBegin; 1493 mg->adaptInterpolation = ctype != PCMG_ADAPT_NONE ? PETSC_TRUE : PETSC_FALSE; 1494 mg->coarseSpaceType = ctype; 1495 PetscFunctionReturn(0); 1496 } 1497 1498 PetscErrorCode PCMGGetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType *ctype) { 1499 PC_MG *mg = (PC_MG *)pc->data; 1500 1501 PetscFunctionBegin; 1502 *ctype = mg->coarseSpaceType; 1503 PetscFunctionReturn(0); 1504 } 1505 1506 PetscErrorCode PCMGSetAdaptCR_MG(PC pc, PetscBool cr) { 1507 PC_MG *mg = (PC_MG *)pc->data; 1508 1509 PetscFunctionBegin; 1510 mg->compatibleRelaxation = cr; 1511 PetscFunctionReturn(0); 1512 } 1513 1514 PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr) { 1515 PC_MG *mg = (PC_MG *)pc->data; 1516 1517 PetscFunctionBegin; 1518 *cr = mg->compatibleRelaxation; 1519 PetscFunctionReturn(0); 1520 } 1521 1522 /*@C 1523 PCMGSetAdaptCoarseSpaceType - Set the type of adaptive coarse space. 1524 1525 Adapts or creates the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated. 1526 1527 Logically Collective on PC 1528 1529 Input Parameters: 1530 + pc - the multigrid context 1531 - ctype - the type of coarse space 1532 1533 Options Database Keys: 1534 + -pc_mg_adapt_interp_n <int> - The number of modes to use 1535 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw 1536 1537 Level: intermediate 1538 1539 .keywords: MG, set, Galerkin 1540 .seealso: `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()` 1541 @*/ 1542 PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype) { 1543 PetscFunctionBegin; 1544 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1545 PetscValidLogicalCollectiveEnum(pc, ctype, 2); 1546 PetscTryMethod(pc, "PCMGSetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType), (pc, ctype)); 1547 PetscFunctionReturn(0); 1548 } 1549 1550 /*@C 1551 PCMGGetAdaptCoarseSpaceType - Get the type of adaptive coarse space. 1552 1553 Not Collective 1554 1555 Input Parameter: 1556 . pc - the multigrid context 1557 1558 Output Parameter: 1559 . ctype - the type of coarse space 1560 1561 Level: intermediate 1562 1563 .keywords: MG, Get, Galerkin 1564 .seealso: `PCMGCoarseSpaceType`, `PCMGSetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()` 1565 @*/ 1566 PetscErrorCode PCMGGetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType *ctype) { 1567 PetscFunctionBegin; 1568 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1569 PetscValidPointer(ctype, 2); 1570 PetscUseMethod(pc, "PCMGGetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType *), (pc, ctype)); 1571 PetscFunctionReturn(0); 1572 } 1573 1574 /* MATT: REMOVE? */ 1575 /*@ 1576 PCMGSetAdaptInterpolation - Adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated. 1577 1578 Logically Collective on PC 1579 1580 Input Parameters: 1581 + pc - the multigrid context 1582 - adapt - flag for adaptation of the interpolator 1583 1584 Options Database Keys: 1585 + -pc_mg_adapt_interp - Turn on adaptation 1586 . -pc_mg_adapt_interp_n <int> - The number of modes to use, should be divisible by dimension 1587 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: polynomial, harmonic, eigenvector, generalized_eigenvector 1588 1589 Level: intermediate 1590 1591 .keywords: MG, set, Galerkin 1592 .seealso: `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()` 1593 @*/ 1594 PetscErrorCode PCMGSetAdaptInterpolation(PC pc, PetscBool adapt) { 1595 PetscFunctionBegin; 1596 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1597 PetscTryMethod(pc, "PCMGSetAdaptInterpolation_C", (PC, PetscBool), (pc, adapt)); 1598 PetscFunctionReturn(0); 1599 } 1600 1601 /*@ 1602 PCMGGetAdaptInterpolation - Get the flag to adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated. 1603 1604 Not collective 1605 1606 Input Parameter: 1607 . pc - the multigrid context 1608 1609 Output Parameter: 1610 . adapt - flag for adaptation of the interpolator 1611 1612 Level: intermediate 1613 1614 .keywords: MG, set, Galerkin 1615 .seealso: `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()` 1616 @*/ 1617 PetscErrorCode PCMGGetAdaptInterpolation(PC pc, PetscBool *adapt) { 1618 PetscFunctionBegin; 1619 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1620 PetscValidBoolPointer(adapt, 2); 1621 PetscUseMethod(pc, "PCMGGetAdaptInterpolation_C", (PC, PetscBool *), (pc, adapt)); 1622 PetscFunctionReturn(0); 1623 } 1624 1625 /*@ 1626 PCMGSetAdaptCR - Monitor the coarse space quality using an auxiliary solve with compatible relaxation. 1627 1628 Logically Collective on PC 1629 1630 Input Parameters: 1631 + pc - the multigrid context 1632 - cr - flag for compatible relaxation 1633 1634 Options Database Keys: 1635 . -pc_mg_adapt_cr - Turn on compatible relaxation 1636 1637 Level: intermediate 1638 1639 .keywords: MG, set, Galerkin 1640 .seealso: `PCMGGetAdaptCR()`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()` 1641 @*/ 1642 PetscErrorCode PCMGSetAdaptCR(PC pc, PetscBool cr) { 1643 PetscFunctionBegin; 1644 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1645 PetscTryMethod(pc, "PCMGSetAdaptCR_C", (PC, PetscBool), (pc, cr)); 1646 PetscFunctionReturn(0); 1647 } 1648 1649 /*@ 1650 PCMGGetAdaptCR - Get the flag to monitor coarse space quality using an auxiliary solve with compatible relaxation. 1651 1652 Not collective 1653 1654 Input Parameter: 1655 . pc - the multigrid context 1656 1657 Output Parameter: 1658 . cr - flag for compatible relaxaion 1659 1660 Level: intermediate 1661 1662 .keywords: MG, set, Galerkin 1663 .seealso: `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()` 1664 @*/ 1665 PetscErrorCode PCMGGetAdaptCR(PC pc, PetscBool *cr) { 1666 PetscFunctionBegin; 1667 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1668 PetscValidBoolPointer(cr, 2); 1669 PetscUseMethod(pc, "PCMGGetAdaptCR_C", (PC, PetscBool *), (pc, cr)); 1670 PetscFunctionReturn(0); 1671 } 1672 1673 /*@ 1674 PCMGSetNumberSmooth - Sets the number of pre and post-smoothing steps to use 1675 on all levels. Use PCMGDistinctSmoothUp() to create separate up and down smoothers if you want different numbers of 1676 pre- and post-smoothing steps. 1677 1678 Logically Collective on PC 1679 1680 Input Parameters: 1681 + mg - the multigrid context 1682 - n - the number of smoothing steps 1683 1684 Options Database Key: 1685 . -mg_levels_ksp_max_it <n> - Sets number of pre and post-smoothing steps 1686 1687 Level: advanced 1688 1689 Notes: 1690 this does not set a value on the coarsest grid, since we assume that 1691 there is no separate smooth up on the coarsest grid. 1692 1693 .seealso: `PCMGSetDistinctSmoothUp()` 1694 @*/ 1695 PetscErrorCode PCMGSetNumberSmooth(PC pc, PetscInt n) { 1696 PC_MG *mg = (PC_MG *)pc->data; 1697 PC_MG_Levels **mglevels = mg->levels; 1698 PetscInt i, levels; 1699 1700 PetscFunctionBegin; 1701 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1702 PetscValidLogicalCollectiveInt(pc, n, 2); 1703 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1704 levels = mglevels[0]->levels; 1705 1706 for (i = 1; i < levels; i++) { 1707 PetscCall(KSPSetTolerances(mglevels[i]->smoothu, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, n)); 1708 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, n)); 1709 mg->default_smoothu = n; 1710 mg->default_smoothd = n; 1711 } 1712 PetscFunctionReturn(0); 1713 } 1714 1715 /*@ 1716 PCMGSetDistinctSmoothUp - sets the up (post) smoother to be a separate KSP from the down (pre) smoother on all levels 1717 and adds the suffix _up to the options name 1718 1719 Logically Collective on PC 1720 1721 Input Parameters: 1722 . pc - the preconditioner context 1723 1724 Options Database Key: 1725 . -pc_mg_distinct_smoothup <bool> - use distinct smoothing objects 1726 1727 Level: advanced 1728 1729 Notes: 1730 this does not set a value on the coarsest grid, since we assume that 1731 there is no separate smooth up on the coarsest grid. 1732 1733 .seealso: `PCMGSetNumberSmooth()` 1734 @*/ 1735 PetscErrorCode PCMGSetDistinctSmoothUp(PC pc) { 1736 PC_MG *mg = (PC_MG *)pc->data; 1737 PC_MG_Levels **mglevels = mg->levels; 1738 PetscInt i, levels; 1739 KSP subksp; 1740 1741 PetscFunctionBegin; 1742 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1743 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1744 levels = mglevels[0]->levels; 1745 1746 for (i = 1; i < levels; i++) { 1747 const char *prefix = NULL; 1748 /* make sure smoother up and down are different */ 1749 PetscCall(PCMGGetSmootherUp(pc, i, &subksp)); 1750 PetscCall(KSPGetOptionsPrefix(mglevels[i]->smoothd, &prefix)); 1751 PetscCall(KSPSetOptionsPrefix(subksp, prefix)); 1752 PetscCall(KSPAppendOptionsPrefix(subksp, "up_")); 1753 } 1754 PetscFunctionReturn(0); 1755 } 1756 1757 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */ 1758 PetscErrorCode PCGetInterpolations_MG(PC pc, PetscInt *num_levels, Mat *interpolations[]) { 1759 PC_MG *mg = (PC_MG *)pc->data; 1760 PC_MG_Levels **mglevels = mg->levels; 1761 Mat *mat; 1762 PetscInt l; 1763 1764 PetscFunctionBegin; 1765 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling"); 1766 PetscCall(PetscMalloc1(mg->nlevels, &mat)); 1767 for (l = 1; l < mg->nlevels; l++) { 1768 mat[l - 1] = mglevels[l]->interpolate; 1769 PetscCall(PetscObjectReference((PetscObject)mat[l - 1])); 1770 } 1771 *num_levels = mg->nlevels; 1772 *interpolations = mat; 1773 PetscFunctionReturn(0); 1774 } 1775 1776 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */ 1777 PetscErrorCode PCGetCoarseOperators_MG(PC pc, PetscInt *num_levels, Mat *coarseOperators[]) { 1778 PC_MG *mg = (PC_MG *)pc->data; 1779 PC_MG_Levels **mglevels = mg->levels; 1780 PetscInt l; 1781 Mat *mat; 1782 1783 PetscFunctionBegin; 1784 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling"); 1785 PetscCall(PetscMalloc1(mg->nlevels, &mat)); 1786 for (l = 0; l < mg->nlevels - 1; l++) { 1787 PetscCall(KSPGetOperators(mglevels[l]->smoothd, NULL, &(mat[l]))); 1788 PetscCall(PetscObjectReference((PetscObject)mat[l])); 1789 } 1790 *num_levels = mg->nlevels; 1791 *coarseOperators = mat; 1792 PetscFunctionReturn(0); 1793 } 1794 1795 /*@C 1796 PCMGRegisterCoarseSpaceConstructor - Adds a method to the PCMG package for coarse space construction. 1797 1798 Not collective 1799 1800 Input Parameters: 1801 + name - name of the constructor 1802 - function - constructor routine 1803 1804 Notes: 1805 Calling sequence for the routine: 1806 $ my_csp(PC pc, PetscInt l, DM dm, KSP smooth, PetscInt Nc, Mat initGuess, Mat *coarseSp) 1807 $ pc - The PC object 1808 $ l - The multigrid level, 0 is the coarse level 1809 $ dm - The DM for this level 1810 $ smooth - The level smoother 1811 $ Nc - The size of the coarse space 1812 $ initGuess - Basis for an initial guess for the space 1813 $ coarseSp - A basis for the computed coarse space 1814 1815 Level: advanced 1816 1817 .seealso: `PCMGGetCoarseSpaceConstructor()`, `PCRegister()` 1818 @*/ 1819 PetscErrorCode PCMGRegisterCoarseSpaceConstructor(const char name[], PetscErrorCode (*function)(PC, PetscInt, DM, KSP, PetscInt, Mat, Mat *)) { 1820 PetscFunctionBegin; 1821 PetscCall(PCInitializePackage()); 1822 PetscCall(PetscFunctionListAdd(&PCMGCoarseList, name, function)); 1823 PetscFunctionReturn(0); 1824 } 1825 1826 /*@C 1827 PCMGGetCoarseSpaceConstructor - Returns the given coarse space construction method. 1828 1829 Not collective 1830 1831 Input Parameter: 1832 . name - name of the constructor 1833 1834 Output Parameter: 1835 . function - constructor routine 1836 1837 Notes: 1838 Calling sequence for the routine: 1839 $ my_csp(PC pc, PetscInt l, DM dm, KSP smooth, PetscInt Nc, Mat initGuess, Mat *coarseSp) 1840 $ pc - The PC object 1841 $ l - The multigrid level, 0 is the coarse level 1842 $ dm - The DM for this level 1843 $ smooth - The level smoother 1844 $ Nc - The size of the coarse space 1845 $ initGuess - Basis for an initial guess for the space 1846 $ coarseSp - A basis for the computed coarse space 1847 1848 Level: advanced 1849 1850 .seealso: `PCMGRegisterCoarseSpaceConstructor()`, `PCRegister()` 1851 @*/ 1852 PetscErrorCode PCMGGetCoarseSpaceConstructor(const char name[], PetscErrorCode (**function)(PC, PetscInt, DM, KSP, PetscInt, Mat, Mat *)) { 1853 PetscFunctionBegin; 1854 PetscCall(PetscFunctionListFind(PCMGCoarseList, name, function)); 1855 PetscFunctionReturn(0); 1856 } 1857 1858 /* ----------------------------------------------------------------------------------------*/ 1859 1860 /*MC 1861 PCMG - Use multigrid preconditioning. This preconditioner requires you provide additional 1862 information about the coarser grid matrices and restriction/interpolation operators. 1863 1864 Options Database Keys: 1865 + -pc_mg_levels <nlevels> - number of levels including finest 1866 . -pc_mg_cycle_type <v,w> - provide the cycle desired 1867 . -pc_mg_type <additive,multiplicative,full,kaskade> - multiplicative is the default 1868 . -pc_mg_log - log information about time spent on each level of the solver 1869 . -pc_mg_distinct_smoothup - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes) 1870 . -pc_mg_galerkin <both,pmat,mat,none> - use Galerkin process to compute coarser operators, i.e. Acoarse = R A R' 1871 . -pc_mg_multiplicative_cycles - number of cycles to use as the preconditioner (defaults to 1) 1872 . -pc_mg_dump_matlab - dumps the matrices for each level and the restriction/interpolation matrices 1873 to the Socket viewer for reading from MATLAB. 1874 - -pc_mg_dump_binary - dumps the matrices for each level and the restriction/interpolation matrices 1875 to the binary output file called binaryoutput 1876 1877 Notes: 1878 If one uses a Krylov method such GMRES or CG as the smoother then one must use KSPFGMRES, KSPGCR, or KSPRICHARDSON as the outer Krylov method 1879 1880 When run with a single level the smoother options are used on that level NOT the coarse grid solver options 1881 1882 When run with KSPRICHARDSON the convergence test changes slightly if monitor is turned on. The iteration count may change slightly. This 1883 is because without monitoring the residual norm is computed WITHIN each multigrid cycle on the finest level after the pre-smoothing 1884 (because the residual has just been computed for the multigrid algorithm and is hence available for free) while with monitoring the 1885 residual is computed at the end of each cycle. 1886 1887 Level: intermediate 1888 1889 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMGType`, `PCEXOTIC`, `PCGAMG`, `PCML`, `PCHYPRE` 1890 `PCMGSetLevels()`, `PCMGGetLevels()`, `PCMGSetType()`, `PCMGSetCycleType()`, 1891 `PCMGSetDistinctSmoothUp()`, `PCMGGetCoarseSolve()`, `PCMGSetResidual()`, `PCMGSetInterpolation()`, 1892 `PCMGSetRestriction()`, `PCMGGetSmoother()`, `PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`, 1893 `PCMGSetCycleTypeOnLevel()`, `PCMGSetRhs()`, `PCMGSetX()`, `PCMGSetR()` 1894 M*/ 1895 1896 PETSC_EXTERN PetscErrorCode PCCreate_MG(PC pc) { 1897 PC_MG *mg; 1898 1899 PetscFunctionBegin; 1900 PetscCall(PetscNewLog(pc, &mg)); 1901 pc->data = mg; 1902 mg->nlevels = -1; 1903 mg->am = PC_MG_MULTIPLICATIVE; 1904 mg->galerkin = PC_MG_GALERKIN_NONE; 1905 mg->adaptInterpolation = PETSC_FALSE; 1906 mg->Nc = -1; 1907 mg->eigenvalue = -1; 1908 1909 pc->useAmat = PETSC_TRUE; 1910 1911 pc->ops->apply = PCApply_MG; 1912 pc->ops->applytranspose = PCApplyTranspose_MG; 1913 pc->ops->matapply = PCMatApply_MG; 1914 pc->ops->setup = PCSetUp_MG; 1915 pc->ops->reset = PCReset_MG; 1916 pc->ops->destroy = PCDestroy_MG; 1917 pc->ops->setfromoptions = PCSetFromOptions_MG; 1918 pc->ops->view = PCView_MG; 1919 1920 PetscCall(PetscObjectComposedDataRegister(&mg->eigenvalue)); 1921 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", PCMGSetGalerkin_MG)); 1922 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", PCMGGetLevels_MG)); 1923 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", PCMGSetLevels_MG)); 1924 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_MG)); 1925 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_MG)); 1926 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", PCMGSetAdaptInterpolation_MG)); 1927 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", PCMGGetAdaptInterpolation_MG)); 1928 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", PCMGSetAdaptCR_MG)); 1929 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", PCMGGetAdaptCR_MG)); 1930 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", PCMGSetAdaptCoarseSpaceType_MG)); 1931 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", PCMGGetAdaptCoarseSpaceType_MG)); 1932 PetscFunctionReturn(0); 1933 } 1934