xref: /petsc/src/ksp/pc/impls/mg/mg.c (revision bf0c7fc2d7fb4e90d42e412b25194b878e6e442d)
1 /*
2     Defines the multigrid preconditioner interface.
3 */
4 #include <petsc/private/pcmgimpl.h> /*I "petscksp.h" I*/
5 #include <petsc/private/kspimpl.h>
6 #include <petscdm.h>
7 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC, PetscBool *);
8 
9 /*
10    Contains the list of registered coarse space construction routines
11 */
12 PetscFunctionList PCMGCoarseList = NULL;
13 
14 PetscErrorCode PCMGMCycle_Private(PC pc, PC_MG_Levels **mglevelsin, PetscBool transpose, PetscBool matapp, PCRichardsonConvergedReason *reason)
15 {
16   PC_MG        *mg = (PC_MG *)pc->data;
17   PC_MG_Levels *mgc, *mglevels = *mglevelsin;
18   PetscInt      cycles = (mglevels->level == 1) ? 1 : mglevels->cycles;
19 
20   PetscFunctionBegin;
21   if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0));
22   if (!transpose) {
23     if (matapp) {
24       PetscCall(KSPMatSolve(mglevels->smoothd, mglevels->B, mglevels->X)); /* pre-smooth */
25       PetscCall(KSPCheckSolve(mglevels->smoothd, pc, NULL));
26     } else {
27       PetscCall(KSPSolve(mglevels->smoothd, mglevels->b, mglevels->x)); /* pre-smooth */
28       PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x));
29     }
30   } else {
31     PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
32     PetscCall(KSPSolveTranspose(mglevels->smoothu, mglevels->b, mglevels->x)); /* transpose of post-smooth */
33     PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x));
34   }
35   if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0));
36   if (mglevels->level) { /* not the coarsest grid */
37     if (mglevels->eventresidual) PetscCall(PetscLogEventBegin(mglevels->eventresidual, 0, 0, 0, 0));
38     if (matapp && !mglevels->R) PetscCall(MatDuplicate(mglevels->B, MAT_DO_NOT_COPY_VALUES, &mglevels->R));
39     if (!transpose) {
40       if (matapp) PetscCall((*mglevels->matresidual)(mglevels->A, mglevels->B, mglevels->X, mglevels->R));
41       else PetscCall((*mglevels->residual)(mglevels->A, mglevels->b, mglevels->x, mglevels->r));
42     } else {
43       if (matapp) PetscCall((*mglevels->matresidualtranspose)(mglevels->A, mglevels->B, mglevels->X, mglevels->R));
44       else PetscCall((*mglevels->residualtranspose)(mglevels->A, mglevels->b, mglevels->x, mglevels->r));
45     }
46     if (mglevels->eventresidual) PetscCall(PetscLogEventEnd(mglevels->eventresidual, 0, 0, 0, 0));
47 
48     /* if on finest level and have convergence criteria set */
49     if (mglevels->level == mglevels->levels - 1 && mg->ttol && reason) {
50       PetscReal rnorm;
51 
52       PetscCall(VecNorm(mglevels->r, NORM_2, &rnorm));
53       if (rnorm <= mg->ttol) {
54         if (rnorm < mg->abstol) {
55           *reason = PCRICHARDSON_CONVERGED_ATOL;
56           PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than absolute tolerance %g\n", (double)rnorm, (double)mg->abstol));
57         } else {
58           *reason = PCRICHARDSON_CONVERGED_RTOL;
59           PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than relative tolerance times initial residual norm %g\n", (double)rnorm, (double)mg->ttol));
60         }
61         PetscFunctionReturn(PETSC_SUCCESS);
62       }
63     }
64 
65     mgc = *(mglevelsin - 1);
66     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0));
67     if (!transpose) {
68       if (matapp) PetscCall(MatMatRestrict(mglevels->restrct, mglevels->R, &mgc->B));
69       else PetscCall(MatRestrict(mglevels->restrct, mglevels->r, mgc->b));
70     } else {
71       if (matapp) PetscCall(MatMatRestrict(mglevels->interpolate, mglevels->R, &mgc->B));
72       else PetscCall(MatRestrict(mglevels->interpolate, mglevels->r, mgc->b));
73     }
74     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0));
75     if (matapp) {
76       if (!mgc->X) {
77         PetscCall(MatDuplicate(mgc->B, MAT_DO_NOT_COPY_VALUES, &mgc->X));
78       } else {
79         PetscCall(MatZeroEntries(mgc->X));
80       }
81     } else {
82       PetscCall(VecZeroEntries(mgc->x));
83     }
84     while (cycles--) PetscCall(PCMGMCycle_Private(pc, mglevelsin - 1, transpose, matapp, reason));
85     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0));
86     if (!transpose) {
87       if (matapp) PetscCall(MatMatInterpolateAdd(mglevels->interpolate, mgc->X, mglevels->X, &mglevels->X));
88       else PetscCall(MatInterpolateAdd(mglevels->interpolate, mgc->x, mglevels->x, mglevels->x));
89     } else {
90       PetscCall(MatInterpolateAdd(mglevels->restrct, mgc->x, mglevels->x, mglevels->x));
91     }
92     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0));
93     if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0));
94     if (!transpose) {
95       if (matapp) {
96         PetscCall(KSPMatSolve(mglevels->smoothu, mglevels->B, mglevels->X)); /* post smooth */
97         PetscCall(KSPCheckSolve(mglevels->smoothu, pc, NULL));
98       } else {
99         PetscCall(KSPSolve(mglevels->smoothu, mglevels->b, mglevels->x)); /* post smooth */
100         PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x));
101       }
102     } else {
103       PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
104       PetscCall(KSPSolveTranspose(mglevels->smoothd, mglevels->b, mglevels->x)); /* post smooth */
105       PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x));
106     }
107     if (mglevels->cr) {
108       Mat crA;
109 
110       PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
111       /* TODO Turn on copy and turn off noisy if we have an exact solution
112       PetscCall(VecCopy(mglevels->x, mglevels->crx));
113       PetscCall(VecCopy(mglevels->b, mglevels->crb)); */
114       PetscCall(KSPGetOperators(mglevels->cr, &crA, NULL));
115       PetscCall(KSPSetNoisy_Private(crA, mglevels->crx));
116       PetscCall(KSPSolve(mglevels->cr, mglevels->crb, mglevels->crx)); /* compatible relaxation */
117       PetscCall(KSPCheckSolve(mglevels->cr, pc, mglevels->crx));
118     }
119     if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0));
120   }
121   PetscFunctionReturn(PETSC_SUCCESS);
122 }
123 
124 static PetscErrorCode PCApplyRichardson_MG(PC pc, Vec b, Vec x, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
125 {
126   PC_MG         *mg       = (PC_MG *)pc->data;
127   PC_MG_Levels **mglevels = mg->levels;
128   PC             tpc;
129   PetscBool      changeu, changed;
130   PetscInt       levels = mglevels[0]->levels, i;
131 
132   PetscFunctionBegin;
133   /* When the DM is supplying the matrix then it will not exist until here */
134   for (i = 0; i < levels; i++) {
135     if (!mglevels[i]->A) {
136       PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL));
137       PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A));
138     }
139   }
140 
141   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
142   PetscCall(PCPreSolveChangeRHS(tpc, &changed));
143   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
144   PetscCall(PCPreSolveChangeRHS(tpc, &changeu));
145   if (!changed && !changeu) {
146     PetscCall(VecDestroy(&mglevels[levels - 1]->b));
147     mglevels[levels - 1]->b = b;
148   } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */
149     if (!mglevels[levels - 1]->b) {
150       Vec *vec;
151 
152       PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL));
153       mglevels[levels - 1]->b = *vec;
154       PetscCall(PetscFree(vec));
155     }
156     PetscCall(VecCopy(b, mglevels[levels - 1]->b));
157   }
158   mglevels[levels - 1]->x = x;
159 
160   mg->rtol   = rtol;
161   mg->abstol = abstol;
162   mg->dtol   = dtol;
163   if (rtol) {
164     /* compute initial residual norm for relative convergence test */
165     PetscReal rnorm;
166 
167     if (zeroguess) {
168       PetscCall(VecNorm(b, NORM_2, &rnorm));
169     } else {
170       PetscCall((*mglevels[levels - 1]->residual)(mglevels[levels - 1]->A, b, x, w));
171       PetscCall(VecNorm(w, NORM_2, &rnorm));
172     }
173     mg->ttol = PetscMax(rtol * rnorm, abstol);
174   } else if (abstol) mg->ttol = abstol;
175   else mg->ttol = 0.0;
176 
177   /* since smoother is applied to full system, not just residual we need to make sure that smoothers don't
178      stop prematurely due to small residual */
179   for (i = 1; i < levels; i++) {
180     PetscCall(KSPSetTolerances(mglevels[i]->smoothu, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
181     if (mglevels[i]->smoothu != mglevels[i]->smoothd) {
182       /* For Richardson the initial guess is nonzero since it is solving in each cycle the original system not just applying as a preconditioner */
183       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE));
184       PetscCall(KSPSetTolerances(mglevels[i]->smoothd, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
185     }
186   }
187 
188   *reason = PCRICHARDSON_NOT_SET;
189   for (i = 0; i < its; i++) {
190     PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, PETSC_FALSE, PETSC_FALSE, reason));
191     if (*reason) break;
192   }
193   if (*reason == PCRICHARDSON_NOT_SET) *reason = PCRICHARDSON_CONVERGED_ITS;
194   *outits = i;
195   if (!changed && !changeu) mglevels[levels - 1]->b = NULL;
196   PetscFunctionReturn(PETSC_SUCCESS);
197 }
198 
199 PetscErrorCode PCReset_MG(PC pc)
200 {
201   PC_MG         *mg       = (PC_MG *)pc->data;
202   PC_MG_Levels **mglevels = mg->levels;
203   PetscInt       i, n;
204 
205   PetscFunctionBegin;
206   if (mglevels) {
207     n = mglevels[0]->levels;
208     for (i = 0; i < n - 1; i++) {
209       PetscCall(VecDestroy(&mglevels[i + 1]->r));
210       PetscCall(VecDestroy(&mglevels[i]->b));
211       PetscCall(VecDestroy(&mglevels[i]->x));
212       PetscCall(MatDestroy(&mglevels[i + 1]->R));
213       PetscCall(MatDestroy(&mglevels[i]->B));
214       PetscCall(MatDestroy(&mglevels[i]->X));
215       PetscCall(VecDestroy(&mglevels[i]->crx));
216       PetscCall(VecDestroy(&mglevels[i]->crb));
217       PetscCall(MatDestroy(&mglevels[i + 1]->restrct));
218       PetscCall(MatDestroy(&mglevels[i + 1]->interpolate));
219       PetscCall(MatDestroy(&mglevels[i + 1]->inject));
220       PetscCall(VecDestroy(&mglevels[i + 1]->rscale));
221     }
222     PetscCall(VecDestroy(&mglevels[n - 1]->crx));
223     PetscCall(VecDestroy(&mglevels[n - 1]->crb));
224     /* this is not null only if the smoother on the finest level
225        changes the rhs during PreSolve */
226     PetscCall(VecDestroy(&mglevels[n - 1]->b));
227     PetscCall(MatDestroy(&mglevels[n - 1]->B));
228 
229     for (i = 0; i < n; i++) {
230       PetscCall(MatDestroy(&mglevels[i]->coarseSpace));
231       PetscCall(MatDestroy(&mglevels[i]->A));
232       if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPReset(mglevels[i]->smoothd));
233       PetscCall(KSPReset(mglevels[i]->smoothu));
234       if (mglevels[i]->cr) PetscCall(KSPReset(mglevels[i]->cr));
235     }
236     mg->Nc = 0;
237   }
238   PetscFunctionReturn(PETSC_SUCCESS);
239 }
240 
241 /* Implementing CR
242 
243 We only want to make corrections that ``do not change'' the coarse solution. What we mean by not changing is that if I prolong my coarse solution to the fine grid and then inject that fine solution back to the coarse grid, I get the same answer. Injection is what Brannick calls R. We want the complementary projector to Inj, which we will call S, after Brannick, so that Inj S = 0. Now the orthogonal projector onto the range of Inj^T is
244 
245   Inj^T (Inj Inj^T)^{-1} Inj
246 
247 and if Inj is a VecScatter, as it is now in PETSc, we have
248 
249   Inj^T Inj
250 
251 and
252 
253   S = I - Inj^T Inj
254 
255 since
256 
257   Inj S = Inj - (Inj Inj^T) Inj = 0.
258 
259 Brannick suggests
260 
261   A \to S^T A S  \qquad\mathrm{and}\qquad M \to S^T M S
262 
263 but I do not think his :math:`S^T S = I` is correct. Our S is an orthogonal projector, so :math:`S^T S = S^2 = S`. We will use
264 
265   M^{-1} A \to S M^{-1} A S
266 
267 In fact, since it is somewhat hard in PETSc to do the symmetric application, we will just apply S on the left.
268 
269   Check: || Inj P - I ||_F < tol
270   Check: In general, Inj Inj^T = I
271 */
272 
273 typedef struct {
274   PC       mg;  /* The PCMG object */
275   PetscInt l;   /* The multigrid level for this solver */
276   Mat      Inj; /* The injection matrix */
277   Mat      S;   /* I - Inj^T Inj */
278 } CRContext;
279 
280 static PetscErrorCode CRSetup_Private(PC pc)
281 {
282   CRContext *ctx;
283   Mat        It;
284 
285   PetscFunctionBeginUser;
286   PetscCall(PCShellGetContext(pc, &ctx));
287   PetscCall(PCMGGetInjection(ctx->mg, ctx->l, &It));
288   PetscCheck(It, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "CR requires that injection be defined for this PCMG");
289   PetscCall(MatCreateTranspose(It, &ctx->Inj));
290   PetscCall(MatCreateNormal(ctx->Inj, &ctx->S));
291   PetscCall(MatScale(ctx->S, -1.0));
292   PetscCall(MatShift(ctx->S, 1.0));
293   PetscFunctionReturn(PETSC_SUCCESS);
294 }
295 
296 static PetscErrorCode CRApply_Private(PC pc, Vec x, Vec y)
297 {
298   CRContext *ctx;
299 
300   PetscFunctionBeginUser;
301   PetscCall(PCShellGetContext(pc, &ctx));
302   PetscCall(MatMult(ctx->S, x, y));
303   PetscFunctionReturn(PETSC_SUCCESS);
304 }
305 
306 static PetscErrorCode CRDestroy_Private(PC pc)
307 {
308   CRContext *ctx;
309 
310   PetscFunctionBeginUser;
311   PetscCall(PCShellGetContext(pc, &ctx));
312   PetscCall(MatDestroy(&ctx->Inj));
313   PetscCall(MatDestroy(&ctx->S));
314   PetscCall(PetscFree(ctx));
315   PetscCall(PCShellSetContext(pc, NULL));
316   PetscFunctionReturn(PETSC_SUCCESS);
317 }
318 
319 static PetscErrorCode CreateCR_Private(PC pc, PetscInt l, PC *cr)
320 {
321   CRContext *ctx;
322 
323   PetscFunctionBeginUser;
324   PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), cr));
325   PetscCall(PetscObjectSetName((PetscObject)*cr, "S (complementary projector to injection)"));
326   PetscCall(PetscCalloc1(1, &ctx));
327   ctx->mg = pc;
328   ctx->l  = l;
329   PetscCall(PCSetType(*cr, PCSHELL));
330   PetscCall(PCShellSetContext(*cr, ctx));
331   PetscCall(PCShellSetApply(*cr, CRApply_Private));
332   PetscCall(PCShellSetSetUp(*cr, CRSetup_Private));
333   PetscCall(PCShellSetDestroy(*cr, CRDestroy_Private));
334   PetscFunctionReturn(PETSC_SUCCESS);
335 }
336 
337 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char[], const char[], const char *[], const char *[], PetscBool *);
338 
339 PetscErrorCode PCMGSetLevels_MG(PC pc, PetscInt levels, MPI_Comm *comms)
340 {
341   PC_MG         *mg = (PC_MG *)pc->data;
342   MPI_Comm       comm;
343   PC_MG_Levels **mglevels = mg->levels;
344   PCMGType       mgtype   = mg->am;
345   PetscInt       mgctype  = (PetscInt)PC_MG_CYCLE_V;
346   PetscInt       i;
347   PetscMPIInt    size;
348   const char    *prefix;
349   PC             ipc;
350   PetscInt       n;
351 
352   PetscFunctionBegin;
353   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
354   PetscValidLogicalCollectiveInt(pc, levels, 2);
355   if (mg->nlevels == levels) PetscFunctionReturn(PETSC_SUCCESS);
356   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
357   if (mglevels) {
358     mgctype = mglevels[0]->cycles;
359     /* changing the number of levels so free up the previous stuff */
360     PetscCall(PCReset_MG(pc));
361     n = mglevels[0]->levels;
362     for (i = 0; i < n; i++) {
363       if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd));
364       PetscCall(KSPDestroy(&mglevels[i]->smoothu));
365       PetscCall(KSPDestroy(&mglevels[i]->cr));
366       PetscCall(PetscFree(mglevels[i]));
367     }
368     PetscCall(PetscFree(mg->levels));
369   }
370 
371   mg->nlevels = levels;
372 
373   PetscCall(PetscMalloc1(levels, &mglevels));
374 
375   PetscCall(PCGetOptionsPrefix(pc, &prefix));
376 
377   mg->stageApply = 0;
378   for (i = 0; i < levels; i++) {
379     PetscCall(PetscNew(&mglevels[i]));
380 
381     mglevels[i]->level               = i;
382     mglevels[i]->levels              = levels;
383     mglevels[i]->cycles              = mgctype;
384     mg->default_smoothu              = 2;
385     mg->default_smoothd              = 2;
386     mglevels[i]->eventsmoothsetup    = 0;
387     mglevels[i]->eventsmoothsolve    = 0;
388     mglevels[i]->eventresidual       = 0;
389     mglevels[i]->eventinterprestrict = 0;
390 
391     if (comms) comm = comms[i];
392     if (comm != MPI_COMM_NULL) {
393       PetscCall(KSPCreate(comm, &mglevels[i]->smoothd));
394       PetscCall(KSPSetNestLevel(mglevels[i]->smoothd, pc->kspnestlevel));
395       PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->smoothd, pc->erroriffailure));
396       PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->smoothd, (PetscObject)pc, levels - i));
397       PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, prefix));
398       PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->smoothd, PetscMGLevelId, mglevels[i]->level));
399       if (i == 0 && levels > 1) { // coarse grid
400         PetscCall(KSPAppendOptionsPrefix(mglevels[0]->smoothd, "mg_coarse_"));
401 
402         /* coarse solve is (redundant) LU by default; set shifttype NONZERO to avoid annoying zero-pivot in LU preconditioner */
403         PetscCall(KSPSetType(mglevels[0]->smoothd, KSPPREONLY));
404         PetscCall(KSPGetPC(mglevels[0]->smoothd, &ipc));
405         PetscCallMPI(MPI_Comm_size(comm, &size));
406         if (size > 1) {
407           PetscCall(PCSetType(ipc, PCREDUNDANT));
408         } else {
409           PetscCall(PCSetType(ipc, PCLU));
410         }
411         PetscCall(PCFactorSetShiftType(ipc, MAT_SHIFT_INBLOCKS));
412       } else {
413         char tprefix[128];
414 
415         PetscCall(KSPSetType(mglevels[i]->smoothd, KSPCHEBYSHEV));
416         PetscCall(KSPSetConvergenceTest(mglevels[i]->smoothd, KSPConvergedSkip, NULL, NULL));
417         PetscCall(KSPSetNormType(mglevels[i]->smoothd, KSP_NORM_NONE));
418         PetscCall(KSPGetPC(mglevels[i]->smoothd, &ipc));
419         PetscCall(PCSetType(ipc, PCSOR));
420         PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd));
421 
422         if (i == levels - 1 && levels > 1) { // replace 'mg_finegrid_' with 'mg_levels_X_'
423           PetscBool set;
424 
425           PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)mglevels[i]->smoothd)->options, ((PetscObject)mglevels[i]->smoothd)->prefix, "-mg_fine_", NULL, NULL, &set));
426           if (set) {
427             if (prefix) PetscCall(PetscSNPrintf(tprefix, 128, "%smg_fine_", prefix));
428             else PetscCall(PetscSNPrintf(tprefix, 128, "mg_fine_"));
429             PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, tprefix));
430           } else {
431             PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i));
432             PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix));
433           }
434         } else {
435           PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i));
436           PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix));
437         }
438       }
439     }
440     mglevels[i]->smoothu = mglevels[i]->smoothd;
441     mg->rtol             = 0.0;
442     mg->abstol           = 0.0;
443     mg->dtol             = 0.0;
444     mg->ttol             = 0.0;
445     mg->cyclesperpcapply = 1;
446   }
447   mg->levels = mglevels;
448   PetscCall(PCMGSetType(pc, mgtype));
449   PetscFunctionReturn(PETSC_SUCCESS);
450 }
451 
452 /*@C
453   PCMGSetLevels - Sets the number of levels to use with `PCMG`.
454   Must be called before any other `PCMG` routine.
455 
456   Logically Collective
457 
458   Input Parameters:
459 + pc     - the preconditioner context
460 . levels - the number of levels
461 - comms  - optional communicators for each level; this is to allow solving the coarser problems
462            on smaller sets of processes. For processes that are not included in the computation
463            you must pass `MPI_COMM_NULL`. Use comms = `NULL` to specify that all processes
464            should participate in each level of problem.
465 
466   Options Database Key:
467 . -pc_mg_levels <levels> - set the number of levels to use
468 
469   Level: intermediate
470 
471   Notes:
472   If the number of levels is one then the multigrid uses the `-mg_levels` prefix
473   for setting the level options rather than the `-mg_coarse` or `-mg_fine` prefix.
474 
475   You can free the information in comms after this routine is called.
476 
477   The array of MPI communicators must contain `MPI_COMM_NULL` for those ranks that at each level
478   are not participating in the coarser solve. For example, with 2 levels and 1 and 2 ranks on
479   the two levels, rank 0 in the original communicator will pass in an array of 2 communicators
480   of size 2 and 1, while rank 1 in the original communicator will pass in array of 2 communicators
481   the first of size 2 and the second of value `MPI_COMM_NULL` since the rank 1 does not participate
482   in the coarse grid solve.
483 
484   Since each coarser level may have a new `MPI_Comm` with fewer ranks than the previous, one
485   must take special care in providing the restriction and interpolation operation. We recommend
486   providing these as two step operations; first perform a standard restriction or interpolation on
487   the full number of ranks for that level and then use an MPI call to copy the resulting vector
488   array entries (after calls to VecGetArray()) to the smaller or larger number of ranks, note in both
489   cases the MPI calls must be made on the larger of the two communicators. Traditional MPI send and
490   receives or `MPI_AlltoAllv()` could be used to do the reshuffling of the vector entries.
491 
492   Fortran Notes:
493   Use comms = `PETSC_NULL_MPI_COMM` as the equivalent of `NULL` in the C interface. Note `PETSC_NULL_MPI_COMM`
494   is not `MPI_COMM_NULL`. It is more like `PETSC_NULL_INTEGER`, `PETSC_NULL_REAL` etc.
495 
496 .seealso: [](ch_ksp), `PCMGSetType()`, `PCMGGetLevels()`
497 @*/
498 PetscErrorCode PCMGSetLevels(PC pc, PetscInt levels, MPI_Comm *comms)
499 {
500   PetscFunctionBegin;
501   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
502   if (comms) PetscAssertPointer(comms, 3);
503   PetscTryMethod(pc, "PCMGSetLevels_C", (PC, PetscInt, MPI_Comm *), (pc, levels, comms));
504   PetscFunctionReturn(PETSC_SUCCESS);
505 }
506 
507 PetscErrorCode PCDestroy_MG(PC pc)
508 {
509   PC_MG         *mg       = (PC_MG *)pc->data;
510   PC_MG_Levels **mglevels = mg->levels;
511   PetscInt       i, n;
512 
513   PetscFunctionBegin;
514   PetscCall(PCReset_MG(pc));
515   if (mglevels) {
516     n = mglevels[0]->levels;
517     for (i = 0; i < n; i++) {
518       if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd));
519       PetscCall(KSPDestroy(&mglevels[i]->smoothu));
520       PetscCall(KSPDestroy(&mglevels[i]->cr));
521       PetscCall(PetscFree(mglevels[i]));
522     }
523     PetscCall(PetscFree(mg->levels));
524   }
525   PetscCall(PetscFree(pc->data));
526   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
527   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
528   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", NULL));
529   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetReusePreconditioner_C", NULL));
530   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", NULL));
531   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", NULL));
532   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
533   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
534   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", NULL));
535   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", NULL));
536   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", NULL));
537   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", NULL));
538   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", NULL));
539   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", NULL));
540   PetscFunctionReturn(PETSC_SUCCESS);
541 }
542 
543 /*
544    PCApply_MG - Runs either an additive, multiplicative, Kaskadic
545              or full cycle of multigrid.
546 
547   Note:
548   A simple wrapper which calls PCMGMCycle(),PCMGACycle(), or PCMGFCycle().
549 */
550 static PetscErrorCode PCApply_MG_Internal(PC pc, Vec b, Vec x, Mat B, Mat X, PetscBool transpose)
551 {
552   PC_MG         *mg       = (PC_MG *)pc->data;
553   PC_MG_Levels **mglevels = mg->levels;
554   PC             tpc;
555   PetscInt       levels = mglevels[0]->levels, i;
556   PetscBool      changeu, changed, matapp;
557 
558   PetscFunctionBegin;
559   matapp = (PetscBool)(B && X);
560   if (mg->stageApply) PetscCall(PetscLogStagePush(mg->stageApply));
561   /* When the DM is supplying the matrix then it will not exist until here */
562   for (i = 0; i < levels; i++) {
563     if (!mglevels[i]->A) {
564       PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL));
565       PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A));
566     }
567   }
568 
569   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
570   PetscCall(PCPreSolveChangeRHS(tpc, &changed));
571   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
572   PetscCall(PCPreSolveChangeRHS(tpc, &changeu));
573   if (!changeu && !changed) {
574     if (matapp) {
575       PetscCall(MatDestroy(&mglevels[levels - 1]->B));
576       mglevels[levels - 1]->B = B;
577     } else {
578       PetscCall(VecDestroy(&mglevels[levels - 1]->b));
579       mglevels[levels - 1]->b = b;
580     }
581   } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */
582     if (matapp) {
583       if (mglevels[levels - 1]->B) {
584         PetscInt  N1, N2;
585         PetscBool flg;
586 
587         PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &N1));
588         PetscCall(MatGetSize(B, NULL, &N2));
589         PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 1]->B, ((PetscObject)B)->type_name, &flg));
590         if (N1 != N2 || !flg) PetscCall(MatDestroy(&mglevels[levels - 1]->B));
591       }
592       if (!mglevels[levels - 1]->B) {
593         PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &mglevels[levels - 1]->B));
594       } else {
595         PetscCall(MatCopy(B, mglevels[levels - 1]->B, SAME_NONZERO_PATTERN));
596       }
597     } else {
598       if (!mglevels[levels - 1]->b) {
599         Vec *vec;
600 
601         PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL));
602         mglevels[levels - 1]->b = *vec;
603         PetscCall(PetscFree(vec));
604       }
605       PetscCall(VecCopy(b, mglevels[levels - 1]->b));
606     }
607   }
608   if (matapp) {
609     mglevels[levels - 1]->X = X;
610   } else {
611     mglevels[levels - 1]->x = x;
612   }
613 
614   /* If coarser Xs are present, it means we have already block applied the PC at least once
615      Reset operators if sizes/type do no match */
616   if (matapp && levels > 1 && mglevels[levels - 2]->X) {
617     PetscInt  Xc, Bc;
618     PetscBool flg;
619 
620     PetscCall(MatGetSize(mglevels[levels - 2]->X, NULL, &Xc));
621     PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &Bc));
622     PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 2]->X, ((PetscObject)mglevels[levels - 1]->X)->type_name, &flg));
623     if (Xc != Bc || !flg) {
624       PetscCall(MatDestroy(&mglevels[levels - 1]->R));
625       for (i = 0; i < levels - 1; i++) {
626         PetscCall(MatDestroy(&mglevels[i]->R));
627         PetscCall(MatDestroy(&mglevels[i]->B));
628         PetscCall(MatDestroy(&mglevels[i]->X));
629       }
630     }
631   }
632 
633   if (mg->am == PC_MG_MULTIPLICATIVE) {
634     if (matapp) PetscCall(MatZeroEntries(X));
635     else PetscCall(VecZeroEntries(x));
636     for (i = 0; i < mg->cyclesperpcapply; i++) PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, transpose, matapp, NULL));
637   } else if (mg->am == PC_MG_ADDITIVE) {
638     PetscCall(PCMGACycle_Private(pc, mglevels, transpose, matapp));
639   } else if (mg->am == PC_MG_KASKADE) {
640     PetscCall(PCMGKCycle_Private(pc, mglevels, transpose, matapp));
641   } else {
642     PetscCall(PCMGFCycle_Private(pc, mglevels, transpose, matapp));
643   }
644   if (mg->stageApply) PetscCall(PetscLogStagePop());
645   if (!changeu && !changed) {
646     if (matapp) {
647       mglevels[levels - 1]->B = NULL;
648     } else {
649       mglevels[levels - 1]->b = NULL;
650     }
651   }
652   PetscFunctionReturn(PETSC_SUCCESS);
653 }
654 
655 static PetscErrorCode PCApply_MG(PC pc, Vec b, Vec x)
656 {
657   PetscFunctionBegin;
658   PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_FALSE));
659   PetscFunctionReturn(PETSC_SUCCESS);
660 }
661 
662 static PetscErrorCode PCApplyTranspose_MG(PC pc, Vec b, Vec x)
663 {
664   PetscFunctionBegin;
665   PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_TRUE));
666   PetscFunctionReturn(PETSC_SUCCESS);
667 }
668 
669 static PetscErrorCode PCMatApply_MG(PC pc, Mat b, Mat x)
670 {
671   PetscFunctionBegin;
672   PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_FALSE));
673   PetscFunctionReturn(PETSC_SUCCESS);
674 }
675 
676 static PetscErrorCode PCMatApplyTranspose_MG(PC pc, Mat b, Mat x)
677 {
678   PetscFunctionBegin;
679   PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_TRUE));
680   PetscFunctionReturn(PETSC_SUCCESS);
681 }
682 
683 PetscErrorCode PCSetFromOptions_MG(PC pc, PetscOptionItems PetscOptionsObject)
684 {
685   PetscInt            levels, cycles;
686   PetscBool           flg, flg2;
687   PC_MG              *mg = (PC_MG *)pc->data;
688   PC_MG_Levels      **mglevels;
689   PCMGType            mgtype;
690   PCMGCycleType       mgctype;
691   PCMGGalerkinType    gtype;
692   PCMGCoarseSpaceType coarseSpaceType;
693 
694   PetscFunctionBegin;
695   levels = PetscMax(mg->nlevels, 1);
696   PetscOptionsHeadBegin(PetscOptionsObject, "Multigrid options");
697   PetscCall(PetscOptionsInt("-pc_mg_levels", "Number of Levels", "PCMGSetLevels", levels, &levels, &flg));
698   if (!flg && !mg->levels && pc->dm) {
699     PetscCall(DMGetRefineLevel(pc->dm, &levels));
700     levels++;
701     mg->usedmfornumberoflevels = PETSC_TRUE;
702   }
703   PetscCall(PCMGSetLevels(pc, levels, NULL));
704   mglevels = mg->levels;
705 
706   mgctype = (PCMGCycleType)mglevels[0]->cycles;
707   PetscCall(PetscOptionsEnum("-pc_mg_cycle_type", "V cycle or for W-cycle", "PCMGSetCycleType", PCMGCycleTypes, (PetscEnum)mgctype, (PetscEnum *)&mgctype, &flg));
708   if (flg) PetscCall(PCMGSetCycleType(pc, mgctype));
709   coarseSpaceType = mg->coarseSpaceType;
710   PetscCall(PetscOptionsEnum("-pc_mg_adapt_interp_coarse_space", "Type of adaptive coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw", "PCMGSetAdaptCoarseSpaceType", PCMGCoarseSpaceTypes, (PetscEnum)coarseSpaceType, (PetscEnum *)&coarseSpaceType, &flg));
711   if (flg) PetscCall(PCMGSetAdaptCoarseSpaceType(pc, coarseSpaceType));
712   PetscCall(PetscOptionsInt("-pc_mg_adapt_interp_n", "Size of the coarse space for adaptive interpolation", "PCMGSetCoarseSpace", mg->Nc, &mg->Nc, &flg));
713   PetscCall(PetscOptionsBool("-pc_mg_mesp_monitor", "Monitor the multilevel eigensolver", "PCMGSetAdaptInterpolation", PETSC_FALSE, &mg->mespMonitor, &flg));
714   flg2 = PETSC_FALSE;
715   PetscCall(PetscOptionsBool("-pc_mg_adapt_cr", "Monitor coarse space quality using Compatible Relaxation (CR)", "PCMGSetAdaptCR", PETSC_FALSE, &flg2, &flg));
716   if (flg) PetscCall(PCMGSetAdaptCR(pc, flg2));
717   flg = PETSC_FALSE;
718   PetscCall(PetscOptionsBool("-pc_mg_distinct_smoothup", "Create separate smoothup KSP and append the prefix _up", "PCMGSetDistinctSmoothUp", PETSC_FALSE, &flg, NULL));
719   if (flg) PetscCall(PCMGSetDistinctSmoothUp(pc));
720   PetscCall(PetscOptionsEnum("-pc_mg_galerkin", "Use Galerkin process to compute coarser operators", "PCMGSetGalerkin", PCMGGalerkinTypes, (PetscEnum)mg->galerkin, (PetscEnum *)&gtype, &flg));
721   if (flg) PetscCall(PCMGSetGalerkin(pc, gtype));
722   mgtype = mg->am;
723   PetscCall(PetscOptionsEnum("-pc_mg_type", "Multigrid type", "PCMGSetType", PCMGTypes, (PetscEnum)mgtype, (PetscEnum *)&mgtype, &flg));
724   if (flg) PetscCall(PCMGSetType(pc, mgtype));
725   if (mg->am == PC_MG_MULTIPLICATIVE) {
726     PetscCall(PetscOptionsInt("-pc_mg_multiplicative_cycles", "Number of cycles for each preconditioner step", "PCMGMultiplicativeSetCycles", mg->cyclesperpcapply, &cycles, &flg));
727     if (flg) PetscCall(PCMGMultiplicativeSetCycles(pc, cycles));
728   }
729   flg = PETSC_FALSE;
730   PetscCall(PetscOptionsBool("-pc_mg_log", "Log times for each multigrid level", "None", flg, &flg, NULL));
731   if (flg) {
732     PetscInt i;
733     char     eventname[128];
734 
735     levels = mglevels[0]->levels;
736     for (i = 0; i < levels; i++) {
737       PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSetup Level %" PetscInt_FMT, i));
738       PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsetup));
739       PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSmooth Level %" PetscInt_FMT, i));
740       PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsolve));
741       if (i) {
742         PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGResid Level %" PetscInt_FMT, i));
743         PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventresidual));
744         PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGInterp Level %" PetscInt_FMT, i));
745         PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventinterprestrict));
746       }
747     }
748 
749     if (PetscDefined(USE_LOG)) {
750       const char sname[] = "MG Apply";
751 
752       PetscCall(PetscLogStageGetId(sname, &mg->stageApply));
753       if (mg->stageApply < 0) PetscCall(PetscLogStageRegister(sname, &mg->stageApply));
754     }
755   }
756   PetscOptionsHeadEnd();
757   /* Check option consistency */
758   PetscCall(PCMGGetGalerkin(pc, &gtype));
759   PetscCall(PCMGGetAdaptInterpolation(pc, &flg));
760   PetscCheck(!flg || !(gtype >= PC_MG_GALERKIN_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use Galerkin coarse operators when adapting the interpolator");
761   PetscFunctionReturn(PETSC_SUCCESS);
762 }
763 
764 const char *const PCMGTypes[]            = {"MULTIPLICATIVE", "ADDITIVE", "FULL", "KASKADE", "PCMGType", "PC_MG", NULL};
765 const char *const PCMGCycleTypes[]       = {"invalid", "v", "w", "PCMGCycleType", "PC_MG_CYCLE", NULL};
766 const char *const PCMGGalerkinTypes[]    = {"both", "pmat", "mat", "none", "external", "PCMGGalerkinType", "PC_MG_GALERKIN", NULL};
767 const char *const PCMGCoarseSpaceTypes[] = {"none", "polynomial", "harmonic", "eigenvector", "generalized_eigenvector", "gdsw", "PCMGCoarseSpaceType", "PCMG_ADAPT_NONE", NULL};
768 
769 #include <petscdraw.h>
770 PetscErrorCode PCView_MG(PC pc, PetscViewer viewer)
771 {
772   PC_MG         *mg       = (PC_MG *)pc->data;
773   PC_MG_Levels **mglevels = mg->levels;
774   PetscInt       levels   = mglevels ? mglevels[0]->levels : 0, i;
775   PetscBool      isascii, isbinary, isdraw;
776 
777   PetscFunctionBegin;
778   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
779   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
780   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
781   if (isascii) {
782     const char *cyclename = levels ? (mglevels[0]->cycles == PC_MG_CYCLE_V ? "v" : "w") : "unknown";
783 
784     if (levels == 1) PetscCall(PetscViewerASCIIPrintf(viewer, "  WARNING: Multigrid is being run with only a single level!\n"));
785     PetscCall(PetscViewerASCIIPrintf(viewer, "  type is %s, levels=%" PetscInt_FMT " cycles=%s\n", PCMGTypes[mg->am], levels, cyclename));
786     if (mg->am == PC_MG_MULTIPLICATIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "    Cycles per PCApply=%" PetscInt_FMT "\n", mg->cyclesperpcapply));
787     if (mg->galerkin == PC_MG_GALERKIN_BOTH) {
788       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using Galerkin computed coarse grid matrices\n"));
789     } else if (mg->galerkin == PC_MG_GALERKIN_PMAT) {
790       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using Galerkin computed coarse grid matrices for pmat\n"));
791     } else if (mg->galerkin == PC_MG_GALERKIN_MAT) {
792       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using Galerkin computed coarse grid matrices for mat\n"));
793     } else if (mg->galerkin == PC_MG_GALERKIN_EXTERNAL) {
794       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using externally compute Galerkin coarse grid matrices\n"));
795     } else {
796       PetscCall(PetscViewerASCIIPrintf(viewer, "    Not using Galerkin computed coarse grid matrices\n"));
797     }
798     if (mg->view) PetscCall((*mg->view)(pc, viewer));
799     for (i = 0; i < levels; i++) {
800       if (i) {
801         PetscCall(PetscViewerASCIIPrintf(viewer, "Down solver (pre-smoother) on level %" PetscInt_FMT " -------------------------------\n", i));
802       } else {
803         PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse grid solver -- level %" PetscInt_FMT " -------------------------------\n", i));
804       }
805       PetscCall(PetscViewerASCIIPushTab(viewer));
806       PetscCall(KSPView(mglevels[i]->smoothd, viewer));
807       PetscCall(PetscViewerASCIIPopTab(viewer));
808       if (i && mglevels[i]->smoothd == mglevels[i]->smoothu) {
809         PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) same as down solver (pre-smoother)\n"));
810       } else if (i) {
811         PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) on level %" PetscInt_FMT " -------------------------------\n", i));
812         PetscCall(PetscViewerASCIIPushTab(viewer));
813         PetscCall(KSPView(mglevels[i]->smoothu, viewer));
814         PetscCall(PetscViewerASCIIPopTab(viewer));
815       }
816       if (i && mglevels[i]->cr) {
817         PetscCall(PetscViewerASCIIPrintf(viewer, "CR solver on level %" PetscInt_FMT " -------------------------------\n", i));
818         PetscCall(PetscViewerASCIIPushTab(viewer));
819         PetscCall(KSPView(mglevels[i]->cr, viewer));
820         PetscCall(PetscViewerASCIIPopTab(viewer));
821       }
822     }
823   } else if (isbinary) {
824     for (i = levels - 1; i >= 0; i--) {
825       PetscCall(KSPView(mglevels[i]->smoothd, viewer));
826       if (i && mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPView(mglevels[i]->smoothu, viewer));
827     }
828   } else if (isdraw) {
829     PetscDraw draw;
830     PetscReal x, w, y, bottom, th;
831     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
832     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
833     PetscCall(PetscDrawStringGetSize(draw, NULL, &th));
834     bottom = y - th;
835     for (i = levels - 1; i >= 0; i--) {
836       if (!mglevels[i]->smoothu || (mglevels[i]->smoothu == mglevels[i]->smoothd)) {
837         PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom));
838         PetscCall(KSPView(mglevels[i]->smoothd, viewer));
839         PetscCall(PetscDrawPopCurrentPoint(draw));
840       } else {
841         w = 0.5 * PetscMin(1.0 - x, x);
842         PetscCall(PetscDrawPushCurrentPoint(draw, x + w, bottom));
843         PetscCall(KSPView(mglevels[i]->smoothd, viewer));
844         PetscCall(PetscDrawPopCurrentPoint(draw));
845         PetscCall(PetscDrawPushCurrentPoint(draw, x - w, bottom));
846         PetscCall(KSPView(mglevels[i]->smoothu, viewer));
847         PetscCall(PetscDrawPopCurrentPoint(draw));
848       }
849       PetscCall(PetscDrawGetBoundingBox(draw, NULL, &bottom, NULL, NULL));
850       bottom -= th;
851     }
852   }
853   PetscFunctionReturn(PETSC_SUCCESS);
854 }
855 
856 #include <petsc/private/kspimpl.h>
857 
858 /*
859     Calls setup for the KSP on each level
860 */
861 PetscErrorCode PCSetUp_MG(PC pc)
862 {
863   PC_MG         *mg       = (PC_MG *)pc->data;
864   PC_MG_Levels **mglevels = mg->levels;
865   PetscInt       i, n;
866   PC             cpc;
867   PetscBool      dump = PETSC_FALSE, opsset, use_amat, missinginterpolate = PETSC_FALSE;
868   Mat            dA, dB;
869   Vec            tvec;
870   DM            *dms;
871   PetscViewer    viewer = NULL;
872   PetscBool      dAeqdB = PETSC_FALSE, needRestricts = PETSC_FALSE, doCR = PETSC_FALSE;
873   PetscBool      adaptInterpolation = mg->adaptInterpolation;
874 
875   PetscFunctionBegin;
876   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels with PCMGSetLevels() before setting up");
877   n = mglevels[0]->levels;
878   /* FIX: Move this to PCSetFromOptions_MG? */
879   if (mg->usedmfornumberoflevels) {
880     PetscInt levels;
881 
882     PetscCall(DMGetRefineLevel(pc->dm, &levels));
883     levels++;
884     if (levels > n) { /* the problem is now being solved on a finer grid */
885       PetscCall(PCMGSetLevels(pc, levels, NULL));
886       n = levels;
887       PetscCall(PCSetFromOptions(pc)); /* it is bad to call this here, but otherwise will never be called for the new hierarchy */
888       mglevels = mg->levels;
889     }
890   }
891 
892   /* If user did not provide fine grid operators OR operator was not updated since last global KSPSetOperators() */
893   /* so use those from global PC */
894   /* Is this what we always want? What if user wants to keep old one? */
895   PetscCall(KSPGetOperatorsSet(mglevels[n - 1]->smoothd, NULL, &opsset));
896   if (opsset) {
897     Mat mmat;
898     PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, NULL, &mmat));
899     if (mmat == pc->pmat) opsset = PETSC_FALSE;
900   }
901   /* fine grid smoother inherits the reuse-pc flag */
902   PetscCall(KSPGetPC(mglevels[n - 1]->smoothd, &cpc));
903   cpc->reusepreconditioner = pc->reusepreconditioner;
904   PetscCall(KSPGetPC(mglevels[n - 1]->smoothu, &cpc));
905   cpc->reusepreconditioner = pc->reusepreconditioner;
906 
907   /* Create CR solvers */
908   PetscCall(PCMGGetAdaptCR(pc, &doCR));
909   if (doCR) {
910     const char *prefix;
911 
912     PetscCall(PCGetOptionsPrefix(pc, &prefix));
913     for (i = 1; i < n; ++i) {
914       PC   ipc, cr;
915       char crprefix[128];
916 
917       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &mglevels[i]->cr));
918       PetscCall(KSPSetNestLevel(mglevels[i]->cr, pc->kspnestlevel));
919       PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->cr, PETSC_FALSE));
920       PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->cr, (PetscObject)pc, n - i));
921       PetscCall(KSPSetOptionsPrefix(mglevels[i]->cr, prefix));
922       PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->cr, PetscMGLevelId, mglevels[i]->level));
923       PetscCall(KSPSetType(mglevels[i]->cr, KSPCHEBYSHEV));
924       PetscCall(KSPSetConvergenceTest(mglevels[i]->cr, KSPConvergedSkip, NULL, NULL));
925       PetscCall(KSPSetNormType(mglevels[i]->cr, KSP_NORM_PRECONDITIONED));
926       PetscCall(KSPGetPC(mglevels[i]->cr, &ipc));
927 
928       PetscCall(PCSetType(ipc, PCCOMPOSITE));
929       PetscCall(PCCompositeSetType(ipc, PC_COMPOSITE_MULTIPLICATIVE));
930       PetscCall(PCCompositeAddPCType(ipc, PCSOR));
931       PetscCall(CreateCR_Private(pc, i, &cr));
932       PetscCall(PCCompositeAddPC(ipc, cr));
933       PetscCall(PCDestroy(&cr));
934 
935       PetscCall(KSPSetTolerances(mglevels[i]->cr, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd));
936       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
937       PetscCall(PetscSNPrintf(crprefix, 128, "mg_levels_%" PetscInt_FMT "_cr_", i));
938       PetscCall(KSPAppendOptionsPrefix(mglevels[i]->cr, crprefix));
939     }
940   }
941 
942   if (!opsset) {
943     PetscCall(PCGetUseAmat(pc, &use_amat));
944     if (use_amat) {
945       PetscCall(PetscInfo(pc, "Using outer operators to define finest grid operator \n  because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n"));
946       PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->mat, pc->pmat));
947     } else {
948       PetscCall(PetscInfo(pc, "Using matrix (pmat) operators to define finest grid operator \n  because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n"));
949       PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->pmat, pc->pmat));
950     }
951   }
952 
953   for (i = n - 1; i > 0; i--) {
954     if (!(mglevels[i]->interpolate || mglevels[i]->restrct)) {
955       missinginterpolate = PETSC_TRUE;
956       break;
957     }
958   }
959 
960   PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, &dA, &dB));
961   if (dA == dB) dAeqdB = PETSC_TRUE;
962   if (mg->galerkin == PC_MG_GALERKIN_NONE || ((mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_MAT) && !dAeqdB)) needRestricts = PETSC_TRUE; /* user must compute either mat, pmat, or both so must restrict x to coarser levels */
963 
964   if (pc->dm && !pc->setupcalled) {
965     /* finest smoother also gets DM but it is not active, independent of whether galerkin==PC_MG_GALERKIN_EXTERNAL */
966     PetscCall(KSPSetDM(mglevels[n - 1]->smoothd, pc->dm));
967     PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothd, KSP_DMACTIVE_ALL, PETSC_FALSE));
968     if (mglevels[n - 1]->smoothd != mglevels[n - 1]->smoothu) {
969       PetscCall(KSPSetDM(mglevels[n - 1]->smoothu, pc->dm));
970       PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothu, KSP_DMACTIVE_ALL, PETSC_FALSE));
971     }
972     if (mglevels[n - 1]->cr) {
973       PetscCall(KSPSetDM(mglevels[n - 1]->cr, pc->dm));
974       PetscCall(KSPSetDMActive(mglevels[n - 1]->cr, KSP_DMACTIVE_ALL, PETSC_FALSE));
975     }
976   }
977 
978   /*
979    Skipping if user has provided all interpolation/restriction needed (since DM might not be able to produce them (when coming from SNES/TS)
980    Skipping for externally managed hierarchy (such as ML and GAMG). Cleaner logic here would be great. Wrap ML/GAMG as DMs?
981   */
982   if (missinginterpolate && mg->galerkin != PC_MG_GALERKIN_EXTERNAL && !pc->setupcalled) {
983     /* first see if we can compute a coarse space */
984     if (mg->coarseSpaceType == PCMG_ADAPT_GDSW) {
985       for (i = n - 2; i > -1; i--) {
986         if (!mglevels[i + 1]->restrct && !mglevels[i + 1]->interpolate) {
987           PetscCall(PCMGComputeCoarseSpace_Internal(pc, i + 1, mg->coarseSpaceType, mg->Nc, NULL, &mglevels[i + 1]->coarseSpace));
988           PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->coarseSpace));
989         }
990       }
991     } else { /* construct the interpolation from the DMs */
992       Mat p;
993       Vec rscale;
994 
995       PetscCheck(n == 1 || pc->dm, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PC lacks a DM so cannot automatically construct a multigrid hierarchy. Number of levels requested %" PetscInt_FMT, n);
996       PetscCall(PetscMalloc1(n, &dms));
997       dms[n - 1] = pc->dm;
998       /* Separately create them so we do not get DMKSP interference between levels */
999       for (i = n - 2; i > -1; i--) PetscCall(DMCoarsen(dms[i + 1], MPI_COMM_NULL, &dms[i]));
1000       for (i = n - 2; i > -1; i--) {
1001         PetscBool dmhasrestrict, dmhasinject;
1002 
1003         PetscCall(KSPSetDM(mglevels[i]->smoothd, dms[i]));
1004         if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothd, KSP_DMACTIVE_ALL, PETSC_FALSE));
1005         PetscCall(KSPSetDMActive(mglevels[i]->smoothd, KSP_DMACTIVE_RHS, PETSC_FALSE));
1006         if (mglevels[i]->smoothd != mglevels[i]->smoothu) {
1007           PetscCall(KSPSetDM(mglevels[i]->smoothu, dms[i]));
1008           if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothu, KSP_DMACTIVE_ALL, PETSC_FALSE));
1009           PetscCall(KSPSetDMActive(mglevels[i]->smoothu, KSP_DMACTIVE_RHS, PETSC_FALSE));
1010         }
1011         if (mglevels[i]->cr) {
1012           PetscCall(KSPSetDM(mglevels[i]->cr, dms[i]));
1013           if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->cr, KSP_DMACTIVE_ALL, PETSC_FALSE));
1014           PetscCall(KSPSetDMActive(mglevels[i]->cr, KSP_DMACTIVE_RHS, PETSC_FALSE));
1015         }
1016         if (!mglevels[i + 1]->interpolate) {
1017           PetscCall(DMCreateInterpolation(dms[i], dms[i + 1], &p, &rscale));
1018           PetscCall(PCMGSetInterpolation(pc, i + 1, p));
1019           if (rscale) PetscCall(PCMGSetRScale(pc, i + 1, rscale));
1020           PetscCall(VecDestroy(&rscale));
1021           PetscCall(MatDestroy(&p));
1022         }
1023         PetscCall(DMHasCreateRestriction(dms[i], &dmhasrestrict));
1024         if (dmhasrestrict && !mglevels[i + 1]->restrct) {
1025           PetscCall(DMCreateRestriction(dms[i], dms[i + 1], &p));
1026           PetscCall(PCMGSetRestriction(pc, i + 1, p));
1027           PetscCall(MatDestroy(&p));
1028         }
1029         PetscCall(DMHasCreateInjection(dms[i], &dmhasinject));
1030         if (dmhasinject && !mglevels[i + 1]->inject) {
1031           PetscCall(DMCreateInjection(dms[i], dms[i + 1], &p));
1032           PetscCall(PCMGSetInjection(pc, i + 1, p));
1033           PetscCall(MatDestroy(&p));
1034         }
1035       }
1036 
1037       for (i = n - 2; i > -1; i--) PetscCall(DMDestroy(&dms[i]));
1038       PetscCall(PetscFree(dms));
1039     }
1040   }
1041 
1042   if (mg->galerkin < PC_MG_GALERKIN_NONE) {
1043     Mat       A, B;
1044     PetscBool doA = PETSC_FALSE, doB = PETSC_FALSE;
1045     MatReuse  reuse = MAT_INITIAL_MATRIX;
1046 
1047     if (mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_BOTH) doB = PETSC_TRUE;
1048     if (mg->galerkin == PC_MG_GALERKIN_MAT || (mg->galerkin == PC_MG_GALERKIN_BOTH && dA != dB)) doA = PETSC_TRUE;
1049     if (pc->setupcalled) reuse = MAT_REUSE_MATRIX;
1050     for (i = n - 2; i > -1; i--) {
1051       PetscCheck(mglevels[i + 1]->restrct || mglevels[i + 1]->interpolate, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must provide interpolation or restriction for each MG level except level 0");
1052       if (!mglevels[i + 1]->interpolate) PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->restrct));
1053       if (!mglevels[i + 1]->restrct) PetscCall(PCMGSetRestriction(pc, i + 1, mglevels[i + 1]->interpolate));
1054       if (reuse == MAT_REUSE_MATRIX) PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, &B));
1055       if (doA) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dA, mglevels[i + 1]->interpolate, reuse, 1.0, &A));
1056       if (doB) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dB, mglevels[i + 1]->interpolate, reuse, 1.0, &B));
1057       /* the management of the PetscObjectReference() and PetscObjecDereference() below is rather delicate */
1058       if (!doA && dAeqdB) {
1059         if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)B));
1060         A = B;
1061       } else if (!doA && reuse == MAT_INITIAL_MATRIX) {
1062         PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, NULL));
1063         PetscCall(PetscObjectReference((PetscObject)A));
1064       }
1065       if (!doB && dAeqdB) {
1066         if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)A));
1067         B = A;
1068       } else if (!doB && reuse == MAT_INITIAL_MATRIX) {
1069         PetscCall(KSPGetOperators(mglevels[i]->smoothd, NULL, &B));
1070         PetscCall(PetscObjectReference((PetscObject)B));
1071       }
1072       if (reuse == MAT_INITIAL_MATRIX) {
1073         PetscCall(KSPSetOperators(mglevels[i]->smoothd, A, B));
1074         PetscCall(PetscObjectDereference((PetscObject)A));
1075         PetscCall(PetscObjectDereference((PetscObject)B));
1076       }
1077       dA = A;
1078       dB = B;
1079     }
1080   }
1081 
1082   /* Adapt interpolation matrices */
1083   if (adaptInterpolation) {
1084     for (i = 0; i < n; ++i) {
1085       if (!mglevels[i]->coarseSpace) PetscCall(PCMGComputeCoarseSpace_Internal(pc, i, mg->coarseSpaceType, mg->Nc, !i ? NULL : mglevels[i - 1]->coarseSpace, &mglevels[i]->coarseSpace));
1086       if (i) PetscCall(PCMGAdaptInterpolator_Internal(pc, i, mglevels[i - 1]->smoothu, mglevels[i]->smoothu, mglevels[i - 1]->coarseSpace, mglevels[i]->coarseSpace));
1087     }
1088     for (i = n - 2; i > -1; --i) PetscCall(PCMGRecomputeLevelOperators_Internal(pc, i));
1089   }
1090 
1091   if (needRestricts && pc->dm) {
1092     for (i = n - 2; i >= 0; i--) {
1093       DM  dmfine, dmcoarse;
1094       Mat Restrict, Inject;
1095       Vec rscale;
1096 
1097       PetscCall(KSPGetDM(mglevels[i + 1]->smoothd, &dmfine));
1098       PetscCall(KSPGetDM(mglevels[i]->smoothd, &dmcoarse));
1099       PetscCall(PCMGGetRestriction(pc, i + 1, &Restrict));
1100       PetscCall(PCMGGetRScale(pc, i + 1, &rscale));
1101       PetscCall(PCMGGetInjection(pc, i + 1, &Inject));
1102       PetscCall(DMRestrict(dmfine, Restrict, rscale, Inject, dmcoarse));
1103     }
1104   }
1105 
1106   if (!pc->setupcalled) {
1107     for (i = 0; i < n; i++) PetscCall(KSPSetFromOptions(mglevels[i]->smoothd));
1108     for (i = 1; i < n; i++) {
1109       if (mglevels[i]->smoothu && (mglevels[i]->smoothu != mglevels[i]->smoothd)) PetscCall(KSPSetFromOptions(mglevels[i]->smoothu));
1110       if (mglevels[i]->cr) PetscCall(KSPSetFromOptions(mglevels[i]->cr));
1111     }
1112     /* insure that if either interpolation or restriction is set the other one is set */
1113     for (i = 1; i < n; i++) {
1114       PetscCall(PCMGGetInterpolation(pc, i, NULL));
1115       PetscCall(PCMGGetRestriction(pc, i, NULL));
1116     }
1117     for (i = 0; i < n - 1; i++) {
1118       if (!mglevels[i]->b) {
1119         Vec *vec;
1120         PetscCall(KSPCreateVecs(mglevels[i]->smoothd, 1, &vec, 0, NULL));
1121         PetscCall(PCMGSetRhs(pc, i, *vec));
1122         PetscCall(VecDestroy(vec));
1123         PetscCall(PetscFree(vec));
1124       }
1125       if (!mglevels[i]->r && i) {
1126         PetscCall(VecDuplicate(mglevels[i]->b, &tvec));
1127         PetscCall(PCMGSetR(pc, i, tvec));
1128         PetscCall(VecDestroy(&tvec));
1129       }
1130       if (!mglevels[i]->x) {
1131         PetscCall(VecDuplicate(mglevels[i]->b, &tvec));
1132         PetscCall(PCMGSetX(pc, i, tvec));
1133         PetscCall(VecDestroy(&tvec));
1134       }
1135       if (doCR) {
1136         PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crx));
1137         PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crb));
1138         PetscCall(VecZeroEntries(mglevels[i]->crb));
1139       }
1140     }
1141     if (n != 1 && !mglevels[n - 1]->r) {
1142       /* PCMGSetR() on the finest level if user did not supply it */
1143       Vec *vec;
1144 
1145       PetscCall(KSPCreateVecs(mglevels[n - 1]->smoothd, 1, &vec, 0, NULL));
1146       PetscCall(PCMGSetR(pc, n - 1, *vec));
1147       PetscCall(VecDestroy(vec));
1148       PetscCall(PetscFree(vec));
1149     }
1150     if (doCR) {
1151       PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crx));
1152       PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crb));
1153       PetscCall(VecZeroEntries(mglevels[n - 1]->crb));
1154     }
1155   }
1156 
1157   if (pc->dm) {
1158     /* need to tell all the coarser levels to rebuild the matrix using the DM for that level */
1159     for (i = 0; i < n - 1; i++) {
1160       if (mglevels[i]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[i]->smoothd->setupstage = KSP_SETUP_NEWMATRIX;
1161     }
1162   }
1163   // We got here (PCSetUp_MG) because the matrix has changed, which means the smoother needs to be set up again (e.g.,
1164   // new diagonal for Jacobi). Setting it here allows it to be logged under PCSetUp rather than deep inside a PCApply.
1165   if (mglevels[n - 1]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[n - 1]->smoothd->setupstage = KSP_SETUP_NEWMATRIX;
1166 
1167   for (i = 1; i < n; i++) {
1168     if (mglevels[i]->smoothu == mglevels[i]->smoothd || mg->am == PC_MG_FULL || mg->am == PC_MG_KASKADE || mg->cyclesperpcapply > 1) {
1169       /* if doing only down then initial guess is zero */
1170       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE));
1171     }
1172     if (mglevels[i]->cr) PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
1173     if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1174     PetscCall(KSPSetUp(mglevels[i]->smoothd));
1175     if (mglevels[i]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR;
1176     if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1177     if (!mglevels[i]->residual) {
1178       Mat mat;
1179 
1180       PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL));
1181       PetscCall(PCMGSetResidual(pc, i, PCMGResidualDefault, mat));
1182     }
1183     if (!mglevels[i]->residualtranspose) {
1184       Mat mat;
1185 
1186       PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL));
1187       PetscCall(PCMGSetResidualTranspose(pc, i, PCMGResidualTransposeDefault, mat));
1188     }
1189   }
1190   for (i = 1; i < n; i++) {
1191     if (mglevels[i]->smoothu && mglevels[i]->smoothu != mglevels[i]->smoothd) {
1192       Mat downmat, downpmat;
1193 
1194       /* check if operators have been set for up, if not use down operators to set them */
1195       PetscCall(KSPGetOperatorsSet(mglevels[i]->smoothu, &opsset, NULL));
1196       if (!opsset) {
1197         PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat));
1198         PetscCall(KSPSetOperators(mglevels[i]->smoothu, downmat, downpmat));
1199       }
1200 
1201       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothu, PETSC_TRUE));
1202       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1203       PetscCall(KSPSetUp(mglevels[i]->smoothu));
1204       if (mglevels[i]->smoothu->reason) pc->failedreason = PC_SUBPC_ERROR;
1205       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1206     }
1207     if (mglevels[i]->cr) {
1208       Mat downmat, downpmat;
1209 
1210       /* check if operators have been set for up, if not use down operators to set them */
1211       PetscCall(KSPGetOperatorsSet(mglevels[i]->cr, &opsset, NULL));
1212       if (!opsset) {
1213         PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat));
1214         PetscCall(KSPSetOperators(mglevels[i]->cr, downmat, downpmat));
1215       }
1216 
1217       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
1218       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1219       PetscCall(KSPSetUp(mglevels[i]->cr));
1220       if (mglevels[i]->cr->reason) pc->failedreason = PC_SUBPC_ERROR;
1221       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1222     }
1223   }
1224 
1225   if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0));
1226   PetscCall(KSPSetUp(mglevels[0]->smoothd));
1227   if (mglevels[0]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR;
1228   if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0));
1229 
1230   /*
1231      Dump the interpolation/restriction matrices plus the
1232    Jacobian/stiffness on each level. This allows MATLAB users to
1233    easily check if the Galerkin condition A_c = R A_f R^T is satisfied.
1234 
1235    Only support one or the other at the same time.
1236   */
1237 #if defined(PETSC_USE_SOCKET_VIEWER)
1238   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_matlab", &dump, NULL));
1239   if (dump) viewer = PETSC_VIEWER_SOCKET_(PetscObjectComm((PetscObject)pc));
1240   dump = PETSC_FALSE;
1241 #endif
1242   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_binary", &dump, NULL));
1243   if (dump) viewer = PETSC_VIEWER_BINARY_(PetscObjectComm((PetscObject)pc));
1244 
1245   if (viewer) {
1246     for (i = 1; i < n; i++) PetscCall(MatView(mglevels[i]->restrct, viewer));
1247     for (i = 0; i < n; i++) {
1248       PetscCall(KSPGetPC(mglevels[i]->smoothd, &pc));
1249       PetscCall(MatView(pc->mat, viewer));
1250     }
1251   }
1252   PetscFunctionReturn(PETSC_SUCCESS);
1253 }
1254 
1255 PetscErrorCode PCMGGetLevels_MG(PC pc, PetscInt *levels)
1256 {
1257   PC_MG *mg = (PC_MG *)pc->data;
1258 
1259   PetscFunctionBegin;
1260   *levels = mg->nlevels;
1261   PetscFunctionReturn(PETSC_SUCCESS);
1262 }
1263 
1264 /*@
1265   PCMGGetLevels - Gets the number of levels to use with `PCMG`.
1266 
1267   Not Collective
1268 
1269   Input Parameter:
1270 . pc - the preconditioner context
1271 
1272   Output Parameter:
1273 . levels - the number of levels
1274 
1275   Level: advanced
1276 
1277 .seealso: [](ch_ksp), `PCMG`, `PCMGSetLevels()`
1278 @*/
1279 PetscErrorCode PCMGGetLevels(PC pc, PetscInt *levels)
1280 {
1281   PetscFunctionBegin;
1282   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1283   PetscAssertPointer(levels, 2);
1284   *levels = 0;
1285   PetscTryMethod(pc, "PCMGGetLevels_C", (PC, PetscInt *), (pc, levels));
1286   PetscFunctionReturn(PETSC_SUCCESS);
1287 }
1288 
1289 /*@
1290   PCMGGetGridComplexity - compute operator and grid complexity of the `PCMG` hierarchy
1291 
1292   Input Parameter:
1293 . pc - the preconditioner context
1294 
1295   Output Parameters:
1296 + gc - grid complexity = sum_i(n_i) / n_0
1297 - oc - operator complexity = sum_i(nnz_i) / nnz_0
1298 
1299   Level: advanced
1300 
1301   Note:
1302   This is often call the operator complexity in multigrid literature
1303 
1304 .seealso: [](ch_ksp), `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`
1305 @*/
1306 PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc, PetscReal *oc)
1307 {
1308   PC_MG         *mg       = (PC_MG *)pc->data;
1309   PC_MG_Levels **mglevels = mg->levels;
1310   PetscInt       lev, N;
1311   PetscLogDouble nnz0 = 0, sgc = 0, soc = 0, n0 = 0;
1312   MatInfo        info;
1313 
1314   PetscFunctionBegin;
1315   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1316   if (gc) PetscAssertPointer(gc, 2);
1317   if (oc) PetscAssertPointer(oc, 3);
1318   if (!pc->setupcalled) {
1319     if (gc) *gc = 0;
1320     if (oc) *oc = 0;
1321     PetscFunctionReturn(PETSC_SUCCESS);
1322   }
1323   PetscCheck(mg->nlevels > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MG has no levels");
1324   for (lev = 0; lev < mg->nlevels; lev++) {
1325     Mat dB;
1326     PetscCall(KSPGetOperators(mglevels[lev]->smoothd, NULL, &dB));
1327     PetscCall(MatGetInfo(dB, MAT_GLOBAL_SUM, &info)); /* global reduction */
1328     PetscCall(MatGetSize(dB, &N, NULL));
1329     sgc += N;
1330     soc += info.nz_used;
1331     if (lev == mg->nlevels - 1) {
1332       nnz0 = info.nz_used;
1333       n0   = N;
1334     }
1335   }
1336   PetscCheck(n0 > 0 && gc, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number for grid points on finest level is not available");
1337   *gc = (PetscReal)(sgc / n0);
1338   if (nnz0 > 0 && oc) *oc = (PetscReal)(soc / nnz0);
1339   PetscFunctionReturn(PETSC_SUCCESS);
1340 }
1341 
1342 /*@
1343   PCMGSetType - Determines the form of multigrid to use, either
1344   multiplicative, additive, full, or the Kaskade algorithm.
1345 
1346   Logically Collective
1347 
1348   Input Parameters:
1349 + pc   - the preconditioner context
1350 - form - multigrid form, one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`
1351 
1352   Options Database Key:
1353 . -pc_mg_type <form> - Sets <form>, one of multiplicative, additive, full, kaskade
1354 
1355   Level: advanced
1356 
1357 .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGGetType()`, `PCMGCycleType`
1358 @*/
1359 PetscErrorCode PCMGSetType(PC pc, PCMGType form)
1360 {
1361   PC_MG *mg = (PC_MG *)pc->data;
1362 
1363   PetscFunctionBegin;
1364   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1365   PetscValidLogicalCollectiveEnum(pc, form, 2);
1366   mg->am = form;
1367   if (form == PC_MG_MULTIPLICATIVE) pc->ops->applyrichardson = PCApplyRichardson_MG;
1368   else pc->ops->applyrichardson = NULL;
1369   PetscFunctionReturn(PETSC_SUCCESS);
1370 }
1371 
1372 /*@
1373   PCMGGetType - Finds the form of multigrid the `PCMG` is using  multiplicative, additive, full, or the Kaskade algorithm.
1374 
1375   Logically Collective
1376 
1377   Input Parameter:
1378 . pc - the preconditioner context
1379 
1380   Output Parameter:
1381 . type - one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`, `PCMGCycleType`
1382 
1383   Level: advanced
1384 
1385 .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGSetType()`
1386 @*/
1387 PetscErrorCode PCMGGetType(PC pc, PCMGType *type)
1388 {
1389   PC_MG *mg = (PC_MG *)pc->data;
1390 
1391   PetscFunctionBegin;
1392   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1393   *type = mg->am;
1394   PetscFunctionReturn(PETSC_SUCCESS);
1395 }
1396 
1397 /*@
1398   PCMGSetCycleType - Sets the type cycles to use.  Use `PCMGSetCycleTypeOnLevel()` for more
1399   complicated cycling.
1400 
1401   Logically Collective
1402 
1403   Input Parameters:
1404 + pc - the multigrid context
1405 - n  - either `PC_MG_CYCLE_V` or `PC_MG_CYCLE_W`
1406 
1407   Options Database Key:
1408 . -pc_mg_cycle_type <v,w> - provide the cycle desired
1409 
1410   Level: advanced
1411 
1412 .seealso: [](ch_ksp), `PCMG`, `PCMGSetCycleTypeOnLevel()`, `PCMGType`, `PCMGCycleType`
1413 @*/
1414 PetscErrorCode PCMGSetCycleType(PC pc, PCMGCycleType n)
1415 {
1416   PC_MG         *mg       = (PC_MG *)pc->data;
1417   PC_MG_Levels **mglevels = mg->levels;
1418   PetscInt       i, levels;
1419 
1420   PetscFunctionBegin;
1421   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1422   PetscValidLogicalCollectiveEnum(pc, n, 2);
1423   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1424   levels = mglevels[0]->levels;
1425   for (i = 0; i < levels; i++) mglevels[i]->cycles = n;
1426   PetscFunctionReturn(PETSC_SUCCESS);
1427 }
1428 
1429 /*@
1430   PCMGMultiplicativeSetCycles - Sets the number of cycles to use for each preconditioner step
1431   of multigrid when `PCMGType` is `PC_MG_MULTIPLICATIVE`
1432 
1433   Logically Collective
1434 
1435   Input Parameters:
1436 + pc - the multigrid context
1437 - n  - number of cycles (default is 1)
1438 
1439   Options Database Key:
1440 . -pc_mg_multiplicative_cycles n - set the number of cycles
1441 
1442   Level: advanced
1443 
1444   Note:
1445   This is not associated with setting a v or w cycle, that is set with `PCMGSetCycleType()`
1446 
1447 .seealso: [](ch_ksp), `PCMGSetCycleTypeOnLevel()`, `PCMGSetCycleType()`, `PCMGCycleType`, `PCMGType`
1448 @*/
1449 PetscErrorCode PCMGMultiplicativeSetCycles(PC pc, PetscInt n)
1450 {
1451   PC_MG *mg = (PC_MG *)pc->data;
1452 
1453   PetscFunctionBegin;
1454   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1455   PetscValidLogicalCollectiveInt(pc, n, 2);
1456   mg->cyclesperpcapply = n;
1457   PetscFunctionReturn(PETSC_SUCCESS);
1458 }
1459 
1460 /*
1461    Since the finest level KSP shares the original matrix (of the entire system), it's preconditioner
1462    should not be updated if the whole PC is supposed to reuse the preconditioner
1463 */
1464 static PetscErrorCode PCSetReusePreconditioner_MG(PC pc, PetscBool flag)
1465 {
1466   PC_MG         *mg       = (PC_MG *)pc->data;
1467   PC_MG_Levels **mglevels = mg->levels;
1468   PetscInt       levels;
1469   PC             tpc;
1470 
1471   PetscFunctionBegin;
1472   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1473   PetscValidLogicalCollectiveBool(pc, flag, 2);
1474   if (mglevels) {
1475     levels = mglevels[0]->levels;
1476     PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
1477     tpc->reusepreconditioner = flag;
1478     PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
1479     tpc->reusepreconditioner = flag;
1480   }
1481   PetscFunctionReturn(PETSC_SUCCESS);
1482 }
1483 
1484 static PetscErrorCode PCMGSetGalerkin_MG(PC pc, PCMGGalerkinType use)
1485 {
1486   PC_MG *mg = (PC_MG *)pc->data;
1487 
1488   PetscFunctionBegin;
1489   mg->galerkin = use;
1490   PetscFunctionReturn(PETSC_SUCCESS);
1491 }
1492 
1493 /*@
1494   PCMGSetGalerkin - Causes the coarser grid matrices to be computed from the
1495   finest grid via the Galerkin process: A_i-1 = r_i * A_i * p_i
1496 
1497   Logically Collective
1498 
1499   Input Parameters:
1500 + pc  - the multigrid context
1501 - use - one of `PC_MG_GALERKIN_BOTH`, `PC_MG_GALERKIN_PMAT`, `PC_MG_GALERKIN_MAT`, or `PC_MG_GALERKIN_NONE`
1502 
1503   Options Database Key:
1504 . -pc_mg_galerkin <both,pmat,mat,none> - set the matrices to form via the Galerkin process
1505 
1506   Level: intermediate
1507 
1508   Note:
1509   Some codes that use `PCMG` such as `PCGAMG` use Galerkin internally while constructing the hierarchy and thus do not
1510   use the `PCMG` construction of the coarser grids.
1511 
1512 .seealso: [](ch_ksp), `PCMG`, `PCMGGetGalerkin()`, `PCMGGalerkinType`
1513 @*/
1514 PetscErrorCode PCMGSetGalerkin(PC pc, PCMGGalerkinType use)
1515 {
1516   PetscFunctionBegin;
1517   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1518   PetscTryMethod(pc, "PCMGSetGalerkin_C", (PC, PCMGGalerkinType), (pc, use));
1519   PetscFunctionReturn(PETSC_SUCCESS);
1520 }
1521 
1522 /*@
1523   PCMGGetGalerkin - Checks if Galerkin multigrid is being used, i.e. A_i-1 = r_i * A_i * p_i
1524 
1525   Not Collective
1526 
1527   Input Parameter:
1528 . pc - the multigrid context
1529 
1530   Output Parameter:
1531 . galerkin - one of `PC_MG_GALERKIN_BOTH`,`PC_MG_GALERKIN_PMAT`,`PC_MG_GALERKIN_MAT`, `PC_MG_GALERKIN_NONE`, or `PC_MG_GALERKIN_EXTERNAL`
1532 
1533   Level: intermediate
1534 
1535 .seealso: [](ch_ksp), `PCMG`, `PCMGSetGalerkin()`, `PCMGGalerkinType`
1536 @*/
1537 PetscErrorCode PCMGGetGalerkin(PC pc, PCMGGalerkinType *galerkin)
1538 {
1539   PC_MG *mg = (PC_MG *)pc->data;
1540 
1541   PetscFunctionBegin;
1542   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1543   *galerkin = mg->galerkin;
1544   PetscFunctionReturn(PETSC_SUCCESS);
1545 }
1546 
1547 static PetscErrorCode PCMGSetAdaptInterpolation_MG(PC pc, PetscBool adapt)
1548 {
1549   PC_MG *mg = (PC_MG *)pc->data;
1550 
1551   PetscFunctionBegin;
1552   mg->adaptInterpolation = adapt;
1553   PetscFunctionReturn(PETSC_SUCCESS);
1554 }
1555 
1556 static PetscErrorCode PCMGGetAdaptInterpolation_MG(PC pc, PetscBool *adapt)
1557 {
1558   PC_MG *mg = (PC_MG *)pc->data;
1559 
1560   PetscFunctionBegin;
1561   *adapt = mg->adaptInterpolation;
1562   PetscFunctionReturn(PETSC_SUCCESS);
1563 }
1564 
1565 static PetscErrorCode PCMGSetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType ctype)
1566 {
1567   PC_MG *mg = (PC_MG *)pc->data;
1568 
1569   PetscFunctionBegin;
1570   mg->adaptInterpolation = ctype != PCMG_ADAPT_NONE ? PETSC_TRUE : PETSC_FALSE;
1571   mg->coarseSpaceType    = ctype;
1572   PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_BOTH));
1573   PetscFunctionReturn(PETSC_SUCCESS);
1574 }
1575 
1576 static PetscErrorCode PCMGGetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType *ctype)
1577 {
1578   PC_MG *mg = (PC_MG *)pc->data;
1579 
1580   PetscFunctionBegin;
1581   *ctype = mg->coarseSpaceType;
1582   PetscFunctionReturn(PETSC_SUCCESS);
1583 }
1584 
1585 static PetscErrorCode PCMGSetAdaptCR_MG(PC pc, PetscBool cr)
1586 {
1587   PC_MG *mg = (PC_MG *)pc->data;
1588 
1589   PetscFunctionBegin;
1590   mg->compatibleRelaxation = cr;
1591   PetscFunctionReturn(PETSC_SUCCESS);
1592 }
1593 
1594 static PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr)
1595 {
1596   PC_MG *mg = (PC_MG *)pc->data;
1597 
1598   PetscFunctionBegin;
1599   *cr = mg->compatibleRelaxation;
1600   PetscFunctionReturn(PETSC_SUCCESS);
1601 }
1602 
1603 /*@
1604   PCMGSetAdaptCoarseSpaceType - Set the type of adaptive coarse space.
1605 
1606   Adapts or creates the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated.
1607 
1608   Logically Collective
1609 
1610   Input Parameters:
1611 + pc    - the multigrid context
1612 - ctype - the type of coarse space
1613 
1614   Options Database Keys:
1615 + -pc_mg_adapt_interp_n <int>             - The number of modes to use
1616 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: none, `polynomial`, `harmonic`, `eigenvector`, `generalized_eigenvector`, `gdsw`
1617 
1618   Level: intermediate
1619 
1620   Note:
1621   Requires a `DM` with specific functionality be attached to the `PC`.
1622 
1623 .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`, `DM`
1624 @*/
1625 PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype)
1626 {
1627   PetscFunctionBegin;
1628   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1629   PetscValidLogicalCollectiveEnum(pc, ctype, 2);
1630   PetscTryMethod(pc, "PCMGSetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType), (pc, ctype));
1631   PetscFunctionReturn(PETSC_SUCCESS);
1632 }
1633 
1634 /*@
1635   PCMGGetAdaptCoarseSpaceType - Get the type of adaptive coarse space.
1636 
1637   Not Collective
1638 
1639   Input Parameter:
1640 . pc - the multigrid context
1641 
1642   Output Parameter:
1643 . ctype - the type of coarse space
1644 
1645   Level: intermediate
1646 
1647 .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGSetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`
1648 @*/
1649 PetscErrorCode PCMGGetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType *ctype)
1650 {
1651   PetscFunctionBegin;
1652   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1653   PetscAssertPointer(ctype, 2);
1654   PetscUseMethod(pc, "PCMGGetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType *), (pc, ctype));
1655   PetscFunctionReturn(PETSC_SUCCESS);
1656 }
1657 
1658 /* MATT: REMOVE? */
1659 /*@
1660   PCMGSetAdaptInterpolation - Adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated.
1661 
1662   Logically Collective
1663 
1664   Input Parameters:
1665 + pc    - the multigrid context
1666 - adapt - flag for adaptation of the interpolator
1667 
1668   Options Database Keys:
1669 + -pc_mg_adapt_interp                     - Turn on adaptation
1670 . -pc_mg_adapt_interp_n <int>             - The number of modes to use, should be divisible by dimension
1671 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: polynomial, harmonic, eigenvector, generalized_eigenvector
1672 
1673   Level: intermediate
1674 
1675 .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1676 @*/
1677 PetscErrorCode PCMGSetAdaptInterpolation(PC pc, PetscBool adapt)
1678 {
1679   PetscFunctionBegin;
1680   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1681   PetscTryMethod(pc, "PCMGSetAdaptInterpolation_C", (PC, PetscBool), (pc, adapt));
1682   PetscFunctionReturn(PETSC_SUCCESS);
1683 }
1684 
1685 /*@
1686   PCMGGetAdaptInterpolation - Get the flag to adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh,
1687   and thus accurately interpolated.
1688 
1689   Not Collective
1690 
1691   Input Parameter:
1692 . pc - the multigrid context
1693 
1694   Output Parameter:
1695 . adapt - flag for adaptation of the interpolator
1696 
1697   Level: intermediate
1698 
1699 .seealso: [](ch_ksp), `PCMG`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1700 @*/
1701 PetscErrorCode PCMGGetAdaptInterpolation(PC pc, PetscBool *adapt)
1702 {
1703   PetscFunctionBegin;
1704   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1705   PetscAssertPointer(adapt, 2);
1706   PetscUseMethod(pc, "PCMGGetAdaptInterpolation_C", (PC, PetscBool *), (pc, adapt));
1707   PetscFunctionReturn(PETSC_SUCCESS);
1708 }
1709 
1710 /*@
1711   PCMGSetAdaptCR - Monitor the coarse space quality using an auxiliary solve with compatible relaxation.
1712 
1713   Logically Collective
1714 
1715   Input Parameters:
1716 + pc - the multigrid context
1717 - cr - flag for compatible relaxation
1718 
1719   Options Database Key:
1720 . -pc_mg_adapt_cr - Turn on compatible relaxation
1721 
1722   Level: intermediate
1723 
1724 .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptCR()`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1725 @*/
1726 PetscErrorCode PCMGSetAdaptCR(PC pc, PetscBool cr)
1727 {
1728   PetscFunctionBegin;
1729   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1730   PetscTryMethod(pc, "PCMGSetAdaptCR_C", (PC, PetscBool), (pc, cr));
1731   PetscFunctionReturn(PETSC_SUCCESS);
1732 }
1733 
1734 /*@
1735   PCMGGetAdaptCR - Get the flag to monitor coarse space quality using an auxiliary solve with compatible relaxation.
1736 
1737   Not Collective
1738 
1739   Input Parameter:
1740 . pc - the multigrid context
1741 
1742   Output Parameter:
1743 . cr - flag for compatible relaxaion
1744 
1745   Level: intermediate
1746 
1747 .seealso: [](ch_ksp), `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1748 @*/
1749 PetscErrorCode PCMGGetAdaptCR(PC pc, PetscBool *cr)
1750 {
1751   PetscFunctionBegin;
1752   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1753   PetscAssertPointer(cr, 2);
1754   PetscUseMethod(pc, "PCMGGetAdaptCR_C", (PC, PetscBool *), (pc, cr));
1755   PetscFunctionReturn(PETSC_SUCCESS);
1756 }
1757 
1758 /*@
1759   PCMGSetNumberSmooth - Sets the number of pre and post-smoothing steps to use
1760   on all levels.  Use `PCMGDistinctSmoothUp()` to create separate up and down smoothers if you want different numbers of
1761   pre- and post-smoothing steps.
1762 
1763   Logically Collective
1764 
1765   Input Parameters:
1766 + pc - the multigrid context
1767 - n  - the number of smoothing steps
1768 
1769   Options Database Key:
1770 . -mg_levels_ksp_max_it <n> - Sets number of pre and post-smoothing steps
1771 
1772   Level: advanced
1773 
1774   Note:
1775   This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid.
1776 
1777 .seealso: [](ch_ksp), `PCMG`, `PCMGSetDistinctSmoothUp()`
1778 @*/
1779 PetscErrorCode PCMGSetNumberSmooth(PC pc, PetscInt n)
1780 {
1781   PC_MG         *mg       = (PC_MG *)pc->data;
1782   PC_MG_Levels **mglevels = mg->levels;
1783   PetscInt       i, levels;
1784 
1785   PetscFunctionBegin;
1786   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1787   PetscValidLogicalCollectiveInt(pc, n, 2);
1788   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1789   levels = mglevels[0]->levels;
1790 
1791   for (i = 1; i < levels; i++) {
1792     PetscCall(KSPSetTolerances(mglevels[i]->smoothu, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n));
1793     PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n));
1794     mg->default_smoothu = n;
1795     mg->default_smoothd = n;
1796   }
1797   PetscFunctionReturn(PETSC_SUCCESS);
1798 }
1799 
1800 /*@
1801   PCMGSetDistinctSmoothUp - sets the up (post) smoother to be a separate `KSP` from the down (pre) smoother on all levels
1802   and adds the suffix _up to the options name
1803 
1804   Logically Collective
1805 
1806   Input Parameter:
1807 . pc - the preconditioner context
1808 
1809   Options Database Key:
1810 . -pc_mg_distinct_smoothup <bool> - use distinct smoothing objects
1811 
1812   Level: advanced
1813 
1814   Note:
1815   This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid.
1816 
1817 .seealso: [](ch_ksp), `PCMG`, `PCMGSetNumberSmooth()`
1818 @*/
1819 PetscErrorCode PCMGSetDistinctSmoothUp(PC pc)
1820 {
1821   PC_MG         *mg       = (PC_MG *)pc->data;
1822   PC_MG_Levels **mglevels = mg->levels;
1823   PetscInt       i, levels;
1824   KSP            subksp;
1825 
1826   PetscFunctionBegin;
1827   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1828   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1829   levels = mglevels[0]->levels;
1830 
1831   for (i = 1; i < levels; i++) {
1832     const char *prefix = NULL;
1833     /* make sure smoother up and down are different */
1834     PetscCall(PCMGGetSmootherUp(pc, i, &subksp));
1835     PetscCall(KSPGetOptionsPrefix(mglevels[i]->smoothd, &prefix));
1836     PetscCall(KSPSetOptionsPrefix(subksp, prefix));
1837     PetscCall(KSPAppendOptionsPrefix(subksp, "up_"));
1838   }
1839   PetscFunctionReturn(PETSC_SUCCESS);
1840 }
1841 
1842 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */
1843 static PetscErrorCode PCGetInterpolations_MG(PC pc, PetscInt *num_levels, Mat *interpolations[])
1844 {
1845   PC_MG         *mg       = (PC_MG *)pc->data;
1846   PC_MG_Levels **mglevels = mg->levels;
1847   Mat           *mat;
1848   PetscInt       l;
1849 
1850   PetscFunctionBegin;
1851   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling");
1852   PetscCall(PetscMalloc1(mg->nlevels, &mat));
1853   for (l = 1; l < mg->nlevels; l++) {
1854     mat[l - 1] = mglevels[l]->interpolate;
1855     PetscCall(PetscObjectReference((PetscObject)mat[l - 1]));
1856   }
1857   *num_levels     = mg->nlevels;
1858   *interpolations = mat;
1859   PetscFunctionReturn(PETSC_SUCCESS);
1860 }
1861 
1862 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */
1863 static PetscErrorCode PCGetCoarseOperators_MG(PC pc, PetscInt *num_levels, Mat *coarseOperators[])
1864 {
1865   PC_MG         *mg       = (PC_MG *)pc->data;
1866   PC_MG_Levels **mglevels = mg->levels;
1867   PetscInt       l;
1868   Mat           *mat;
1869 
1870   PetscFunctionBegin;
1871   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling");
1872   PetscCall(PetscMalloc1(mg->nlevels, &mat));
1873   for (l = 0; l < mg->nlevels - 1; l++) {
1874     PetscCall(KSPGetOperators(mglevels[l]->smoothd, NULL, &mat[l]));
1875     PetscCall(PetscObjectReference((PetscObject)mat[l]));
1876   }
1877   *num_levels      = mg->nlevels;
1878   *coarseOperators = mat;
1879   PetscFunctionReturn(PETSC_SUCCESS);
1880 }
1881 
1882 /*@C
1883   PCMGRegisterCoarseSpaceConstructor -  Adds a method to the `PCMG` package for coarse space construction.
1884 
1885   Not Collective, No Fortran Support
1886 
1887   Input Parameters:
1888 + name     - name of the constructor
1889 - function - constructor routine, see `PCMGCoarseSpaceConstructorFn`
1890 
1891   Level: advanced
1892 
1893   Developer Notes:
1894   This does not appear to be used anywhere
1895 
1896 .seealso: [](ch_ksp), `PCMGCoarseSpaceConstructorFn`, `PCMG`, `PCMGGetCoarseSpaceConstructor()`, `PCRegister()`
1897 @*/
1898 PetscErrorCode PCMGRegisterCoarseSpaceConstructor(const char name[], PCMGCoarseSpaceConstructorFn *function)
1899 {
1900   PetscFunctionBegin;
1901   PetscCall(PCInitializePackage());
1902   PetscCall(PetscFunctionListAdd(&PCMGCoarseList, name, function));
1903   PetscFunctionReturn(PETSC_SUCCESS);
1904 }
1905 
1906 /*@C
1907   PCMGGetCoarseSpaceConstructor -  Returns the given coarse space construction method.
1908 
1909   Not Collective, No Fortran Support
1910 
1911   Input Parameter:
1912 . name - name of the constructor
1913 
1914   Output Parameter:
1915 . function - constructor routine
1916 
1917   Level: advanced
1918 
1919 .seealso: [](ch_ksp), `PCMGCoarseSpaceConstructorFn`, `PCMG`, `PCMGRegisterCoarseSpaceConstructor()`, `PCRegister()`
1920 @*/
1921 PetscErrorCode PCMGGetCoarseSpaceConstructor(const char name[], PCMGCoarseSpaceConstructorFn **function)
1922 {
1923   PetscFunctionBegin;
1924   PetscCall(PetscFunctionListFind(PCMGCoarseList, name, function));
1925   PetscFunctionReturn(PETSC_SUCCESS);
1926 }
1927 
1928 /*MC
1929    PCMG - Use multigrid preconditioning. This preconditioner requires you provide additional information about the restriction/interpolation
1930    operators using `PCMGSetInterpolation()` and/or `PCMGSetRestriction()`(and possibly the coarser grid matrices) or a `DM` that can provide such information.
1931 
1932    Options Database Keys:
1933 +  -pc_mg_levels <nlevels>                            - number of levels including finest
1934 .  -pc_mg_cycle_type <v,w>                            - provide the cycle desired
1935 .  -pc_mg_type <additive,multiplicative,full,kaskade> - multiplicative is the default
1936 .  -pc_mg_log                                         - log information about time spent on each level of the solver
1937 .  -pc_mg_distinct_smoothup                           - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes)
1938 .  -pc_mg_galerkin <both,pmat,mat,none>               - use the Galerkin process to compute coarser operators, i.e., $A_{coarse} = R A_{fine} R^T$
1939 .  -pc_mg_multiplicative_cycles                        - number of cycles to use as the preconditioner (defaults to 1)
1940 .  -pc_mg_dump_matlab                                  - dumps the matrices for each level and the restriction/interpolation matrices
1941                                                          to a `PETSCVIEWERSOCKET` for reading from MATLAB.
1942 -  -pc_mg_dump_binary                                  -dumps the matrices for each level and the restriction/interpolation matrices
1943                                                         to the binary output file called binaryoutput
1944 
1945    Level: intermediate
1946 
1947    Notes:
1948    `PCMG` provides a general framework for implementing multigrid methods. Use `PCGAMG` for PETSc's algebraic multigrid preconditioner, `PCHYPRE` for hypre's
1949    BoomerAMG algebraic multigrid, and `PCML` for Trilinos's ML preconditioner. `PCAMGX` provides access to NVIDIA's AmgX algebraic multigrid.
1950 
1951    If you use `KSPSetDM()` (or `SNESSetDM()` or `TSSetDM()`) with an appropriate `DM`, such as `DMDA`, then `PCMG` will use the geometric information
1952    from the `DM` to generate appropriate restriction and interpolation information and construct a geometric multigrid.
1953 
1954    If you do not provide an appropriate `DM` and do not provide restriction or interpolation operators with `PCMGSetInterpolation()` and/or `PCMGSetRestriction()`,
1955    then `PCMG` will run multigrid with only a single level (so not really multigrid).
1956 
1957    The Krylov solver (if any) and preconditioner (smoother) and their parameters are controlled from the options database with the standard
1958    options database keywords prefixed with `-mg_levels_` to affect all the levels but the coarsest, which is controlled with `-mg_coarse_`,
1959    and the finest where `-mg_fine_` can override `-mg_levels_`.  One can set different preconditioners etc on specific levels with the prefix
1960    `-mg_levels_n_` where `n` is the level number (zero being the coarse level. For example
1961 .vb
1962    -mg_levels_ksp_type gmres -mg_levels_pc_type bjacobi -mg_coarse_pc_type svd -mg_levels_2_pc_type sor
1963 .ve
1964    These options also work for controlling the smoothers etc inside `PCGAMG`
1965 
1966    If one uses a Krylov method such `KSPGMRES` or `KSPCG` as the smoother than one must use `KSPFGMRES`, `KSPGCR`, or `KSPRICHARDSON` as the outer Krylov method
1967 
1968    When run with a single level the smoother options are used on that level NOT the coarse grid solver options
1969 
1970    When run with `KSPRICHARDSON` the convergence test changes slightly if monitor is turned on. The iteration count may change slightly. This
1971    is because without monitoring the residual norm is computed WITHIN each multigrid cycle on the finest level after the pre-smoothing
1972    (because the residual has just been computed for the multigrid algorithm and is hence available for free) while with monitoring the
1973    residual is computed at the end of each cycle.
1974 
1975 .seealso: [](sec_mg), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMGType`, `PCEXOTIC`, `PCGAMG`, `PCML`, `PCHYPRE`
1976           `PCMGSetLevels()`, `PCMGGetLevels()`, `PCMGSetType()`, `PCMGSetCycleType()`,
1977           `PCMGSetDistinctSmoothUp()`, `PCMGGetCoarseSolve()`, `PCMGSetResidual()`, `PCMGSetInterpolation()`,
1978           `PCMGSetRestriction()`, `PCMGGetSmoother()`, `PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`,
1979           `PCMGSetCycleTypeOnLevel()`, `PCMGSetRhs()`, `PCMGSetX()`, `PCMGSetR()`,
1980           `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1981 M*/
1982 
1983 PETSC_EXTERN PetscErrorCode PCCreate_MG(PC pc)
1984 {
1985   PC_MG *mg;
1986 
1987   PetscFunctionBegin;
1988   PetscCall(PetscNew(&mg));
1989   pc->data               = mg;
1990   mg->nlevels            = -1;
1991   mg->am                 = PC_MG_MULTIPLICATIVE;
1992   mg->galerkin           = PC_MG_GALERKIN_NONE;
1993   mg->adaptInterpolation = PETSC_FALSE;
1994   mg->Nc                 = -1;
1995   mg->eigenvalue         = -1;
1996 
1997   pc->useAmat = PETSC_TRUE;
1998 
1999   pc->ops->apply             = PCApply_MG;
2000   pc->ops->applytranspose    = PCApplyTranspose_MG;
2001   pc->ops->matapply          = PCMatApply_MG;
2002   pc->ops->matapplytranspose = PCMatApplyTranspose_MG;
2003   pc->ops->setup             = PCSetUp_MG;
2004   pc->ops->reset             = PCReset_MG;
2005   pc->ops->destroy           = PCDestroy_MG;
2006   pc->ops->setfromoptions    = PCSetFromOptions_MG;
2007   pc->ops->view              = PCView_MG;
2008 
2009   PetscCall(PetscObjectComposedDataRegister(&mg->eigenvalue));
2010   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", PCMGSetGalerkin_MG));
2011   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetReusePreconditioner_C", PCSetReusePreconditioner_MG));
2012   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", PCMGGetLevels_MG));
2013   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", PCMGSetLevels_MG));
2014   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_MG));
2015   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_MG));
2016   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", PCMGSetAdaptInterpolation_MG));
2017   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", PCMGGetAdaptInterpolation_MG));
2018   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", PCMGSetAdaptCR_MG));
2019   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", PCMGGetAdaptCR_MG));
2020   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", PCMGSetAdaptCoarseSpaceType_MG));
2021   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", PCMGGetAdaptCoarseSpaceType_MG));
2022   PetscFunctionReturn(PETSC_SUCCESS);
2023 }
2024