xref: /petsc/src/ksp/pc/interface/precon.c (revision a69119a591a03a9d906b29c0a4e9802e4d7c9795)
1 
2 /*
3     The PC (preconditioner) interface routines, callable by users.
4 */
5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/
6 #include <petscdm.h>
7 
8 /* Logging support */
9 PetscClassId  PC_CLASSID;
10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks;
12 PetscInt      PetscMGLevelId;
13 
14 PetscErrorCode PCGetDefaultType_Private(PC pc, const char *type[]) {
15   PetscMPIInt size;
16   PetscBool   hasop, flg1, flg2, set, flg3, isnormal;
17 
18   PetscFunctionBegin;
19   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
20   if (pc->pmat) {
21     PetscCall(MatHasOperation(pc->pmat, MATOP_GET_DIAGONAL_BLOCK, &hasop));
22     if (size == 1) {
23       PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ICC, &flg1));
24       PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ILU, &flg2));
25       PetscCall(MatIsSymmetricKnown(pc->pmat, &set, &flg3));
26       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &isnormal, MATNORMAL, MATNORMALHERMITIAN, NULL));
27       if (flg1 && (!flg2 || (set && flg3))) {
28         *type = PCICC;
29       } else if (flg2) {
30         *type = PCILU;
31       } else if (isnormal) {
32         *type = PCNONE;
33       } else if (hasop) { /* likely is a parallel matrix run on one processor */
34         *type = PCBJACOBI;
35       } else {
36         *type = PCNONE;
37       }
38     } else {
39       if (hasop) {
40         *type = PCBJACOBI;
41       } else {
42         *type = PCNONE;
43       }
44     }
45   } else {
46     if (size == 1) {
47       *type = PCILU;
48     } else {
49       *type = PCBJACOBI;
50     }
51   }
52   PetscFunctionReturn(0);
53 }
54 
55 /*@
56    PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats
57 
58    Collective on PC
59 
60    Input Parameter:
61 .  pc - the preconditioner context
62 
63    Level: developer
64 
65    Notes:
66     This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC
67 
68 .seealso: `PCCreate()`, `PCSetUp()`
69 @*/
70 PetscErrorCode PCReset(PC pc) {
71   PetscFunctionBegin;
72   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
73   PetscTryTypeMethod(pc, reset);
74   PetscCall(VecDestroy(&pc->diagonalscaleright));
75   PetscCall(VecDestroy(&pc->diagonalscaleleft));
76   PetscCall(MatDestroy(&pc->pmat));
77   PetscCall(MatDestroy(&pc->mat));
78 
79   pc->setupcalled = 0;
80   PetscFunctionReturn(0);
81 }
82 
83 /*@C
84    PCDestroy - Destroys PC context that was created with PCCreate().
85 
86    Collective on PC
87 
88    Input Parameter:
89 .  pc - the preconditioner context
90 
91    Level: developer
92 
93 .seealso: `PCCreate()`, `PCSetUp()`
94 @*/
95 PetscErrorCode PCDestroy(PC *pc) {
96   PetscFunctionBegin;
97   if (!*pc) PetscFunctionReturn(0);
98   PetscValidHeaderSpecific((*pc), PC_CLASSID, 1);
99   if (--((PetscObject)(*pc))->refct > 0) {
100     *pc = NULL;
101     PetscFunctionReturn(0);
102   }
103 
104   PetscCall(PCReset(*pc));
105 
106   /* if memory was published with SAWs then destroy it */
107   PetscCall(PetscObjectSAWsViewOff((PetscObject)*pc));
108   PetscTryTypeMethod((*pc), destroy);
109   PetscCall(DMDestroy(&(*pc)->dm));
110   PetscCall(PetscHeaderDestroy(pc));
111   PetscFunctionReturn(0);
112 }
113 
114 /*@C
115    PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right
116       scaling as needed by certain time-stepping codes.
117 
118    Logically Collective on PC
119 
120    Input Parameter:
121 .  pc - the preconditioner context
122 
123    Output Parameter:
124 .  flag - PETSC_TRUE if it applies the scaling
125 
126    Level: developer
127 
128    Notes:
129     If this returns PETSC_TRUE then the system solved via the Krylov method is
130 $           D M A D^{-1} y = D M b  for left preconditioning or
131 $           D A M D^{-1} z = D b for right preconditioning
132 
133 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCSetDiagonalScale()`
134 @*/
135 PetscErrorCode PCGetDiagonalScale(PC pc, PetscBool *flag) {
136   PetscFunctionBegin;
137   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
138   PetscValidBoolPointer(flag, 2);
139   *flag = pc->diagonalscale;
140   PetscFunctionReturn(0);
141 }
142 
143 /*@
144    PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right
145       scaling as needed by certain time-stepping codes.
146 
147    Logically Collective on PC
148 
149    Input Parameters:
150 +  pc - the preconditioner context
151 -  s - scaling vector
152 
153    Level: intermediate
154 
155    Notes:
156     The system solved via the Krylov method is
157 $           D M A D^{-1} y = D M b  for left preconditioning or
158 $           D A M D^{-1} z = D b for right preconditioning
159 
160    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
161 
162 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCGetDiagonalScale()`
163 @*/
164 PetscErrorCode PCSetDiagonalScale(PC pc, Vec s) {
165   PetscFunctionBegin;
166   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
167   PetscValidHeaderSpecific(s, VEC_CLASSID, 2);
168   pc->diagonalscale = PETSC_TRUE;
169 
170   PetscCall(PetscObjectReference((PetscObject)s));
171   PetscCall(VecDestroy(&pc->diagonalscaleleft));
172 
173   pc->diagonalscaleleft = s;
174 
175   PetscCall(VecDuplicate(s, &pc->diagonalscaleright));
176   PetscCall(VecCopy(s, pc->diagonalscaleright));
177   PetscCall(VecReciprocal(pc->diagonalscaleright));
178   PetscFunctionReturn(0);
179 }
180 
181 /*@
182    PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes.
183 
184    Logically Collective on PC
185 
186    Input Parameters:
187 +  pc - the preconditioner context
188 .  in - input vector
189 -  out - scaled vector (maybe the same as in)
190 
191    Level: intermediate
192 
193    Notes:
194     The system solved via the Krylov method is
195 $           D M A D^{-1} y = D M b  for left preconditioning or
196 $           D A M D^{-1} z = D b for right preconditioning
197 
198    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
199 
200    If diagonal scaling is turned off and in is not out then in is copied to out
201 
202 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleSet()`, `PCDiagonalScaleRight()`, `PCDiagonalScale()`
203 @*/
204 PetscErrorCode PCDiagonalScaleLeft(PC pc, Vec in, Vec out) {
205   PetscFunctionBegin;
206   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
207   PetscValidHeaderSpecific(in, VEC_CLASSID, 2);
208   PetscValidHeaderSpecific(out, VEC_CLASSID, 3);
209   if (pc->diagonalscale) {
210     PetscCall(VecPointwiseMult(out, pc->diagonalscaleleft, in));
211   } else if (in != out) {
212     PetscCall(VecCopy(in, out));
213   }
214   PetscFunctionReturn(0);
215 }
216 
217 /*@
218    PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.
219 
220    Logically Collective on PC
221 
222    Input Parameters:
223 +  pc - the preconditioner context
224 .  in - input vector
225 -  out - scaled vector (maybe the same as in)
226 
227    Level: intermediate
228 
229    Notes:
230     The system solved via the Krylov method is
231 $           D M A D^{-1} y = D M b  for left preconditioning or
232 $           D A M D^{-1} z = D b for right preconditioning
233 
234    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
235 
236    If diagonal scaling is turned off and in is not out then in is copied to out
237 
238 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleSet()`, `PCDiagonalScale()`
239 @*/
240 PetscErrorCode PCDiagonalScaleRight(PC pc, Vec in, Vec out) {
241   PetscFunctionBegin;
242   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
243   PetscValidHeaderSpecific(in, VEC_CLASSID, 2);
244   PetscValidHeaderSpecific(out, VEC_CLASSID, 3);
245   if (pc->diagonalscale) {
246     PetscCall(VecPointwiseMult(out, pc->diagonalscaleright, in));
247   } else if (in != out) {
248     PetscCall(VecCopy(in, out));
249   }
250   PetscFunctionReturn(0);
251 }
252 
253 /*@
254    PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the
255    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
256    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperators() or PCSetOperators() not the Pmat.
257 
258    Logically Collective on PC
259 
260    Input Parameters:
261 +  pc - the preconditioner context
262 -  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
263 
264    Options Database Key:
265 .  -pc_use_amat <true,false> - use the amat to apply the operator
266 
267    Notes:
268    For the common case in which the linear system matrix and the matrix used to construct the
269    preconditioner are identical, this routine is does nothing.
270 
271    Level: intermediate
272 
273 .seealso: `PCGetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`
274 @*/
275 PetscErrorCode PCSetUseAmat(PC pc, PetscBool flg) {
276   PetscFunctionBegin;
277   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
278   pc->useAmat = flg;
279   PetscFunctionReturn(0);
280 }
281 
282 /*@
283    PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected.
284 
285    Logically Collective on PC
286 
287    Input Parameters:
288 +  pc - iterative context obtained from PCCreate()
289 -  flg - PETSC_TRUE indicates you want the error generated
290 
291    Level: advanced
292 
293    Notes:
294     Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve()
295     to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is
296     detected.
297 
298     This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs
299 
300 .seealso: `PCGetInitialGuessNonzero()`, `PCSetInitialGuessKnoll()`, `PCGetInitialGuessKnoll()`
301 @*/
302 PetscErrorCode PCSetErrorIfFailure(PC pc, PetscBool flg) {
303   PetscFunctionBegin;
304   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
305   PetscValidLogicalCollectiveBool(pc, flg, 2);
306   pc->erroriffailure = flg;
307   PetscFunctionReturn(0);
308 }
309 
310 /*@
311    PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the
312    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
313    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperators() or PCSetOperators() not the Pmat.
314 
315    Logically Collective on PC
316 
317    Input Parameter:
318 .  pc - the preconditioner context
319 
320    Output Parameter:
321 .  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
322 
323    Notes:
324    For the common case in which the linear system matrix and the matrix used to construct the
325    preconditioner are identical, this routine is does nothing.
326 
327    Level: intermediate
328 
329 .seealso: `PCSetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`
330 @*/
331 PetscErrorCode PCGetUseAmat(PC pc, PetscBool *flg) {
332   PetscFunctionBegin;
333   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
334   *flg = pc->useAmat;
335   PetscFunctionReturn(0);
336 }
337 
338 /*@
339    PCCreate - Creates a preconditioner context.
340 
341    Collective
342 
343    Input Parameter:
344 .  comm - MPI communicator
345 
346    Output Parameter:
347 .  pc - location to put the preconditioner context
348 
349    Notes:
350    The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC
351    in parallel. For dense matrices it is always PCNONE.
352 
353    Level: developer
354 
355 .seealso: `PCSetUp()`, `PCApply()`, `PCDestroy()`
356 @*/
357 PetscErrorCode PCCreate(MPI_Comm comm, PC *newpc) {
358   PC pc;
359 
360   PetscFunctionBegin;
361   PetscValidPointer(newpc, 2);
362   *newpc = NULL;
363   PetscCall(PCInitializePackage());
364 
365   PetscCall(PetscHeaderCreate(pc, PC_CLASSID, "PC", "Preconditioner", "PC", comm, PCDestroy, PCView));
366 
367   pc->mat                  = NULL;
368   pc->pmat                 = NULL;
369   pc->setupcalled          = 0;
370   pc->setfromoptionscalled = 0;
371   pc->data                 = NULL;
372   pc->diagonalscale        = PETSC_FALSE;
373   pc->diagonalscaleleft    = NULL;
374   pc->diagonalscaleright   = NULL;
375 
376   pc->modifysubmatrices  = NULL;
377   pc->modifysubmatricesP = NULL;
378 
379   *newpc = pc;
380   PetscFunctionReturn(0);
381 }
382 
383 /* -------------------------------------------------------------------------------*/
384 
385 /*@
386    PCApply - Applies the preconditioner to a vector.
387 
388    Collective on PC
389 
390    Input Parameters:
391 +  pc - the preconditioner context
392 -  x - input vector
393 
394    Output Parameter:
395 .  y - output vector
396 
397    Level: developer
398 
399 .seealso: `PCApplyTranspose()`, `PCApplyBAorAB()`
400 @*/
401 PetscErrorCode PCApply(PC pc, Vec x, Vec y) {
402   PetscInt m, n, mv, nv;
403 
404   PetscFunctionBegin;
405   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
406   PetscValidHeaderSpecific(x, VEC_CLASSID, 2);
407   PetscValidHeaderSpecific(y, VEC_CLASSID, 3);
408   PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors");
409   if (pc->erroriffailure) PetscCall(VecValidValues(x, 2, PETSC_TRUE));
410   /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */
411   PetscCall(MatGetLocalSize(pc->pmat, &m, &n));
412   PetscCall(VecGetLocalSize(x, &mv));
413   PetscCall(VecGetLocalSize(y, &nv));
414   /* check pmat * y = x is feasible */
415   PetscCheck(mv == m, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local rows %" PetscInt_FMT " does not equal input vector size %" PetscInt_FMT, m, mv);
416   PetscCheck(nv == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local columns %" PetscInt_FMT " does not equal output vector size %" PetscInt_FMT, n, nv);
417   PetscCall(VecSetErrorIfLocked(y, 3));
418 
419   PetscCall(PCSetUp(pc));
420   PetscCall(VecLockReadPush(x));
421   PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0));
422   PetscUseTypeMethod(pc, apply, x, y);
423   PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0));
424   if (pc->erroriffailure) PetscCall(VecValidValues(y, 3, PETSC_FALSE));
425   PetscCall(VecLockReadPop(x));
426   PetscFunctionReturn(0);
427 }
428 
429 /*@
430    PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices.
431 
432    Collective on PC
433 
434    Input Parameters:
435 +  pc - the preconditioner context
436 -  X - block of input vectors
437 
438    Output Parameter:
439 .  Y - block of output vectors
440 
441    Level: developer
442 
443 .seealso: `PCApply()`, `KSPMatSolve()`
444 @*/
445 PetscErrorCode PCMatApply(PC pc, Mat X, Mat Y) {
446   Mat       A;
447   Vec       cy, cx;
448   PetscInt  m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3;
449   PetscBool match;
450 
451   PetscFunctionBegin;
452   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
453   PetscValidHeaderSpecific(X, MAT_CLASSID, 2);
454   PetscValidHeaderSpecific(Y, MAT_CLASSID, 3);
455   PetscCheckSameComm(pc, 1, X, 2);
456   PetscCheckSameComm(pc, 1, Y, 3);
457   PetscCheck(Y != X, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices");
458   PetscCall(PCGetOperators(pc, NULL, &A));
459   PetscCall(MatGetLocalSize(A, &m3, &n3));
460   PetscCall(MatGetLocalSize(X, &m2, &n2));
461   PetscCall(MatGetLocalSize(Y, &m1, &n1));
462   PetscCall(MatGetSize(A, &M3, &N3));
463   PetscCall(MatGetSize(X, &M2, &N2));
464   PetscCall(MatGetSize(Y, &M1, &N1));
465   PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of output vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1);
466   PetscCheck(m2 == m3 && M2 == M3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m2, M2, m3, M3, n3, N3);
467   PetscCheck(m1 == n3 && M1 == N3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m1, M1, m3, M3, n3, N3);
468   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, ""));
469   PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat");
470   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, ""));
471   PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat");
472   PetscCall(PCSetUp(pc));
473   if (pc->ops->matapply) {
474     PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0));
475     PetscUseTypeMethod(pc, matapply, X, Y);
476     PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0));
477   } else {
478     PetscCall(PetscInfo(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name));
479     for (n1 = 0; n1 < N1; ++n1) {
480       PetscCall(MatDenseGetColumnVecRead(X, n1, &cx));
481       PetscCall(MatDenseGetColumnVecWrite(Y, n1, &cy));
482       PetscCall(PCApply(pc, cx, cy));
483       PetscCall(MatDenseRestoreColumnVecWrite(Y, n1, &cy));
484       PetscCall(MatDenseRestoreColumnVecRead(X, n1, &cx));
485     }
486   }
487   PetscFunctionReturn(0);
488 }
489 
490 /*@
491    PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.
492 
493    Collective on PC
494 
495    Input Parameters:
496 +  pc - the preconditioner context
497 -  x - input vector
498 
499    Output Parameter:
500 .  y - output vector
501 
502    Notes:
503    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
504 
505    Level: developer
506 
507 .seealso: `PCApply()`, `PCApplySymmetricRight()`
508 @*/
509 PetscErrorCode PCApplySymmetricLeft(PC pc, Vec x, Vec y) {
510   PetscFunctionBegin;
511   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
512   PetscValidHeaderSpecific(x, VEC_CLASSID, 2);
513   PetscValidHeaderSpecific(y, VEC_CLASSID, 3);
514   PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors");
515   if (pc->erroriffailure) PetscCall(VecValidValues(x, 2, PETSC_TRUE));
516   PetscCall(PCSetUp(pc));
517   PetscCall(VecLockReadPush(x));
518   PetscCall(PetscLogEventBegin(PC_ApplySymmetricLeft, pc, x, y, 0));
519   PetscUseTypeMethod(pc, applysymmetricleft, x, y);
520   PetscCall(PetscLogEventEnd(PC_ApplySymmetricLeft, pc, x, y, 0));
521   PetscCall(VecLockReadPop(x));
522   if (pc->erroriffailure) PetscCall(VecValidValues(y, 3, PETSC_FALSE));
523   PetscFunctionReturn(0);
524 }
525 
526 /*@
527    PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.
528 
529    Collective on PC
530 
531    Input Parameters:
532 +  pc - the preconditioner context
533 -  x - input vector
534 
535    Output Parameter:
536 .  y - output vector
537 
538    Level: developer
539 
540    Notes:
541    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
542 
543 .seealso: `PCApply()`, `PCApplySymmetricLeft()`
544 @*/
545 PetscErrorCode PCApplySymmetricRight(PC pc, Vec x, Vec y) {
546   PetscFunctionBegin;
547   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
548   PetscValidHeaderSpecific(x, VEC_CLASSID, 2);
549   PetscValidHeaderSpecific(y, VEC_CLASSID, 3);
550   PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors");
551   if (pc->erroriffailure) PetscCall(VecValidValues(x, 2, PETSC_TRUE));
552   PetscCall(PCSetUp(pc));
553   PetscCall(VecLockReadPush(x));
554   PetscCall(PetscLogEventBegin(PC_ApplySymmetricRight, pc, x, y, 0));
555   PetscUseTypeMethod(pc, applysymmetricright, x, y);
556   PetscCall(PetscLogEventEnd(PC_ApplySymmetricRight, pc, x, y, 0));
557   PetscCall(VecLockReadPop(x));
558   if (pc->erroriffailure) PetscCall(VecValidValues(y, 3, PETSC_FALSE));
559   PetscFunctionReturn(0);
560 }
561 
562 /*@
563    PCApplyTranspose - Applies the transpose of preconditioner to a vector.
564 
565    Collective on PC
566 
567    Input Parameters:
568 +  pc - the preconditioner context
569 -  x - input vector
570 
571    Output Parameter:
572 .  y - output vector
573 
574    Notes:
575     For complex numbers this applies the non-Hermitian transpose.
576 
577    Developer Notes:
578     We need to implement a PCApplyHermitianTranspose()
579 
580    Level: developer
581 
582 .seealso: `PCApply()`, `PCApplyBAorAB()`, `PCApplyBAorABTranspose()`, `PCApplyTransposeExists()`
583 @*/
584 PetscErrorCode PCApplyTranspose(PC pc, Vec x, Vec y) {
585   PetscFunctionBegin;
586   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
587   PetscValidHeaderSpecific(x, VEC_CLASSID, 2);
588   PetscValidHeaderSpecific(y, VEC_CLASSID, 3);
589   PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors");
590   if (pc->erroriffailure) PetscCall(VecValidValues(x, 2, PETSC_TRUE));
591   PetscCall(PCSetUp(pc));
592   PetscCall(VecLockReadPush(x));
593   PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0));
594   PetscUseTypeMethod(pc, applytranspose, x, y);
595   PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0));
596   PetscCall(VecLockReadPop(x));
597   if (pc->erroriffailure) PetscCall(VecValidValues(y, 3, PETSC_FALSE));
598   PetscFunctionReturn(0);
599 }
600 
601 /*@
602    PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation
603 
604    Collective on PC
605 
606    Input Parameters:
607 .  pc - the preconditioner context
608 
609    Output Parameter:
610 .  flg - PETSC_TRUE if a transpose operation is defined
611 
612    Level: developer
613 
614 .seealso: `PCApplyTranspose()`
615 @*/
616 PetscErrorCode PCApplyTransposeExists(PC pc, PetscBool *flg) {
617   PetscFunctionBegin;
618   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
619   PetscValidBoolPointer(flg, 2);
620   if (pc->ops->applytranspose) *flg = PETSC_TRUE;
621   else *flg = PETSC_FALSE;
622   PetscFunctionReturn(0);
623 }
624 
625 /*@
626    PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.
627 
628    Collective on PC
629 
630    Input Parameters:
631 +  pc - the preconditioner context
632 .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
633 .  x - input vector
634 -  work - work vector
635 
636    Output Parameter:
637 .  y - output vector
638 
639    Level: developer
640 
641    Notes:
642     If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or  D A M D^{-1} is actually applied. Note that the
643    specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling.
644 
645 .seealso: `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorABTranspose()`
646 @*/
647 PetscErrorCode PCApplyBAorAB(PC pc, PCSide side, Vec x, Vec y, Vec work) {
648   PetscFunctionBegin;
649   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
650   PetscValidLogicalCollectiveEnum(pc, side, 2);
651   PetscValidHeaderSpecific(x, VEC_CLASSID, 3);
652   PetscValidHeaderSpecific(y, VEC_CLASSID, 4);
653   PetscValidHeaderSpecific(work, VEC_CLASSID, 5);
654   PetscCheckSameComm(pc, 1, x, 3);
655   PetscCheckSameComm(pc, 1, y, 4);
656   PetscCheckSameComm(pc, 1, work, 5);
657   PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors");
658   PetscCheck(side == PC_LEFT || side == PC_SYMMETRIC || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right, left, or symmetric");
659   PetscCheck(!pc->diagonalscale || side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot include diagonal scaling with symmetric preconditioner application");
660   if (pc->erroriffailure) PetscCall(VecValidValues(x, 3, PETSC_TRUE));
661 
662   PetscCall(PCSetUp(pc));
663   if (pc->diagonalscale) {
664     if (pc->ops->applyBA) {
665       Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
666       PetscCall(VecDuplicate(x, &work2));
667       PetscCall(PCDiagonalScaleRight(pc, x, work2));
668       PetscUseTypeMethod(pc, applyBA, side, work2, y, work);
669       PetscCall(PCDiagonalScaleLeft(pc, y, y));
670       PetscCall(VecDestroy(&work2));
671     } else if (side == PC_RIGHT) {
672       PetscCall(PCDiagonalScaleRight(pc, x, y));
673       PetscCall(PCApply(pc, y, work));
674       PetscCall(MatMult(pc->mat, work, y));
675       PetscCall(PCDiagonalScaleLeft(pc, y, y));
676     } else if (side == PC_LEFT) {
677       PetscCall(PCDiagonalScaleRight(pc, x, y));
678       PetscCall(MatMult(pc->mat, y, work));
679       PetscCall(PCApply(pc, work, y));
680       PetscCall(PCDiagonalScaleLeft(pc, y, y));
681     } else PetscCheck(side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot provide diagonal scaling with symmetric application of preconditioner");
682   } else {
683     if (pc->ops->applyBA) {
684       PetscUseTypeMethod(pc, applyBA, side, x, y, work);
685     } else if (side == PC_RIGHT) {
686       PetscCall(PCApply(pc, x, work));
687       PetscCall(MatMult(pc->mat, work, y));
688     } else if (side == PC_LEFT) {
689       PetscCall(MatMult(pc->mat, x, work));
690       PetscCall(PCApply(pc, work, y));
691     } else if (side == PC_SYMMETRIC) {
692       /* There's an extra copy here; maybe should provide 2 work vectors instead? */
693       PetscCall(PCApplySymmetricRight(pc, x, work));
694       PetscCall(MatMult(pc->mat, work, y));
695       PetscCall(VecCopy(y, work));
696       PetscCall(PCApplySymmetricLeft(pc, work, y));
697     }
698   }
699   if (pc->erroriffailure) PetscCall(VecValidValues(y, 4, PETSC_FALSE));
700   PetscFunctionReturn(0);
701 }
702 
703 /*@
704    PCApplyBAorABTranspose - Applies the transpose of the preconditioner
705    and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
706    NOT tr(B*A) = tr(A)*tr(B).
707 
708    Collective on PC
709 
710    Input Parameters:
711 +  pc - the preconditioner context
712 .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
713 .  x - input vector
714 -  work - work vector
715 
716    Output Parameter:
717 .  y - output vector
718 
719    Notes:
720     this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
721       defined by B'. This is why this has the funny form that it computes tr(B) * tr(A)
722 
723     Level: developer
724 
725 .seealso: `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorAB()`
726 @*/
727 PetscErrorCode PCApplyBAorABTranspose(PC pc, PCSide side, Vec x, Vec y, Vec work) {
728   PetscFunctionBegin;
729   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
730   PetscValidHeaderSpecific(x, VEC_CLASSID, 3);
731   PetscValidHeaderSpecific(y, VEC_CLASSID, 4);
732   PetscValidHeaderSpecific(work, VEC_CLASSID, 5);
733   PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors");
734   if (pc->erroriffailure) PetscCall(VecValidValues(x, 3, PETSC_TRUE));
735   if (pc->ops->applyBAtranspose) {
736     PetscUseTypeMethod(pc, applyBAtranspose, side, x, y, work);
737     if (pc->erroriffailure) PetscCall(VecValidValues(y, 4, PETSC_FALSE));
738     PetscFunctionReturn(0);
739   }
740   PetscCheck(side == PC_LEFT || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right or left");
741 
742   PetscCall(PCSetUp(pc));
743   if (side == PC_RIGHT) {
744     PetscCall(PCApplyTranspose(pc, x, work));
745     PetscCall(MatMultTranspose(pc->mat, work, y));
746   } else if (side == PC_LEFT) {
747     PetscCall(MatMultTranspose(pc->mat, x, work));
748     PetscCall(PCApplyTranspose(pc, work, y));
749   }
750   /* add support for PC_SYMMETRIC */
751   if (pc->erroriffailure) PetscCall(VecValidValues(y, 4, PETSC_FALSE));
752   PetscFunctionReturn(0);
753 }
754 
755 /* -------------------------------------------------------------------------------*/
756 
757 /*@
758    PCApplyRichardsonExists - Determines whether a particular preconditioner has a
759    built-in fast application of Richardson's method.
760 
761    Not Collective
762 
763    Input Parameter:
764 .  pc - the preconditioner
765 
766    Output Parameter:
767 .  exists - PETSC_TRUE or PETSC_FALSE
768 
769    Level: developer
770 
771 .seealso: `PCApplyRichardson()`
772 @*/
773 PetscErrorCode PCApplyRichardsonExists(PC pc, PetscBool *exists) {
774   PetscFunctionBegin;
775   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
776   PetscValidBoolPointer(exists, 2);
777   if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
778   else *exists = PETSC_FALSE;
779   PetscFunctionReturn(0);
780 }
781 
782 /*@
783    PCApplyRichardson - Applies several steps of Richardson iteration with
784    the particular preconditioner. This routine is usually used by the
785    Krylov solvers and not the application code directly.
786 
787    Collective on PC
788 
789    Input Parameters:
790 +  pc  - the preconditioner context
791 .  b   - the right hand side
792 .  w   - one work vector
793 .  rtol - relative decrease in residual norm convergence criteria
794 .  abstol - absolute residual norm convergence criteria
795 .  dtol - divergence residual norm increase criteria
796 .  its - the number of iterations to apply.
797 -  guesszero - if the input x contains nonzero initial guess
798 
799    Output Parameters:
800 +  outits - number of iterations actually used (for SOR this always equals its)
801 .  reason - the reason the apply terminated
802 -  y - the solution (also contains initial guess if guesszero is PETSC_FALSE
803 
804    Notes:
805    Most preconditioners do not support this function. Use the command
806    PCApplyRichardsonExists() to determine if one does.
807 
808    Except for the multigrid PC this routine ignores the convergence tolerances
809    and always runs for the number of iterations
810 
811    Level: developer
812 
813 .seealso: `PCApplyRichardsonExists()`
814 @*/
815 PetscErrorCode PCApplyRichardson(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) {
816   PetscFunctionBegin;
817   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
818   PetscValidHeaderSpecific(b, VEC_CLASSID, 2);
819   PetscValidHeaderSpecific(y, VEC_CLASSID, 3);
820   PetscValidHeaderSpecific(w, VEC_CLASSID, 4);
821   PetscCheck(b != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "b and y must be different vectors");
822   PetscCall(PCSetUp(pc));
823   PetscUseTypeMethod(pc, applyrichardson, b, y, w, rtol, abstol, dtol, its, guesszero, outits, reason);
824   PetscFunctionReturn(0);
825 }
826 
827 /*@
828    PCSetFailedReason - Sets the reason a PCSetUp() failed or PC_NOERROR if it did not fail
829 
830    Logically Collective on PC
831 
832    Input Parameters:
833 +  pc - the preconditioner context
834 -  reason - the reason it failedx
835 
836    Level: advanced
837 
838 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCFailedReason`
839 @*/
840 PetscErrorCode PCSetFailedReason(PC pc, PCFailedReason reason) {
841   PetscFunctionBegin;
842   pc->failedreason = reason;
843   PetscFunctionReturn(0);
844 }
845 
846 /*@
847    PCGetFailedReason - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail
848 
849    Logically Collective on PC
850 
851    Input Parameter:
852 .  pc - the preconditioner context
853 
854    Output Parameter:
855 .  reason - the reason it failed
856 
857    Level: advanced
858 
859    Notes: This is the maximum over reason over all ranks in the PC communicator. It is only valid after
860    a call KSPCheckDot() or  KSPCheckNorm() inside a KSPSolve(). It is not valid immediately after a PCSetUp()
861    or PCApply(), then use PCGetFailedReasonRank()
862 
863 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReasonRank()`, `PCSetFailedReason()`
864 @*/
865 PetscErrorCode PCGetFailedReason(PC pc, PCFailedReason *reason) {
866   PetscFunctionBegin;
867   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
868   else *reason = pc->failedreason;
869   PetscFunctionReturn(0);
870 }
871 
872 /*@
873    PCGetFailedReasonRank - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail on this MPI rank
874 
875   Not Collective on PC
876 
877    Input Parameter:
878 .  pc - the preconditioner context
879 
880    Output Parameter:
881 .  reason - the reason it failed
882 
883    Notes:
884      Different ranks may have different reasons or no reason, see PCGetFailedReason()
885 
886    Level: advanced
887 
888 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()`
889 @*/
890 PetscErrorCode PCGetFailedReasonRank(PC pc, PCFailedReason *reason) {
891   PetscFunctionBegin;
892   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
893   else *reason = pc->failedreason;
894   PetscFunctionReturn(0);
895 }
896 
897 /*  Next line needed to deactivate KSP_Solve logging */
898 #include <petsc/private/kspimpl.h>
899 
900 /*
901       a setupcall of 0 indicates never setup,
902                      1 indicates has been previously setup
903                     -1 indicates a PCSetUp() was attempted and failed
904 */
905 /*@
906    PCSetUp - Prepares for the use of a preconditioner.
907 
908    Collective on PC
909 
910    Input Parameter:
911 .  pc - the preconditioner context
912 
913    Level: developer
914 
915 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`
916 @*/
917 PetscErrorCode PCSetUp(PC pc) {
918   const char      *def;
919   PetscObjectState matstate, matnonzerostate;
920 
921   PetscFunctionBegin;
922   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
923   PetscCheck(pc->mat, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be set first");
924 
925   if (pc->setupcalled && pc->reusepreconditioner) {
926     PetscCall(PetscInfo(pc, "Leaving PC with identical preconditioner since reuse preconditioner is set\n"));
927     PetscFunctionReturn(0);
928   }
929 
930   PetscCall(PetscObjectStateGet((PetscObject)pc->pmat, &matstate));
931   PetscCall(MatGetNonzeroState(pc->pmat, &matnonzerostate));
932   if (!pc->setupcalled) {
933     PetscCall(PetscInfo(pc, "Setting up PC for first time\n"));
934     pc->flag = DIFFERENT_NONZERO_PATTERN;
935   } else if (matstate == pc->matstate) {
936     PetscCall(PetscInfo(pc, "Leaving PC with identical preconditioner since operator is unchanged\n"));
937     PetscFunctionReturn(0);
938   } else {
939     if (matnonzerostate > pc->matnonzerostate) {
940       PetscCall(PetscInfo(pc, "Setting up PC with different nonzero pattern\n"));
941       pc->flag = DIFFERENT_NONZERO_PATTERN;
942     } else {
943       PetscCall(PetscInfo(pc, "Setting up PC with same nonzero pattern\n"));
944       pc->flag = SAME_NONZERO_PATTERN;
945     }
946   }
947   pc->matstate        = matstate;
948   pc->matnonzerostate = matnonzerostate;
949 
950   if (!((PetscObject)pc)->type_name) {
951     PetscCall(PCGetDefaultType_Private(pc, &def));
952     PetscCall(PCSetType(pc, def));
953   }
954 
955   PetscCall(MatSetErrorIfFailure(pc->pmat, pc->erroriffailure));
956   PetscCall(MatSetErrorIfFailure(pc->mat, pc->erroriffailure));
957   PetscCall(PetscLogEventBegin(PC_SetUp, pc, 0, 0, 0));
958   if (pc->ops->setup) {
959     /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */
960     PetscCall(KSPInitializePackage());
961     PetscCall(PetscLogEventDeactivatePush(KSP_Solve));
962     PetscCall(PetscLogEventDeactivatePush(PC_Apply));
963     PetscUseTypeMethod(pc, setup);
964     PetscCall(PetscLogEventDeactivatePop(KSP_Solve));
965     PetscCall(PetscLogEventDeactivatePop(PC_Apply));
966   }
967   PetscCall(PetscLogEventEnd(PC_SetUp, pc, 0, 0, 0));
968   if (!pc->setupcalled) pc->setupcalled = 1;
969   PetscFunctionReturn(0);
970 }
971 
972 /*@
973    PCSetUpOnBlocks - Sets up the preconditioner for each block in
974    the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
975    methods.
976 
977    Collective on PC
978 
979    Input Parameters:
980 .  pc - the preconditioner context
981 
982    Level: developer
983 
984 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCSetUp()`
985 @*/
986 PetscErrorCode PCSetUpOnBlocks(PC pc) {
987   PetscFunctionBegin;
988   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
989   if (!pc->ops->setuponblocks) PetscFunctionReturn(0);
990   PetscCall(PetscLogEventBegin(PC_SetUpOnBlocks, pc, 0, 0, 0));
991   PetscUseTypeMethod(pc, setuponblocks);
992   PetscCall(PetscLogEventEnd(PC_SetUpOnBlocks, pc, 0, 0, 0));
993   PetscFunctionReturn(0);
994 }
995 
996 /*@C
997    PCSetModifySubMatrices - Sets a user-defined routine for modifying the
998    submatrices that arise within certain subdomain-based preconditioners.
999    The basic submatrices are extracted from the preconditioner matrix as
1000    usual; the user can then alter these (for example, to set different boundary
1001    conditions for each submatrix) before they are used for the local solves.
1002 
1003    Logically Collective on PC
1004 
1005    Input Parameters:
1006 +  pc - the preconditioner context
1007 .  func - routine for modifying the submatrices
1008 -  ctx - optional user-defined context (may be null)
1009 
1010    Calling sequence of func:
1011 $     func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);
1012 
1013 +  row - an array of index sets that contain the global row numbers
1014          that comprise each local submatrix
1015 .  col - an array of index sets that contain the global column numbers
1016          that comprise each local submatrix
1017 .  submat - array of local submatrices
1018 -  ctx - optional user-defined context for private data for the
1019          user-defined func routine (may be null)
1020 
1021    Notes:
1022    PCSetModifySubMatrices() MUST be called before KSPSetUp() and
1023    KSPSolve().
1024 
1025    A routine set by PCSetModifySubMatrices() is currently called within
1026    the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM)
1027    preconditioners.  All other preconditioners ignore this routine.
1028 
1029    Level: advanced
1030 
1031 .seealso: `PCModifySubMatrices()`
1032 @*/
1033 PetscErrorCode PCSetModifySubMatrices(PC pc, PetscErrorCode (*func)(PC, PetscInt, const IS[], const IS[], Mat[], void *), void *ctx) {
1034   PetscFunctionBegin;
1035   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1036   pc->modifysubmatrices  = func;
1037   pc->modifysubmatricesP = ctx;
1038   PetscFunctionReturn(0);
1039 }
1040 
1041 /*@C
1042    PCModifySubMatrices - Calls an optional user-defined routine within
1043    certain preconditioners if one has been set with PCSetModifySubMatrices().
1044 
1045    Collective on PC
1046 
1047    Input Parameters:
1048 +  pc - the preconditioner context
1049 .  nsub - the number of local submatrices
1050 .  row - an array of index sets that contain the global row numbers
1051          that comprise each local submatrix
1052 .  col - an array of index sets that contain the global column numbers
1053          that comprise each local submatrix
1054 .  submat - array of local submatrices
1055 -  ctx - optional user-defined context for private data for the
1056          user-defined routine (may be null)
1057 
1058    Output Parameter:
1059 .  submat - array of local submatrices (the entries of which may
1060             have been modified)
1061 
1062    Notes:
1063    The user should NOT generally call this routine, as it will
1064    automatically be called within certain preconditioners (currently
1065    block Jacobi, additive Schwarz) if set.
1066 
1067    The basic submatrices are extracted from the preconditioner matrix
1068    as usual; the user can then alter these (for example, to set different
1069    boundary conditions for each submatrix) before they are used for the
1070    local solves.
1071 
1072    Level: developer
1073 
1074 .seealso: `PCSetModifySubMatrices()`
1075 @*/
1076 PetscErrorCode PCModifySubMatrices(PC pc, PetscInt nsub, const IS row[], const IS col[], Mat submat[], void *ctx) {
1077   PetscFunctionBegin;
1078   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1079   if (!pc->modifysubmatrices) PetscFunctionReturn(0);
1080   PetscCall(PetscLogEventBegin(PC_ModifySubMatrices, pc, 0, 0, 0));
1081   PetscCall((*pc->modifysubmatrices)(pc, nsub, row, col, submat, ctx));
1082   PetscCall(PetscLogEventEnd(PC_ModifySubMatrices, pc, 0, 0, 0));
1083   PetscFunctionReturn(0);
1084 }
1085 
1086 /*@
1087    PCSetOperators - Sets the matrix associated with the linear system and
1088    a (possibly) different one associated with the preconditioner.
1089 
1090    Logically Collective on PC
1091 
1092    Input Parameters:
1093 +  pc - the preconditioner context
1094 .  Amat - the matrix that defines the linear system
1095 -  Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat.
1096 
1097    Notes:
1098     Passing a NULL for Amat or Pmat removes the matrix that is currently used.
1099 
1100     If you wish to replace either Amat or Pmat but leave the other one untouched then
1101     first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference()
1102     on it and then pass it back in in your call to KSPSetOperators().
1103 
1104    More Notes about Repeated Solution of Linear Systems:
1105    PETSc does NOT reset the matrix entries of either Amat or Pmat
1106    to zero after a linear solve; the user is completely responsible for
1107    matrix assembly.  See the routine MatZeroEntries() if desiring to
1108    zero all elements of a matrix.
1109 
1110    Level: intermediate
1111 
1112 .seealso: `PCGetOperators()`, `MatZeroEntries()`
1113  @*/
1114 PetscErrorCode PCSetOperators(PC pc, Mat Amat, Mat Pmat) {
1115   PetscInt m1, n1, m2, n2;
1116 
1117   PetscFunctionBegin;
1118   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1119   if (Amat) PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2);
1120   if (Pmat) PetscValidHeaderSpecific(Pmat, MAT_CLASSID, 3);
1121   if (Amat) PetscCheckSameComm(pc, 1, Amat, 2);
1122   if (Pmat) PetscCheckSameComm(pc, 1, Pmat, 3);
1123   if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) {
1124     PetscCall(MatGetLocalSize(Amat, &m1, &n1));
1125     PetscCall(MatGetLocalSize(pc->mat, &m2, &n2));
1126     PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Amat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1);
1127     PetscCall(MatGetLocalSize(Pmat, &m1, &n1));
1128     PetscCall(MatGetLocalSize(pc->pmat, &m2, &n2));
1129     PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Pmat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1);
1130   }
1131 
1132   if (Pmat != pc->pmat) {
1133     /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */
1134     pc->matnonzerostate = -1;
1135     pc->matstate        = -1;
1136   }
1137 
1138   /* reference first in case the matrices are the same */
1139   if (Amat) PetscCall(PetscObjectReference((PetscObject)Amat));
1140   PetscCall(MatDestroy(&pc->mat));
1141   if (Pmat) PetscCall(PetscObjectReference((PetscObject)Pmat));
1142   PetscCall(MatDestroy(&pc->pmat));
1143   pc->mat  = Amat;
1144   pc->pmat = Pmat;
1145   PetscFunctionReturn(0);
1146 }
1147 
1148 /*@
1149    PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed.
1150 
1151    Logically Collective on PC
1152 
1153    Input Parameters:
1154 +  pc - the preconditioner context
1155 -  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner
1156 
1157     Level: intermediate
1158 
1159 .seealso: `PCGetOperators()`, `MatZeroEntries()`, `PCGetReusePreconditioner()`, `KSPSetReusePreconditioner()`
1160  @*/
1161 PetscErrorCode PCSetReusePreconditioner(PC pc, PetscBool flag) {
1162   PetscFunctionBegin;
1163   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1164   PetscValidLogicalCollectiveBool(pc, flag, 2);
1165   pc->reusepreconditioner = flag;
1166   PetscFunctionReturn(0);
1167 }
1168 
1169 /*@
1170    PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed.
1171 
1172    Not Collective
1173 
1174    Input Parameter:
1175 .  pc - the preconditioner context
1176 
1177    Output Parameter:
1178 .  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner
1179 
1180    Level: intermediate
1181 
1182 .seealso: `PCGetOperators()`, `MatZeroEntries()`, `PCSetReusePreconditioner()`
1183  @*/
1184 PetscErrorCode PCGetReusePreconditioner(PC pc, PetscBool *flag) {
1185   PetscFunctionBegin;
1186   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1187   PetscValidBoolPointer(flag, 2);
1188   *flag = pc->reusepreconditioner;
1189   PetscFunctionReturn(0);
1190 }
1191 
1192 /*@
1193    PCGetOperators - Gets the matrix associated with the linear system and
1194    possibly a different one associated with the preconditioner.
1195 
1196    Not collective, though parallel Mats are returned if the PC is parallel
1197 
1198    Input Parameter:
1199 .  pc - the preconditioner context
1200 
1201    Output Parameters:
1202 +  Amat - the matrix defining the linear system
1203 -  Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat.
1204 
1205    Level: intermediate
1206 
1207    Notes:
1208     Does not increase the reference count of the matrices, so you should not destroy them
1209 
1210    Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators
1211       are created in PC and returned to the user. In this case, if both operators
1212       mat and pmat are requested, two DIFFERENT operators will be returned. If
1213       only one is requested both operators in the PC will be the same (i.e. as
1214       if one had called KSP/PCSetOperators() with the same argument for both Mats).
1215       The user must set the sizes of the returned matrices and their type etc just
1216       as if the user created them with MatCreate(). For example,
1217 
1218 $         KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to
1219 $           set size, type, etc of Amat
1220 
1221 $         MatCreate(comm,&mat);
1222 $         KSP/PCSetOperators(ksp/pc,Amat,Amat);
1223 $         PetscObjectDereference((PetscObject)mat);
1224 $           set size, type, etc of Amat
1225 
1226      and
1227 
1228 $         KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to
1229 $           set size, type, etc of Amat and Pmat
1230 
1231 $         MatCreate(comm,&Amat);
1232 $         MatCreate(comm,&Pmat);
1233 $         KSP/PCSetOperators(ksp/pc,Amat,Pmat);
1234 $         PetscObjectDereference((PetscObject)Amat);
1235 $         PetscObjectDereference((PetscObject)Pmat);
1236 $           set size, type, etc of Amat and Pmat
1237 
1238     The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy
1239     of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely
1240     managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look
1241     at this is when you create a SNES you do not NEED to create a KSP and attach it to
1242     the SNES object (the SNES object manages it for you). Similarly when you create a KSP
1243     you do not need to attach a PC to it (the KSP object manages the PC object for you).
1244     Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when
1245     it can be created for you?
1246 
1247 .seealso: `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperatorsSet()`
1248 @*/
1249 PetscErrorCode PCGetOperators(PC pc, Mat *Amat, Mat *Pmat) {
1250   PetscFunctionBegin;
1251   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1252   if (Amat) {
1253     if (!pc->mat) {
1254       if (pc->pmat && !Pmat) { /* Apmat has been set, but user did not request it, so use for Amat */
1255         pc->mat = pc->pmat;
1256         PetscCall(PetscObjectReference((PetscObject)pc->mat));
1257       } else { /* both Amat and Pmat are empty */
1258         PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->mat));
1259         if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */
1260           pc->pmat = pc->mat;
1261           PetscCall(PetscObjectReference((PetscObject)pc->pmat));
1262         }
1263       }
1264     }
1265     *Amat = pc->mat;
1266   }
1267   if (Pmat) {
1268     if (!pc->pmat) {
1269       if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */
1270         pc->pmat = pc->mat;
1271         PetscCall(PetscObjectReference((PetscObject)pc->pmat));
1272       } else {
1273         PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->pmat));
1274         if (!Amat) { /* user did NOT request Amat, so make same as Pmat */
1275           pc->mat = pc->pmat;
1276           PetscCall(PetscObjectReference((PetscObject)pc->mat));
1277         }
1278       }
1279     }
1280     *Pmat = pc->pmat;
1281   }
1282   PetscFunctionReturn(0);
1283 }
1284 
1285 /*@C
1286    PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1287    possibly a different one associated with the preconditioner have been set in the PC.
1288 
1289    Not collective, though the results on all processes should be the same
1290 
1291    Input Parameter:
1292 .  pc - the preconditioner context
1293 
1294    Output Parameters:
1295 +  mat - the matrix associated with the linear system was set
1296 -  pmat - matrix associated with the preconditioner was set, usually the same
1297 
1298    Level: intermediate
1299 
1300 .seealso: `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperators()`
1301 @*/
1302 PetscErrorCode PCGetOperatorsSet(PC pc, PetscBool *mat, PetscBool *pmat) {
1303   PetscFunctionBegin;
1304   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1305   if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE;
1306   if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1307   PetscFunctionReturn(0);
1308 }
1309 
1310 /*@
1311    PCFactorGetMatrix - Gets the factored matrix from the
1312    preconditioner context.  This routine is valid only for the LU,
1313    incomplete LU, Cholesky, and incomplete Cholesky methods.
1314 
1315    Not Collective on PC though Mat is parallel if PC is parallel
1316 
1317    Input Parameters:
1318 .  pc - the preconditioner context
1319 
1320    Output parameters:
1321 .  mat - the factored matrix
1322 
1323    Level: advanced
1324 
1325    Notes:
1326     Does not increase the reference count for the matrix so DO NOT destroy it
1327 
1328 @*/
1329 PetscErrorCode PCFactorGetMatrix(PC pc, Mat *mat) {
1330   PetscFunctionBegin;
1331   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1332   PetscValidPointer(mat, 2);
1333   PetscUseTypeMethod(pc, getfactoredmatrix, mat);
1334   PetscFunctionReturn(0);
1335 }
1336 
1337 /*@C
1338    PCSetOptionsPrefix - Sets the prefix used for searching for all
1339    PC options in the database.
1340 
1341    Logically Collective on PC
1342 
1343    Input Parameters:
1344 +  pc - the preconditioner context
1345 -  prefix - the prefix string to prepend to all PC option requests
1346 
1347    Notes:
1348    A hyphen (-) must NOT be given at the beginning of the prefix name.
1349    The first character of all runtime options is AUTOMATICALLY the
1350    hyphen.
1351 
1352    Level: advanced
1353 
1354 .seealso: `PCAppendOptionsPrefix()`, `PCGetOptionsPrefix()`
1355 @*/
1356 PetscErrorCode PCSetOptionsPrefix(PC pc, const char prefix[]) {
1357   PetscFunctionBegin;
1358   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1359   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc, prefix));
1360   PetscFunctionReturn(0);
1361 }
1362 
1363 /*@C
1364    PCAppendOptionsPrefix - Appends to the prefix used for searching for all
1365    PC options in the database.
1366 
1367    Logically Collective on PC
1368 
1369    Input Parameters:
1370 +  pc - the preconditioner context
1371 -  prefix - the prefix string to prepend to all PC option requests
1372 
1373    Notes:
1374    A hyphen (-) must NOT be given at the beginning of the prefix name.
1375    The first character of all runtime options is AUTOMATICALLY the
1376    hyphen.
1377 
1378    Level: advanced
1379 
1380 .seealso: `PCSetOptionsPrefix()`, `PCGetOptionsPrefix()`
1381 @*/
1382 PetscErrorCode PCAppendOptionsPrefix(PC pc, const char prefix[]) {
1383   PetscFunctionBegin;
1384   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1385   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc, prefix));
1386   PetscFunctionReturn(0);
1387 }
1388 
1389 /*@C
1390    PCGetOptionsPrefix - Gets the prefix used for searching for all
1391    PC options in the database.
1392 
1393    Not Collective
1394 
1395    Input Parameters:
1396 .  pc - the preconditioner context
1397 
1398    Output Parameters:
1399 .  prefix - pointer to the prefix string used, is returned
1400 
1401    Notes:
1402     On the fortran side, the user should pass in a string 'prefix' of
1403    sufficient length to hold the prefix.
1404 
1405    Level: advanced
1406 
1407 .seealso: `PCSetOptionsPrefix()`, `PCAppendOptionsPrefix()`
1408 @*/
1409 PetscErrorCode PCGetOptionsPrefix(PC pc, const char *prefix[]) {
1410   PetscFunctionBegin;
1411   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1412   PetscValidPointer(prefix, 2);
1413   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, prefix));
1414   PetscFunctionReturn(0);
1415 }
1416 
1417 /*
1418    Indicates the right hand side will be changed by KSPSolve(), this occurs for a few
1419   preconditioners including BDDC and Eisentat that transform the equations before applying
1420   the Krylov methods
1421 */
1422 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc, PetscBool *change) {
1423   PetscFunctionBegin;
1424   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1425   PetscValidPointer(change, 2);
1426   *change = PETSC_FALSE;
1427   PetscTryMethod(pc, "PCPreSolveChangeRHS_C", (PC, PetscBool *), (pc, change));
1428   PetscFunctionReturn(0);
1429 }
1430 
1431 /*@
1432    PCPreSolve - Optional pre-solve phase, intended for any
1433    preconditioner-specific actions that must be performed before
1434    the iterative solve itself.
1435 
1436    Collective on PC
1437 
1438    Input Parameters:
1439 +  pc - the preconditioner context
1440 -  ksp - the Krylov subspace context
1441 
1442    Level: developer
1443 
1444    Sample of Usage:
1445 .vb
1446     PCPreSolve(pc,ksp);
1447     KSPSolve(ksp,b,x);
1448     PCPostSolve(pc,ksp);
1449 .ve
1450 
1451    Notes:
1452    The pre-solve phase is distinct from the PCSetUp() phase.
1453 
1454    KSPSolve() calls this directly, so is rarely called by the user.
1455 
1456 .seealso: `PCPostSolve()`
1457 @*/
1458 PetscErrorCode PCPreSolve(PC pc, KSP ksp) {
1459   Vec x, rhs;
1460 
1461   PetscFunctionBegin;
1462   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1463   PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2);
1464   pc->presolvedone++;
1465   PetscCheck(pc->presolvedone <= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot embed PCPreSolve() more than twice");
1466   PetscCall(KSPGetSolution(ksp, &x));
1467   PetscCall(KSPGetRhs(ksp, &rhs));
1468 
1469   if (pc->ops->presolve) PetscUseTypeMethod(pc, presolve, ksp, rhs, x);
1470   else if (pc->presolve) PetscCall((pc->presolve)(pc, ksp));
1471   PetscFunctionReturn(0);
1472 }
1473 
1474 /*@C
1475    PCSetPreSolve - Sets function PCPreSolve() which is intended for any
1476    preconditioner-specific actions that must be performed before
1477    the iterative solve itself.
1478 
1479    Logically Collective on pc
1480 
1481    Input Parameters:
1482 +   pc - the preconditioner object
1483 -   presolve - the function to call before the solve
1484 
1485    Calling sequence of presolve:
1486 $  func(PC pc,KSP ksp)
1487 
1488 +  pc - the PC context
1489 -  ksp - the KSP context
1490 
1491    Level: developer
1492 
1493 .seealso: `PC`, `PCSetUp()`, `PCPreSolve()`
1494 @*/
1495 PetscErrorCode PCSetPreSolve(PC pc, PetscErrorCode (*presolve)(PC, KSP)) {
1496   PetscFunctionBegin;
1497   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1498   pc->presolve = presolve;
1499   PetscFunctionReturn(0);
1500 }
1501 
1502 /*@
1503    PCPostSolve - Optional post-solve phase, intended for any
1504    preconditioner-specific actions that must be performed after
1505    the iterative solve itself.
1506 
1507    Collective on PC
1508 
1509    Input Parameters:
1510 +  pc - the preconditioner context
1511 -  ksp - the Krylov subspace context
1512 
1513    Sample of Usage:
1514 .vb
1515     PCPreSolve(pc,ksp);
1516     KSPSolve(ksp,b,x);
1517     PCPostSolve(pc,ksp);
1518 .ve
1519 
1520    Note:
1521    KSPSolve() calls this routine directly, so it is rarely called by the user.
1522 
1523    Level: developer
1524 
1525 .seealso: `PCPreSolve()`, `KSPSolve()`
1526 @*/
1527 PetscErrorCode PCPostSolve(PC pc, KSP ksp) {
1528   Vec x, rhs;
1529 
1530   PetscFunctionBegin;
1531   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1532   PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2);
1533   pc->presolvedone--;
1534   PetscCall(KSPGetSolution(ksp, &x));
1535   PetscCall(KSPGetRhs(ksp, &rhs));
1536   PetscTryTypeMethod(pc, postsolve, ksp, rhs, x);
1537   PetscFunctionReturn(0);
1538 }
1539 
1540 /*@C
1541   PCLoad - Loads a PC that has been stored in binary  with PCView().
1542 
1543   Collective on PetscViewer
1544 
1545   Input Parameters:
1546 + newdm - the newly loaded PC, this needs to have been created with PCCreate() or
1547            some related function before a call to PCLoad().
1548 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen()
1549 
1550    Level: intermediate
1551 
1552   Notes:
1553    The type is determined by the data in the file, any type set into the PC before this call is ignored.
1554 
1555   Notes for advanced users:
1556   Most users should not need to know the details of the binary storage
1557   format, since PCLoad() and PCView() completely hide these details.
1558   But for anyone who's interested, the standard binary matrix storage
1559   format is
1560 .vb
1561      has not yet been determined
1562 .ve
1563 
1564 .seealso: `PetscViewerBinaryOpen()`, `PCView()`, `MatLoad()`, `VecLoad()`
1565 @*/
1566 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) {
1567   PetscBool isbinary;
1568   PetscInt  classid;
1569   char      type[256];
1570 
1571   PetscFunctionBegin;
1572   PetscValidHeaderSpecific(newdm, PC_CLASSID, 1);
1573   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1574   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
1575   PetscCheck(isbinary, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid viewer; open viewer with PetscViewerBinaryOpen()");
1576 
1577   PetscCall(PetscViewerBinaryRead(viewer, &classid, 1, NULL, PETSC_INT));
1578   PetscCheck(classid == PC_FILE_CLASSID, PetscObjectComm((PetscObject)newdm), PETSC_ERR_ARG_WRONG, "Not PC next in file");
1579   PetscCall(PetscViewerBinaryRead(viewer, type, 256, NULL, PETSC_CHAR));
1580   PetscCall(PCSetType(newdm, type));
1581   PetscTryTypeMethod(newdm, load, viewer);
1582   PetscFunctionReturn(0);
1583 }
1584 
1585 #include <petscdraw.h>
1586 #if defined(PETSC_HAVE_SAWS)
1587 #include <petscviewersaws.h>
1588 #endif
1589 
1590 /*@C
1591    PCViewFromOptions - View from Options
1592 
1593    Collective on PC
1594 
1595    Input Parameters:
1596 +  A - the PC context
1597 .  obj - Optional object
1598 -  name - command line option
1599 
1600    Level: intermediate
1601 .seealso: `PC`, `PCView`, `PetscObjectViewFromOptions()`, `PCCreate()`
1602 @*/
1603 PetscErrorCode PCViewFromOptions(PC A, PetscObject obj, const char name[]) {
1604   PetscFunctionBegin;
1605   PetscValidHeaderSpecific(A, PC_CLASSID, 1);
1606   PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1607   PetscFunctionReturn(0);
1608 }
1609 
1610 /*@C
1611    PCView - Prints the PC data structure.
1612 
1613    Collective on PC
1614 
1615    Input Parameters:
1616 +  PC - the PC context
1617 -  viewer - optional visualization context
1618 
1619    Note:
1620    The available visualization contexts include
1621 +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
1622 -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
1623          output where only the first processor opens
1624          the file.  All other processors send their
1625          data to the first processor to print.
1626 
1627    The user can open an alternative visualization contexts with
1628    PetscViewerASCIIOpen() (output to a specified file).
1629 
1630    Level: developer
1631 
1632 .seealso: `KSPView()`, `PetscViewerASCIIOpen()`
1633 @*/
1634 PetscErrorCode PCView(PC pc, PetscViewer viewer) {
1635   PCType    cstr;
1636   PetscBool iascii, isstring, isbinary, isdraw;
1637 #if defined(PETSC_HAVE_SAWS)
1638   PetscBool issaws;
1639 #endif
1640 
1641   PetscFunctionBegin;
1642   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1643   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer));
1644   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1645   PetscCheckSameComm(pc, 1, viewer, 2);
1646 
1647   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1648   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1649   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
1650   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
1651 #if defined(PETSC_HAVE_SAWS)
1652   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1653 #endif
1654 
1655   if (iascii) {
1656     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)pc, viewer));
1657     if (!pc->setupcalled) PetscCall(PetscViewerASCIIPrintf(viewer, "  PC has not been set up so information may be incomplete\n"));
1658     PetscCall(PetscViewerASCIIPushTab(viewer));
1659     PetscTryTypeMethod(pc, view, viewer);
1660     PetscCall(PetscViewerASCIIPopTab(viewer));
1661     if (pc->mat) {
1662       PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO));
1663       if (pc->pmat == pc->mat) {
1664         PetscCall(PetscViewerASCIIPrintf(viewer, "  linear system matrix = precond matrix:\n"));
1665         PetscCall(PetscViewerASCIIPushTab(viewer));
1666         PetscCall(MatView(pc->mat, viewer));
1667         PetscCall(PetscViewerASCIIPopTab(viewer));
1668       } else {
1669         if (pc->pmat) {
1670           PetscCall(PetscViewerASCIIPrintf(viewer, "  linear system matrix followed by preconditioner matrix:\n"));
1671         } else {
1672           PetscCall(PetscViewerASCIIPrintf(viewer, "  linear system matrix:\n"));
1673         }
1674         PetscCall(PetscViewerASCIIPushTab(viewer));
1675         PetscCall(MatView(pc->mat, viewer));
1676         if (pc->pmat) PetscCall(MatView(pc->pmat, viewer));
1677         PetscCall(PetscViewerASCIIPopTab(viewer));
1678       }
1679       PetscCall(PetscViewerPopFormat(viewer));
1680     }
1681   } else if (isstring) {
1682     PetscCall(PCGetType(pc, &cstr));
1683     PetscCall(PetscViewerStringSPrintf(viewer, " PCType: %-7.7s", cstr));
1684     PetscTryTypeMethod(pc, view, viewer);
1685     if (pc->mat) PetscCall(MatView(pc->mat, viewer));
1686     if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer));
1687   } else if (isbinary) {
1688     PetscInt    classid = PC_FILE_CLASSID;
1689     MPI_Comm    comm;
1690     PetscMPIInt rank;
1691     char        type[256];
1692 
1693     PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
1694     PetscCallMPI(MPI_Comm_rank(comm, &rank));
1695     if (rank == 0) {
1696       PetscCall(PetscViewerBinaryWrite(viewer, &classid, 1, PETSC_INT));
1697       PetscCall(PetscStrncpy(type, ((PetscObject)pc)->type_name, 256));
1698       PetscCall(PetscViewerBinaryWrite(viewer, type, 256, PETSC_CHAR));
1699     }
1700     PetscTryTypeMethod(pc, view, viewer);
1701   } else if (isdraw) {
1702     PetscDraw draw;
1703     char      str[25];
1704     PetscReal x, y, bottom, h;
1705     PetscInt  n;
1706 
1707     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
1708     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
1709     if (pc->mat) {
1710       PetscCall(MatGetSize(pc->mat, &n, NULL));
1711       PetscCall(PetscSNPrintf(str, 25, "PC: %s (%" PetscInt_FMT ")", ((PetscObject)pc)->type_name, n));
1712     } else {
1713       PetscCall(PetscSNPrintf(str, 25, "PC: %s", ((PetscObject)pc)->type_name));
1714     }
1715     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
1716     bottom = y - h;
1717     PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom));
1718     PetscTryTypeMethod(pc, view, viewer);
1719     PetscCall(PetscDrawPopCurrentPoint(draw));
1720 #if defined(PETSC_HAVE_SAWS)
1721   } else if (issaws) {
1722     PetscMPIInt rank;
1723 
1724     PetscCall(PetscObjectName((PetscObject)pc));
1725     PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1726     if (!((PetscObject)pc)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)pc, viewer));
1727     if (pc->mat) PetscCall(MatView(pc->mat, viewer));
1728     if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer));
1729 #endif
1730   }
1731   PetscFunctionReturn(0);
1732 }
1733 
1734 /*@C
1735   PCRegister -  Adds a method to the preconditioner package.
1736 
1737    Not collective
1738 
1739    Input Parameters:
1740 +  name_solver - name of a new user-defined solver
1741 -  routine_create - routine to create method context
1742 
1743    Notes:
1744    PCRegister() may be called multiple times to add several user-defined preconditioners.
1745 
1746    Sample usage:
1747 .vb
1748    PCRegister("my_solver", MySolverCreate);
1749 .ve
1750 
1751    Then, your solver can be chosen with the procedural interface via
1752 $     PCSetType(pc,"my_solver")
1753    or at runtime via the option
1754 $     -pc_type my_solver
1755 
1756    Level: advanced
1757 
1758 .seealso: `PCRegisterAll()`
1759 @*/
1760 PetscErrorCode PCRegister(const char sname[], PetscErrorCode (*function)(PC)) {
1761   PetscFunctionBegin;
1762   PetscCall(PCInitializePackage());
1763   PetscCall(PetscFunctionListAdd(&PCList, sname, function));
1764   PetscFunctionReturn(0);
1765 }
1766 
1767 static PetscErrorCode MatMult_PC(Mat A, Vec X, Vec Y) {
1768   PC pc;
1769 
1770   PetscFunctionBegin;
1771   PetscCall(MatShellGetContext(A, &pc));
1772   PetscCall(PCApply(pc, X, Y));
1773   PetscFunctionReturn(0);
1774 }
1775 
1776 /*@
1777     PCComputeOperator - Computes the explicit preconditioned operator.
1778 
1779     Collective on PC
1780 
1781     Input Parameters:
1782 +   pc - the preconditioner object
1783 -   mattype - the matrix type to be used for the operator
1784 
1785     Output Parameter:
1786 .   mat - the explicit preconditioned operator
1787 
1788     Notes:
1789     This computation is done by applying the operators to columns of the identity matrix.
1790     This routine is costly in general, and is recommended for use only with relatively small systems.
1791     Currently, this routine uses a dense matrix format when mattype == NULL
1792 
1793     Level: advanced
1794 
1795 .seealso: `KSPComputeOperator()`, `MatType`
1796 
1797 @*/
1798 PetscErrorCode PCComputeOperator(PC pc, MatType mattype, Mat *mat) {
1799   PetscInt N, M, m, n;
1800   Mat      A, Apc;
1801 
1802   PetscFunctionBegin;
1803   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1804   PetscValidPointer(mat, 3);
1805   PetscCall(PCGetOperators(pc, &A, NULL));
1806   PetscCall(MatGetLocalSize(A, &m, &n));
1807   PetscCall(MatGetSize(A, &M, &N));
1808   PetscCall(MatCreateShell(PetscObjectComm((PetscObject)pc), m, n, M, N, pc, &Apc));
1809   PetscCall(MatShellSetOperation(Apc, MATOP_MULT, (void (*)(void))MatMult_PC));
1810   PetscCall(MatComputeOperator(Apc, mattype, mat));
1811   PetscCall(MatDestroy(&Apc));
1812   PetscFunctionReturn(0);
1813 }
1814 
1815 /*@
1816    PCSetCoordinates - sets the coordinates of all the nodes on the local process
1817 
1818    Collective on PC
1819 
1820    Input Parameters:
1821 +  pc - the solver context
1822 .  dim - the dimension of the coordinates 1, 2, or 3
1823 .  nloc - the blocked size of the coordinates array
1824 -  coords - the coordinates array
1825 
1826    Level: intermediate
1827 
1828    Notes:
1829    coords is an array of the dim coordinates for the nodes on
1830    the local processor, of size dim*nloc.
1831    If there are 108 equation on a processor
1832    for a displacement finite element discretization of elasticity (so
1833    that there are nloc = 36 = 108/3 nodes) then the array must have 108
1834    double precision values (ie, 3 * 36).  These x y z coordinates
1835    should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x,
1836    ... , N-1.z ].
1837 
1838 .seealso: `MatSetNearNullSpace()`
1839 @*/
1840 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) {
1841   PetscFunctionBegin;
1842   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1843   PetscValidLogicalCollectiveInt(pc, dim, 2);
1844   PetscTryMethod(pc, "PCSetCoordinates_C", (PC, PetscInt, PetscInt, PetscReal *), (pc, dim, nloc, coords));
1845   PetscFunctionReturn(0);
1846 }
1847 
1848 /*@
1849    PCGetInterpolations - Gets interpolation matrices for all levels (except level 0)
1850 
1851    Logically Collective on PC
1852 
1853    Input Parameter:
1854 .  pc - the precondition context
1855 
1856    Output Parameters:
1857 +  num_levels - the number of levels
1858 -  interpolations - the interpolation matrices (size of num_levels-1)
1859 
1860    Level: advanced
1861 
1862 .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level
1863 
1864 .seealso: `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetInterpolation()`, `PCGetCoarseOperators()`
1865 @*/
1866 PetscErrorCode PCGetInterpolations(PC pc, PetscInt *num_levels, Mat *interpolations[]) {
1867   PetscFunctionBegin;
1868   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1869   PetscValidIntPointer(num_levels, 2);
1870   PetscValidPointer(interpolations, 3);
1871   PetscUseMethod(pc, "PCGetInterpolations_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, interpolations));
1872   PetscFunctionReturn(0);
1873 }
1874 
1875 /*@
1876    PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level)
1877 
1878    Logically Collective on PC
1879 
1880    Input Parameter:
1881 .  pc - the precondition context
1882 
1883    Output Parameters:
1884 +  num_levels - the number of levels
1885 -  coarseOperators - the coarse operator matrices (size of num_levels-1)
1886 
1887    Level: advanced
1888 
1889 .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level
1890 
1891 .seealso: `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()`
1892 @*/
1893 PetscErrorCode PCGetCoarseOperators(PC pc, PetscInt *num_levels, Mat *coarseOperators[]) {
1894   PetscFunctionBegin;
1895   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1896   PetscValidIntPointer(num_levels, 2);
1897   PetscValidPointer(coarseOperators, 3);
1898   PetscUseMethod(pc, "PCGetCoarseOperators_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, coarseOperators));
1899   PetscFunctionReturn(0);
1900 }
1901