xref: /petsc/src/ksp/pc/interface/precon.c (revision d71ae5a4db6382e7f06317b8d368875286fe9008)
1 
2 /*
3     The PC (preconditioner) interface routines, callable by users.
4 */
5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/
6 #include <petscdm.h>
7 
8 /* Logging support */
9 PetscClassId  PC_CLASSID;
10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks;
12 PetscInt      PetscMGLevelId;
13 
14 PetscErrorCode PCGetDefaultType_Private(PC pc, const char *type[])
15 {
16   PetscMPIInt size;
17   PetscBool   hasop, flg1, flg2, set, flg3, isnormal;
18 
19   PetscFunctionBegin;
20   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
21   if (pc->pmat) {
22     PetscCall(MatHasOperation(pc->pmat, MATOP_GET_DIAGONAL_BLOCK, &hasop));
23     if (size == 1) {
24       PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ICC, &flg1));
25       PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ILU, &flg2));
26       PetscCall(MatIsSymmetricKnown(pc->pmat, &set, &flg3));
27       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &isnormal, MATNORMAL, MATNORMALHERMITIAN, NULL));
28       if (flg1 && (!flg2 || (set && flg3))) {
29         *type = PCICC;
30       } else if (flg2) {
31         *type = PCILU;
32       } else if (isnormal) {
33         *type = PCNONE;
34       } else if (hasop) { /* likely is a parallel matrix run on one processor */
35         *type = PCBJACOBI;
36       } else {
37         *type = PCNONE;
38       }
39     } else {
40       if (hasop) {
41         *type = PCBJACOBI;
42       } else {
43         *type = PCNONE;
44       }
45     }
46   } else {
47     if (size == 1) {
48       *type = PCILU;
49     } else {
50       *type = PCBJACOBI;
51     }
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 /*@
57    PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated `Vec`s and `Mat`s
58 
59    Collective on pc
60 
61    Input Parameter:
62 .  pc - the preconditioner context
63 
64    Level: developer
65 
66    Note:
67     This allows a `PC` to be reused for a different sized linear system but using the same options that have been previously set in the PC
68 
69 .seealso: `PC`, `PCCreate()`, `PCSetUp()`
70 @*/
71 PetscErrorCode PCReset(PC pc)
72 {
73   PetscFunctionBegin;
74   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
75   PetscTryTypeMethod(pc, reset);
76   PetscCall(VecDestroy(&pc->diagonalscaleright));
77   PetscCall(VecDestroy(&pc->diagonalscaleleft));
78   PetscCall(MatDestroy(&pc->pmat));
79   PetscCall(MatDestroy(&pc->mat));
80 
81   pc->setupcalled = 0;
82   PetscFunctionReturn(0);
83 }
84 
85 /*@C
86    PCDestroy - Destroys `PC` context that was created with `PCCreate()`.
87 
88    Collective on pc
89 
90    Input Parameter:
91 .  pc - the preconditioner context
92 
93    Level: developer
94 
95 .seealso: `PC`, `PCCreate()`, `PCSetUp()`
96 @*/
97 PetscErrorCode PCDestroy(PC *pc)
98 {
99   PetscFunctionBegin;
100   if (!*pc) PetscFunctionReturn(0);
101   PetscValidHeaderSpecific((*pc), PC_CLASSID, 1);
102   if (--((PetscObject)(*pc))->refct > 0) {
103     *pc = NULL;
104     PetscFunctionReturn(0);
105   }
106 
107   PetscCall(PCReset(*pc));
108 
109   /* if memory was published with SAWs then destroy it */
110   PetscCall(PetscObjectSAWsViewOff((PetscObject)*pc));
111   PetscTryTypeMethod((*pc), destroy);
112   PetscCall(DMDestroy(&(*pc)->dm));
113   PetscCall(PetscHeaderDestroy(pc));
114   PetscFunctionReturn(0);
115 }
116 
117 /*@C
118    PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right
119       scaling as needed by certain time-stepping codes.
120 
121    Logically Collective on pc
122 
123    Input Parameter:
124 .  pc - the preconditioner context
125 
126    Output Parameter:
127 .  flag - `PETSC_TRUE` if it applies the scaling
128 
129    Level: developer
130 
131    Note:
132     If this returns `PETSC_TRUE` then the system solved via the Krylov method is
133 .vb
134       D M A D^{-1} y = D M b  for left preconditioning or
135       D A M D^{-1} z = D b for right preconditioning
136 .ve
137 
138 .seealso: `PC`, `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCSetDiagonalScale()`
139 @*/
140 PetscErrorCode PCGetDiagonalScale(PC pc, PetscBool *flag)
141 {
142   PetscFunctionBegin;
143   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
144   PetscValidBoolPointer(flag, 2);
145   *flag = pc->diagonalscale;
146   PetscFunctionReturn(0);
147 }
148 
149 /*@
150    PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right
151       scaling as needed by certain time-stepping codes.
152 
153    Logically Collective on pc
154 
155    Input Parameters:
156 +  pc - the preconditioner context
157 -  s - scaling vector
158 
159    Level: intermediate
160 
161    Notes:
162     The system solved via the Krylov method is
163 .vb
164            D M A D^{-1} y = D M b  for left preconditioning or
165            D A M D^{-1} z = D b for right preconditioning
166 .ve
167 
168    `PCDiagonalScaleLeft()` scales a vector by D. `PCDiagonalScaleRight()` scales a vector by D^{-1}.
169 
170 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCGetDiagonalScale()`
171 @*/
172 PetscErrorCode PCSetDiagonalScale(PC pc, Vec s)
173 {
174   PetscFunctionBegin;
175   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
176   PetscValidHeaderSpecific(s, VEC_CLASSID, 2);
177   pc->diagonalscale = PETSC_TRUE;
178 
179   PetscCall(PetscObjectReference((PetscObject)s));
180   PetscCall(VecDestroy(&pc->diagonalscaleleft));
181 
182   pc->diagonalscaleleft = s;
183 
184   PetscCall(VecDuplicate(s, &pc->diagonalscaleright));
185   PetscCall(VecCopy(s, pc->diagonalscaleright));
186   PetscCall(VecReciprocal(pc->diagonalscaleright));
187   PetscFunctionReturn(0);
188 }
189 
190 /*@
191    PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes.
192 
193    Logically Collective on pc
194 
195    Input Parameters:
196 +  pc - the preconditioner context
197 .  in - input vector
198 -  out - scaled vector (maybe the same as in)
199 
200    Level: intermediate
201 
202    Notes:
203     The system solved via the Krylov method is
204 .vb
205         D M A D^{-1} y = D M b  for left preconditioning or
206         D A M D^{-1} z = D b for right preconditioning
207 .ve
208 
209    `PCDiagonalScaleLeft()` scales a vector by D. `PCDiagonalScaleRight()` scales a vector by D^{-1}.
210 
211    If diagonal scaling is turned off and in is not out then in is copied to out
212 
213 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleSet()`, `PCDiagonalScaleRight()`, `PCDiagonalScale()`
214 @*/
215 PetscErrorCode PCDiagonalScaleLeft(PC pc, Vec in, Vec out)
216 {
217   PetscFunctionBegin;
218   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
219   PetscValidHeaderSpecific(in, VEC_CLASSID, 2);
220   PetscValidHeaderSpecific(out, VEC_CLASSID, 3);
221   if (pc->diagonalscale) {
222     PetscCall(VecPointwiseMult(out, pc->diagonalscaleleft, in));
223   } else if (in != out) {
224     PetscCall(VecCopy(in, out));
225   }
226   PetscFunctionReturn(0);
227 }
228 
229 /*@
230    PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.
231 
232    Logically Collective on pc
233 
234    Input Parameters:
235 +  pc - the preconditioner context
236 .  in - input vector
237 -  out - scaled vector (maybe the same as in)
238 
239    Level: intermediate
240 
241    Notes:
242     The system solved via the Krylov method is
243 .vb
244         D M A D^{-1} y = D M b  for left preconditioning or
245         D A M D^{-1} z = D b for right preconditioning
246 .ve
247 
248    `PCDiagonalScaleLeft()` scales a vector by D. `PCDiagonalScaleRight()` scales a vector by D^{-1}.
249 
250    If diagonal scaling is turned off and in is not out then in is copied to out
251 
252 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleSet()`, `PCDiagonalScale()`
253 @*/
254 PetscErrorCode PCDiagonalScaleRight(PC pc, Vec in, Vec out)
255 {
256   PetscFunctionBegin;
257   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
258   PetscValidHeaderSpecific(in, VEC_CLASSID, 2);
259   PetscValidHeaderSpecific(out, VEC_CLASSID, 3);
260   if (pc->diagonalscale) {
261     PetscCall(VecPointwiseMult(out, pc->diagonalscaleright, in));
262   } else if (in != out) {
263     PetscCall(VecCopy(in, out));
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 /*@
269    PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the
270    operator during the preconditioning process it applies the Amat provided to `TSSetRHSJacobian()`,
271    `TSSetIJacobian()`, `SNESSetJacobian()`, `KSPSetOperators()` or `PCSetOperators()` not the Pmat.
272 
273    Logically Collective on pc
274 
275    Input Parameters:
276 +  pc - the preconditioner context
277 -  flg - `PETSC_TRUE` to use the Amat, `PETSC_FALSE` to use the Pmat (default is false)
278 
279    Options Database Key:
280 .  -pc_use_amat <true,false> - use the amat to apply the operator
281 
282    Note:
283    For the common case in which the linear system matrix and the matrix used to construct the
284    preconditioner are identical, this routine is does nothing.
285 
286    Level: intermediate
287 
288 .seealso: `PC`, `PCGetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`
289 @*/
290 PetscErrorCode PCSetUseAmat(PC pc, PetscBool flg)
291 {
292   PetscFunctionBegin;
293   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
294   pc->useAmat = flg;
295   PetscFunctionReturn(0);
296 }
297 
298 /*@
299    PCSetErrorIfFailure - Causes `PC` to generate an error if a FPE, for example a zero pivot, is detected.
300 
301    Logically Collective on pc
302 
303    Input Parameters:
304 +  pc - iterative context obtained from PCCreate()
305 -  flg - `PETSC_TRUE` indicates you want the error generated
306 
307    Level: advanced
308 
309    Notes:
310     Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call `KSPGetConvergedReason()` after a `KSPSolve()`
311     to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is
312     detected.
313 
314     This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs
315 
316 .seealso: `PC`, `KSPSetErrorIfNotConverged()`, `PCGetInitialGuessNonzero()`, `PCSetInitialGuessKnoll()`, `PCGetInitialGuessKnoll()`
317 @*/
318 PetscErrorCode PCSetErrorIfFailure(PC pc, PetscBool flg)
319 {
320   PetscFunctionBegin;
321   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
322   PetscValidLogicalCollectiveBool(pc, flg, 2);
323   pc->erroriffailure = flg;
324   PetscFunctionReturn(0);
325 }
326 
327 /*@
328    PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the
329    operator during the preconditioning process it applies the Amat provided to `TSSetRHSJacobian()`,
330    `TSSetIJacobian()`, `SNESSetJacobian()`, `KSPSetOperators()` or `PCSetOperators()` not the Pmat.
331 
332    Logically Collective on pc
333 
334    Input Parameter:
335 .  pc - the preconditioner context
336 
337    Output Parameter:
338 .  flg - `PETSC_TRUE` to use the Amat, `PETSC_FALSE` to use the Pmat (default is false)
339 
340    Note:
341    For the common case in which the linear system matrix and the matrix used to construct the
342    preconditioner are identical, this routine is does nothing.
343 
344    Level: intermediate
345 
346 .seealso: `PC`, `PCSetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`
347 @*/
348 PetscErrorCode PCGetUseAmat(PC pc, PetscBool *flg)
349 {
350   PetscFunctionBegin;
351   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
352   *flg = pc->useAmat;
353   PetscFunctionReturn(0);
354 }
355 
356 /*@
357    PCCreate - Creates a preconditioner context, `PC`
358 
359    Collective
360 
361    Input Parameter:
362 .  comm - MPI communicator
363 
364    Output Parameter:
365 .  pc - location to put the preconditioner context
366 
367    Note:
368    The default preconditioner for sparse matrices is `PCILU` or `PCICC` with 0 fill on one process and block Jacobi (`PCBJACOBI`) with `PCILU` or `PCICC`
369    in parallel. For dense matrices it is always `PCNONE`.
370 
371    Level: developer
372 
373 .seealso: `PC`, `PCSetUp()`, `PCApply()`, `PCDestroy()`
374 @*/
375 PetscErrorCode PCCreate(MPI_Comm comm, PC *newpc)
376 {
377   PC pc;
378 
379   PetscFunctionBegin;
380   PetscValidPointer(newpc, 2);
381   *newpc = NULL;
382   PetscCall(PCInitializePackage());
383 
384   PetscCall(PetscHeaderCreate(pc, PC_CLASSID, "PC", "Preconditioner", "PC", comm, PCDestroy, PCView));
385 
386   pc->mat                  = NULL;
387   pc->pmat                 = NULL;
388   pc->setupcalled          = 0;
389   pc->setfromoptionscalled = 0;
390   pc->data                 = NULL;
391   pc->diagonalscale        = PETSC_FALSE;
392   pc->diagonalscaleleft    = NULL;
393   pc->diagonalscaleright   = NULL;
394 
395   pc->modifysubmatrices  = NULL;
396   pc->modifysubmatricesP = NULL;
397 
398   *newpc = pc;
399   PetscFunctionReturn(0);
400 }
401 
402 /*@
403    PCApply - Applies the preconditioner to a vector.
404 
405    Collective on pc
406 
407    Input Parameters:
408 +  pc - the preconditioner context
409 -  x - input vector
410 
411    Output Parameter:
412 .  y - output vector
413 
414    Level: developer
415 
416 .seealso: `PC`, `PCApplyTranspose()`, `PCApplyBAorAB()`
417 @*/
418 PetscErrorCode PCApply(PC pc, Vec x, Vec y)
419 {
420   PetscInt m, n, mv, nv;
421 
422   PetscFunctionBegin;
423   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
424   PetscValidHeaderSpecific(x, VEC_CLASSID, 2);
425   PetscValidHeaderSpecific(y, VEC_CLASSID, 3);
426   PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors");
427   if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
428   /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */
429   PetscCall(MatGetLocalSize(pc->pmat, &m, &n));
430   PetscCall(VecGetLocalSize(x, &mv));
431   PetscCall(VecGetLocalSize(y, &nv));
432   /* check pmat * y = x is feasible */
433   PetscCheck(mv == m, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local rows %" PetscInt_FMT " does not equal input vector size %" PetscInt_FMT, m, mv);
434   PetscCheck(nv == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local columns %" PetscInt_FMT " does not equal output vector size %" PetscInt_FMT, n, nv);
435   PetscCall(VecSetErrorIfLocked(y, 3));
436 
437   PetscCall(PCSetUp(pc));
438   PetscCall(VecLockReadPush(x));
439   PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0));
440   PetscUseTypeMethod(pc, apply, x, y);
441   PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0));
442   if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
443   PetscCall(VecLockReadPop(x));
444   PetscFunctionReturn(0);
445 }
446 
447 /*@
448    PCMatApply - Applies the preconditioner to multiple vectors stored as a `MATDENSE`. Like `PCApply()`, Y and X must be different matrices.
449 
450    Collective on pc
451 
452    Input Parameters:
453 +  pc - the preconditioner context
454 -  X - block of input vectors
455 
456    Output Parameter:
457 .  Y - block of output vectors
458 
459    Level: developer
460 
461 .seealso: `PC`, `PCApply()`, `KSPMatSolve()`
462 @*/
463 PetscErrorCode PCMatApply(PC pc, Mat X, Mat Y)
464 {
465   Mat       A;
466   Vec       cy, cx;
467   PetscInt  m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3;
468   PetscBool match;
469 
470   PetscFunctionBegin;
471   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
472   PetscValidHeaderSpecific(X, MAT_CLASSID, 2);
473   PetscValidHeaderSpecific(Y, MAT_CLASSID, 3);
474   PetscCheckSameComm(pc, 1, X, 2);
475   PetscCheckSameComm(pc, 1, Y, 3);
476   PetscCheck(Y != X, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices");
477   PetscCall(PCGetOperators(pc, NULL, &A));
478   PetscCall(MatGetLocalSize(A, &m3, &n3));
479   PetscCall(MatGetLocalSize(X, &m2, &n2));
480   PetscCall(MatGetLocalSize(Y, &m1, &n1));
481   PetscCall(MatGetSize(A, &M3, &N3));
482   PetscCall(MatGetSize(X, &M2, &N2));
483   PetscCall(MatGetSize(Y, &M1, &N1));
484   PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of output vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1);
485   PetscCheck(m2 == m3 && M2 == M3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m2, M2, m3, M3, n3, N3);
486   PetscCheck(m1 == n3 && M1 == N3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m1, M1, m3, M3, n3, N3);
487   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, ""));
488   PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat");
489   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, ""));
490   PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat");
491   PetscCall(PCSetUp(pc));
492   if (pc->ops->matapply) {
493     PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0));
494     PetscUseTypeMethod(pc, matapply, X, Y);
495     PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0));
496   } else {
497     PetscCall(PetscInfo(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name));
498     for (n1 = 0; n1 < N1; ++n1) {
499       PetscCall(MatDenseGetColumnVecRead(X, n1, &cx));
500       PetscCall(MatDenseGetColumnVecWrite(Y, n1, &cy));
501       PetscCall(PCApply(pc, cx, cy));
502       PetscCall(MatDenseRestoreColumnVecWrite(Y, n1, &cy));
503       PetscCall(MatDenseRestoreColumnVecRead(X, n1, &cx));
504     }
505   }
506   PetscFunctionReturn(0);
507 }
508 
509 /*@
510    PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.
511 
512    Collective on pc
513 
514    Input Parameters:
515 +  pc - the preconditioner context
516 -  x - input vector
517 
518    Output Parameter:
519 .  y - output vector
520 
521    Note:
522    Currently, this routine is implemented only for `PCICC` and `PCJACOBI` preconditioners.
523 
524    Level: developer
525 
526 .seealso: `PC`, `PCApply()`, `PCApplySymmetricRight()`
527 @*/
528 PetscErrorCode PCApplySymmetricLeft(PC pc, Vec x, Vec y)
529 {
530   PetscFunctionBegin;
531   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
532   PetscValidHeaderSpecific(x, VEC_CLASSID, 2);
533   PetscValidHeaderSpecific(y, VEC_CLASSID, 3);
534   PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors");
535   if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
536   PetscCall(PCSetUp(pc));
537   PetscCall(VecLockReadPush(x));
538   PetscCall(PetscLogEventBegin(PC_ApplySymmetricLeft, pc, x, y, 0));
539   PetscUseTypeMethod(pc, applysymmetricleft, x, y);
540   PetscCall(PetscLogEventEnd(PC_ApplySymmetricLeft, pc, x, y, 0));
541   PetscCall(VecLockReadPop(x));
542   if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
543   PetscFunctionReturn(0);
544 }
545 
546 /*@
547    PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.
548 
549    Collective on pc
550 
551    Input Parameters:
552 +  pc - the preconditioner context
553 -  x - input vector
554 
555    Output Parameter:
556 .  y - output vector
557 
558    Level: developer
559 
560    Note:
561    Currently, this routine is implemented only for `PCICC` and `PCJACOBI` preconditioners.
562 
563 .seealso: `PC`, `PCApply()`, `PCApplySymmetricLeft()`
564 @*/
565 PetscErrorCode PCApplySymmetricRight(PC pc, Vec x, Vec y)
566 {
567   PetscFunctionBegin;
568   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
569   PetscValidHeaderSpecific(x, VEC_CLASSID, 2);
570   PetscValidHeaderSpecific(y, VEC_CLASSID, 3);
571   PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors");
572   if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
573   PetscCall(PCSetUp(pc));
574   PetscCall(VecLockReadPush(x));
575   PetscCall(PetscLogEventBegin(PC_ApplySymmetricRight, pc, x, y, 0));
576   PetscUseTypeMethod(pc, applysymmetricright, x, y);
577   PetscCall(PetscLogEventEnd(PC_ApplySymmetricRight, pc, x, y, 0));
578   PetscCall(VecLockReadPop(x));
579   if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
580   PetscFunctionReturn(0);
581 }
582 
583 /*@
584    PCApplyTranspose - Applies the transpose of preconditioner to a vector.
585 
586    Collective on pc
587 
588    Input Parameters:
589 +  pc - the preconditioner context
590 -  x - input vector
591 
592    Output Parameter:
593 .  y - output vector
594 
595    Note:
596     For complex numbers this applies the non-Hermitian transpose.
597 
598    Developer Note:
599     We need to implement a `PCApplyHermitianTranspose()`
600 
601    Level: developer
602 
603 .seealso: `PC`, `PCApply()`, `PCApplyBAorAB()`, `PCApplyBAorABTranspose()`, `PCApplyTransposeExists()`
604 @*/
605 PetscErrorCode PCApplyTranspose(PC pc, Vec x, Vec y)
606 {
607   PetscFunctionBegin;
608   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
609   PetscValidHeaderSpecific(x, VEC_CLASSID, 2);
610   PetscValidHeaderSpecific(y, VEC_CLASSID, 3);
611   PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors");
612   if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
613   PetscCall(PCSetUp(pc));
614   PetscCall(VecLockReadPush(x));
615   PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0));
616   PetscUseTypeMethod(pc, applytranspose, x, y);
617   PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0));
618   PetscCall(VecLockReadPop(x));
619   if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
620   PetscFunctionReturn(0);
621 }
622 
623 /*@
624    PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation
625 
626    Collective on pc
627 
628    Input Parameter:
629 .  pc - the preconditioner context
630 
631    Output Parameter:
632 .  flg - `PETSC_TRUE` if a transpose operation is defined
633 
634    Level: developer
635 
636 .seealso: `PC`, `PCApplyTranspose()`
637 @*/
638 PetscErrorCode PCApplyTransposeExists(PC pc, PetscBool *flg)
639 {
640   PetscFunctionBegin;
641   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
642   PetscValidBoolPointer(flg, 2);
643   if (pc->ops->applytranspose) *flg = PETSC_TRUE;
644   else *flg = PETSC_FALSE;
645   PetscFunctionReturn(0);
646 }
647 
648 /*@
649    PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.
650 
651    Collective on pc
652 
653    Input Parameters:
654 +  pc - the preconditioner context
655 .  side - indicates the preconditioner side, one of `PC_LEFT`, `PC_RIGHT`, or `PC_SYMMETRIC`
656 .  x - input vector
657 -  work - work vector
658 
659    Output Parameter:
660 .  y - output vector
661 
662    Level: developer
663 
664    Note:
665     If the `PC` has had `PCSetDiagonalScale()` set then D M A D^{-1} for left preconditioning or  D A M D^{-1} is actually applied. Note that the
666    specific `KSPSolve()` method must also be written to handle the post-solve "correction" for the diagonal scaling.
667 
668 .seealso: `PC`, `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorABTranspose()`
669 @*/
670 PetscErrorCode PCApplyBAorAB(PC pc, PCSide side, Vec x, Vec y, Vec work)
671 {
672   PetscFunctionBegin;
673   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
674   PetscValidLogicalCollectiveEnum(pc, side, 2);
675   PetscValidHeaderSpecific(x, VEC_CLASSID, 3);
676   PetscValidHeaderSpecific(y, VEC_CLASSID, 4);
677   PetscValidHeaderSpecific(work, VEC_CLASSID, 5);
678   PetscCheckSameComm(pc, 1, x, 3);
679   PetscCheckSameComm(pc, 1, y, 4);
680   PetscCheckSameComm(pc, 1, work, 5);
681   PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors");
682   PetscCheck(side == PC_LEFT || side == PC_SYMMETRIC || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right, left, or symmetric");
683   PetscCheck(!pc->diagonalscale || side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot include diagonal scaling with symmetric preconditioner application");
684   if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 3, PETSC_TRUE));
685 
686   PetscCall(PCSetUp(pc));
687   if (pc->diagonalscale) {
688     if (pc->ops->applyBA) {
689       Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
690       PetscCall(VecDuplicate(x, &work2));
691       PetscCall(PCDiagonalScaleRight(pc, x, work2));
692       PetscUseTypeMethod(pc, applyBA, side, work2, y, work);
693       PetscCall(PCDiagonalScaleLeft(pc, y, y));
694       PetscCall(VecDestroy(&work2));
695     } else if (side == PC_RIGHT) {
696       PetscCall(PCDiagonalScaleRight(pc, x, y));
697       PetscCall(PCApply(pc, y, work));
698       PetscCall(MatMult(pc->mat, work, y));
699       PetscCall(PCDiagonalScaleLeft(pc, y, y));
700     } else if (side == PC_LEFT) {
701       PetscCall(PCDiagonalScaleRight(pc, x, y));
702       PetscCall(MatMult(pc->mat, y, work));
703       PetscCall(PCApply(pc, work, y));
704       PetscCall(PCDiagonalScaleLeft(pc, y, y));
705     } else PetscCheck(side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot provide diagonal scaling with symmetric application of preconditioner");
706   } else {
707     if (pc->ops->applyBA) {
708       PetscUseTypeMethod(pc, applyBA, side, x, y, work);
709     } else if (side == PC_RIGHT) {
710       PetscCall(PCApply(pc, x, work));
711       PetscCall(MatMult(pc->mat, work, y));
712     } else if (side == PC_LEFT) {
713       PetscCall(MatMult(pc->mat, x, work));
714       PetscCall(PCApply(pc, work, y));
715     } else if (side == PC_SYMMETRIC) {
716       /* There's an extra copy here; maybe should provide 2 work vectors instead? */
717       PetscCall(PCApplySymmetricRight(pc, x, work));
718       PetscCall(MatMult(pc->mat, work, y));
719       PetscCall(VecCopy(y, work));
720       PetscCall(PCApplySymmetricLeft(pc, work, y));
721     }
722   }
723   if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE));
724   PetscFunctionReturn(0);
725 }
726 
727 /*@
728    PCApplyBAorABTranspose - Applies the transpose of the preconditioner
729    and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
730    NOT tr(B*A) = tr(A)*tr(B).
731 
732    Collective on pc
733 
734    Input Parameters:
735 +  pc - the preconditioner context
736 .  side - indicates the preconditioner side, one of `PC_LEFT`, `PC_RIGHT`, or `PC_SYMMETRIC`
737 .  x - input vector
738 -  work - work vector
739 
740    Output Parameter:
741 .  y - output vector
742 
743    Note:
744     This routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
745       defined by B'. This is why this has the funny form that it computes tr(B) * tr(A)
746 
747     Level: developer
748 
749 .seealso: `PC`, `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorAB()`
750 @*/
751 PetscErrorCode PCApplyBAorABTranspose(PC pc, PCSide side, Vec x, Vec y, Vec work)
752 {
753   PetscFunctionBegin;
754   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
755   PetscValidHeaderSpecific(x, VEC_CLASSID, 3);
756   PetscValidHeaderSpecific(y, VEC_CLASSID, 4);
757   PetscValidHeaderSpecific(work, VEC_CLASSID, 5);
758   PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors");
759   if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 3, PETSC_TRUE));
760   if (pc->ops->applyBAtranspose) {
761     PetscUseTypeMethod(pc, applyBAtranspose, side, x, y, work);
762     if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE));
763     PetscFunctionReturn(0);
764   }
765   PetscCheck(side == PC_LEFT || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right or left");
766 
767   PetscCall(PCSetUp(pc));
768   if (side == PC_RIGHT) {
769     PetscCall(PCApplyTranspose(pc, x, work));
770     PetscCall(MatMultTranspose(pc->mat, work, y));
771   } else if (side == PC_LEFT) {
772     PetscCall(MatMultTranspose(pc->mat, x, work));
773     PetscCall(PCApplyTranspose(pc, work, y));
774   }
775   /* add support for PC_SYMMETRIC */
776   if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE));
777   PetscFunctionReturn(0);
778 }
779 
780 /*@
781    PCApplyRichardsonExists - Determines whether a particular preconditioner has a
782    built-in fast application of Richardson's method.
783 
784    Not Collective
785 
786    Input Parameter:
787 .  pc - the preconditioner
788 
789    Output Parameter:
790 .  exists - `PETSC_TRUE` or `PETSC_FALSE`
791 
792    Level: developer
793 
794 .seealso: `PC`, `PCRICHARDSON`, `PCApplyRichardson()`
795 @*/
796 PetscErrorCode PCApplyRichardsonExists(PC pc, PetscBool *exists)
797 {
798   PetscFunctionBegin;
799   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
800   PetscValidBoolPointer(exists, 2);
801   if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
802   else *exists = PETSC_FALSE;
803   PetscFunctionReturn(0);
804 }
805 
806 /*@
807    PCApplyRichardson - Applies several steps of Richardson iteration with
808    the particular preconditioner. This routine is usually used by the
809    Krylov solvers and not the application code directly.
810 
811    Collective on pc
812 
813    Input Parameters:
814 +  pc  - the preconditioner context
815 .  b   - the right hand side
816 .  w   - one work vector
817 .  rtol - relative decrease in residual norm convergence criteria
818 .  abstol - absolute residual norm convergence criteria
819 .  dtol - divergence residual norm increase criteria
820 .  its - the number of iterations to apply.
821 -  guesszero - if the input x contains nonzero initial guess
822 
823    Output Parameters:
824 +  outits - number of iterations actually used (for SOR this always equals its)
825 .  reason - the reason the apply terminated
826 -  y - the solution (also contains initial guess if guesszero is `PETSC_FALSE`
827 
828    Notes:
829    Most preconditioners do not support this function. Use the command
830    `PCApplyRichardsonExists()` to determine if one does.
831 
832    Except for the `PCMG` this routine ignores the convergence tolerances
833    and always runs for the number of iterations
834 
835    Level: developer
836 
837 .seealso: `PC`, `PCApplyRichardsonExists()`
838 @*/
839 PetscErrorCode PCApplyRichardson(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
840 {
841   PetscFunctionBegin;
842   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
843   PetscValidHeaderSpecific(b, VEC_CLASSID, 2);
844   PetscValidHeaderSpecific(y, VEC_CLASSID, 3);
845   PetscValidHeaderSpecific(w, VEC_CLASSID, 4);
846   PetscCheck(b != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "b and y must be different vectors");
847   PetscCall(PCSetUp(pc));
848   PetscUseTypeMethod(pc, applyrichardson, b, y, w, rtol, abstol, dtol, its, guesszero, outits, reason);
849   PetscFunctionReturn(0);
850 }
851 
852 /*@
853    PCSetFailedReason - Sets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail
854 
855    Logically Collective on pc
856 
857    Input Parameters:
858 +  pc - the preconditioner context
859 -  reason - the reason it failedx
860 
861    Level: advanced
862 
863 .seealso: `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCFailedReason`
864 @*/
865 PetscErrorCode PCSetFailedReason(PC pc, PCFailedReason reason)
866 {
867   PetscFunctionBegin;
868   pc->failedreason = reason;
869   PetscFunctionReturn(0);
870 }
871 
872 /*@
873    PCGetFailedReason - Gets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail
874 
875    Logically Collective on pc
876 
877    Input Parameter:
878 .  pc - the preconditioner context
879 
880    Output Parameter:
881 .  reason - the reason it failed
882 
883    Level: advanced
884 
885    Note:
886    This is the maximum over reason over all ranks in the PC communicator. It is only valid after
887    a call `KSPCheckDot()` or  `KSPCheckNorm()` inside a `KSPSolve()`. It is not valid immediately after a `PCSetUp()`
888    or `PCApply()`, then use `PCGetFailedReasonRank()`
889 
890 .seealso: PC`, ``PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReasonRank()`, `PCSetFailedReason()`
891 @*/
892 PetscErrorCode PCGetFailedReason(PC pc, PCFailedReason *reason)
893 {
894   PetscFunctionBegin;
895   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
896   else *reason = pc->failedreason;
897   PetscFunctionReturn(0);
898 }
899 
900 /*@
901    PCGetFailedReasonRank - Gets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail on this MPI rank
902 
903    Not Collective
904 
905    Input Parameter:
906 .  pc - the preconditioner context
907 
908    Output Parameter:
909 .  reason - the reason it failed
910 
911    Note:
912      Different ranks may have different reasons or no reason, see `PCGetFailedReason()`
913 
914    Level: advanced
915 
916 .seealso: `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()`
917 @*/
918 PetscErrorCode PCGetFailedReasonRank(PC pc, PCFailedReason *reason)
919 {
920   PetscFunctionBegin;
921   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
922   else *reason = pc->failedreason;
923   PetscFunctionReturn(0);
924 }
925 
926 /*  Next line needed to deactivate KSP_Solve logging */
927 #include <petsc/private/kspimpl.h>
928 
929 /*
930       a setupcall of 0 indicates never setup,
931                      1 indicates has been previously setup
932                     -1 indicates a PCSetUp() was attempted and failed
933 */
934 /*@
935    PCSetUp - Prepares for the use of a preconditioner.
936 
937    Collective on pc
938 
939    Input Parameter:
940 .  pc - the preconditioner context
941 
942    Level: developer
943 
944 .seealso: `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`
945 @*/
946 PetscErrorCode PCSetUp(PC pc)
947 {
948   const char      *def;
949   PetscObjectState matstate, matnonzerostate;
950 
951   PetscFunctionBegin;
952   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
953   PetscCheck(pc->mat, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be set first");
954 
955   if (pc->setupcalled && pc->reusepreconditioner) {
956     PetscCall(PetscInfo(pc, "Leaving PC with identical preconditioner since reuse preconditioner is set\n"));
957     PetscFunctionReturn(0);
958   }
959 
960   PetscCall(PetscObjectStateGet((PetscObject)pc->pmat, &matstate));
961   PetscCall(MatGetNonzeroState(pc->pmat, &matnonzerostate));
962   if (!pc->setupcalled) {
963     PetscCall(PetscInfo(pc, "Setting up PC for first time\n"));
964     pc->flag = DIFFERENT_NONZERO_PATTERN;
965   } else if (matstate == pc->matstate) {
966     PetscCall(PetscInfo(pc, "Leaving PC with identical preconditioner since operator is unchanged\n"));
967     PetscFunctionReturn(0);
968   } else {
969     if (matnonzerostate > pc->matnonzerostate) {
970       PetscCall(PetscInfo(pc, "Setting up PC with different nonzero pattern\n"));
971       pc->flag = DIFFERENT_NONZERO_PATTERN;
972     } else {
973       PetscCall(PetscInfo(pc, "Setting up PC with same nonzero pattern\n"));
974       pc->flag = SAME_NONZERO_PATTERN;
975     }
976   }
977   pc->matstate        = matstate;
978   pc->matnonzerostate = matnonzerostate;
979 
980   if (!((PetscObject)pc)->type_name) {
981     PetscCall(PCGetDefaultType_Private(pc, &def));
982     PetscCall(PCSetType(pc, def));
983   }
984 
985   PetscCall(MatSetErrorIfFailure(pc->pmat, pc->erroriffailure));
986   PetscCall(MatSetErrorIfFailure(pc->mat, pc->erroriffailure));
987   PetscCall(PetscLogEventBegin(PC_SetUp, pc, 0, 0, 0));
988   if (pc->ops->setup) {
989     /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */
990     PetscCall(KSPInitializePackage());
991     PetscCall(PetscLogEventDeactivatePush(KSP_Solve));
992     PetscCall(PetscLogEventDeactivatePush(PC_Apply));
993     PetscUseTypeMethod(pc, setup);
994     PetscCall(PetscLogEventDeactivatePop(KSP_Solve));
995     PetscCall(PetscLogEventDeactivatePop(PC_Apply));
996   }
997   PetscCall(PetscLogEventEnd(PC_SetUp, pc, 0, 0, 0));
998   if (!pc->setupcalled) pc->setupcalled = 1;
999   PetscFunctionReturn(0);
1000 }
1001 
1002 /*@
1003    PCSetUpOnBlocks - Sets up the preconditioner for each block in
1004    the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
1005    methods.
1006 
1007    Collective on pc
1008 
1009    Input Parameter:
1010 .  pc - the preconditioner context
1011 
1012    Level: developer
1013 
1014    Note:
1015    For nested preconditioners such as `PCBJACOBI` `PCSetUp()` is not called on each sub-`KSP` when `PCSetUp()` is
1016    called on the outer `PC`, this routine ensures it is called.
1017 
1018 .seealso: `PC`, `PCSetUp()`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCSetUp()`
1019 @*/
1020 PetscErrorCode PCSetUpOnBlocks(PC pc)
1021 {
1022   PetscFunctionBegin;
1023   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1024   if (!pc->ops->setuponblocks) PetscFunctionReturn(0);
1025   PetscCall(PetscLogEventBegin(PC_SetUpOnBlocks, pc, 0, 0, 0));
1026   PetscUseTypeMethod(pc, setuponblocks);
1027   PetscCall(PetscLogEventEnd(PC_SetUpOnBlocks, pc, 0, 0, 0));
1028   PetscFunctionReturn(0);
1029 }
1030 
1031 /*@C
1032    PCSetModifySubMatrices - Sets a user-defined routine for modifying the
1033    submatrices that arise within certain subdomain-based preconditioners.
1034    The basic submatrices are extracted from the preconditioner matrix as
1035    usual; the user can then alter these (for example, to set different boundary
1036    conditions for each submatrix) before they are used for the local solves.
1037 
1038    Logically Collective on pc
1039 
1040    Input Parameters:
1041 +  pc - the preconditioner context
1042 .  func - routine for modifying the submatrices
1043 -  ctx - optional user-defined context (may be null)
1044 
1045    Calling sequence of func:
1046 $     func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);
1047 
1048 +  row - an array of index sets that contain the global row numbers
1049          that comprise each local submatrix
1050 .  col - an array of index sets that contain the global column numbers
1051          that comprise each local submatrix
1052 .  submat - array of local submatrices
1053 -  ctx - optional user-defined context for private data for the
1054          user-defined func routine (may be null)
1055 
1056    Notes:
1057    `PCSetModifySubMatrices()` MUST be called before `KSPSetUp()` and
1058    `KSPSolve()`.
1059 
1060    A routine set by `PCSetModifySubMatrices()` is currently called within
1061    the block Jacobi (`PCBJACOBI`) and additive Schwarz (`PCASM`)
1062    preconditioners.  All other preconditioners ignore this routine.
1063 
1064    Level: advanced
1065 
1066 .seealso: `PC`, `PCBJACOBI`, `PCASM`, `PCModifySubMatrices()`
1067 @*/
1068 PetscErrorCode PCSetModifySubMatrices(PC pc, PetscErrorCode (*func)(PC, PetscInt, const IS[], const IS[], Mat[], void *), void *ctx)
1069 {
1070   PetscFunctionBegin;
1071   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1072   pc->modifysubmatrices  = func;
1073   pc->modifysubmatricesP = ctx;
1074   PetscFunctionReturn(0);
1075 }
1076 
1077 /*@C
1078    PCModifySubMatrices - Calls an optional user-defined routine within
1079    certain preconditioners if one has been set with `PCSetModifySubMatrices()`.
1080 
1081    Collective on pc
1082 
1083    Input Parameters:
1084 +  pc - the preconditioner context
1085 .  nsub - the number of local submatrices
1086 .  row - an array of index sets that contain the global row numbers
1087          that comprise each local submatrix
1088 .  col - an array of index sets that contain the global column numbers
1089          that comprise each local submatrix
1090 .  submat - array of local submatrices
1091 -  ctx - optional user-defined context for private data for the
1092          user-defined routine (may be null)
1093 
1094    Output Parameter:
1095 .  submat - array of local submatrices (the entries of which may
1096             have been modified)
1097 
1098    Notes:
1099    The user should NOT generally call this routine, as it will
1100    automatically be called within certain preconditioners (currently
1101    block Jacobi, additive Schwarz) if set.
1102 
1103    The basic submatrices are extracted from the preconditioner matrix
1104    as usual; the user can then alter these (for example, to set different
1105    boundary conditions for each submatrix) before they are used for the
1106    local solves.
1107 
1108    Level: developer
1109 
1110 .seealso: `PC`, `PCSetModifySubMatrices()`
1111 @*/
1112 PetscErrorCode PCModifySubMatrices(PC pc, PetscInt nsub, const IS row[], const IS col[], Mat submat[], void *ctx)
1113 {
1114   PetscFunctionBegin;
1115   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1116   if (!pc->modifysubmatrices) PetscFunctionReturn(0);
1117   PetscCall(PetscLogEventBegin(PC_ModifySubMatrices, pc, 0, 0, 0));
1118   PetscCall((*pc->modifysubmatrices)(pc, nsub, row, col, submat, ctx));
1119   PetscCall(PetscLogEventEnd(PC_ModifySubMatrices, pc, 0, 0, 0));
1120   PetscFunctionReturn(0);
1121 }
1122 
1123 /*@
1124    PCSetOperators - Sets the matrix associated with the linear system and
1125    a (possibly) different one associated with the preconditioner.
1126 
1127    Logically Collective on pc
1128 
1129    Input Parameters:
1130 +  pc - the preconditioner context
1131 .  Amat - the matrix that defines the linear system
1132 -  Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat.
1133 
1134    Notes:
1135     Passing a NULL for Amat or Pmat removes the matrix that is currently used.
1136 
1137     If you wish to replace either Amat or Pmat but leave the other one untouched then
1138     first call `KSPGetOperators()` to get the one you wish to keep, call `PetscObjectReference()`
1139     on it and then pass it back in in your call to `KSPSetOperators()`.
1140 
1141    More Notes about Repeated Solution of Linear Systems:
1142    PETSc does NOT reset the matrix entries of either Amat or Pmat
1143    to zero after a linear solve; the user is completely responsible for
1144    matrix assembly.  See the routine `MatZeroEntries()` if desiring to
1145    zero all elements of a matrix.
1146 
1147    Level: intermediate
1148 
1149 .seealso: `PC`, `PCGetOperators()`, `MatZeroEntries()`
1150  @*/
1151 PetscErrorCode PCSetOperators(PC pc, Mat Amat, Mat Pmat)
1152 {
1153   PetscInt m1, n1, m2, n2;
1154 
1155   PetscFunctionBegin;
1156   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1157   if (Amat) PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2);
1158   if (Pmat) PetscValidHeaderSpecific(Pmat, MAT_CLASSID, 3);
1159   if (Amat) PetscCheckSameComm(pc, 1, Amat, 2);
1160   if (Pmat) PetscCheckSameComm(pc, 1, Pmat, 3);
1161   if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) {
1162     PetscCall(MatGetLocalSize(Amat, &m1, &n1));
1163     PetscCall(MatGetLocalSize(pc->mat, &m2, &n2));
1164     PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Amat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1);
1165     PetscCall(MatGetLocalSize(Pmat, &m1, &n1));
1166     PetscCall(MatGetLocalSize(pc->pmat, &m2, &n2));
1167     PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Pmat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1);
1168   }
1169 
1170   if (Pmat != pc->pmat) {
1171     /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */
1172     pc->matnonzerostate = -1;
1173     pc->matstate        = -1;
1174   }
1175 
1176   /* reference first in case the matrices are the same */
1177   if (Amat) PetscCall(PetscObjectReference((PetscObject)Amat));
1178   PetscCall(MatDestroy(&pc->mat));
1179   if (Pmat) PetscCall(PetscObjectReference((PetscObject)Pmat));
1180   PetscCall(MatDestroy(&pc->pmat));
1181   pc->mat  = Amat;
1182   pc->pmat = Pmat;
1183   PetscFunctionReturn(0);
1184 }
1185 
1186 /*@
1187    PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed.
1188 
1189    Logically Collective on pc
1190 
1191    Input Parameters:
1192 +  pc - the preconditioner context
1193 -  flag - `PETSC_TRUE` do not compute a new preconditioner, `PETSC_FALSE` do compute a new preconditioner
1194 
1195     Level: intermediate
1196 
1197    Note:
1198    Normally if a matrix inside a `PC` changes the `PC` automatically updates itself using information from the changed matrix. This option
1199    prevents this.
1200 
1201 .seealso: `PC`, `PCGetOperators()`, `MatZeroEntries()`, `PCGetReusePreconditioner()`, `KSPSetReusePreconditioner()`
1202  @*/
1203 PetscErrorCode PCSetReusePreconditioner(PC pc, PetscBool flag)
1204 {
1205   PetscFunctionBegin;
1206   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1207   PetscValidLogicalCollectiveBool(pc, flag, 2);
1208   pc->reusepreconditioner = flag;
1209   PetscFunctionReturn(0);
1210 }
1211 
1212 /*@
1213    PCGetReusePreconditioner - Determines if the `PC` reuses the current preconditioner even if the operator in the preconditioner has changed.
1214 
1215    Not Collective
1216 
1217    Input Parameter:
1218 .  pc - the preconditioner context
1219 
1220    Output Parameter:
1221 .  flag - `PETSC_TRUE` do not compute a new preconditioner, `PETSC_FALSE` do compute a new preconditioner
1222 
1223    Level: intermediate
1224 
1225 .seealso: `PC`, `PCGetOperators()`, `MatZeroEntries()`, `PCSetReusePreconditioner()`
1226  @*/
1227 PetscErrorCode PCGetReusePreconditioner(PC pc, PetscBool *flag)
1228 {
1229   PetscFunctionBegin;
1230   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1231   PetscValidBoolPointer(flag, 2);
1232   *flag = pc->reusepreconditioner;
1233   PetscFunctionReturn(0);
1234 }
1235 
1236 /*@
1237    PCGetOperators - Gets the matrix associated with the linear system and
1238    possibly a different one associated with the preconditioner.
1239 
1240    Not collective, though parallel mats are returned if pc is parallel
1241 
1242    Input Parameter:
1243 .  pc - the preconditioner context
1244 
1245    Output Parameters:
1246 +  Amat - the matrix defining the linear system
1247 -  Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat.
1248 
1249    Level: intermediate
1250 
1251    Note:
1252     Does not increase the reference count of the matrices, so you should not destroy them
1253 
1254    Alternative usage: If the operators have NOT been set with `KSPSetOperators()`/`PCSetOperators()` then the operators
1255       are created in `PC` and returned to the user. In this case, if both operators
1256       mat and pmat are requested, two DIFFERENT operators will be returned. If
1257       only one is requested both operators in the PC will be the same (i.e. as
1258       if one had called `KSPSetOperators()`/`PCSetOperators()` with the same argument for both Mats).
1259       The user must set the sizes of the returned matrices and their type etc just
1260       as if the user created them with `MatCreate()`. For example,
1261 
1262 .vb
1263          KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to
1264            set size, type, etc of Amat
1265 
1266          MatCreate(comm,&mat);
1267          KSP/PCSetOperators(ksp/pc,Amat,Amat);
1268          PetscObjectDereference((PetscObject)mat);
1269            set size, type, etc of Amat
1270 .ve
1271 
1272      and
1273 
1274 .vb
1275          KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to
1276            set size, type, etc of Amat and Pmat
1277 
1278          MatCreate(comm,&Amat);
1279          MatCreate(comm,&Pmat);
1280          KSP/PCSetOperators(ksp/pc,Amat,Pmat);
1281          PetscObjectDereference((PetscObject)Amat);
1282          PetscObjectDereference((PetscObject)Pmat);
1283            set size, type, etc of Amat and Pmat
1284 .ve
1285 
1286     The rationale for this support is so that when creating a `TS`, `SNES`, or `KSP` the hierarchy
1287     of underlying objects (i.e. `SNES`, `KSP`, `PC`, `Mat`) and their livespans can be completely
1288     managed by the top most level object (i.e. the `TS`, `SNES`, or `KSP`). Another way to look
1289     at this is when you create a `SNES` you do not NEED to create a `KSP` and attach it to
1290     the `SNES` object (the `SNES` object manages it for you). Similarly when you create a KSP
1291     you do not need to attach a `PC` to it (the `KSP` object manages the `PC` object for you).
1292     Thus, why should YOU have to create the `Mat` and attach it to the `SNES`/`KSP`/`PC`, when
1293     it can be created for you?
1294 
1295 .seealso: `PC`, `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperatorsSet()`
1296 @*/
1297 PetscErrorCode PCGetOperators(PC pc, Mat *Amat, Mat *Pmat)
1298 {
1299   PetscFunctionBegin;
1300   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1301   if (Amat) {
1302     if (!pc->mat) {
1303       if (pc->pmat && !Pmat) { /* Apmat has been set, but user did not request it, so use for Amat */
1304         pc->mat = pc->pmat;
1305         PetscCall(PetscObjectReference((PetscObject)pc->mat));
1306       } else { /* both Amat and Pmat are empty */
1307         PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->mat));
1308         if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */
1309           pc->pmat = pc->mat;
1310           PetscCall(PetscObjectReference((PetscObject)pc->pmat));
1311         }
1312       }
1313     }
1314     *Amat = pc->mat;
1315   }
1316   if (Pmat) {
1317     if (!pc->pmat) {
1318       if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */
1319         pc->pmat = pc->mat;
1320         PetscCall(PetscObjectReference((PetscObject)pc->pmat));
1321       } else {
1322         PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->pmat));
1323         if (!Amat) { /* user did NOT request Amat, so make same as Pmat */
1324           pc->mat = pc->pmat;
1325           PetscCall(PetscObjectReference((PetscObject)pc->mat));
1326         }
1327       }
1328     }
1329     *Pmat = pc->pmat;
1330   }
1331   PetscFunctionReturn(0);
1332 }
1333 
1334 /*@C
1335    PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1336    possibly a different one associated with the preconditioner have been set in the `PC`.
1337 
1338    Not collective, though the results on all processes should be the same
1339 
1340    Input Parameter:
1341 .  pc - the preconditioner context
1342 
1343    Output Parameters:
1344 +  mat - the matrix associated with the linear system was set
1345 -  pmat - matrix associated with the preconditioner was set, usually the same
1346 
1347    Level: intermediate
1348 
1349 .seealso: `PC`, `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperators()`
1350 @*/
1351 PetscErrorCode PCGetOperatorsSet(PC pc, PetscBool *mat, PetscBool *pmat)
1352 {
1353   PetscFunctionBegin;
1354   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1355   if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE;
1356   if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1357   PetscFunctionReturn(0);
1358 }
1359 
1360 /*@
1361    PCFactorGetMatrix - Gets the factored matrix from the
1362    preconditioner context.  This routine is valid only for the `PCLU`,
1363    `PCILU`, `PCCHOLESKY`, and `PCICC` methods.
1364 
1365    Not Collective on pc though mat is parallel if pc is parallel
1366 
1367    Input Parameter:
1368 .  pc - the preconditioner context
1369 
1370    Output parameters:
1371 .  mat - the factored matrix
1372 
1373    Level: advanced
1374 
1375    Note:
1376     Does not increase the reference count for the matrix so DO NOT destroy it
1377 
1378 .seealso: `PC`, `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
1379 @*/
1380 PetscErrorCode PCFactorGetMatrix(PC pc, Mat *mat)
1381 {
1382   PetscFunctionBegin;
1383   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1384   PetscValidPointer(mat, 2);
1385   PetscUseTypeMethod(pc, getfactoredmatrix, mat);
1386   PetscFunctionReturn(0);
1387 }
1388 
1389 /*@C
1390    PCSetOptionsPrefix - Sets the prefix used for searching for all
1391    `PC` options in the database.
1392 
1393    Logically Collective on pc
1394 
1395    Input Parameters:
1396 +  pc - the preconditioner context
1397 -  prefix - the prefix string to prepend to all `PC` option requests
1398 
1399    Note:
1400    A hyphen (-) must NOT be given at the beginning of the prefix name.
1401    The first character of all runtime options is AUTOMATICALLY the
1402    hyphen.
1403 
1404    Level: advanced
1405 
1406 .seealso: `PC`, `PCSetFromOptions`, `PCAppendOptionsPrefix()`, `PCGetOptionsPrefix()`
1407 @*/
1408 PetscErrorCode PCSetOptionsPrefix(PC pc, const char prefix[])
1409 {
1410   PetscFunctionBegin;
1411   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1412   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc, prefix));
1413   PetscFunctionReturn(0);
1414 }
1415 
1416 /*@C
1417    PCAppendOptionsPrefix - Appends to the prefix used for searching for all
1418    `PC` options in the database.
1419 
1420    Logically Collective on pc
1421 
1422    Input Parameters:
1423 +  pc - the preconditioner context
1424 -  prefix - the prefix string to prepend to all `PC` option requests
1425 
1426    Note:
1427    A hyphen (-) must NOT be given at the beginning of the prefix name.
1428    The first character of all runtime options is AUTOMATICALLY the
1429    hyphen.
1430 
1431    Level: advanced
1432 
1433 .seealso: `PC`, `PCSetFromOptions`, `PCSetOptionsPrefix()`, `PCGetOptionsPrefix()`
1434 @*/
1435 PetscErrorCode PCAppendOptionsPrefix(PC pc, const char prefix[])
1436 {
1437   PetscFunctionBegin;
1438   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1439   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc, prefix));
1440   PetscFunctionReturn(0);
1441 }
1442 
1443 /*@C
1444    PCGetOptionsPrefix - Gets the prefix used for searching for all
1445    PC options in the database.
1446 
1447    Not Collective
1448 
1449    Input Parameter:
1450 .  pc - the preconditioner context
1451 
1452    Output Parameter:
1453 .  prefix - pointer to the prefix string used, is returned
1454 
1455    Fortran Note:
1456    The user should pass in a string 'prefix' of
1457    sufficient length to hold the prefix.
1458 
1459    Level: advanced
1460 
1461 .seealso: `PC`, `PCSetFromOptions`, `PCSetOptionsPrefix()`, `PCAppendOptionsPrefix()`
1462 @*/
1463 PetscErrorCode PCGetOptionsPrefix(PC pc, const char *prefix[])
1464 {
1465   PetscFunctionBegin;
1466   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1467   PetscValidPointer(prefix, 2);
1468   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, prefix));
1469   PetscFunctionReturn(0);
1470 }
1471 
1472 /*
1473    Indicates the right hand side will be changed by KSPSolve(), this occurs for a few
1474   preconditioners including BDDC and Eisentat that transform the equations before applying
1475   the Krylov methods
1476 */
1477 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc, PetscBool *change)
1478 {
1479   PetscFunctionBegin;
1480   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1481   PetscValidPointer(change, 2);
1482   *change = PETSC_FALSE;
1483   PetscTryMethod(pc, "PCPreSolveChangeRHS_C", (PC, PetscBool *), (pc, change));
1484   PetscFunctionReturn(0);
1485 }
1486 
1487 /*@
1488    PCPreSolve - Optional pre-solve phase, intended for any
1489    preconditioner-specific actions that must be performed before
1490    the iterative solve itself.
1491 
1492    Collective on pc
1493 
1494    Input Parameters:
1495 +  pc - the preconditioner context
1496 -  ksp - the Krylov subspace context
1497 
1498    Level: developer
1499 
1500    Sample of Usage:
1501 .vb
1502     PCPreSolve(pc,ksp);
1503     KSPSolve(ksp,b,x);
1504     PCPostSolve(pc,ksp);
1505 .ve
1506 
1507    Notes:
1508    The pre-solve phase is distinct from the `PCSetUp()` phase.
1509 
1510    `KSPSolve()` calls this directly, so is rarely called by the user.
1511 
1512 .seealso: `PC`, `PCPostSolve()`
1513 @*/
1514 PetscErrorCode PCPreSolve(PC pc, KSP ksp)
1515 {
1516   Vec x, rhs;
1517 
1518   PetscFunctionBegin;
1519   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1520   PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2);
1521   pc->presolvedone++;
1522   PetscCheck(pc->presolvedone <= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot embed PCPreSolve() more than twice");
1523   PetscCall(KSPGetSolution(ksp, &x));
1524   PetscCall(KSPGetRhs(ksp, &rhs));
1525 
1526   if (pc->ops->presolve) PetscUseTypeMethod(pc, presolve, ksp, rhs, x);
1527   else if (pc->presolve) PetscCall((pc->presolve)(pc, ksp));
1528   PetscFunctionReturn(0);
1529 }
1530 
1531 /*@C
1532    PCSetPreSolve - Sets function used by `PCPreSolve()` which is intended for any
1533    preconditioner-specific actions that must be performed before
1534    the iterative solve itself.
1535 
1536    Logically Collective on pc
1537 
1538    Input Parameters:
1539 +   pc - the preconditioner object
1540 -   presolve - the function to call before the solve
1541 
1542    Calling sequence of presolve:
1543 $  func(PC pc,KSP ksp)
1544 
1545 +  pc - the PC context
1546 -  ksp - the KSP context
1547 
1548    Level: developer
1549 
1550 .seealso: `PC`, `PCSetUp()`, `PCPreSolve()`
1551 @*/
1552 PetscErrorCode PCSetPreSolve(PC pc, PetscErrorCode (*presolve)(PC, KSP))
1553 {
1554   PetscFunctionBegin;
1555   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1556   pc->presolve = presolve;
1557   PetscFunctionReturn(0);
1558 }
1559 
1560 /*@
1561    PCPostSolve - Optional post-solve phase, intended for any
1562    preconditioner-specific actions that must be performed after
1563    the iterative solve itself.
1564 
1565    Collective on pc
1566 
1567    Input Parameters:
1568 +  pc - the preconditioner context
1569 -  ksp - the Krylov subspace context
1570 
1571    Sample of Usage:
1572 .vb
1573     PCPreSolve(pc,ksp);
1574     KSPSolve(ksp,b,x);
1575     PCPostSolve(pc,ksp);
1576 .ve
1577 
1578    Note:
1579    `KSPSolve()` calls this routine directly, so it is rarely called by the user.
1580 
1581    Level: developer
1582 
1583 .seealso: `PC`, `PCSetPostSolve()`, `PCSetPresolve()`, `PCPreSolve()`, `KSPSolve()`
1584 @*/
1585 PetscErrorCode PCPostSolve(PC pc, KSP ksp)
1586 {
1587   Vec x, rhs;
1588 
1589   PetscFunctionBegin;
1590   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1591   PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2);
1592   pc->presolvedone--;
1593   PetscCall(KSPGetSolution(ksp, &x));
1594   PetscCall(KSPGetRhs(ksp, &rhs));
1595   PetscTryTypeMethod(pc, postsolve, ksp, rhs, x);
1596   PetscFunctionReturn(0);
1597 }
1598 
1599 /*@C
1600   PCLoad - Loads a `PC` that has been stored in binary  with `PCView()`.
1601 
1602   Collective on newdm
1603 
1604   Input Parameters:
1605 + newdm - the newly loaded `PC`, this needs to have been created with `PCCreate()` or
1606            some related function before a call to `PCLoad()`.
1607 - viewer - binary file viewer, obtained from `PetscViewerBinaryOpen()`
1608 
1609    Level: intermediate
1610 
1611   Note:
1612    The type is determined by the data in the file, any type set into the PC before this call is ignored.
1613 
1614 .seealso: `PC`, `PetscViewerBinaryOpen()`, `PCView()`, `MatLoad()`, `VecLoad()`
1615 @*/
1616 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer)
1617 {
1618   PetscBool isbinary;
1619   PetscInt  classid;
1620   char      type[256];
1621 
1622   PetscFunctionBegin;
1623   PetscValidHeaderSpecific(newdm, PC_CLASSID, 1);
1624   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1625   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
1626   PetscCheck(isbinary, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid viewer; open viewer with PetscViewerBinaryOpen()");
1627 
1628   PetscCall(PetscViewerBinaryRead(viewer, &classid, 1, NULL, PETSC_INT));
1629   PetscCheck(classid == PC_FILE_CLASSID, PetscObjectComm((PetscObject)newdm), PETSC_ERR_ARG_WRONG, "Not PC next in file");
1630   PetscCall(PetscViewerBinaryRead(viewer, type, 256, NULL, PETSC_CHAR));
1631   PetscCall(PCSetType(newdm, type));
1632   PetscTryTypeMethod(newdm, load, viewer);
1633   PetscFunctionReturn(0);
1634 }
1635 
1636 #include <petscdraw.h>
1637 #if defined(PETSC_HAVE_SAWS)
1638   #include <petscviewersaws.h>
1639 #endif
1640 
1641 /*@C
1642    PCViewFromOptions - View from the `PC` based on options in the database
1643 
1644    Collective on pc
1645 
1646    Input Parameters:
1647 +  A - the PC context
1648 .  obj - Optional object that provides the options prefix
1649 -  name - command line option
1650 
1651    Level: intermediate
1652 
1653 .seealso: `PC`, `PCView`, `PetscObjectViewFromOptions()`, `PCCreate()`
1654 @*/
1655 PetscErrorCode PCViewFromOptions(PC A, PetscObject obj, const char name[])
1656 {
1657   PetscFunctionBegin;
1658   PetscValidHeaderSpecific(A, PC_CLASSID, 1);
1659   PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1660   PetscFunctionReturn(0);
1661 }
1662 
1663 /*@C
1664    PCView - Prints information about the `PC`
1665 
1666    Collective on pc
1667 
1668    Input Parameters:
1669 +  PC - the `PC` context
1670 -  viewer - optional visualization context
1671 
1672    Notes:
1673    The available visualization contexts include
1674 +     `PETSC_VIEWER_STDOUT_SELF` - standard output (default)
1675 -     `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard
1676          output where only the first processor opens
1677          the file.  All other processors send their
1678          data to the first processor to print.
1679 
1680    The user can open an alternative visualization contexts with
1681    `PetscViewerASCIIOpen()` (output to a specified file).
1682 
1683    Level: developer
1684 
1685 .seealso: `PC`, `PetscViewer`, `KSPView()`, `PetscViewerASCIIOpen()`
1686 @*/
1687 PetscErrorCode PCView(PC pc, PetscViewer viewer)
1688 {
1689   PCType    cstr;
1690   PetscBool iascii, isstring, isbinary, isdraw;
1691 #if defined(PETSC_HAVE_SAWS)
1692   PetscBool issaws;
1693 #endif
1694 
1695   PetscFunctionBegin;
1696   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1697   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer));
1698   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1699   PetscCheckSameComm(pc, 1, viewer, 2);
1700 
1701   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1702   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1703   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
1704   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
1705 #if defined(PETSC_HAVE_SAWS)
1706   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1707 #endif
1708 
1709   if (iascii) {
1710     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)pc, viewer));
1711     if (!pc->setupcalled) PetscCall(PetscViewerASCIIPrintf(viewer, "  PC has not been set up so information may be incomplete\n"));
1712     PetscCall(PetscViewerASCIIPushTab(viewer));
1713     PetscTryTypeMethod(pc, view, viewer);
1714     PetscCall(PetscViewerASCIIPopTab(viewer));
1715     if (pc->mat) {
1716       PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO));
1717       if (pc->pmat == pc->mat) {
1718         PetscCall(PetscViewerASCIIPrintf(viewer, "  linear system matrix = precond matrix:\n"));
1719         PetscCall(PetscViewerASCIIPushTab(viewer));
1720         PetscCall(MatView(pc->mat, viewer));
1721         PetscCall(PetscViewerASCIIPopTab(viewer));
1722       } else {
1723         if (pc->pmat) {
1724           PetscCall(PetscViewerASCIIPrintf(viewer, "  linear system matrix followed by preconditioner matrix:\n"));
1725         } else {
1726           PetscCall(PetscViewerASCIIPrintf(viewer, "  linear system matrix:\n"));
1727         }
1728         PetscCall(PetscViewerASCIIPushTab(viewer));
1729         PetscCall(MatView(pc->mat, viewer));
1730         if (pc->pmat) PetscCall(MatView(pc->pmat, viewer));
1731         PetscCall(PetscViewerASCIIPopTab(viewer));
1732       }
1733       PetscCall(PetscViewerPopFormat(viewer));
1734     }
1735   } else if (isstring) {
1736     PetscCall(PCGetType(pc, &cstr));
1737     PetscCall(PetscViewerStringSPrintf(viewer, " PCType: %-7.7s", cstr));
1738     PetscTryTypeMethod(pc, view, viewer);
1739     if (pc->mat) PetscCall(MatView(pc->mat, viewer));
1740     if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer));
1741   } else if (isbinary) {
1742     PetscInt    classid = PC_FILE_CLASSID;
1743     MPI_Comm    comm;
1744     PetscMPIInt rank;
1745     char        type[256];
1746 
1747     PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
1748     PetscCallMPI(MPI_Comm_rank(comm, &rank));
1749     if (rank == 0) {
1750       PetscCall(PetscViewerBinaryWrite(viewer, &classid, 1, PETSC_INT));
1751       PetscCall(PetscStrncpy(type, ((PetscObject)pc)->type_name, 256));
1752       PetscCall(PetscViewerBinaryWrite(viewer, type, 256, PETSC_CHAR));
1753     }
1754     PetscTryTypeMethod(pc, view, viewer);
1755   } else if (isdraw) {
1756     PetscDraw draw;
1757     char      str[25];
1758     PetscReal x, y, bottom, h;
1759     PetscInt  n;
1760 
1761     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
1762     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
1763     if (pc->mat) {
1764       PetscCall(MatGetSize(pc->mat, &n, NULL));
1765       PetscCall(PetscSNPrintf(str, 25, "PC: %s (%" PetscInt_FMT ")", ((PetscObject)pc)->type_name, n));
1766     } else {
1767       PetscCall(PetscSNPrintf(str, 25, "PC: %s", ((PetscObject)pc)->type_name));
1768     }
1769     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
1770     bottom = y - h;
1771     PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom));
1772     PetscTryTypeMethod(pc, view, viewer);
1773     PetscCall(PetscDrawPopCurrentPoint(draw));
1774 #if defined(PETSC_HAVE_SAWS)
1775   } else if (issaws) {
1776     PetscMPIInt rank;
1777 
1778     PetscCall(PetscObjectName((PetscObject)pc));
1779     PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1780     if (!((PetscObject)pc)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)pc, viewer));
1781     if (pc->mat) PetscCall(MatView(pc->mat, viewer));
1782     if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer));
1783 #endif
1784   }
1785   PetscFunctionReturn(0);
1786 }
1787 
1788 /*@C
1789   PCRegister -  Adds a method to the preconditioner package.
1790 
1791    Not collective
1792 
1793    Input Parameters:
1794 +  name_solver - name of a new user-defined solver
1795 -  routine_create - routine to create method context
1796 
1797    Note:
1798    `PCRegister()` may be called multiple times to add several user-defined preconditioners.
1799 
1800    Sample usage:
1801 .vb
1802    PCRegister("my_solver", MySolverCreate);
1803 .ve
1804 
1805    Then, your solver can be chosen with the procedural interface via
1806 $     PCSetType(pc,"my_solver")
1807    or at runtime via the option
1808 $     -pc_type my_solver
1809 
1810    Level: advanced
1811 
1812 .seealso: `PC`, `PCType`, `PCRegisterAll()`
1813 @*/
1814 PetscErrorCode PCRegister(const char sname[], PetscErrorCode (*function)(PC))
1815 {
1816   PetscFunctionBegin;
1817   PetscCall(PCInitializePackage());
1818   PetscCall(PetscFunctionListAdd(&PCList, sname, function));
1819   PetscFunctionReturn(0);
1820 }
1821 
1822 static PetscErrorCode MatMult_PC(Mat A, Vec X, Vec Y)
1823 {
1824   PC pc;
1825 
1826   PetscFunctionBegin;
1827   PetscCall(MatShellGetContext(A, &pc));
1828   PetscCall(PCApply(pc, X, Y));
1829   PetscFunctionReturn(0);
1830 }
1831 
1832 /*@
1833     PCComputeOperator - Computes the explicit preconditioned operator.
1834 
1835     Collective on pc
1836 
1837     Input Parameters:
1838 +   pc - the preconditioner object
1839 -   mattype - the matrix type to be used for the operator
1840 
1841     Output Parameter:
1842 .   mat - the explicit preconditioned operator
1843 
1844     Note:
1845     This computation is done by applying the operators to columns of the identity matrix.
1846     This routine is costly in general, and is recommended for use only with relatively small systems.
1847     Currently, this routine uses a dense matrix format when mattype == NULL
1848 
1849     Level: advanced
1850 
1851 .seealso: `PC`, `KSPComputeOperator()`, `MatType`
1852 
1853 @*/
1854 PetscErrorCode PCComputeOperator(PC pc, MatType mattype, Mat *mat)
1855 {
1856   PetscInt N, M, m, n;
1857   Mat      A, Apc;
1858 
1859   PetscFunctionBegin;
1860   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1861   PetscValidPointer(mat, 3);
1862   PetscCall(PCGetOperators(pc, &A, NULL));
1863   PetscCall(MatGetLocalSize(A, &m, &n));
1864   PetscCall(MatGetSize(A, &M, &N));
1865   PetscCall(MatCreateShell(PetscObjectComm((PetscObject)pc), m, n, M, N, pc, &Apc));
1866   PetscCall(MatShellSetOperation(Apc, MATOP_MULT, (void (*)(void))MatMult_PC));
1867   PetscCall(MatComputeOperator(Apc, mattype, mat));
1868   PetscCall(MatDestroy(&Apc));
1869   PetscFunctionReturn(0);
1870 }
1871 
1872 /*@
1873    PCSetCoordinates - sets the coordinates of all the nodes on the local process
1874 
1875    Collective on pc
1876 
1877    Input Parameters:
1878 +  pc - the solver context
1879 .  dim - the dimension of the coordinates 1, 2, or 3
1880 .  nloc - the blocked size of the coordinates array
1881 -  coords - the coordinates array
1882 
1883    Level: intermediate
1884 
1885    Note:
1886    coords is an array of the dim coordinates for the nodes on
1887    the local processor, of size dim*nloc.
1888    If there are 108 equation on a processor
1889    for a displacement finite element discretization of elasticity (so
1890    that there are nloc = 36 = 108/3 nodes) then the array must have 108
1891    double precision values (ie, 3 * 36).  These x y z coordinates
1892    should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x,
1893    ... , N-1.z ].
1894 
1895 .seealso: `PC`, `MatSetNearNullSpace()`
1896 @*/
1897 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
1898 {
1899   PetscFunctionBegin;
1900   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1901   PetscValidLogicalCollectiveInt(pc, dim, 2);
1902   PetscTryMethod(pc, "PCSetCoordinates_C", (PC, PetscInt, PetscInt, PetscReal *), (pc, dim, nloc, coords));
1903   PetscFunctionReturn(0);
1904 }
1905 
1906 /*@
1907    PCGetInterpolations - Gets interpolation matrices for all levels (except level 0)
1908 
1909    Logically Collective on pc
1910 
1911    Input Parameter:
1912 .  pc - the precondition context
1913 
1914    Output Parameters:
1915 +  num_levels - the number of levels
1916 -  interpolations - the interpolation matrices (size of num_levels-1)
1917 
1918    Level: advanced
1919 
1920    Developer Note:
1921    Why is this here instead of in `PCMG` etc?
1922 
1923 .seealso: `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetInterpolation()`, `PCGetCoarseOperators()`
1924 @*/
1925 PetscErrorCode PCGetInterpolations(PC pc, PetscInt *num_levels, Mat *interpolations[])
1926 {
1927   PetscFunctionBegin;
1928   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1929   PetscValidIntPointer(num_levels, 2);
1930   PetscValidPointer(interpolations, 3);
1931   PetscUseMethod(pc, "PCGetInterpolations_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, interpolations));
1932   PetscFunctionReturn(0);
1933 }
1934 
1935 /*@
1936    PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level)
1937 
1938    Logically Collective on pc
1939 
1940    Input Parameter:
1941 .  pc - the precondition context
1942 
1943    Output Parameters:
1944 +  num_levels - the number of levels
1945 -  coarseOperators - the coarse operator matrices (size of num_levels-1)
1946 
1947    Level: advanced
1948 
1949    Developer Note:
1950    Why is this here instead of in `PCMG` etc?
1951 
1952 .seealso: `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()`
1953 @*/
1954 PetscErrorCode PCGetCoarseOperators(PC pc, PetscInt *num_levels, Mat *coarseOperators[])
1955 {
1956   PetscFunctionBegin;
1957   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1958   PetscValidIntPointer(num_levels, 2);
1959   PetscValidPointer(coarseOperators, 3);
1960   PetscUseMethod(pc, "PCGetCoarseOperators_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, coarseOperators));
1961   PetscFunctionReturn(0);
1962 }
1963