xref: /petsc/src/ksp/pc/interface/precon.c (revision 40cbb1a031ea8f2be4fe2b92dc842b003ad37be3)
1 
2 /*
3     The PC (preconditioner) interface routines, callable by users.
4 */
5 #include <petsc/private/pcimpl.h>            /*I "petscksp.h" I*/
6 #include <petscdm.h>
7 
8 /* Logging support */
9 PetscClassId  PC_CLASSID;
10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks;
12 PetscInt      PetscMGLevelId;
13 
14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[])
15 {
16   PetscMPIInt    size;
17   PetscBool      hasop,flg1,flg2,set,flg3,isnormal;
18 
19   PetscFunctionBegin;
20   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
21   if (pc->pmat) {
22     PetscCall(MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop));
23     if (size == 1) {
24       PetscCall(MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1));
25       PetscCall(MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2));
26       PetscCall(MatIsSymmetricKnown(pc->pmat,&set,&flg3));
27       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat,&isnormal,MATNORMAL,MATNORMALHERMITIAN,NULL));
28       if (flg1 && (!flg2 || (set && flg3))) {
29         *type = PCICC;
30       } else if (flg2) {
31         *type = PCILU;
32       } else if (isnormal) {
33         *type = PCNONE;
34       } else if (hasop) { /* likely is a parallel matrix run on one processor */
35         *type = PCBJACOBI;
36       } else {
37         *type = PCNONE;
38       }
39     } else {
40        if (hasop) {
41         *type = PCBJACOBI;
42       } else {
43         *type = PCNONE;
44       }
45     }
46   } else {
47     if (size == 1) {
48       *type = PCILU;
49     } else {
50       *type = PCBJACOBI;
51     }
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 /*@
57    PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats
58 
59    Collective on PC
60 
61    Input Parameter:
62 .  pc - the preconditioner context
63 
64    Level: developer
65 
66    Notes:
67     This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC
68 
69 .seealso: `PCCreate()`, `PCSetUp()`
70 @*/
71 PetscErrorCode  PCReset(PC pc)
72 {
73   PetscFunctionBegin;
74   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
75   if (pc->ops->reset) {
76     PetscCall((*pc->ops->reset)(pc));
77   }
78   PetscCall(VecDestroy(&pc->diagonalscaleright));
79   PetscCall(VecDestroy(&pc->diagonalscaleleft));
80   PetscCall(MatDestroy(&pc->pmat));
81   PetscCall(MatDestroy(&pc->mat));
82 
83   pc->setupcalled = 0;
84   PetscFunctionReturn(0);
85 }
86 
87 /*@C
88    PCDestroy - Destroys PC context that was created with PCCreate().
89 
90    Collective on PC
91 
92    Input Parameter:
93 .  pc - the preconditioner context
94 
95    Level: developer
96 
97 .seealso: `PCCreate()`, `PCSetUp()`
98 @*/
99 PetscErrorCode  PCDestroy(PC *pc)
100 {
101   PetscFunctionBegin;
102   if (!*pc) PetscFunctionReturn(0);
103   PetscValidHeaderSpecific((*pc),PC_CLASSID,1);
104   if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; PetscFunctionReturn(0);}
105 
106   PetscCall(PCReset(*pc));
107 
108   /* if memory was published with SAWs then destroy it */
109   PetscCall(PetscObjectSAWsViewOff((PetscObject)*pc));
110   if ((*pc)->ops->destroy) PetscCall((*(*pc)->ops->destroy)((*pc)));
111   PetscCall(DMDestroy(&(*pc)->dm));
112   PetscCall(PetscHeaderDestroy(pc));
113   PetscFunctionReturn(0);
114 }
115 
116 /*@C
117    PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right
118       scaling as needed by certain time-stepping codes.
119 
120    Logically Collective on PC
121 
122    Input Parameter:
123 .  pc - the preconditioner context
124 
125    Output Parameter:
126 .  flag - PETSC_TRUE if it applies the scaling
127 
128    Level: developer
129 
130    Notes:
131     If this returns PETSC_TRUE then the system solved via the Krylov method is
132 $           D M A D^{-1} y = D M b  for left preconditioning or
133 $           D A M D^{-1} z = D b for right preconditioning
134 
135 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCSetDiagonalScale()`
136 @*/
137 PetscErrorCode  PCGetDiagonalScale(PC pc,PetscBool  *flag)
138 {
139   PetscFunctionBegin;
140   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
141   PetscValidBoolPointer(flag,2);
142   *flag = pc->diagonalscale;
143   PetscFunctionReturn(0);
144 }
145 
146 /*@
147    PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right
148       scaling as needed by certain time-stepping codes.
149 
150    Logically Collective on PC
151 
152    Input Parameters:
153 +  pc - the preconditioner context
154 -  s - scaling vector
155 
156    Level: intermediate
157 
158    Notes:
159     The system solved via the Krylov method is
160 $           D M A D^{-1} y = D M b  for left preconditioning or
161 $           D A M D^{-1} z = D b for right preconditioning
162 
163    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
164 
165 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCGetDiagonalScale()`
166 @*/
167 PetscErrorCode  PCSetDiagonalScale(PC pc,Vec s)
168 {
169   PetscFunctionBegin;
170   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
171   PetscValidHeaderSpecific(s,VEC_CLASSID,2);
172   pc->diagonalscale     = PETSC_TRUE;
173 
174   PetscCall(PetscObjectReference((PetscObject)s));
175   PetscCall(VecDestroy(&pc->diagonalscaleleft));
176 
177   pc->diagonalscaleleft = s;
178 
179   PetscCall(VecDuplicate(s,&pc->diagonalscaleright));
180   PetscCall(VecCopy(s,pc->diagonalscaleright));
181   PetscCall(VecReciprocal(pc->diagonalscaleright));
182   PetscFunctionReturn(0);
183 }
184 
185 /*@
186    PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes.
187 
188    Logically Collective on PC
189 
190    Input Parameters:
191 +  pc - the preconditioner context
192 .  in - input vector
193 -  out - scaled vector (maybe the same as in)
194 
195    Level: intermediate
196 
197    Notes:
198     The system solved via the Krylov method is
199 $           D M A D^{-1} y = D M b  for left preconditioning or
200 $           D A M D^{-1} z = D b for right preconditioning
201 
202    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
203 
204    If diagonal scaling is turned off and in is not out then in is copied to out
205 
206 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleSet()`, `PCDiagonalScaleRight()`, `PCDiagonalScale()`
207 @*/
208 PetscErrorCode  PCDiagonalScaleLeft(PC pc,Vec in,Vec out)
209 {
210   PetscFunctionBegin;
211   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
212   PetscValidHeaderSpecific(in,VEC_CLASSID,2);
213   PetscValidHeaderSpecific(out,VEC_CLASSID,3);
214   if (pc->diagonalscale) {
215     PetscCall(VecPointwiseMult(out,pc->diagonalscaleleft,in));
216   } else if (in != out) {
217     PetscCall(VecCopy(in,out));
218   }
219   PetscFunctionReturn(0);
220 }
221 
222 /*@
223    PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.
224 
225    Logically Collective on PC
226 
227    Input Parameters:
228 +  pc - the preconditioner context
229 .  in - input vector
230 -  out - scaled vector (maybe the same as in)
231 
232    Level: intermediate
233 
234    Notes:
235     The system solved via the Krylov method is
236 $           D M A D^{-1} y = D M b  for left preconditioning or
237 $           D A M D^{-1} z = D b for right preconditioning
238 
239    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
240 
241    If diagonal scaling is turned off and in is not out then in is copied to out
242 
243 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleSet()`, `PCDiagonalScale()`
244 @*/
245 PetscErrorCode  PCDiagonalScaleRight(PC pc,Vec in,Vec out)
246 {
247   PetscFunctionBegin;
248   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
249   PetscValidHeaderSpecific(in,VEC_CLASSID,2);
250   PetscValidHeaderSpecific(out,VEC_CLASSID,3);
251   if (pc->diagonalscale) {
252     PetscCall(VecPointwiseMult(out,pc->diagonalscaleright,in));
253   } else if (in != out) {
254     PetscCall(VecCopy(in,out));
255   }
256   PetscFunctionReturn(0);
257 }
258 
259 /*@
260    PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the
261    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
262    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperators() or PCSetOperators() not the Pmat.
263 
264    Logically Collective on PC
265 
266    Input Parameters:
267 +  pc - the preconditioner context
268 -  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
269 
270    Options Database Key:
271 .  -pc_use_amat <true,false> - use the amat to apply the operator
272 
273    Notes:
274    For the common case in which the linear system matrix and the matrix used to construct the
275    preconditioner are identical, this routine is does nothing.
276 
277    Level: intermediate
278 
279 .seealso: `PCGetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`
280 @*/
281 PetscErrorCode  PCSetUseAmat(PC pc,PetscBool flg)
282 {
283   PetscFunctionBegin;
284   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
285   pc->useAmat = flg;
286   PetscFunctionReturn(0);
287 }
288 
289 /*@
290    PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected.
291 
292    Logically Collective on PC
293 
294    Input Parameters:
295 +  pc - iterative context obtained from PCCreate()
296 -  flg - PETSC_TRUE indicates you want the error generated
297 
298    Level: advanced
299 
300    Notes:
301     Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve()
302     to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is
303     detected.
304 
305     This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs
306 
307 .seealso: `PCGetInitialGuessNonzero()`, `PCSetInitialGuessKnoll()`, `PCGetInitialGuessKnoll()`
308 @*/
309 PetscErrorCode  PCSetErrorIfFailure(PC pc,PetscBool flg)
310 {
311   PetscFunctionBegin;
312   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
313   PetscValidLogicalCollectiveBool(pc,flg,2);
314   pc->erroriffailure = flg;
315   PetscFunctionReturn(0);
316 }
317 
318 /*@
319    PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the
320    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
321    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperators() or PCSetOperators() not the Pmat.
322 
323    Logically Collective on PC
324 
325    Input Parameter:
326 .  pc - the preconditioner context
327 
328    Output Parameter:
329 .  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
330 
331    Notes:
332    For the common case in which the linear system matrix and the matrix used to construct the
333    preconditioner are identical, this routine is does nothing.
334 
335    Level: intermediate
336 
337 .seealso: `PCSetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`
338 @*/
339 PetscErrorCode  PCGetUseAmat(PC pc,PetscBool *flg)
340 {
341   PetscFunctionBegin;
342   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
343   *flg = pc->useAmat;
344   PetscFunctionReturn(0);
345 }
346 
347 /*@
348    PCCreate - Creates a preconditioner context.
349 
350    Collective
351 
352    Input Parameter:
353 .  comm - MPI communicator
354 
355    Output Parameter:
356 .  pc - location to put the preconditioner context
357 
358    Notes:
359    The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC
360    in parallel. For dense matrices it is always PCNONE.
361 
362    Level: developer
363 
364 .seealso: `PCSetUp()`, `PCApply()`, `PCDestroy()`
365 @*/
366 PetscErrorCode  PCCreate(MPI_Comm comm,PC *newpc)
367 {
368   PC             pc;
369 
370   PetscFunctionBegin;
371   PetscValidPointer(newpc,2);
372   *newpc = NULL;
373   PetscCall(PCInitializePackage());
374 
375   PetscCall(PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView));
376 
377   pc->mat                  = NULL;
378   pc->pmat                 = NULL;
379   pc->setupcalled          = 0;
380   pc->setfromoptionscalled = 0;
381   pc->data                 = NULL;
382   pc->diagonalscale        = PETSC_FALSE;
383   pc->diagonalscaleleft    = NULL;
384   pc->diagonalscaleright   = NULL;
385 
386   pc->modifysubmatrices  = NULL;
387   pc->modifysubmatricesP = NULL;
388 
389   *newpc = pc;
390   PetscFunctionReturn(0);
391 
392 }
393 
394 /* -------------------------------------------------------------------------------*/
395 
396 /*@
397    PCApply - Applies the preconditioner to a vector.
398 
399    Collective on PC
400 
401    Input Parameters:
402 +  pc - the preconditioner context
403 -  x - input vector
404 
405    Output Parameter:
406 .  y - output vector
407 
408    Level: developer
409 
410 .seealso: `PCApplyTranspose()`, `PCApplyBAorAB()`
411 @*/
412 PetscErrorCode  PCApply(PC pc,Vec x,Vec y)
413 {
414   PetscInt       m,n,mv,nv;
415 
416   PetscFunctionBegin;
417   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
418   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
419   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
420   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
421   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
422   /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */
423   PetscCall(MatGetLocalSize(pc->pmat,&m,&n));
424   PetscCall(VecGetLocalSize(x,&mv));
425   PetscCall(VecGetLocalSize(y,&nv));
426   /* check pmat * y = x is feasible */
427   PetscCheck(mv == m,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %" PetscInt_FMT " does not equal input vector size %" PetscInt_FMT,m,mv);
428   PetscCheck(nv == n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %" PetscInt_FMT " does not equal output vector size %" PetscInt_FMT,n,nv);
429   PetscCall(VecSetErrorIfLocked(y,3));
430 
431   PetscCall(PCSetUp(pc));
432   PetscCheck(pc->ops->apply,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply");
433   PetscCall(VecLockReadPush(x));
434   PetscCall(PetscLogEventBegin(PC_Apply,pc,x,y,0));
435   PetscCall((*pc->ops->apply)(pc,x,y));
436   PetscCall(PetscLogEventEnd(PC_Apply,pc,x,y,0));
437   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
438   PetscCall(VecLockReadPop(x));
439   PetscFunctionReturn(0);
440 }
441 
442 /*@
443    PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices.
444 
445    Collective on PC
446 
447    Input Parameters:
448 +  pc - the preconditioner context
449 -  X - block of input vectors
450 
451    Output Parameter:
452 .  Y - block of output vectors
453 
454    Level: developer
455 
456 .seealso: `PCApply()`, `KSPMatSolve()`
457 @*/
458 PetscErrorCode  PCMatApply(PC pc,Mat X,Mat Y)
459 {
460   Mat            A;
461   Vec            cy, cx;
462   PetscInt       m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3;
463   PetscBool      match;
464 
465   PetscFunctionBegin;
466   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
467   PetscValidHeaderSpecific(X, MAT_CLASSID, 2);
468   PetscValidHeaderSpecific(Y, MAT_CLASSID, 3);
469   PetscCheckSameComm(pc, 1, X, 2);
470   PetscCheckSameComm(pc, 1, Y, 3);
471   PetscCheck(Y != X,PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices");
472   PetscCall(PCGetOperators(pc, NULL, &A));
473   PetscCall(MatGetLocalSize(A, &m3, &n3));
474   PetscCall(MatGetLocalSize(X, &m2, &n2));
475   PetscCall(MatGetLocalSize(Y, &m1, &n1));
476   PetscCall(MatGetSize(A, &M3, &N3));
477   PetscCall(MatGetSize(X, &M2, &N2));
478   PetscCall(MatGetSize(Y, &M1, &N1));
479   PetscCheck(n1 == n2 && N1 == N2,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of output vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1);
480   PetscCheck(m2 == m3 && M2 == M3,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m2, M2, m3, M3, n3, N3);
481   PetscCheck(m1 == n3 && M1 == N3,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m1, M1, m3, M3, n3, N3);
482   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, ""));
483   PetscCheck(match,PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat");
484   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, ""));
485   PetscCheck(match,PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat");
486   PetscCall(PCSetUp(pc));
487   if (pc->ops->matapply) {
488     PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0));
489     PetscCall((*pc->ops->matapply)(pc, X, Y));
490     PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0));
491   } else {
492     PetscCall(PetscInfo(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name));
493     for (n1 = 0; n1 < N1; ++n1) {
494       PetscCall(MatDenseGetColumnVecRead(X, n1, &cx));
495       PetscCall(MatDenseGetColumnVecWrite(Y, n1, &cy));
496       PetscCall(PCApply(pc, cx, cy));
497       PetscCall(MatDenseRestoreColumnVecWrite(Y, n1, &cy));
498       PetscCall(MatDenseRestoreColumnVecRead(X, n1, &cx));
499     }
500   }
501   PetscFunctionReturn(0);
502 }
503 
504 /*@
505    PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.
506 
507    Collective on PC
508 
509    Input Parameters:
510 +  pc - the preconditioner context
511 -  x - input vector
512 
513    Output Parameter:
514 .  y - output vector
515 
516    Notes:
517    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
518 
519    Level: developer
520 
521 .seealso: `PCApply()`, `PCApplySymmetricRight()`
522 @*/
523 PetscErrorCode  PCApplySymmetricLeft(PC pc,Vec x,Vec y)
524 {
525   PetscFunctionBegin;
526   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
527   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
528   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
529   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
530   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
531   PetscCall(PCSetUp(pc));
532   PetscCheck(pc->ops->applysymmetricleft,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
533   PetscCall(VecLockReadPush(x));
534   PetscCall(PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0));
535   PetscCall((*pc->ops->applysymmetricleft)(pc,x,y));
536   PetscCall(PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0));
537   PetscCall(VecLockReadPop(x));
538   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
539   PetscFunctionReturn(0);
540 }
541 
542 /*@
543    PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.
544 
545    Collective on PC
546 
547    Input Parameters:
548 +  pc - the preconditioner context
549 -  x - input vector
550 
551    Output Parameter:
552 .  y - output vector
553 
554    Level: developer
555 
556    Notes:
557    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
558 
559 .seealso: `PCApply()`, `PCApplySymmetricLeft()`
560 @*/
561 PetscErrorCode  PCApplySymmetricRight(PC pc,Vec x,Vec y)
562 {
563   PetscFunctionBegin;
564   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
565   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
566   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
567   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
568   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
569   PetscCall(PCSetUp(pc));
570   PetscCheck(pc->ops->applysymmetricright,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
571   PetscCall(VecLockReadPush(x));
572   PetscCall(PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0));
573   PetscCall((*pc->ops->applysymmetricright)(pc,x,y));
574   PetscCall(PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0));
575   PetscCall(VecLockReadPop(x));
576   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
577   PetscFunctionReturn(0);
578 }
579 
580 /*@
581    PCApplyTranspose - Applies the transpose of preconditioner to a vector.
582 
583    Collective on PC
584 
585    Input Parameters:
586 +  pc - the preconditioner context
587 -  x - input vector
588 
589    Output Parameter:
590 .  y - output vector
591 
592    Notes:
593     For complex numbers this applies the non-Hermitian transpose.
594 
595    Developer Notes:
596     We need to implement a PCApplyHermitianTranspose()
597 
598    Level: developer
599 
600 .seealso: `PCApply()`, `PCApplyBAorAB()`, `PCApplyBAorABTranspose()`, `PCApplyTransposeExists()`
601 @*/
602 PetscErrorCode  PCApplyTranspose(PC pc,Vec x,Vec y)
603 {
604   PetscFunctionBegin;
605   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
606   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
607   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
608   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
609   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
610   PetscCall(PCSetUp(pc));
611   PetscCheck(pc->ops->applytranspose,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose");
612   PetscCall(VecLockReadPush(x));
613   PetscCall(PetscLogEventBegin(PC_Apply,pc,x,y,0));
614   PetscCall((*pc->ops->applytranspose)(pc,x,y));
615   PetscCall(PetscLogEventEnd(PC_Apply,pc,x,y,0));
616   PetscCall(VecLockReadPop(x));
617   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
618   PetscFunctionReturn(0);
619 }
620 
621 /*@
622    PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation
623 
624    Collective on PC
625 
626    Input Parameters:
627 .  pc - the preconditioner context
628 
629    Output Parameter:
630 .  flg - PETSC_TRUE if a transpose operation is defined
631 
632    Level: developer
633 
634 .seealso: `PCApplyTranspose()`
635 @*/
636 PetscErrorCode  PCApplyTransposeExists(PC pc,PetscBool  *flg)
637 {
638   PetscFunctionBegin;
639   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
640   PetscValidBoolPointer(flg,2);
641   if (pc->ops->applytranspose) *flg = PETSC_TRUE;
642   else *flg = PETSC_FALSE;
643   PetscFunctionReturn(0);
644 }
645 
646 /*@
647    PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.
648 
649    Collective on PC
650 
651    Input Parameters:
652 +  pc - the preconditioner context
653 .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
654 .  x - input vector
655 -  work - work vector
656 
657    Output Parameter:
658 .  y - output vector
659 
660    Level: developer
661 
662    Notes:
663     If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or  D A M D^{-1} is actually applied. Note that the
664    specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling.
665 
666 .seealso: `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorABTranspose()`
667 @*/
668 PetscErrorCode  PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work)
669 {
670   PetscFunctionBegin;
671   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
672   PetscValidLogicalCollectiveEnum(pc,side,2);
673   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
674   PetscValidHeaderSpecific(y,VEC_CLASSID,4);
675   PetscValidHeaderSpecific(work,VEC_CLASSID,5);
676   PetscCheckSameComm(pc,1,x,3);
677   PetscCheckSameComm(pc,1,y,4);
678   PetscCheckSameComm(pc,1,work,5);
679   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
680   PetscCheck(side == PC_LEFT || side == PC_SYMMETRIC || side == PC_RIGHT,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric");
681   PetscCheck(!pc->diagonalscale || side != PC_SYMMETRIC,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application");
682   if (pc->erroriffailure) PetscCall(VecValidValues(x,3,PETSC_TRUE));
683 
684   PetscCall(PCSetUp(pc));
685   if (pc->diagonalscale) {
686     if (pc->ops->applyBA) {
687       Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
688       PetscCall(VecDuplicate(x,&work2));
689       PetscCall(PCDiagonalScaleRight(pc,x,work2));
690       PetscCall((*pc->ops->applyBA)(pc,side,work2,y,work));
691       PetscCall(PCDiagonalScaleLeft(pc,y,y));
692       PetscCall(VecDestroy(&work2));
693     } else if (side == PC_RIGHT) {
694       PetscCall(PCDiagonalScaleRight(pc,x,y));
695       PetscCall(PCApply(pc,y,work));
696       PetscCall(MatMult(pc->mat,work,y));
697       PetscCall(PCDiagonalScaleLeft(pc,y,y));
698     } else if (side == PC_LEFT) {
699       PetscCall(PCDiagonalScaleRight(pc,x,y));
700       PetscCall(MatMult(pc->mat,y,work));
701       PetscCall(PCApply(pc,work,y));
702       PetscCall(PCDiagonalScaleLeft(pc,y,y));
703     } else PetscCheck(side != PC_SYMMETRIC,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner");
704   } else {
705     if (pc->ops->applyBA) {
706       PetscCall((*pc->ops->applyBA)(pc,side,x,y,work));
707     } else if (side == PC_RIGHT) {
708       PetscCall(PCApply(pc,x,work));
709       PetscCall(MatMult(pc->mat,work,y));
710     } else if (side == PC_LEFT) {
711       PetscCall(MatMult(pc->mat,x,work));
712       PetscCall(PCApply(pc,work,y));
713     } else if (side == PC_SYMMETRIC) {
714       /* There's an extra copy here; maybe should provide 2 work vectors instead? */
715       PetscCall(PCApplySymmetricRight(pc,x,work));
716       PetscCall(MatMult(pc->mat,work,y));
717       PetscCall(VecCopy(y,work));
718       PetscCall(PCApplySymmetricLeft(pc,work,y));
719     }
720   }
721   if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
722   PetscFunctionReturn(0);
723 }
724 
725 /*@
726    PCApplyBAorABTranspose - Applies the transpose of the preconditioner
727    and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
728    NOT tr(B*A) = tr(A)*tr(B).
729 
730    Collective on PC
731 
732    Input Parameters:
733 +  pc - the preconditioner context
734 .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
735 .  x - input vector
736 -  work - work vector
737 
738    Output Parameter:
739 .  y - output vector
740 
741    Notes:
742     this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
743       defined by B'. This is why this has the funny form that it computes tr(B) * tr(A)
744 
745     Level: developer
746 
747 .seealso: `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorAB()`
748 @*/
749 PetscErrorCode  PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work)
750 {
751   PetscFunctionBegin;
752   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
753   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
754   PetscValidHeaderSpecific(y,VEC_CLASSID,4);
755   PetscValidHeaderSpecific(work,VEC_CLASSID,5);
756   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
757   if (pc->erroriffailure) PetscCall(VecValidValues(x,3,PETSC_TRUE));
758   if (pc->ops->applyBAtranspose) {
759     PetscCall((*pc->ops->applyBAtranspose)(pc,side,x,y,work));
760     if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
761     PetscFunctionReturn(0);
762   }
763   PetscCheck(side == PC_LEFT || side == PC_RIGHT,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left");
764 
765   PetscCall(PCSetUp(pc));
766   if (side == PC_RIGHT) {
767     PetscCall(PCApplyTranspose(pc,x,work));
768     PetscCall(MatMultTranspose(pc->mat,work,y));
769   } else if (side == PC_LEFT) {
770     PetscCall(MatMultTranspose(pc->mat,x,work));
771     PetscCall(PCApplyTranspose(pc,work,y));
772   }
773   /* add support for PC_SYMMETRIC */
774   if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
775   PetscFunctionReturn(0);
776 }
777 
778 /* -------------------------------------------------------------------------------*/
779 
780 /*@
781    PCApplyRichardsonExists - Determines whether a particular preconditioner has a
782    built-in fast application of Richardson's method.
783 
784    Not Collective
785 
786    Input Parameter:
787 .  pc - the preconditioner
788 
789    Output Parameter:
790 .  exists - PETSC_TRUE or PETSC_FALSE
791 
792    Level: developer
793 
794 .seealso: `PCApplyRichardson()`
795 @*/
796 PetscErrorCode  PCApplyRichardsonExists(PC pc,PetscBool  *exists)
797 {
798   PetscFunctionBegin;
799   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
800   PetscValidBoolPointer(exists,2);
801   if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
802   else *exists = PETSC_FALSE;
803   PetscFunctionReturn(0);
804 }
805 
806 /*@
807    PCApplyRichardson - Applies several steps of Richardson iteration with
808    the particular preconditioner. This routine is usually used by the
809    Krylov solvers and not the application code directly.
810 
811    Collective on PC
812 
813    Input Parameters:
814 +  pc  - the preconditioner context
815 .  b   - the right hand side
816 .  w   - one work vector
817 .  rtol - relative decrease in residual norm convergence criteria
818 .  abstol - absolute residual norm convergence criteria
819 .  dtol - divergence residual norm increase criteria
820 .  its - the number of iterations to apply.
821 -  guesszero - if the input x contains nonzero initial guess
822 
823    Output Parameters:
824 +  outits - number of iterations actually used (for SOR this always equals its)
825 .  reason - the reason the apply terminated
826 -  y - the solution (also contains initial guess if guesszero is PETSC_FALSE
827 
828    Notes:
829    Most preconditioners do not support this function. Use the command
830    PCApplyRichardsonExists() to determine if one does.
831 
832    Except for the multigrid PC this routine ignores the convergence tolerances
833    and always runs for the number of iterations
834 
835    Level: developer
836 
837 .seealso: `PCApplyRichardsonExists()`
838 @*/
839 PetscErrorCode  PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason)
840 {
841   PetscFunctionBegin;
842   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
843   PetscValidHeaderSpecific(b,VEC_CLASSID,2);
844   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
845   PetscValidHeaderSpecific(w,VEC_CLASSID,4);
846   PetscCheck(b != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors");
847   PetscCall(PCSetUp(pc));
848   PetscCheck(pc->ops->applyrichardson,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson");
849   PetscCall((*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason));
850   PetscFunctionReturn(0);
851 }
852 
853 /*@
854    PCSetFailedReason - Sets the reason a PCSetUp() failed or PC_NOERROR if it did not fail
855 
856    Logically Collective on PC
857 
858    Input Parameters:
859 +  pc - the preconditioner context
860 -  reason - the reason it failedx
861 
862    Level: advanced
863 
864 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCFailedReason`
865 @*/
866 PetscErrorCode PCSetFailedReason(PC pc,PCFailedReason reason)
867 {
868   PetscFunctionBegin;
869   pc->failedreason = reason;
870   PetscFunctionReturn(0);
871 }
872 
873 /*@
874    PCGetFailedReason - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail
875 
876    Logically Collective on PC
877 
878    Input Parameter:
879 .  pc - the preconditioner context
880 
881    Output Parameter:
882 .  reason - the reason it failed
883 
884    Level: advanced
885 
886    Notes: This is the maximum over reason over all ranks in the PC communicator. It is only valid after
887    a call KSPCheckDot() or  KSPCheckNorm() inside a KSPSolve(). It is not valid immediately after a PCSetUp()
888    or PCApply(), then use PCGetFailedReasonRank()
889 
890 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReasonRank()`, `PCSetFailedReason()`
891 @*/
892 PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason)
893 {
894   PetscFunctionBegin;
895   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
896   else *reason = pc->failedreason;
897   PetscFunctionReturn(0);
898 }
899 
900 /*@
901    PCGetFailedReasonRank - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail on this MPI rank
902 
903   Not Collective on PC
904 
905    Input Parameter:
906 .  pc - the preconditioner context
907 
908    Output Parameter:
909 .  reason - the reason it failed
910 
911    Notes:
912      Different ranks may have different reasons or no reason, see PCGetFailedReason()
913 
914    Level: advanced
915 
916 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()`
917 @*/
918 PetscErrorCode PCGetFailedReasonRank(PC pc,PCFailedReason *reason)
919 {
920   PetscFunctionBegin;
921   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
922   else *reason = pc->failedreason;
923   PetscFunctionReturn(0);
924 }
925 
926 /*  Next line needed to deactivate KSP_Solve logging */
927 #include <petsc/private/kspimpl.h>
928 
929 /*
930       a setupcall of 0 indicates never setup,
931                      1 indicates has been previously setup
932                     -1 indicates a PCSetUp() was attempted and failed
933 */
934 /*@
935    PCSetUp - Prepares for the use of a preconditioner.
936 
937    Collective on PC
938 
939    Input Parameter:
940 .  pc - the preconditioner context
941 
942    Level: developer
943 
944 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`
945 @*/
946 PetscErrorCode  PCSetUp(PC pc)
947 {
948   const char       *def;
949   PetscObjectState matstate, matnonzerostate;
950 
951   PetscFunctionBegin;
952   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
953   PetscCheck(pc->mat,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first");
954 
955   if (pc->setupcalled && pc->reusepreconditioner) {
956     PetscCall(PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n"));
957     PetscFunctionReturn(0);
958   }
959 
960   PetscCall(PetscObjectStateGet((PetscObject)pc->pmat,&matstate));
961   PetscCall(MatGetNonzeroState(pc->pmat,&matnonzerostate));
962   if (!pc->setupcalled) {
963     PetscCall(PetscInfo(pc,"Setting up PC for first time\n"));
964     pc->flag = DIFFERENT_NONZERO_PATTERN;
965   } else if (matstate == pc->matstate) {
966     PetscCall(PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n"));
967     PetscFunctionReturn(0);
968   } else {
969     if (matnonzerostate > pc->matnonzerostate) {
970       PetscCall(PetscInfo(pc,"Setting up PC with different nonzero pattern\n"));
971       pc->flag = DIFFERENT_NONZERO_PATTERN;
972     } else {
973       PetscCall(PetscInfo(pc,"Setting up PC with same nonzero pattern\n"));
974       pc->flag = SAME_NONZERO_PATTERN;
975     }
976   }
977   pc->matstate        = matstate;
978   pc->matnonzerostate = matnonzerostate;
979 
980   if (!((PetscObject)pc)->type_name) {
981     PetscCall(PCGetDefaultType_Private(pc,&def));
982     PetscCall(PCSetType(pc,def));
983   }
984 
985   PetscCall(MatSetErrorIfFailure(pc->pmat,pc->erroriffailure));
986   PetscCall(MatSetErrorIfFailure(pc->mat,pc->erroriffailure));
987   PetscCall(PetscLogEventBegin(PC_SetUp,pc,0,0,0));
988   if (pc->ops->setup) {
989     /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */
990     PetscCall(KSPInitializePackage());
991     PetscCall(PetscLogEventDeactivatePush(KSP_Solve));
992     PetscCall(PetscLogEventDeactivatePush(PC_Apply));
993     PetscCall((*pc->ops->setup)(pc));
994     PetscCall(PetscLogEventDeactivatePop(KSP_Solve));
995     PetscCall(PetscLogEventDeactivatePop(PC_Apply));
996   }
997   PetscCall(PetscLogEventEnd(PC_SetUp,pc,0,0,0));
998   if (!pc->setupcalled) pc->setupcalled = 1;
999   PetscFunctionReturn(0);
1000 }
1001 
1002 /*@
1003    PCSetUpOnBlocks - Sets up the preconditioner for each block in
1004    the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
1005    methods.
1006 
1007    Collective on PC
1008 
1009    Input Parameters:
1010 .  pc - the preconditioner context
1011 
1012    Level: developer
1013 
1014 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCSetUp()`
1015 @*/
1016 PetscErrorCode  PCSetUpOnBlocks(PC pc)
1017 {
1018   PetscFunctionBegin;
1019   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1020   if (!pc->ops->setuponblocks) PetscFunctionReturn(0);
1021   PetscCall(PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0));
1022   PetscCall((*pc->ops->setuponblocks)(pc));
1023   PetscCall(PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0));
1024   PetscFunctionReturn(0);
1025 }
1026 
1027 /*@C
1028    PCSetModifySubMatrices - Sets a user-defined routine for modifying the
1029    submatrices that arise within certain subdomain-based preconditioners.
1030    The basic submatrices are extracted from the preconditioner matrix as
1031    usual; the user can then alter these (for example, to set different boundary
1032    conditions for each submatrix) before they are used for the local solves.
1033 
1034    Logically Collective on PC
1035 
1036    Input Parameters:
1037 +  pc - the preconditioner context
1038 .  func - routine for modifying the submatrices
1039 -  ctx - optional user-defined context (may be null)
1040 
1041    Calling sequence of func:
1042 $     func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);
1043 
1044 +  row - an array of index sets that contain the global row numbers
1045          that comprise each local submatrix
1046 .  col - an array of index sets that contain the global column numbers
1047          that comprise each local submatrix
1048 .  submat - array of local submatrices
1049 -  ctx - optional user-defined context for private data for the
1050          user-defined func routine (may be null)
1051 
1052    Notes:
1053    PCSetModifySubMatrices() MUST be called before KSPSetUp() and
1054    KSPSolve().
1055 
1056    A routine set by PCSetModifySubMatrices() is currently called within
1057    the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM)
1058    preconditioners.  All other preconditioners ignore this routine.
1059 
1060    Level: advanced
1061 
1062 .seealso: `PCModifySubMatrices()`
1063 @*/
1064 PetscErrorCode  PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx)
1065 {
1066   PetscFunctionBegin;
1067   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1068   pc->modifysubmatrices  = func;
1069   pc->modifysubmatricesP = ctx;
1070   PetscFunctionReturn(0);
1071 }
1072 
1073 /*@C
1074    PCModifySubMatrices - Calls an optional user-defined routine within
1075    certain preconditioners if one has been set with PCSetModifySubMatrices().
1076 
1077    Collective on PC
1078 
1079    Input Parameters:
1080 +  pc - the preconditioner context
1081 .  nsub - the number of local submatrices
1082 .  row - an array of index sets that contain the global row numbers
1083          that comprise each local submatrix
1084 .  col - an array of index sets that contain the global column numbers
1085          that comprise each local submatrix
1086 .  submat - array of local submatrices
1087 -  ctx - optional user-defined context for private data for the
1088          user-defined routine (may be null)
1089 
1090    Output Parameter:
1091 .  submat - array of local submatrices (the entries of which may
1092             have been modified)
1093 
1094    Notes:
1095    The user should NOT generally call this routine, as it will
1096    automatically be called within certain preconditioners (currently
1097    block Jacobi, additive Schwarz) if set.
1098 
1099    The basic submatrices are extracted from the preconditioner matrix
1100    as usual; the user can then alter these (for example, to set different
1101    boundary conditions for each submatrix) before they are used for the
1102    local solves.
1103 
1104    Level: developer
1105 
1106 .seealso: `PCSetModifySubMatrices()`
1107 @*/
1108 PetscErrorCode  PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx)
1109 {
1110   PetscFunctionBegin;
1111   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1112   if (!pc->modifysubmatrices) PetscFunctionReturn(0);
1113   PetscCall(PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0));
1114   PetscCall((*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx));
1115   PetscCall(PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0));
1116   PetscFunctionReturn(0);
1117 }
1118 
1119 /*@
1120    PCSetOperators - Sets the matrix associated with the linear system and
1121    a (possibly) different one associated with the preconditioner.
1122 
1123    Logically Collective on PC
1124 
1125    Input Parameters:
1126 +  pc - the preconditioner context
1127 .  Amat - the matrix that defines the linear system
1128 -  Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat.
1129 
1130    Notes:
1131     Passing a NULL for Amat or Pmat removes the matrix that is currently used.
1132 
1133     If you wish to replace either Amat or Pmat but leave the other one untouched then
1134     first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference()
1135     on it and then pass it back in in your call to KSPSetOperators().
1136 
1137    More Notes about Repeated Solution of Linear Systems:
1138    PETSc does NOT reset the matrix entries of either Amat or Pmat
1139    to zero after a linear solve; the user is completely responsible for
1140    matrix assembly.  See the routine MatZeroEntries() if desiring to
1141    zero all elements of a matrix.
1142 
1143    Level: intermediate
1144 
1145 .seealso: `PCGetOperators()`, `MatZeroEntries()`
1146  @*/
1147 PetscErrorCode  PCSetOperators(PC pc,Mat Amat,Mat Pmat)
1148 {
1149   PetscInt         m1,n1,m2,n2;
1150 
1151   PetscFunctionBegin;
1152   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1153   if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2);
1154   if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3);
1155   if (Amat) PetscCheckSameComm(pc,1,Amat,2);
1156   if (Pmat) PetscCheckSameComm(pc,1,Pmat,3);
1157   if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) {
1158     PetscCall(MatGetLocalSize(Amat,&m1,&n1));
1159     PetscCall(MatGetLocalSize(pc->mat,&m2,&n2));
1160     PetscCheck(m1 == m2 && n1 == n2,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT,m2,n2,m1,n1);
1161     PetscCall(MatGetLocalSize(Pmat,&m1,&n1));
1162     PetscCall(MatGetLocalSize(pc->pmat,&m2,&n2));
1163     PetscCheck(m1 == m2 && n1 == n2,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT,m2,n2,m1,n1);
1164   }
1165 
1166   if (Pmat != pc->pmat) {
1167     /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */
1168     pc->matnonzerostate = -1;
1169     pc->matstate        = -1;
1170   }
1171 
1172   /* reference first in case the matrices are the same */
1173   if (Amat) PetscCall(PetscObjectReference((PetscObject)Amat));
1174   PetscCall(MatDestroy(&pc->mat));
1175   if (Pmat) PetscCall(PetscObjectReference((PetscObject)Pmat));
1176   PetscCall(MatDestroy(&pc->pmat));
1177   pc->mat  = Amat;
1178   pc->pmat = Pmat;
1179   PetscFunctionReturn(0);
1180 }
1181 
1182 /*@
1183    PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed.
1184 
1185    Logically Collective on PC
1186 
1187    Input Parameters:
1188 +  pc - the preconditioner context
1189 -  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner
1190 
1191     Level: intermediate
1192 
1193 .seealso: `PCGetOperators()`, `MatZeroEntries()`, `PCGetReusePreconditioner()`, `KSPSetReusePreconditioner()`
1194  @*/
1195 PetscErrorCode  PCSetReusePreconditioner(PC pc,PetscBool flag)
1196 {
1197   PetscFunctionBegin;
1198   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1199   PetscValidLogicalCollectiveBool(pc,flag,2);
1200   pc->reusepreconditioner = flag;
1201   PetscFunctionReturn(0);
1202 }
1203 
1204 /*@
1205    PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed.
1206 
1207    Not Collective
1208 
1209    Input Parameter:
1210 .  pc - the preconditioner context
1211 
1212    Output Parameter:
1213 .  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner
1214 
1215    Level: intermediate
1216 
1217 .seealso: `PCGetOperators()`, `MatZeroEntries()`, `PCSetReusePreconditioner()`
1218  @*/
1219 PetscErrorCode  PCGetReusePreconditioner(PC pc,PetscBool *flag)
1220 {
1221   PetscFunctionBegin;
1222   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1223   PetscValidBoolPointer(flag,2);
1224   *flag = pc->reusepreconditioner;
1225   PetscFunctionReturn(0);
1226 }
1227 
1228 /*@
1229    PCGetOperators - Gets the matrix associated with the linear system and
1230    possibly a different one associated with the preconditioner.
1231 
1232    Not collective, though parallel Mats are returned if the PC is parallel
1233 
1234    Input Parameter:
1235 .  pc - the preconditioner context
1236 
1237    Output Parameters:
1238 +  Amat - the matrix defining the linear system
1239 -  Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat.
1240 
1241    Level: intermediate
1242 
1243    Notes:
1244     Does not increase the reference count of the matrices, so you should not destroy them
1245 
1246    Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators
1247       are created in PC and returned to the user. In this case, if both operators
1248       mat and pmat are requested, two DIFFERENT operators will be returned. If
1249       only one is requested both operators in the PC will be the same (i.e. as
1250       if one had called KSP/PCSetOperators() with the same argument for both Mats).
1251       The user must set the sizes of the returned matrices and their type etc just
1252       as if the user created them with MatCreate(). For example,
1253 
1254 $         KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to
1255 $           set size, type, etc of Amat
1256 
1257 $         MatCreate(comm,&mat);
1258 $         KSP/PCSetOperators(ksp/pc,Amat,Amat);
1259 $         PetscObjectDereference((PetscObject)mat);
1260 $           set size, type, etc of Amat
1261 
1262      and
1263 
1264 $         KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to
1265 $           set size, type, etc of Amat and Pmat
1266 
1267 $         MatCreate(comm,&Amat);
1268 $         MatCreate(comm,&Pmat);
1269 $         KSP/PCSetOperators(ksp/pc,Amat,Pmat);
1270 $         PetscObjectDereference((PetscObject)Amat);
1271 $         PetscObjectDereference((PetscObject)Pmat);
1272 $           set size, type, etc of Amat and Pmat
1273 
1274     The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy
1275     of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely
1276     managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look
1277     at this is when you create a SNES you do not NEED to create a KSP and attach it to
1278     the SNES object (the SNES object manages it for you). Similarly when you create a KSP
1279     you do not need to attach a PC to it (the KSP object manages the PC object for you).
1280     Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when
1281     it can be created for you?
1282 
1283 .seealso: `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperatorsSet()`
1284 @*/
1285 PetscErrorCode  PCGetOperators(PC pc,Mat *Amat,Mat *Pmat)
1286 {
1287   PetscFunctionBegin;
1288   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1289   if (Amat) {
1290     if (!pc->mat) {
1291       if (pc->pmat && !Pmat) {  /* Apmat has been set, but user did not request it, so use for Amat */
1292         pc->mat = pc->pmat;
1293         PetscCall(PetscObjectReference((PetscObject)pc->mat));
1294       } else {                  /* both Amat and Pmat are empty */
1295         PetscCall(MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat));
1296         if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */
1297           pc->pmat = pc->mat;
1298           PetscCall(PetscObjectReference((PetscObject)pc->pmat));
1299         }
1300       }
1301     }
1302     *Amat = pc->mat;
1303   }
1304   if (Pmat) {
1305     if (!pc->pmat) {
1306       if (pc->mat && !Amat) {    /* Amat has been set but was not requested, so use for pmat */
1307         pc->pmat = pc->mat;
1308         PetscCall(PetscObjectReference((PetscObject)pc->pmat));
1309       } else {
1310         PetscCall(MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat));
1311         if (!Amat) { /* user did NOT request Amat, so make same as Pmat */
1312           pc->mat = pc->pmat;
1313           PetscCall(PetscObjectReference((PetscObject)pc->mat));
1314         }
1315       }
1316     }
1317     *Pmat = pc->pmat;
1318   }
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 /*@C
1323    PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1324    possibly a different one associated with the preconditioner have been set in the PC.
1325 
1326    Not collective, though the results on all processes should be the same
1327 
1328    Input Parameter:
1329 .  pc - the preconditioner context
1330 
1331    Output Parameters:
1332 +  mat - the matrix associated with the linear system was set
1333 -  pmat - matrix associated with the preconditioner was set, usually the same
1334 
1335    Level: intermediate
1336 
1337 .seealso: `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperators()`
1338 @*/
1339 PetscErrorCode  PCGetOperatorsSet(PC pc,PetscBool  *mat,PetscBool  *pmat)
1340 {
1341   PetscFunctionBegin;
1342   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1343   if (mat) *mat = (pc->mat)  ? PETSC_TRUE : PETSC_FALSE;
1344   if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1345   PetscFunctionReturn(0);
1346 }
1347 
1348 /*@
1349    PCFactorGetMatrix - Gets the factored matrix from the
1350    preconditioner context.  This routine is valid only for the LU,
1351    incomplete LU, Cholesky, and incomplete Cholesky methods.
1352 
1353    Not Collective on PC though Mat is parallel if PC is parallel
1354 
1355    Input Parameters:
1356 .  pc - the preconditioner context
1357 
1358    Output parameters:
1359 .  mat - the factored matrix
1360 
1361    Level: advanced
1362 
1363    Notes:
1364     Does not increase the reference count for the matrix so DO NOT destroy it
1365 
1366 @*/
1367 PetscErrorCode  PCFactorGetMatrix(PC pc,Mat *mat)
1368 {
1369   PetscFunctionBegin;
1370   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1371   PetscValidPointer(mat,2);
1372   if (pc->ops->getfactoredmatrix) {
1373     PetscCall((*pc->ops->getfactoredmatrix)(pc,mat));
1374   } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix");
1375   PetscFunctionReturn(0);
1376 }
1377 
1378 /*@C
1379    PCSetOptionsPrefix - Sets the prefix used for searching for all
1380    PC options in the database.
1381 
1382    Logically Collective on PC
1383 
1384    Input Parameters:
1385 +  pc - the preconditioner context
1386 -  prefix - the prefix string to prepend to all PC option requests
1387 
1388    Notes:
1389    A hyphen (-) must NOT be given at the beginning of the prefix name.
1390    The first character of all runtime options is AUTOMATICALLY the
1391    hyphen.
1392 
1393    Level: advanced
1394 
1395 .seealso: `PCAppendOptionsPrefix()`, `PCGetOptionsPrefix()`
1396 @*/
1397 PetscErrorCode  PCSetOptionsPrefix(PC pc,const char prefix[])
1398 {
1399   PetscFunctionBegin;
1400   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1401   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc,prefix));
1402   PetscFunctionReturn(0);
1403 }
1404 
1405 /*@C
1406    PCAppendOptionsPrefix - Appends to the prefix used for searching for all
1407    PC options in the database.
1408 
1409    Logically Collective on PC
1410 
1411    Input Parameters:
1412 +  pc - the preconditioner context
1413 -  prefix - the prefix string to prepend to all PC option requests
1414 
1415    Notes:
1416    A hyphen (-) must NOT be given at the beginning of the prefix name.
1417    The first character of all runtime options is AUTOMATICALLY the
1418    hyphen.
1419 
1420    Level: advanced
1421 
1422 .seealso: `PCSetOptionsPrefix()`, `PCGetOptionsPrefix()`
1423 @*/
1424 PetscErrorCode  PCAppendOptionsPrefix(PC pc,const char prefix[])
1425 {
1426   PetscFunctionBegin;
1427   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1428   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix));
1429   PetscFunctionReturn(0);
1430 }
1431 
1432 /*@C
1433    PCGetOptionsPrefix - Gets the prefix used for searching for all
1434    PC options in the database.
1435 
1436    Not Collective
1437 
1438    Input Parameters:
1439 .  pc - the preconditioner context
1440 
1441    Output Parameters:
1442 .  prefix - pointer to the prefix string used, is returned
1443 
1444    Notes:
1445     On the fortran side, the user should pass in a string 'prifix' of
1446    sufficient length to hold the prefix.
1447 
1448    Level: advanced
1449 
1450 .seealso: `PCSetOptionsPrefix()`, `PCAppendOptionsPrefix()`
1451 @*/
1452 PetscErrorCode  PCGetOptionsPrefix(PC pc,const char *prefix[])
1453 {
1454   PetscFunctionBegin;
1455   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1456   PetscValidPointer(prefix,2);
1457   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc,prefix));
1458   PetscFunctionReturn(0);
1459 }
1460 
1461 /*
1462    Indicates the right hand side will be changed by KSPSolve(), this occurs for a few
1463   preconditioners including BDDC and Eisentat that transform the equations before applying
1464   the Krylov methods
1465 */
1466 PETSC_INTERN PetscErrorCode  PCPreSolveChangeRHS(PC pc,PetscBool *change)
1467 {
1468   PetscFunctionBegin;
1469   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1470   PetscValidPointer(change,2);
1471   *change = PETSC_FALSE;
1472   PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));
1473   PetscFunctionReturn(0);
1474 }
1475 
1476 /*@
1477    PCPreSolve - Optional pre-solve phase, intended for any
1478    preconditioner-specific actions that must be performed before
1479    the iterative solve itself.
1480 
1481    Collective on PC
1482 
1483    Input Parameters:
1484 +  pc - the preconditioner context
1485 -  ksp - the Krylov subspace context
1486 
1487    Level: developer
1488 
1489    Sample of Usage:
1490 .vb
1491     PCPreSolve(pc,ksp);
1492     KSPSolve(ksp,b,x);
1493     PCPostSolve(pc,ksp);
1494 .ve
1495 
1496    Notes:
1497    The pre-solve phase is distinct from the PCSetUp() phase.
1498 
1499    KSPSolve() calls this directly, so is rarely called by the user.
1500 
1501 .seealso: `PCPostSolve()`
1502 @*/
1503 PetscErrorCode PCPreSolve(PC pc,KSP ksp)
1504 {
1505   Vec            x,rhs;
1506 
1507   PetscFunctionBegin;
1508   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1509   PetscValidHeaderSpecific(ksp,KSP_CLASSID,2);
1510   pc->presolvedone++;
1511   PetscCheck(pc->presolvedone <= 2,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice");
1512   PetscCall(KSPGetSolution(ksp,&x));
1513   PetscCall(KSPGetRhs(ksp,&rhs));
1514 
1515   if (pc->ops->presolve) {
1516     PetscCall((*pc->ops->presolve)(pc,ksp,rhs,x));
1517   } else if (pc->presolve) {
1518     PetscCall((pc->presolve)(pc,ksp));
1519   }
1520   PetscFunctionReturn(0);
1521 }
1522 
1523 /*@C
1524    PCSetPreSolve - Sets function PCPreSolve() which is intended for any
1525    preconditioner-specific actions that must be performed before
1526    the iterative solve itself.
1527 
1528    Logically Collective on pc
1529 
1530    Input Parameters:
1531 +   pc - the preconditioner object
1532 -   presolve - the function to call before the solve
1533 
1534    Calling sequence of presolve:
1535 $  func(PC pc,KSP ksp)
1536 
1537 +  pc - the PC context
1538 -  ksp - the KSP context
1539 
1540    Level: developer
1541 
1542 .seealso: `PC`, `PCSetUp()`, `PCPreSolve()`
1543 @*/
1544 PetscErrorCode PCSetPreSolve(PC pc,PetscErrorCode (*presolve)(PC,KSP))
1545 {
1546   PetscFunctionBegin;
1547   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1548   pc->presolve = presolve;
1549   PetscFunctionReturn(0);
1550 }
1551 
1552 /*@
1553    PCPostSolve - Optional post-solve phase, intended for any
1554    preconditioner-specific actions that must be performed after
1555    the iterative solve itself.
1556 
1557    Collective on PC
1558 
1559    Input Parameters:
1560 +  pc - the preconditioner context
1561 -  ksp - the Krylov subspace context
1562 
1563    Sample of Usage:
1564 .vb
1565     PCPreSolve(pc,ksp);
1566     KSPSolve(ksp,b,x);
1567     PCPostSolve(pc,ksp);
1568 .ve
1569 
1570    Note:
1571    KSPSolve() calls this routine directly, so it is rarely called by the user.
1572 
1573    Level: developer
1574 
1575 .seealso: `PCPreSolve()`, `KSPSolve()`
1576 @*/
1577 PetscErrorCode  PCPostSolve(PC pc,KSP ksp)
1578 {
1579   Vec            x,rhs;
1580 
1581   PetscFunctionBegin;
1582   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1583   PetscValidHeaderSpecific(ksp,KSP_CLASSID,2);
1584   pc->presolvedone--;
1585   PetscCall(KSPGetSolution(ksp,&x));
1586   PetscCall(KSPGetRhs(ksp,&rhs));
1587   if (pc->ops->postsolve) {
1588     PetscCall((*pc->ops->postsolve)(pc,ksp,rhs,x));
1589   }
1590   PetscFunctionReturn(0);
1591 }
1592 
1593 /*@C
1594   PCLoad - Loads a PC that has been stored in binary  with PCView().
1595 
1596   Collective on PetscViewer
1597 
1598   Input Parameters:
1599 + newdm - the newly loaded PC, this needs to have been created with PCCreate() or
1600            some related function before a call to PCLoad().
1601 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen()
1602 
1603    Level: intermediate
1604 
1605   Notes:
1606    The type is determined by the data in the file, any type set into the PC before this call is ignored.
1607 
1608   Notes for advanced users:
1609   Most users should not need to know the details of the binary storage
1610   format, since PCLoad() and PCView() completely hide these details.
1611   But for anyone who's interested, the standard binary matrix storage
1612   format is
1613 .vb
1614      has not yet been determined
1615 .ve
1616 
1617 .seealso: `PetscViewerBinaryOpen()`, `PCView()`, `MatLoad()`, `VecLoad()`
1618 @*/
1619 PetscErrorCode  PCLoad(PC newdm, PetscViewer viewer)
1620 {
1621   PetscBool      isbinary;
1622   PetscInt       classid;
1623   char           type[256];
1624 
1625   PetscFunctionBegin;
1626   PetscValidHeaderSpecific(newdm,PC_CLASSID,1);
1627   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
1628   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary));
1629   PetscCheck(isbinary,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
1630 
1631   PetscCall(PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT));
1632   PetscCheck(classid == PC_FILE_CLASSID,PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file");
1633   PetscCall(PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR));
1634   PetscCall(PCSetType(newdm, type));
1635   if (newdm->ops->load) {
1636     PetscCall((*newdm->ops->load)(newdm,viewer));
1637   }
1638   PetscFunctionReturn(0);
1639 }
1640 
1641 #include <petscdraw.h>
1642 #if defined(PETSC_HAVE_SAWS)
1643 #include <petscviewersaws.h>
1644 #endif
1645 
1646 /*@C
1647    PCViewFromOptions - View from Options
1648 
1649    Collective on PC
1650 
1651    Input Parameters:
1652 +  A - the PC context
1653 .  obj - Optional object
1654 -  name - command line option
1655 
1656    Level: intermediate
1657 .seealso: `PC`, `PCView`, `PetscObjectViewFromOptions()`, `PCCreate()`
1658 @*/
1659 PetscErrorCode  PCViewFromOptions(PC A,PetscObject obj,const char name[])
1660 {
1661   PetscFunctionBegin;
1662   PetscValidHeaderSpecific(A,PC_CLASSID,1);
1663   PetscCall(PetscObjectViewFromOptions((PetscObject)A,obj,name));
1664   PetscFunctionReturn(0);
1665 }
1666 
1667 /*@C
1668    PCView - Prints the PC data structure.
1669 
1670    Collective on PC
1671 
1672    Input Parameters:
1673 +  PC - the PC context
1674 -  viewer - optional visualization context
1675 
1676    Note:
1677    The available visualization contexts include
1678 +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
1679 -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
1680          output where only the first processor opens
1681          the file.  All other processors send their
1682          data to the first processor to print.
1683 
1684    The user can open an alternative visualization contexts with
1685    PetscViewerASCIIOpen() (output to a specified file).
1686 
1687    Level: developer
1688 
1689 .seealso: `KSPView()`, `PetscViewerASCIIOpen()`
1690 @*/
1691 PetscErrorCode  PCView(PC pc,PetscViewer viewer)
1692 {
1693   PCType         cstr;
1694   PetscBool      iascii,isstring,isbinary,isdraw;
1695 #if defined(PETSC_HAVE_SAWS)
1696   PetscBool      issaws;
1697 #endif
1698 
1699   PetscFunctionBegin;
1700   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1701   if (!viewer) {
1702     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer));
1703   }
1704   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
1705   PetscCheckSameComm(pc,1,viewer,2);
1706 
1707   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
1708   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring));
1709   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary));
1710   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw));
1711 #if defined(PETSC_HAVE_SAWS)
1712   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws));
1713 #endif
1714 
1715   if (iascii) {
1716     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer));
1717     if (!pc->setupcalled) {
1718       PetscCall(PetscViewerASCIIPrintf(viewer,"  PC has not been set up so information may be incomplete\n"));
1719     }
1720     if (pc->ops->view) {
1721       PetscCall(PetscViewerASCIIPushTab(viewer));
1722       PetscCall((*pc->ops->view)(pc,viewer));
1723       PetscCall(PetscViewerASCIIPopTab(viewer));
1724     }
1725     if (pc->mat) {
1726       PetscCall(PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO));
1727       if (pc->pmat == pc->mat) {
1728         PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix = precond matrix:\n"));
1729         PetscCall(PetscViewerASCIIPushTab(viewer));
1730         PetscCall(MatView(pc->mat,viewer));
1731         PetscCall(PetscViewerASCIIPopTab(viewer));
1732       } else {
1733         if (pc->pmat) {
1734           PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix followed by preconditioner matrix:\n"));
1735         } else {
1736           PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix:\n"));
1737         }
1738         PetscCall(PetscViewerASCIIPushTab(viewer));
1739         PetscCall(MatView(pc->mat,viewer));
1740         if (pc->pmat) PetscCall(MatView(pc->pmat,viewer));
1741         PetscCall(PetscViewerASCIIPopTab(viewer));
1742       }
1743       PetscCall(PetscViewerPopFormat(viewer));
1744     }
1745   } else if (isstring) {
1746     PetscCall(PCGetType(pc,&cstr));
1747     PetscCall(PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr));
1748     if (pc->ops->view) PetscCall((*pc->ops->view)(pc,viewer));
1749     if (pc->mat) PetscCall(MatView(pc->mat,viewer));
1750     if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat,viewer));
1751   } else if (isbinary) {
1752     PetscInt    classid = PC_FILE_CLASSID;
1753     MPI_Comm    comm;
1754     PetscMPIInt rank;
1755     char        type[256];
1756 
1757     PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
1758     PetscCallMPI(MPI_Comm_rank(comm,&rank));
1759     if (rank == 0) {
1760       PetscCall(PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT));
1761       PetscCall(PetscStrncpy(type,((PetscObject)pc)->type_name,256));
1762       PetscCall(PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR));
1763     }
1764     if (pc->ops->view) {
1765       PetscCall((*pc->ops->view)(pc,viewer));
1766     }
1767   } else if (isdraw) {
1768     PetscDraw draw;
1769     char      str[25];
1770     PetscReal x,y,bottom,h;
1771     PetscInt  n;
1772 
1773     PetscCall(PetscViewerDrawGetDraw(viewer,0,&draw));
1774     PetscCall(PetscDrawGetCurrentPoint(draw,&x,&y));
1775     if (pc->mat) {
1776       PetscCall(MatGetSize(pc->mat,&n,NULL));
1777       PetscCall(PetscSNPrintf(str,25,"PC: %s (%" PetscInt_FMT ")",((PetscObject)pc)->type_name,n));
1778     } else {
1779       PetscCall(PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name));
1780     }
1781     PetscCall(PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h));
1782     bottom = y - h;
1783     PetscCall(PetscDrawPushCurrentPoint(draw,x,bottom));
1784     if (pc->ops->view) {
1785       PetscCall((*pc->ops->view)(pc,viewer));
1786     }
1787     PetscCall(PetscDrawPopCurrentPoint(draw));
1788 #if defined(PETSC_HAVE_SAWS)
1789   } else if (issaws) {
1790     PetscMPIInt rank;
1791 
1792     PetscCall(PetscObjectName((PetscObject)pc));
1793     PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
1794     if (!((PetscObject)pc)->amsmem && rank == 0) {
1795       PetscCall(PetscObjectViewSAWs((PetscObject)pc,viewer));
1796     }
1797     if (pc->mat) PetscCall(MatView(pc->mat,viewer));
1798     if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat,viewer));
1799 #endif
1800   }
1801   PetscFunctionReturn(0);
1802 }
1803 
1804 /*@C
1805   PCRegister -  Adds a method to the preconditioner package.
1806 
1807    Not collective
1808 
1809    Input Parameters:
1810 +  name_solver - name of a new user-defined solver
1811 -  routine_create - routine to create method context
1812 
1813    Notes:
1814    PCRegister() may be called multiple times to add several user-defined preconditioners.
1815 
1816    Sample usage:
1817 .vb
1818    PCRegister("my_solver", MySolverCreate);
1819 .ve
1820 
1821    Then, your solver can be chosen with the procedural interface via
1822 $     PCSetType(pc,"my_solver")
1823    or at runtime via the option
1824 $     -pc_type my_solver
1825 
1826    Level: advanced
1827 
1828 .seealso: `PCRegisterAll()`
1829 @*/
1830 PetscErrorCode  PCRegister(const char sname[],PetscErrorCode (*function)(PC))
1831 {
1832   PetscFunctionBegin;
1833   PetscCall(PCInitializePackage());
1834   PetscCall(PetscFunctionListAdd(&PCList,sname,function));
1835   PetscFunctionReturn(0);
1836 }
1837 
1838 static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y)
1839 {
1840   PC             pc;
1841 
1842   PetscFunctionBegin;
1843   PetscCall(MatShellGetContext(A,&pc));
1844   PetscCall(PCApply(pc,X,Y));
1845   PetscFunctionReturn(0);
1846 }
1847 
1848 /*@
1849     PCComputeOperator - Computes the explicit preconditioned operator.
1850 
1851     Collective on PC
1852 
1853     Input Parameters:
1854 +   pc - the preconditioner object
1855 -   mattype - the matrix type to be used for the operator
1856 
1857     Output Parameter:
1858 .   mat - the explicit preconditioned operator
1859 
1860     Notes:
1861     This computation is done by applying the operators to columns of the identity matrix.
1862     This routine is costly in general, and is recommended for use only with relatively small systems.
1863     Currently, this routine uses a dense matrix format when mattype == NULL
1864 
1865     Level: advanced
1866 
1867 .seealso: `KSPComputeOperator()`, `MatType`
1868 
1869 @*/
1870 PetscErrorCode  PCComputeOperator(PC pc,MatType mattype,Mat *mat)
1871 {
1872   PetscInt       N,M,m,n;
1873   Mat            A,Apc;
1874 
1875   PetscFunctionBegin;
1876   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1877   PetscValidPointer(mat,3);
1878   PetscCall(PCGetOperators(pc,&A,NULL));
1879   PetscCall(MatGetLocalSize(A,&m,&n));
1880   PetscCall(MatGetSize(A,&M,&N));
1881   PetscCall(MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc));
1882   PetscCall(MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC));
1883   PetscCall(MatComputeOperator(Apc,mattype,mat));
1884   PetscCall(MatDestroy(&Apc));
1885   PetscFunctionReturn(0);
1886 }
1887 
1888 /*@
1889    PCSetCoordinates - sets the coordinates of all the nodes on the local process
1890 
1891    Collective on PC
1892 
1893    Input Parameters:
1894 +  pc - the solver context
1895 .  dim - the dimension of the coordinates 1, 2, or 3
1896 .  nloc - the blocked size of the coordinates array
1897 -  coords - the coordinates array
1898 
1899    Level: intermediate
1900 
1901    Notes:
1902    coords is an array of the dim coordinates for the nodes on
1903    the local processor, of size dim*nloc.
1904    If there are 108 equation on a processor
1905    for a displacement finite element discretization of elasticity (so
1906    that there are nloc = 36 = 108/3 nodes) then the array must have 108
1907    double precision values (ie, 3 * 36).  These x y z coordinates
1908    should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x,
1909    ... , N-1.z ].
1910 
1911 .seealso: `MatSetNearNullSpace()`
1912 @*/
1913 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
1914 {
1915   PetscFunctionBegin;
1916   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1917   PetscValidLogicalCollectiveInt(pc,dim,2);
1918   PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));
1919   PetscFunctionReturn(0);
1920 }
1921 
1922 /*@
1923    PCGetInterpolations - Gets interpolation matrices for all levels (except level 0)
1924 
1925    Logically Collective on PC
1926 
1927    Input Parameter:
1928 .  pc - the precondition context
1929 
1930    Output Parameters:
1931 +  num_levels - the number of levels
1932 -  interpolations - the interpolation matrices (size of num_levels-1)
1933 
1934    Level: advanced
1935 
1936 .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level
1937 
1938 .seealso: `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetInterpolation()`, `PCGetCoarseOperators()`
1939 @*/
1940 PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[])
1941 {
1942   PetscFunctionBegin;
1943   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1944   PetscValidIntPointer(num_levels,2);
1945   PetscValidPointer(interpolations,3);
1946   PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));
1947   PetscFunctionReturn(0);
1948 }
1949 
1950 /*@
1951    PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level)
1952 
1953    Logically Collective on PC
1954 
1955    Input Parameter:
1956 .  pc - the precondition context
1957 
1958    Output Parameters:
1959 +  num_levels - the number of levels
1960 -  coarseOperators - the coarse operator matrices (size of num_levels-1)
1961 
1962    Level: advanced
1963 
1964 .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level
1965 
1966 .seealso: `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()`
1967 @*/
1968 PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[])
1969 {
1970   PetscFunctionBegin;
1971   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1972   PetscValidIntPointer(num_levels,2);
1973   PetscValidPointer(coarseOperators,3);
1974   PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));
1975   PetscFunctionReturn(0);
1976 }
1977