xref: /petsc/src/ksp/pc/interface/precon.c (revision 71052fdfa316db9eff6713675d06cb2251c7acfb)
1 
2 /*
3     The PC (preconditioner) interface routines, callable by users.
4 */
5 #include <petsc/private/pcimpl.h>            /*I "petscksp.h" I*/
6 #include <petscdm.h>
7 
8 /* Logging support */
9 PetscClassId  PC_CLASSID;
10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks;
12 PetscInt      PetscMGLevelId;
13 
14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[])
15 {
16   PetscMPIInt    size;
17   PetscBool      hasop,flg1,flg2,set,flg3,isnormal;
18 
19   PetscFunctionBegin;
20   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
21   if (pc->pmat) {
22     PetscCall(MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop));
23     if (size == 1) {
24       PetscCall(MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1));
25       PetscCall(MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2));
26       PetscCall(MatIsSymmetricKnown(pc->pmat,&set,&flg3));
27       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat,&isnormal,MATNORMAL,MATNORMALHERMITIAN,NULL));
28       if (flg1 && (!flg2 || (set && flg3))) {
29         *type = PCICC;
30       } else if (flg2) {
31         *type = PCILU;
32       } else if (isnormal) {
33         *type = PCNONE;
34       } else if (hasop) { /* likely is a parallel matrix run on one processor */
35         *type = PCBJACOBI;
36       } else {
37         *type = PCNONE;
38       }
39     } else {
40        if (hasop) {
41         *type = PCBJACOBI;
42       } else {
43         *type = PCNONE;
44       }
45     }
46   } else {
47     if (size == 1) {
48       *type = PCILU;
49     } else {
50       *type = PCBJACOBI;
51     }
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 /*@
57    PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats
58 
59    Collective on PC
60 
61    Input Parameter:
62 .  pc - the preconditioner context
63 
64    Level: developer
65 
66    Notes:
67     This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC
68 
69 .seealso: `PCCreate()`, `PCSetUp()`
70 @*/
71 PetscErrorCode  PCReset(PC pc)
72 {
73   PetscFunctionBegin;
74   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
75   if (pc->ops->reset) PetscCall((*pc->ops->reset)(pc));
76   PetscCall(VecDestroy(&pc->diagonalscaleright));
77   PetscCall(VecDestroy(&pc->diagonalscaleleft));
78   PetscCall(MatDestroy(&pc->pmat));
79   PetscCall(MatDestroy(&pc->mat));
80 
81   pc->setupcalled = 0;
82   PetscFunctionReturn(0);
83 }
84 
85 /*@C
86    PCDestroy - Destroys PC context that was created with PCCreate().
87 
88    Collective on PC
89 
90    Input Parameter:
91 .  pc - the preconditioner context
92 
93    Level: developer
94 
95 .seealso: `PCCreate()`, `PCSetUp()`
96 @*/
97 PetscErrorCode  PCDestroy(PC *pc)
98 {
99   PetscFunctionBegin;
100   if (!*pc) PetscFunctionReturn(0);
101   PetscValidHeaderSpecific((*pc),PC_CLASSID,1);
102   if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; PetscFunctionReturn(0);}
103 
104   PetscCall(PCReset(*pc));
105 
106   /* if memory was published with SAWs then destroy it */
107   PetscCall(PetscObjectSAWsViewOff((PetscObject)*pc));
108   if ((*pc)->ops->destroy) PetscCall((*(*pc)->ops->destroy)((*pc)));
109   PetscCall(DMDestroy(&(*pc)->dm));
110   PetscCall(PetscHeaderDestroy(pc));
111   PetscFunctionReturn(0);
112 }
113 
114 /*@C
115    PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right
116       scaling as needed by certain time-stepping codes.
117 
118    Logically Collective on PC
119 
120    Input Parameter:
121 .  pc - the preconditioner context
122 
123    Output Parameter:
124 .  flag - PETSC_TRUE if it applies the scaling
125 
126    Level: developer
127 
128    Notes:
129     If this returns PETSC_TRUE then the system solved via the Krylov method is
130 $           D M A D^{-1} y = D M b  for left preconditioning or
131 $           D A M D^{-1} z = D b for right preconditioning
132 
133 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCSetDiagonalScale()`
134 @*/
135 PetscErrorCode  PCGetDiagonalScale(PC pc,PetscBool  *flag)
136 {
137   PetscFunctionBegin;
138   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
139   PetscValidBoolPointer(flag,2);
140   *flag = pc->diagonalscale;
141   PetscFunctionReturn(0);
142 }
143 
144 /*@
145    PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right
146       scaling as needed by certain time-stepping codes.
147 
148    Logically Collective on PC
149 
150    Input Parameters:
151 +  pc - the preconditioner context
152 -  s - scaling vector
153 
154    Level: intermediate
155 
156    Notes:
157     The system solved via the Krylov method is
158 $           D M A D^{-1} y = D M b  for left preconditioning or
159 $           D A M D^{-1} z = D b for right preconditioning
160 
161    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
162 
163 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCGetDiagonalScale()`
164 @*/
165 PetscErrorCode  PCSetDiagonalScale(PC pc,Vec s)
166 {
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
169   PetscValidHeaderSpecific(s,VEC_CLASSID,2);
170   pc->diagonalscale     = PETSC_TRUE;
171 
172   PetscCall(PetscObjectReference((PetscObject)s));
173   PetscCall(VecDestroy(&pc->diagonalscaleleft));
174 
175   pc->diagonalscaleleft = s;
176 
177   PetscCall(VecDuplicate(s,&pc->diagonalscaleright));
178   PetscCall(VecCopy(s,pc->diagonalscaleright));
179   PetscCall(VecReciprocal(pc->diagonalscaleright));
180   PetscFunctionReturn(0);
181 }
182 
183 /*@
184    PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes.
185 
186    Logically Collective on PC
187 
188    Input Parameters:
189 +  pc - the preconditioner context
190 .  in - input vector
191 -  out - scaled vector (maybe the same as in)
192 
193    Level: intermediate
194 
195    Notes:
196     The system solved via the Krylov method is
197 $           D M A D^{-1} y = D M b  for left preconditioning or
198 $           D A M D^{-1} z = D b for right preconditioning
199 
200    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
201 
202    If diagonal scaling is turned off and in is not out then in is copied to out
203 
204 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleSet()`, `PCDiagonalScaleRight()`, `PCDiagonalScale()`
205 @*/
206 PetscErrorCode  PCDiagonalScaleLeft(PC pc,Vec in,Vec out)
207 {
208   PetscFunctionBegin;
209   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
210   PetscValidHeaderSpecific(in,VEC_CLASSID,2);
211   PetscValidHeaderSpecific(out,VEC_CLASSID,3);
212   if (pc->diagonalscale) {
213     PetscCall(VecPointwiseMult(out,pc->diagonalscaleleft,in));
214   } else if (in != out) {
215     PetscCall(VecCopy(in,out));
216   }
217   PetscFunctionReturn(0);
218 }
219 
220 /*@
221    PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.
222 
223    Logically Collective on PC
224 
225    Input Parameters:
226 +  pc - the preconditioner context
227 .  in - input vector
228 -  out - scaled vector (maybe the same as in)
229 
230    Level: intermediate
231 
232    Notes:
233     The system solved via the Krylov method is
234 $           D M A D^{-1} y = D M b  for left preconditioning or
235 $           D A M D^{-1} z = D b for right preconditioning
236 
237    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
238 
239    If diagonal scaling is turned off and in is not out then in is copied to out
240 
241 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleSet()`, `PCDiagonalScale()`
242 @*/
243 PetscErrorCode  PCDiagonalScaleRight(PC pc,Vec in,Vec out)
244 {
245   PetscFunctionBegin;
246   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
247   PetscValidHeaderSpecific(in,VEC_CLASSID,2);
248   PetscValidHeaderSpecific(out,VEC_CLASSID,3);
249   if (pc->diagonalscale) {
250     PetscCall(VecPointwiseMult(out,pc->diagonalscaleright,in));
251   } else if (in != out) {
252     PetscCall(VecCopy(in,out));
253   }
254   PetscFunctionReturn(0);
255 }
256 
257 /*@
258    PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the
259    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
260    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperators() or PCSetOperators() not the Pmat.
261 
262    Logically Collective on PC
263 
264    Input Parameters:
265 +  pc - the preconditioner context
266 -  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
267 
268    Options Database Key:
269 .  -pc_use_amat <true,false> - use the amat to apply the operator
270 
271    Notes:
272    For the common case in which the linear system matrix and the matrix used to construct the
273    preconditioner are identical, this routine is does nothing.
274 
275    Level: intermediate
276 
277 .seealso: `PCGetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`
278 @*/
279 PetscErrorCode  PCSetUseAmat(PC pc,PetscBool flg)
280 {
281   PetscFunctionBegin;
282   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
283   pc->useAmat = flg;
284   PetscFunctionReturn(0);
285 }
286 
287 /*@
288    PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected.
289 
290    Logically Collective on PC
291 
292    Input Parameters:
293 +  pc - iterative context obtained from PCCreate()
294 -  flg - PETSC_TRUE indicates you want the error generated
295 
296    Level: advanced
297 
298    Notes:
299     Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve()
300     to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is
301     detected.
302 
303     This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs
304 
305 .seealso: `PCGetInitialGuessNonzero()`, `PCSetInitialGuessKnoll()`, `PCGetInitialGuessKnoll()`
306 @*/
307 PetscErrorCode  PCSetErrorIfFailure(PC pc,PetscBool flg)
308 {
309   PetscFunctionBegin;
310   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
311   PetscValidLogicalCollectiveBool(pc,flg,2);
312   pc->erroriffailure = flg;
313   PetscFunctionReturn(0);
314 }
315 
316 /*@
317    PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the
318    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
319    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperators() or PCSetOperators() not the Pmat.
320 
321    Logically Collective on PC
322 
323    Input Parameter:
324 .  pc - the preconditioner context
325 
326    Output Parameter:
327 .  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
328 
329    Notes:
330    For the common case in which the linear system matrix and the matrix used to construct the
331    preconditioner are identical, this routine is does nothing.
332 
333    Level: intermediate
334 
335 .seealso: `PCSetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`
336 @*/
337 PetscErrorCode  PCGetUseAmat(PC pc,PetscBool *flg)
338 {
339   PetscFunctionBegin;
340   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
341   *flg = pc->useAmat;
342   PetscFunctionReturn(0);
343 }
344 
345 /*@
346    PCCreate - Creates a preconditioner context.
347 
348    Collective
349 
350    Input Parameter:
351 .  comm - MPI communicator
352 
353    Output Parameter:
354 .  pc - location to put the preconditioner context
355 
356    Notes:
357    The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC
358    in parallel. For dense matrices it is always PCNONE.
359 
360    Level: developer
361 
362 .seealso: `PCSetUp()`, `PCApply()`, `PCDestroy()`
363 @*/
364 PetscErrorCode  PCCreate(MPI_Comm comm,PC *newpc)
365 {
366   PC             pc;
367 
368   PetscFunctionBegin;
369   PetscValidPointer(newpc,2);
370   *newpc = NULL;
371   PetscCall(PCInitializePackage());
372 
373   PetscCall(PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView));
374 
375   pc->mat                  = NULL;
376   pc->pmat                 = NULL;
377   pc->setupcalled          = 0;
378   pc->setfromoptionscalled = 0;
379   pc->data                 = NULL;
380   pc->diagonalscale        = PETSC_FALSE;
381   pc->diagonalscaleleft    = NULL;
382   pc->diagonalscaleright   = NULL;
383 
384   pc->modifysubmatrices  = NULL;
385   pc->modifysubmatricesP = NULL;
386 
387   *newpc = pc;
388   PetscFunctionReturn(0);
389 
390 }
391 
392 /* -------------------------------------------------------------------------------*/
393 
394 /*@
395    PCApply - Applies the preconditioner to a vector.
396 
397    Collective on PC
398 
399    Input Parameters:
400 +  pc - the preconditioner context
401 -  x - input vector
402 
403    Output Parameter:
404 .  y - output vector
405 
406    Level: developer
407 
408 .seealso: `PCApplyTranspose()`, `PCApplyBAorAB()`
409 @*/
410 PetscErrorCode  PCApply(PC pc,Vec x,Vec y)
411 {
412   PetscInt       m,n,mv,nv;
413 
414   PetscFunctionBegin;
415   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
416   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
417   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
418   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
419   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
420   /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */
421   PetscCall(MatGetLocalSize(pc->pmat,&m,&n));
422   PetscCall(VecGetLocalSize(x,&mv));
423   PetscCall(VecGetLocalSize(y,&nv));
424   /* check pmat * y = x is feasible */
425   PetscCheck(mv == m,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %" PetscInt_FMT " does not equal input vector size %" PetscInt_FMT,m,mv);
426   PetscCheck(nv == n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %" PetscInt_FMT " does not equal output vector size %" PetscInt_FMT,n,nv);
427   PetscCall(VecSetErrorIfLocked(y,3));
428 
429   PetscCall(PCSetUp(pc));
430   PetscCall(VecLockReadPush(x));
431   PetscCall(PetscLogEventBegin(PC_Apply,pc,x,y,0));
432   PetscCall((*pc->ops->apply)(pc,x,y));
433   PetscCall(PetscLogEventEnd(PC_Apply,pc,x,y,0));
434   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
435   PetscCall(VecLockReadPop(x));
436   PetscFunctionReturn(0);
437 }
438 
439 /*@
440    PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices.
441 
442    Collective on PC
443 
444    Input Parameters:
445 +  pc - the preconditioner context
446 -  X - block of input vectors
447 
448    Output Parameter:
449 .  Y - block of output vectors
450 
451    Level: developer
452 
453 .seealso: `PCApply()`, `KSPMatSolve()`
454 @*/
455 PetscErrorCode  PCMatApply(PC pc,Mat X,Mat Y)
456 {
457   Mat            A;
458   Vec            cy, cx;
459   PetscInt       m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3;
460   PetscBool      match;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
464   PetscValidHeaderSpecific(X, MAT_CLASSID, 2);
465   PetscValidHeaderSpecific(Y, MAT_CLASSID, 3);
466   PetscCheckSameComm(pc, 1, X, 2);
467   PetscCheckSameComm(pc, 1, Y, 3);
468   PetscCheck(Y != X,PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices");
469   PetscCall(PCGetOperators(pc, NULL, &A));
470   PetscCall(MatGetLocalSize(A, &m3, &n3));
471   PetscCall(MatGetLocalSize(X, &m2, &n2));
472   PetscCall(MatGetLocalSize(Y, &m1, &n1));
473   PetscCall(MatGetSize(A, &M3, &N3));
474   PetscCall(MatGetSize(X, &M2, &N2));
475   PetscCall(MatGetSize(Y, &M1, &N1));
476   PetscCheck(n1 == n2 && N1 == N2,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of output vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1);
477   PetscCheck(m2 == m3 && M2 == M3,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m2, M2, m3, M3, n3, N3);
478   PetscCheck(m1 == n3 && M1 == N3,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m1, M1, m3, M3, n3, N3);
479   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, ""));
480   PetscCheck(match,PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat");
481   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, ""));
482   PetscCheck(match,PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat");
483   PetscCall(PCSetUp(pc));
484   if (pc->ops->matapply) {
485     PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0));
486     PetscCall((*pc->ops->matapply)(pc, X, Y));
487     PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0));
488   } else {
489     PetscCall(PetscInfo(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name));
490     for (n1 = 0; n1 < N1; ++n1) {
491       PetscCall(MatDenseGetColumnVecRead(X, n1, &cx));
492       PetscCall(MatDenseGetColumnVecWrite(Y, n1, &cy));
493       PetscCall(PCApply(pc, cx, cy));
494       PetscCall(MatDenseRestoreColumnVecWrite(Y, n1, &cy));
495       PetscCall(MatDenseRestoreColumnVecRead(X, n1, &cx));
496     }
497   }
498   PetscFunctionReturn(0);
499 }
500 
501 /*@
502    PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.
503 
504    Collective on PC
505 
506    Input Parameters:
507 +  pc - the preconditioner context
508 -  x - input vector
509 
510    Output Parameter:
511 .  y - output vector
512 
513    Notes:
514    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
515 
516    Level: developer
517 
518 .seealso: `PCApply()`, `PCApplySymmetricRight()`
519 @*/
520 PetscErrorCode  PCApplySymmetricLeft(PC pc,Vec x,Vec y)
521 {
522   PetscFunctionBegin;
523   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
524   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
525   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
526   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
527   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
528   PetscCall(PCSetUp(pc));
529   PetscCall(VecLockReadPush(x));
530   PetscCall(PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0));
531   PetscCall((*pc->ops->applysymmetricleft)(pc,x,y));
532   PetscCall(PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0));
533   PetscCall(VecLockReadPop(x));
534   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
535   PetscFunctionReturn(0);
536 }
537 
538 /*@
539    PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.
540 
541    Collective on PC
542 
543    Input Parameters:
544 +  pc - the preconditioner context
545 -  x - input vector
546 
547    Output Parameter:
548 .  y - output vector
549 
550    Level: developer
551 
552    Notes:
553    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
554 
555 .seealso: `PCApply()`, `PCApplySymmetricLeft()`
556 @*/
557 PetscErrorCode  PCApplySymmetricRight(PC pc,Vec x,Vec y)
558 {
559   PetscFunctionBegin;
560   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
561   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
562   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
563   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
564   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
565   PetscCall(PCSetUp(pc));
566   PetscCall(VecLockReadPush(x));
567   PetscCall(PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0));
568   PetscCall((*pc->ops->applysymmetricright)(pc,x,y));
569   PetscCall(PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0));
570   PetscCall(VecLockReadPop(x));
571   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
572   PetscFunctionReturn(0);
573 }
574 
575 /*@
576    PCApplyTranspose - Applies the transpose of preconditioner to a vector.
577 
578    Collective on PC
579 
580    Input Parameters:
581 +  pc - the preconditioner context
582 -  x - input vector
583 
584    Output Parameter:
585 .  y - output vector
586 
587    Notes:
588     For complex numbers this applies the non-Hermitian transpose.
589 
590    Developer Notes:
591     We need to implement a PCApplyHermitianTranspose()
592 
593    Level: developer
594 
595 .seealso: `PCApply()`, `PCApplyBAorAB()`, `PCApplyBAorABTranspose()`, `PCApplyTransposeExists()`
596 @*/
597 PetscErrorCode  PCApplyTranspose(PC pc,Vec x,Vec y)
598 {
599   PetscFunctionBegin;
600   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
601   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
602   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
603   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
604   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
605   PetscCall(PCSetUp(pc));
606   PetscCall(VecLockReadPush(x));
607   PetscCall(PetscLogEventBegin(PC_Apply,pc,x,y,0));
608   PetscCall((*pc->ops->applytranspose)(pc,x,y));
609   PetscCall(PetscLogEventEnd(PC_Apply,pc,x,y,0));
610   PetscCall(VecLockReadPop(x));
611   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
612   PetscFunctionReturn(0);
613 }
614 
615 /*@
616    PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation
617 
618    Collective on PC
619 
620    Input Parameters:
621 .  pc - the preconditioner context
622 
623    Output Parameter:
624 .  flg - PETSC_TRUE if a transpose operation is defined
625 
626    Level: developer
627 
628 .seealso: `PCApplyTranspose()`
629 @*/
630 PetscErrorCode  PCApplyTransposeExists(PC pc,PetscBool  *flg)
631 {
632   PetscFunctionBegin;
633   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
634   PetscValidBoolPointer(flg,2);
635   if (pc->ops->applytranspose) *flg = PETSC_TRUE;
636   else *flg = PETSC_FALSE;
637   PetscFunctionReturn(0);
638 }
639 
640 /*@
641    PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.
642 
643    Collective on PC
644 
645    Input Parameters:
646 +  pc - the preconditioner context
647 .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
648 .  x - input vector
649 -  work - work vector
650 
651    Output Parameter:
652 .  y - output vector
653 
654    Level: developer
655 
656    Notes:
657     If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or  D A M D^{-1} is actually applied. Note that the
658    specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling.
659 
660 .seealso: `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorABTranspose()`
661 @*/
662 PetscErrorCode  PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work)
663 {
664   PetscFunctionBegin;
665   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
666   PetscValidLogicalCollectiveEnum(pc,side,2);
667   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
668   PetscValidHeaderSpecific(y,VEC_CLASSID,4);
669   PetscValidHeaderSpecific(work,VEC_CLASSID,5);
670   PetscCheckSameComm(pc,1,x,3);
671   PetscCheckSameComm(pc,1,y,4);
672   PetscCheckSameComm(pc,1,work,5);
673   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
674   PetscCheck(side == PC_LEFT || side == PC_SYMMETRIC || side == PC_RIGHT,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric");
675   PetscCheck(!pc->diagonalscale || side != PC_SYMMETRIC,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application");
676   if (pc->erroriffailure) PetscCall(VecValidValues(x,3,PETSC_TRUE));
677 
678   PetscCall(PCSetUp(pc));
679   if (pc->diagonalscale) {
680     if (pc->ops->applyBA) {
681       Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
682       PetscCall(VecDuplicate(x,&work2));
683       PetscCall(PCDiagonalScaleRight(pc,x,work2));
684       PetscCall((*pc->ops->applyBA)(pc,side,work2,y,work));
685       PetscCall(PCDiagonalScaleLeft(pc,y,y));
686       PetscCall(VecDestroy(&work2));
687     } else if (side == PC_RIGHT) {
688       PetscCall(PCDiagonalScaleRight(pc,x,y));
689       PetscCall(PCApply(pc,y,work));
690       PetscCall(MatMult(pc->mat,work,y));
691       PetscCall(PCDiagonalScaleLeft(pc,y,y));
692     } else if (side == PC_LEFT) {
693       PetscCall(PCDiagonalScaleRight(pc,x,y));
694       PetscCall(MatMult(pc->mat,y,work));
695       PetscCall(PCApply(pc,work,y));
696       PetscCall(PCDiagonalScaleLeft(pc,y,y));
697     } else PetscCheck(side != PC_SYMMETRIC,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner");
698   } else {
699     if (pc->ops->applyBA) {
700       PetscCall((*pc->ops->applyBA)(pc,side,x,y,work));
701     } else if (side == PC_RIGHT) {
702       PetscCall(PCApply(pc,x,work));
703       PetscCall(MatMult(pc->mat,work,y));
704     } else if (side == PC_LEFT) {
705       PetscCall(MatMult(pc->mat,x,work));
706       PetscCall(PCApply(pc,work,y));
707     } else if (side == PC_SYMMETRIC) {
708       /* There's an extra copy here; maybe should provide 2 work vectors instead? */
709       PetscCall(PCApplySymmetricRight(pc,x,work));
710       PetscCall(MatMult(pc->mat,work,y));
711       PetscCall(VecCopy(y,work));
712       PetscCall(PCApplySymmetricLeft(pc,work,y));
713     }
714   }
715   if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
716   PetscFunctionReturn(0);
717 }
718 
719 /*@
720    PCApplyBAorABTranspose - Applies the transpose of the preconditioner
721    and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
722    NOT tr(B*A) = tr(A)*tr(B).
723 
724    Collective on PC
725 
726    Input Parameters:
727 +  pc - the preconditioner context
728 .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
729 .  x - input vector
730 -  work - work vector
731 
732    Output Parameter:
733 .  y - output vector
734 
735    Notes:
736     this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
737       defined by B'. This is why this has the funny form that it computes tr(B) * tr(A)
738 
739     Level: developer
740 
741 .seealso: `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorAB()`
742 @*/
743 PetscErrorCode  PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work)
744 {
745   PetscFunctionBegin;
746   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
747   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
748   PetscValidHeaderSpecific(y,VEC_CLASSID,4);
749   PetscValidHeaderSpecific(work,VEC_CLASSID,5);
750   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
751   if (pc->erroriffailure) PetscCall(VecValidValues(x,3,PETSC_TRUE));
752   if (pc->ops->applyBAtranspose) {
753     PetscCall((*pc->ops->applyBAtranspose)(pc,side,x,y,work));
754     if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
755     PetscFunctionReturn(0);
756   }
757   PetscCheck(side == PC_LEFT || side == PC_RIGHT,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left");
758 
759   PetscCall(PCSetUp(pc));
760   if (side == PC_RIGHT) {
761     PetscCall(PCApplyTranspose(pc,x,work));
762     PetscCall(MatMultTranspose(pc->mat,work,y));
763   } else if (side == PC_LEFT) {
764     PetscCall(MatMultTranspose(pc->mat,x,work));
765     PetscCall(PCApplyTranspose(pc,work,y));
766   }
767   /* add support for PC_SYMMETRIC */
768   if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
769   PetscFunctionReturn(0);
770 }
771 
772 /* -------------------------------------------------------------------------------*/
773 
774 /*@
775    PCApplyRichardsonExists - Determines whether a particular preconditioner has a
776    built-in fast application of Richardson's method.
777 
778    Not Collective
779 
780    Input Parameter:
781 .  pc - the preconditioner
782 
783    Output Parameter:
784 .  exists - PETSC_TRUE or PETSC_FALSE
785 
786    Level: developer
787 
788 .seealso: `PCApplyRichardson()`
789 @*/
790 PetscErrorCode  PCApplyRichardsonExists(PC pc,PetscBool  *exists)
791 {
792   PetscFunctionBegin;
793   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
794   PetscValidBoolPointer(exists,2);
795   if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
796   else *exists = PETSC_FALSE;
797   PetscFunctionReturn(0);
798 }
799 
800 /*@
801    PCApplyRichardson - Applies several steps of Richardson iteration with
802    the particular preconditioner. This routine is usually used by the
803    Krylov solvers and not the application code directly.
804 
805    Collective on PC
806 
807    Input Parameters:
808 +  pc  - the preconditioner context
809 .  b   - the right hand side
810 .  w   - one work vector
811 .  rtol - relative decrease in residual norm convergence criteria
812 .  abstol - absolute residual norm convergence criteria
813 .  dtol - divergence residual norm increase criteria
814 .  its - the number of iterations to apply.
815 -  guesszero - if the input x contains nonzero initial guess
816 
817    Output Parameters:
818 +  outits - number of iterations actually used (for SOR this always equals its)
819 .  reason - the reason the apply terminated
820 -  y - the solution (also contains initial guess if guesszero is PETSC_FALSE
821 
822    Notes:
823    Most preconditioners do not support this function. Use the command
824    PCApplyRichardsonExists() to determine if one does.
825 
826    Except for the multigrid PC this routine ignores the convergence tolerances
827    and always runs for the number of iterations
828 
829    Level: developer
830 
831 .seealso: `PCApplyRichardsonExists()`
832 @*/
833 PetscErrorCode  PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason)
834 {
835   PetscFunctionBegin;
836   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
837   PetscValidHeaderSpecific(b,VEC_CLASSID,2);
838   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
839   PetscValidHeaderSpecific(w,VEC_CLASSID,4);
840   PetscCheck(b != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors");
841   PetscCall(PCSetUp(pc));
842   PetscCall((*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason));
843   PetscFunctionReturn(0);
844 }
845 
846 /*@
847    PCSetFailedReason - Sets the reason a PCSetUp() failed or PC_NOERROR if it did not fail
848 
849    Logically Collective on PC
850 
851    Input Parameters:
852 +  pc - the preconditioner context
853 -  reason - the reason it failedx
854 
855    Level: advanced
856 
857 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCFailedReason`
858 @*/
859 PetscErrorCode PCSetFailedReason(PC pc,PCFailedReason reason)
860 {
861   PetscFunctionBegin;
862   pc->failedreason = reason;
863   PetscFunctionReturn(0);
864 }
865 
866 /*@
867    PCGetFailedReason - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail
868 
869    Logically Collective on PC
870 
871    Input Parameter:
872 .  pc - the preconditioner context
873 
874    Output Parameter:
875 .  reason - the reason it failed
876 
877    Level: advanced
878 
879    Notes: This is the maximum over reason over all ranks in the PC communicator. It is only valid after
880    a call KSPCheckDot() or  KSPCheckNorm() inside a KSPSolve(). It is not valid immediately after a PCSetUp()
881    or PCApply(), then use PCGetFailedReasonRank()
882 
883 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReasonRank()`, `PCSetFailedReason()`
884 @*/
885 PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason)
886 {
887   PetscFunctionBegin;
888   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
889   else *reason = pc->failedreason;
890   PetscFunctionReturn(0);
891 }
892 
893 /*@
894    PCGetFailedReasonRank - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail on this MPI rank
895 
896   Not Collective on PC
897 
898    Input Parameter:
899 .  pc - the preconditioner context
900 
901    Output Parameter:
902 .  reason - the reason it failed
903 
904    Notes:
905      Different ranks may have different reasons or no reason, see PCGetFailedReason()
906 
907    Level: advanced
908 
909 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()`
910 @*/
911 PetscErrorCode PCGetFailedReasonRank(PC pc,PCFailedReason *reason)
912 {
913   PetscFunctionBegin;
914   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
915   else *reason = pc->failedreason;
916   PetscFunctionReturn(0);
917 }
918 
919 /*  Next line needed to deactivate KSP_Solve logging */
920 #include <petsc/private/kspimpl.h>
921 
922 /*
923       a setupcall of 0 indicates never setup,
924                      1 indicates has been previously setup
925                     -1 indicates a PCSetUp() was attempted and failed
926 */
927 /*@
928    PCSetUp - Prepares for the use of a preconditioner.
929 
930    Collective on PC
931 
932    Input Parameter:
933 .  pc - the preconditioner context
934 
935    Level: developer
936 
937 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`
938 @*/
939 PetscErrorCode  PCSetUp(PC pc)
940 {
941   const char       *def;
942   PetscObjectState matstate, matnonzerostate;
943 
944   PetscFunctionBegin;
945   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
946   PetscCheck(pc->mat,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first");
947 
948   if (pc->setupcalled && pc->reusepreconditioner) {
949     PetscCall(PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n"));
950     PetscFunctionReturn(0);
951   }
952 
953   PetscCall(PetscObjectStateGet((PetscObject)pc->pmat,&matstate));
954   PetscCall(MatGetNonzeroState(pc->pmat,&matnonzerostate));
955   if (!pc->setupcalled) {
956     PetscCall(PetscInfo(pc,"Setting up PC for first time\n"));
957     pc->flag = DIFFERENT_NONZERO_PATTERN;
958   } else if (matstate == pc->matstate) {
959     PetscCall(PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n"));
960     PetscFunctionReturn(0);
961   } else {
962     if (matnonzerostate > pc->matnonzerostate) {
963       PetscCall(PetscInfo(pc,"Setting up PC with different nonzero pattern\n"));
964       pc->flag = DIFFERENT_NONZERO_PATTERN;
965     } else {
966       PetscCall(PetscInfo(pc,"Setting up PC with same nonzero pattern\n"));
967       pc->flag = SAME_NONZERO_PATTERN;
968     }
969   }
970   pc->matstate        = matstate;
971   pc->matnonzerostate = matnonzerostate;
972 
973   if (!((PetscObject)pc)->type_name) {
974     PetscCall(PCGetDefaultType_Private(pc,&def));
975     PetscCall(PCSetType(pc,def));
976   }
977 
978   PetscCall(MatSetErrorIfFailure(pc->pmat,pc->erroriffailure));
979   PetscCall(MatSetErrorIfFailure(pc->mat,pc->erroriffailure));
980   PetscCall(PetscLogEventBegin(PC_SetUp,pc,0,0,0));
981   if (pc->ops->setup) {
982     /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */
983     PetscCall(KSPInitializePackage());
984     PetscCall(PetscLogEventDeactivatePush(KSP_Solve));
985     PetscCall(PetscLogEventDeactivatePush(PC_Apply));
986     PetscCall((*pc->ops->setup)(pc));
987     PetscCall(PetscLogEventDeactivatePop(KSP_Solve));
988     PetscCall(PetscLogEventDeactivatePop(PC_Apply));
989   }
990   PetscCall(PetscLogEventEnd(PC_SetUp,pc,0,0,0));
991   if (!pc->setupcalled) pc->setupcalled = 1;
992   PetscFunctionReturn(0);
993 }
994 
995 /*@
996    PCSetUpOnBlocks - Sets up the preconditioner for each block in
997    the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
998    methods.
999 
1000    Collective on PC
1001 
1002    Input Parameters:
1003 .  pc - the preconditioner context
1004 
1005    Level: developer
1006 
1007 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCSetUp()`
1008 @*/
1009 PetscErrorCode  PCSetUpOnBlocks(PC pc)
1010 {
1011   PetscFunctionBegin;
1012   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1013   if (!pc->ops->setuponblocks) PetscFunctionReturn(0);
1014   PetscCall(PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0));
1015   PetscCall((*pc->ops->setuponblocks)(pc));
1016   PetscCall(PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0));
1017   PetscFunctionReturn(0);
1018 }
1019 
1020 /*@C
1021    PCSetModifySubMatrices - Sets a user-defined routine for modifying the
1022    submatrices that arise within certain subdomain-based preconditioners.
1023    The basic submatrices are extracted from the preconditioner matrix as
1024    usual; the user can then alter these (for example, to set different boundary
1025    conditions for each submatrix) before they are used for the local solves.
1026 
1027    Logically Collective on PC
1028 
1029    Input Parameters:
1030 +  pc - the preconditioner context
1031 .  func - routine for modifying the submatrices
1032 -  ctx - optional user-defined context (may be null)
1033 
1034    Calling sequence of func:
1035 $     func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);
1036 
1037 +  row - an array of index sets that contain the global row numbers
1038          that comprise each local submatrix
1039 .  col - an array of index sets that contain the global column numbers
1040          that comprise each local submatrix
1041 .  submat - array of local submatrices
1042 -  ctx - optional user-defined context for private data for the
1043          user-defined func routine (may be null)
1044 
1045    Notes:
1046    PCSetModifySubMatrices() MUST be called before KSPSetUp() and
1047    KSPSolve().
1048 
1049    A routine set by PCSetModifySubMatrices() is currently called within
1050    the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM)
1051    preconditioners.  All other preconditioners ignore this routine.
1052 
1053    Level: advanced
1054 
1055 .seealso: `PCModifySubMatrices()`
1056 @*/
1057 PetscErrorCode  PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx)
1058 {
1059   PetscFunctionBegin;
1060   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1061   pc->modifysubmatrices  = func;
1062   pc->modifysubmatricesP = ctx;
1063   PetscFunctionReturn(0);
1064 }
1065 
1066 /*@C
1067    PCModifySubMatrices - Calls an optional user-defined routine within
1068    certain preconditioners if one has been set with PCSetModifySubMatrices().
1069 
1070    Collective on PC
1071 
1072    Input Parameters:
1073 +  pc - the preconditioner context
1074 .  nsub - the number of local submatrices
1075 .  row - an array of index sets that contain the global row numbers
1076          that comprise each local submatrix
1077 .  col - an array of index sets that contain the global column numbers
1078          that comprise each local submatrix
1079 .  submat - array of local submatrices
1080 -  ctx - optional user-defined context for private data for the
1081          user-defined routine (may be null)
1082 
1083    Output Parameter:
1084 .  submat - array of local submatrices (the entries of which may
1085             have been modified)
1086 
1087    Notes:
1088    The user should NOT generally call this routine, as it will
1089    automatically be called within certain preconditioners (currently
1090    block Jacobi, additive Schwarz) if set.
1091 
1092    The basic submatrices are extracted from the preconditioner matrix
1093    as usual; the user can then alter these (for example, to set different
1094    boundary conditions for each submatrix) before they are used for the
1095    local solves.
1096 
1097    Level: developer
1098 
1099 .seealso: `PCSetModifySubMatrices()`
1100 @*/
1101 PetscErrorCode  PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx)
1102 {
1103   PetscFunctionBegin;
1104   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1105   if (!pc->modifysubmatrices) PetscFunctionReturn(0);
1106   PetscCall(PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0));
1107   PetscCall((*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx));
1108   PetscCall(PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0));
1109   PetscFunctionReturn(0);
1110 }
1111 
1112 /*@
1113    PCSetOperators - Sets the matrix associated with the linear system and
1114    a (possibly) different one associated with the preconditioner.
1115 
1116    Logically Collective on PC
1117 
1118    Input Parameters:
1119 +  pc - the preconditioner context
1120 .  Amat - the matrix that defines the linear system
1121 -  Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat.
1122 
1123    Notes:
1124     Passing a NULL for Amat or Pmat removes the matrix that is currently used.
1125 
1126     If you wish to replace either Amat or Pmat but leave the other one untouched then
1127     first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference()
1128     on it and then pass it back in in your call to KSPSetOperators().
1129 
1130    More Notes about Repeated Solution of Linear Systems:
1131    PETSc does NOT reset the matrix entries of either Amat or Pmat
1132    to zero after a linear solve; the user is completely responsible for
1133    matrix assembly.  See the routine MatZeroEntries() if desiring to
1134    zero all elements of a matrix.
1135 
1136    Level: intermediate
1137 
1138 .seealso: `PCGetOperators()`, `MatZeroEntries()`
1139  @*/
1140 PetscErrorCode  PCSetOperators(PC pc,Mat Amat,Mat Pmat)
1141 {
1142   PetscInt         m1,n1,m2,n2;
1143 
1144   PetscFunctionBegin;
1145   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1146   if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2);
1147   if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3);
1148   if (Amat) PetscCheckSameComm(pc,1,Amat,2);
1149   if (Pmat) PetscCheckSameComm(pc,1,Pmat,3);
1150   if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) {
1151     PetscCall(MatGetLocalSize(Amat,&m1,&n1));
1152     PetscCall(MatGetLocalSize(pc->mat,&m2,&n2));
1153     PetscCheck(m1 == m2 && n1 == n2,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT,m2,n2,m1,n1);
1154     PetscCall(MatGetLocalSize(Pmat,&m1,&n1));
1155     PetscCall(MatGetLocalSize(pc->pmat,&m2,&n2));
1156     PetscCheck(m1 == m2 && n1 == n2,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT,m2,n2,m1,n1);
1157   }
1158 
1159   if (Pmat != pc->pmat) {
1160     /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */
1161     pc->matnonzerostate = -1;
1162     pc->matstate        = -1;
1163   }
1164 
1165   /* reference first in case the matrices are the same */
1166   if (Amat) PetscCall(PetscObjectReference((PetscObject)Amat));
1167   PetscCall(MatDestroy(&pc->mat));
1168   if (Pmat) PetscCall(PetscObjectReference((PetscObject)Pmat));
1169   PetscCall(MatDestroy(&pc->pmat));
1170   pc->mat  = Amat;
1171   pc->pmat = Pmat;
1172   PetscFunctionReturn(0);
1173 }
1174 
1175 /*@
1176    PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed.
1177 
1178    Logically Collective on PC
1179 
1180    Input Parameters:
1181 +  pc - the preconditioner context
1182 -  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner
1183 
1184     Level: intermediate
1185 
1186 .seealso: `PCGetOperators()`, `MatZeroEntries()`, `PCGetReusePreconditioner()`, `KSPSetReusePreconditioner()`
1187  @*/
1188 PetscErrorCode  PCSetReusePreconditioner(PC pc,PetscBool flag)
1189 {
1190   PetscFunctionBegin;
1191   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1192   PetscValidLogicalCollectiveBool(pc,flag,2);
1193   pc->reusepreconditioner = flag;
1194   PetscFunctionReturn(0);
1195 }
1196 
1197 /*@
1198    PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed.
1199 
1200    Not Collective
1201 
1202    Input Parameter:
1203 .  pc - the preconditioner context
1204 
1205    Output Parameter:
1206 .  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner
1207 
1208    Level: intermediate
1209 
1210 .seealso: `PCGetOperators()`, `MatZeroEntries()`, `PCSetReusePreconditioner()`
1211  @*/
1212 PetscErrorCode  PCGetReusePreconditioner(PC pc,PetscBool *flag)
1213 {
1214   PetscFunctionBegin;
1215   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1216   PetscValidBoolPointer(flag,2);
1217   *flag = pc->reusepreconditioner;
1218   PetscFunctionReturn(0);
1219 }
1220 
1221 /*@
1222    PCGetOperators - Gets the matrix associated with the linear system and
1223    possibly a different one associated with the preconditioner.
1224 
1225    Not collective, though parallel Mats are returned if the PC is parallel
1226 
1227    Input Parameter:
1228 .  pc - the preconditioner context
1229 
1230    Output Parameters:
1231 +  Amat - the matrix defining the linear system
1232 -  Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat.
1233 
1234    Level: intermediate
1235 
1236    Notes:
1237     Does not increase the reference count of the matrices, so you should not destroy them
1238 
1239    Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators
1240       are created in PC and returned to the user. In this case, if both operators
1241       mat and pmat are requested, two DIFFERENT operators will be returned. If
1242       only one is requested both operators in the PC will be the same (i.e. as
1243       if one had called KSP/PCSetOperators() with the same argument for both Mats).
1244       The user must set the sizes of the returned matrices and their type etc just
1245       as if the user created them with MatCreate(). For example,
1246 
1247 $         KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to
1248 $           set size, type, etc of Amat
1249 
1250 $         MatCreate(comm,&mat);
1251 $         KSP/PCSetOperators(ksp/pc,Amat,Amat);
1252 $         PetscObjectDereference((PetscObject)mat);
1253 $           set size, type, etc of Amat
1254 
1255      and
1256 
1257 $         KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to
1258 $           set size, type, etc of Amat and Pmat
1259 
1260 $         MatCreate(comm,&Amat);
1261 $         MatCreate(comm,&Pmat);
1262 $         KSP/PCSetOperators(ksp/pc,Amat,Pmat);
1263 $         PetscObjectDereference((PetscObject)Amat);
1264 $         PetscObjectDereference((PetscObject)Pmat);
1265 $           set size, type, etc of Amat and Pmat
1266 
1267     The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy
1268     of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely
1269     managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look
1270     at this is when you create a SNES you do not NEED to create a KSP and attach it to
1271     the SNES object (the SNES object manages it for you). Similarly when you create a KSP
1272     you do not need to attach a PC to it (the KSP object manages the PC object for you).
1273     Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when
1274     it can be created for you?
1275 
1276 .seealso: `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperatorsSet()`
1277 @*/
1278 PetscErrorCode  PCGetOperators(PC pc,Mat *Amat,Mat *Pmat)
1279 {
1280   PetscFunctionBegin;
1281   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1282   if (Amat) {
1283     if (!pc->mat) {
1284       if (pc->pmat && !Pmat) {  /* Apmat has been set, but user did not request it, so use for Amat */
1285         pc->mat = pc->pmat;
1286         PetscCall(PetscObjectReference((PetscObject)pc->mat));
1287       } else {                  /* both Amat and Pmat are empty */
1288         PetscCall(MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat));
1289         if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */
1290           pc->pmat = pc->mat;
1291           PetscCall(PetscObjectReference((PetscObject)pc->pmat));
1292         }
1293       }
1294     }
1295     *Amat = pc->mat;
1296   }
1297   if (Pmat) {
1298     if (!pc->pmat) {
1299       if (pc->mat && !Amat) {    /* Amat has been set but was not requested, so use for pmat */
1300         pc->pmat = pc->mat;
1301         PetscCall(PetscObjectReference((PetscObject)pc->pmat));
1302       } else {
1303         PetscCall(MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat));
1304         if (!Amat) { /* user did NOT request Amat, so make same as Pmat */
1305           pc->mat = pc->pmat;
1306           PetscCall(PetscObjectReference((PetscObject)pc->mat));
1307         }
1308       }
1309     }
1310     *Pmat = pc->pmat;
1311   }
1312   PetscFunctionReturn(0);
1313 }
1314 
1315 /*@C
1316    PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1317    possibly a different one associated with the preconditioner have been set in the PC.
1318 
1319    Not collective, though the results on all processes should be the same
1320 
1321    Input Parameter:
1322 .  pc - the preconditioner context
1323 
1324    Output Parameters:
1325 +  mat - the matrix associated with the linear system was set
1326 -  pmat - matrix associated with the preconditioner was set, usually the same
1327 
1328    Level: intermediate
1329 
1330 .seealso: `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperators()`
1331 @*/
1332 PetscErrorCode  PCGetOperatorsSet(PC pc,PetscBool  *mat,PetscBool  *pmat)
1333 {
1334   PetscFunctionBegin;
1335   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1336   if (mat) *mat = (pc->mat)  ? PETSC_TRUE : PETSC_FALSE;
1337   if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1338   PetscFunctionReturn(0);
1339 }
1340 
1341 /*@
1342    PCFactorGetMatrix - Gets the factored matrix from the
1343    preconditioner context.  This routine is valid only for the LU,
1344    incomplete LU, Cholesky, and incomplete Cholesky methods.
1345 
1346    Not Collective on PC though Mat is parallel if PC is parallel
1347 
1348    Input Parameters:
1349 .  pc - the preconditioner context
1350 
1351    Output parameters:
1352 .  mat - the factored matrix
1353 
1354    Level: advanced
1355 
1356    Notes:
1357     Does not increase the reference count for the matrix so DO NOT destroy it
1358 
1359 @*/
1360 PetscErrorCode  PCFactorGetMatrix(PC pc,Mat *mat)
1361 {
1362   PetscFunctionBegin;
1363   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1364   PetscValidPointer(mat,2);
1365   if (pc->ops->getfactoredmatrix) {
1366     PetscCall((*pc->ops->getfactoredmatrix)(pc,mat));
1367   } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix");
1368   PetscFunctionReturn(0);
1369 }
1370 
1371 /*@C
1372    PCSetOptionsPrefix - Sets the prefix used for searching for all
1373    PC options in the database.
1374 
1375    Logically Collective on PC
1376 
1377    Input Parameters:
1378 +  pc - the preconditioner context
1379 -  prefix - the prefix string to prepend to all PC option requests
1380 
1381    Notes:
1382    A hyphen (-) must NOT be given at the beginning of the prefix name.
1383    The first character of all runtime options is AUTOMATICALLY the
1384    hyphen.
1385 
1386    Level: advanced
1387 
1388 .seealso: `PCAppendOptionsPrefix()`, `PCGetOptionsPrefix()`
1389 @*/
1390 PetscErrorCode  PCSetOptionsPrefix(PC pc,const char prefix[])
1391 {
1392   PetscFunctionBegin;
1393   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1394   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc,prefix));
1395   PetscFunctionReturn(0);
1396 }
1397 
1398 /*@C
1399    PCAppendOptionsPrefix - Appends to the prefix used for searching for all
1400    PC options in the database.
1401 
1402    Logically Collective on PC
1403 
1404    Input Parameters:
1405 +  pc - the preconditioner context
1406 -  prefix - the prefix string to prepend to all PC option requests
1407 
1408    Notes:
1409    A hyphen (-) must NOT be given at the beginning of the prefix name.
1410    The first character of all runtime options is AUTOMATICALLY the
1411    hyphen.
1412 
1413    Level: advanced
1414 
1415 .seealso: `PCSetOptionsPrefix()`, `PCGetOptionsPrefix()`
1416 @*/
1417 PetscErrorCode  PCAppendOptionsPrefix(PC pc,const char prefix[])
1418 {
1419   PetscFunctionBegin;
1420   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1421   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix));
1422   PetscFunctionReturn(0);
1423 }
1424 
1425 /*@C
1426    PCGetOptionsPrefix - Gets the prefix used for searching for all
1427    PC options in the database.
1428 
1429    Not Collective
1430 
1431    Input Parameters:
1432 .  pc - the preconditioner context
1433 
1434    Output Parameters:
1435 .  prefix - pointer to the prefix string used, is returned
1436 
1437    Notes:
1438     On the fortran side, the user should pass in a string 'prefix' of
1439    sufficient length to hold the prefix.
1440 
1441    Level: advanced
1442 
1443 .seealso: `PCSetOptionsPrefix()`, `PCAppendOptionsPrefix()`
1444 @*/
1445 PetscErrorCode  PCGetOptionsPrefix(PC pc,const char *prefix[])
1446 {
1447   PetscFunctionBegin;
1448   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1449   PetscValidPointer(prefix,2);
1450   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc,prefix));
1451   PetscFunctionReturn(0);
1452 }
1453 
1454 /*
1455    Indicates the right hand side will be changed by KSPSolve(), this occurs for a few
1456   preconditioners including BDDC and Eisentat that transform the equations before applying
1457   the Krylov methods
1458 */
1459 PETSC_INTERN PetscErrorCode  PCPreSolveChangeRHS(PC pc,PetscBool *change)
1460 {
1461   PetscFunctionBegin;
1462   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1463   PetscValidPointer(change,2);
1464   *change = PETSC_FALSE;
1465   PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));
1466   PetscFunctionReturn(0);
1467 }
1468 
1469 /*@
1470    PCPreSolve - Optional pre-solve phase, intended for any
1471    preconditioner-specific actions that must be performed before
1472    the iterative solve itself.
1473 
1474    Collective on PC
1475 
1476    Input Parameters:
1477 +  pc - the preconditioner context
1478 -  ksp - the Krylov subspace context
1479 
1480    Level: developer
1481 
1482    Sample of Usage:
1483 .vb
1484     PCPreSolve(pc,ksp);
1485     KSPSolve(ksp,b,x);
1486     PCPostSolve(pc,ksp);
1487 .ve
1488 
1489    Notes:
1490    The pre-solve phase is distinct from the PCSetUp() phase.
1491 
1492    KSPSolve() calls this directly, so is rarely called by the user.
1493 
1494 .seealso: `PCPostSolve()`
1495 @*/
1496 PetscErrorCode PCPreSolve(PC pc,KSP ksp)
1497 {
1498   Vec            x,rhs;
1499 
1500   PetscFunctionBegin;
1501   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1502   PetscValidHeaderSpecific(ksp,KSP_CLASSID,2);
1503   pc->presolvedone++;
1504   PetscCheck(pc->presolvedone <= 2,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice");
1505   PetscCall(KSPGetSolution(ksp,&x));
1506   PetscCall(KSPGetRhs(ksp,&rhs));
1507 
1508   if (pc->ops->presolve) PetscCall((*pc->ops->presolve)(pc,ksp,rhs,x));
1509   else if (pc->presolve) PetscCall((pc->presolve)(pc,ksp));
1510   PetscFunctionReturn(0);
1511 }
1512 
1513 /*@C
1514    PCSetPreSolve - Sets function PCPreSolve() which is intended for any
1515    preconditioner-specific actions that must be performed before
1516    the iterative solve itself.
1517 
1518    Logically Collective on pc
1519 
1520    Input Parameters:
1521 +   pc - the preconditioner object
1522 -   presolve - the function to call before the solve
1523 
1524    Calling sequence of presolve:
1525 $  func(PC pc,KSP ksp)
1526 
1527 +  pc - the PC context
1528 -  ksp - the KSP context
1529 
1530    Level: developer
1531 
1532 .seealso: `PC`, `PCSetUp()`, `PCPreSolve()`
1533 @*/
1534 PetscErrorCode PCSetPreSolve(PC pc,PetscErrorCode (*presolve)(PC,KSP))
1535 {
1536   PetscFunctionBegin;
1537   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1538   pc->presolve = presolve;
1539   PetscFunctionReturn(0);
1540 }
1541 
1542 /*@
1543    PCPostSolve - Optional post-solve phase, intended for any
1544    preconditioner-specific actions that must be performed after
1545    the iterative solve itself.
1546 
1547    Collective on PC
1548 
1549    Input Parameters:
1550 +  pc - the preconditioner context
1551 -  ksp - the Krylov subspace context
1552 
1553    Sample of Usage:
1554 .vb
1555     PCPreSolve(pc,ksp);
1556     KSPSolve(ksp,b,x);
1557     PCPostSolve(pc,ksp);
1558 .ve
1559 
1560    Note:
1561    KSPSolve() calls this routine directly, so it is rarely called by the user.
1562 
1563    Level: developer
1564 
1565 .seealso: `PCPreSolve()`, `KSPSolve()`
1566 @*/
1567 PetscErrorCode  PCPostSolve(PC pc,KSP ksp)
1568 {
1569   Vec            x,rhs;
1570 
1571   PetscFunctionBegin;
1572   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1573   PetscValidHeaderSpecific(ksp,KSP_CLASSID,2);
1574   pc->presolvedone--;
1575   PetscCall(KSPGetSolution(ksp,&x));
1576   PetscCall(KSPGetRhs(ksp,&rhs));
1577   if (pc->ops->postsolve) PetscCall((*pc->ops->postsolve)(pc,ksp,rhs,x));
1578   PetscFunctionReturn(0);
1579 }
1580 
1581 /*@C
1582   PCLoad - Loads a PC that has been stored in binary  with PCView().
1583 
1584   Collective on PetscViewer
1585 
1586   Input Parameters:
1587 + newdm - the newly loaded PC, this needs to have been created with PCCreate() or
1588            some related function before a call to PCLoad().
1589 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen()
1590 
1591    Level: intermediate
1592 
1593   Notes:
1594    The type is determined by the data in the file, any type set into the PC before this call is ignored.
1595 
1596   Notes for advanced users:
1597   Most users should not need to know the details of the binary storage
1598   format, since PCLoad() and PCView() completely hide these details.
1599   But for anyone who's interested, the standard binary matrix storage
1600   format is
1601 .vb
1602      has not yet been determined
1603 .ve
1604 
1605 .seealso: `PetscViewerBinaryOpen()`, `PCView()`, `MatLoad()`, `VecLoad()`
1606 @*/
1607 PetscErrorCode  PCLoad(PC newdm, PetscViewer viewer)
1608 {
1609   PetscBool      isbinary;
1610   PetscInt       classid;
1611   char           type[256];
1612 
1613   PetscFunctionBegin;
1614   PetscValidHeaderSpecific(newdm,PC_CLASSID,1);
1615   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
1616   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary));
1617   PetscCheck(isbinary,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
1618 
1619   PetscCall(PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT));
1620   PetscCheck(classid == PC_FILE_CLASSID,PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file");
1621   PetscCall(PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR));
1622   PetscCall(PCSetType(newdm, type));
1623   if (newdm->ops->load) PetscCall((*newdm->ops->load)(newdm,viewer));
1624   PetscFunctionReturn(0);
1625 }
1626 
1627 #include <petscdraw.h>
1628 #if defined(PETSC_HAVE_SAWS)
1629 #include <petscviewersaws.h>
1630 #endif
1631 
1632 /*@C
1633    PCViewFromOptions - View from Options
1634 
1635    Collective on PC
1636 
1637    Input Parameters:
1638 +  A - the PC context
1639 .  obj - Optional object
1640 -  name - command line option
1641 
1642    Level: intermediate
1643 .seealso: `PC`, `PCView`, `PetscObjectViewFromOptions()`, `PCCreate()`
1644 @*/
1645 PetscErrorCode  PCViewFromOptions(PC A,PetscObject obj,const char name[])
1646 {
1647   PetscFunctionBegin;
1648   PetscValidHeaderSpecific(A,PC_CLASSID,1);
1649   PetscCall(PetscObjectViewFromOptions((PetscObject)A,obj,name));
1650   PetscFunctionReturn(0);
1651 }
1652 
1653 /*@C
1654    PCView - Prints the PC data structure.
1655 
1656    Collective on PC
1657 
1658    Input Parameters:
1659 +  PC - the PC context
1660 -  viewer - optional visualization context
1661 
1662    Note:
1663    The available visualization contexts include
1664 +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
1665 -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
1666          output where only the first processor opens
1667          the file.  All other processors send their
1668          data to the first processor to print.
1669 
1670    The user can open an alternative visualization contexts with
1671    PetscViewerASCIIOpen() (output to a specified file).
1672 
1673    Level: developer
1674 
1675 .seealso: `KSPView()`, `PetscViewerASCIIOpen()`
1676 @*/
1677 PetscErrorCode  PCView(PC pc,PetscViewer viewer)
1678 {
1679   PCType         cstr;
1680   PetscBool      iascii,isstring,isbinary,isdraw;
1681 #if defined(PETSC_HAVE_SAWS)
1682   PetscBool      issaws;
1683 #endif
1684 
1685   PetscFunctionBegin;
1686   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1687   if (!viewer) {
1688     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer));
1689   }
1690   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
1691   PetscCheckSameComm(pc,1,viewer,2);
1692 
1693   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
1694   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring));
1695   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary));
1696   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw));
1697 #if defined(PETSC_HAVE_SAWS)
1698   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws));
1699 #endif
1700 
1701   if (iascii) {
1702     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer));
1703     if (!pc->setupcalled) {
1704       PetscCall(PetscViewerASCIIPrintf(viewer,"  PC has not been set up so information may be incomplete\n"));
1705     }
1706     if (pc->ops->view) {
1707       PetscCall(PetscViewerASCIIPushTab(viewer));
1708       PetscCall((*pc->ops->view)(pc,viewer));
1709       PetscCall(PetscViewerASCIIPopTab(viewer));
1710     }
1711     if (pc->mat) {
1712       PetscCall(PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO));
1713       if (pc->pmat == pc->mat) {
1714         PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix = precond matrix:\n"));
1715         PetscCall(PetscViewerASCIIPushTab(viewer));
1716         PetscCall(MatView(pc->mat,viewer));
1717         PetscCall(PetscViewerASCIIPopTab(viewer));
1718       } else {
1719         if (pc->pmat) {
1720           PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix followed by preconditioner matrix:\n"));
1721         } else {
1722           PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix:\n"));
1723         }
1724         PetscCall(PetscViewerASCIIPushTab(viewer));
1725         PetscCall(MatView(pc->mat,viewer));
1726         if (pc->pmat) PetscCall(MatView(pc->pmat,viewer));
1727         PetscCall(PetscViewerASCIIPopTab(viewer));
1728       }
1729       PetscCall(PetscViewerPopFormat(viewer));
1730     }
1731   } else if (isstring) {
1732     PetscCall(PCGetType(pc,&cstr));
1733     PetscCall(PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr));
1734     if (pc->ops->view) PetscCall((*pc->ops->view)(pc,viewer));
1735     if (pc->mat) PetscCall(MatView(pc->mat,viewer));
1736     if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat,viewer));
1737   } else if (isbinary) {
1738     PetscInt    classid = PC_FILE_CLASSID;
1739     MPI_Comm    comm;
1740     PetscMPIInt rank;
1741     char        type[256];
1742 
1743     PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
1744     PetscCallMPI(MPI_Comm_rank(comm,&rank));
1745     if (rank == 0) {
1746       PetscCall(PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT));
1747       PetscCall(PetscStrncpy(type,((PetscObject)pc)->type_name,256));
1748       PetscCall(PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR));
1749     }
1750     if (pc->ops->view) PetscCall((*pc->ops->view)(pc,viewer));
1751   } else if (isdraw) {
1752     PetscDraw draw;
1753     char      str[25];
1754     PetscReal x,y,bottom,h;
1755     PetscInt  n;
1756 
1757     PetscCall(PetscViewerDrawGetDraw(viewer,0,&draw));
1758     PetscCall(PetscDrawGetCurrentPoint(draw,&x,&y));
1759     if (pc->mat) {
1760       PetscCall(MatGetSize(pc->mat,&n,NULL));
1761       PetscCall(PetscSNPrintf(str,25,"PC: %s (%" PetscInt_FMT ")",((PetscObject)pc)->type_name,n));
1762     } else {
1763       PetscCall(PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name));
1764     }
1765     PetscCall(PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h));
1766     bottom = y - h;
1767     PetscCall(PetscDrawPushCurrentPoint(draw,x,bottom));
1768     if (pc->ops->view) PetscCall((*pc->ops->view)(pc,viewer));
1769     PetscCall(PetscDrawPopCurrentPoint(draw));
1770 #if defined(PETSC_HAVE_SAWS)
1771   } else if (issaws) {
1772     PetscMPIInt rank;
1773 
1774     PetscCall(PetscObjectName((PetscObject)pc));
1775     PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
1776     if (!((PetscObject)pc)->amsmem && rank == 0) {
1777       PetscCall(PetscObjectViewSAWs((PetscObject)pc,viewer));
1778     }
1779     if (pc->mat) PetscCall(MatView(pc->mat,viewer));
1780     if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat,viewer));
1781 #endif
1782   }
1783   PetscFunctionReturn(0);
1784 }
1785 
1786 /*@C
1787   PCRegister -  Adds a method to the preconditioner package.
1788 
1789    Not collective
1790 
1791    Input Parameters:
1792 +  name_solver - name of a new user-defined solver
1793 -  routine_create - routine to create method context
1794 
1795    Notes:
1796    PCRegister() may be called multiple times to add several user-defined preconditioners.
1797 
1798    Sample usage:
1799 .vb
1800    PCRegister("my_solver", MySolverCreate);
1801 .ve
1802 
1803    Then, your solver can be chosen with the procedural interface via
1804 $     PCSetType(pc,"my_solver")
1805    or at runtime via the option
1806 $     -pc_type my_solver
1807 
1808    Level: advanced
1809 
1810 .seealso: `PCRegisterAll()`
1811 @*/
1812 PetscErrorCode  PCRegister(const char sname[],PetscErrorCode (*function)(PC))
1813 {
1814   PetscFunctionBegin;
1815   PetscCall(PCInitializePackage());
1816   PetscCall(PetscFunctionListAdd(&PCList,sname,function));
1817   PetscFunctionReturn(0);
1818 }
1819 
1820 static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y)
1821 {
1822   PC             pc;
1823 
1824   PetscFunctionBegin;
1825   PetscCall(MatShellGetContext(A,&pc));
1826   PetscCall(PCApply(pc,X,Y));
1827   PetscFunctionReturn(0);
1828 }
1829 
1830 /*@
1831     PCComputeOperator - Computes the explicit preconditioned operator.
1832 
1833     Collective on PC
1834 
1835     Input Parameters:
1836 +   pc - the preconditioner object
1837 -   mattype - the matrix type to be used for the operator
1838 
1839     Output Parameter:
1840 .   mat - the explicit preconditioned operator
1841 
1842     Notes:
1843     This computation is done by applying the operators to columns of the identity matrix.
1844     This routine is costly in general, and is recommended for use only with relatively small systems.
1845     Currently, this routine uses a dense matrix format when mattype == NULL
1846 
1847     Level: advanced
1848 
1849 .seealso: `KSPComputeOperator()`, `MatType`
1850 
1851 @*/
1852 PetscErrorCode  PCComputeOperator(PC pc,MatType mattype,Mat *mat)
1853 {
1854   PetscInt       N,M,m,n;
1855   Mat            A,Apc;
1856 
1857   PetscFunctionBegin;
1858   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1859   PetscValidPointer(mat,3);
1860   PetscCall(PCGetOperators(pc,&A,NULL));
1861   PetscCall(MatGetLocalSize(A,&m,&n));
1862   PetscCall(MatGetSize(A,&M,&N));
1863   PetscCall(MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc));
1864   PetscCall(MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC));
1865   PetscCall(MatComputeOperator(Apc,mattype,mat));
1866   PetscCall(MatDestroy(&Apc));
1867   PetscFunctionReturn(0);
1868 }
1869 
1870 /*@
1871    PCSetCoordinates - sets the coordinates of all the nodes on the local process
1872 
1873    Collective on PC
1874 
1875    Input Parameters:
1876 +  pc - the solver context
1877 .  dim - the dimension of the coordinates 1, 2, or 3
1878 .  nloc - the blocked size of the coordinates array
1879 -  coords - the coordinates array
1880 
1881    Level: intermediate
1882 
1883    Notes:
1884    coords is an array of the dim coordinates for the nodes on
1885    the local processor, of size dim*nloc.
1886    If there are 108 equation on a processor
1887    for a displacement finite element discretization of elasticity (so
1888    that there are nloc = 36 = 108/3 nodes) then the array must have 108
1889    double precision values (ie, 3 * 36).  These x y z coordinates
1890    should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x,
1891    ... , N-1.z ].
1892 
1893 .seealso: `MatSetNearNullSpace()`
1894 @*/
1895 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
1896 {
1897   PetscFunctionBegin;
1898   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1899   PetscValidLogicalCollectiveInt(pc,dim,2);
1900   PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));
1901   PetscFunctionReturn(0);
1902 }
1903 
1904 /*@
1905    PCGetInterpolations - Gets interpolation matrices for all levels (except level 0)
1906 
1907    Logically Collective on PC
1908 
1909    Input Parameter:
1910 .  pc - the precondition context
1911 
1912    Output Parameters:
1913 +  num_levels - the number of levels
1914 -  interpolations - the interpolation matrices (size of num_levels-1)
1915 
1916    Level: advanced
1917 
1918 .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level
1919 
1920 .seealso: `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetInterpolation()`, `PCGetCoarseOperators()`
1921 @*/
1922 PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[])
1923 {
1924   PetscFunctionBegin;
1925   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1926   PetscValidIntPointer(num_levels,2);
1927   PetscValidPointer(interpolations,3);
1928   PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));
1929   PetscFunctionReturn(0);
1930 }
1931 
1932 /*@
1933    PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level)
1934 
1935    Logically Collective on PC
1936 
1937    Input Parameter:
1938 .  pc - the precondition context
1939 
1940    Output Parameters:
1941 +  num_levels - the number of levels
1942 -  coarseOperators - the coarse operator matrices (size of num_levels-1)
1943 
1944    Level: advanced
1945 
1946 .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level
1947 
1948 .seealso: `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()`
1949 @*/
1950 PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[])
1951 {
1952   PetscFunctionBegin;
1953   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1954   PetscValidIntPointer(num_levels,2);
1955   PetscValidPointer(coarseOperators,3);
1956   PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));
1957   PetscFunctionReturn(0);
1958 }
1959