xref: /petsc/src/ksp/pc/interface/precon.c (revision dbbe0bcd3f3a8fbab5a45420dc06f8387e5764c6)
1 
2 /*
3     The PC (preconditioner) interface routines, callable by users.
4 */
5 #include <petsc/private/pcimpl.h>            /*I "petscksp.h" I*/
6 #include <petscdm.h>
7 
8 /* Logging support */
9 PetscClassId  PC_CLASSID;
10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks;
12 PetscInt      PetscMGLevelId;
13 
14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[])
15 {
16   PetscMPIInt    size;
17   PetscBool      hasop,flg1,flg2,set,flg3,isnormal;
18 
19   PetscFunctionBegin;
20   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
21   if (pc->pmat) {
22     PetscCall(MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop));
23     if (size == 1) {
24       PetscCall(MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1));
25       PetscCall(MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2));
26       PetscCall(MatIsSymmetricKnown(pc->pmat,&set,&flg3));
27       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat,&isnormal,MATNORMAL,MATNORMALHERMITIAN,NULL));
28       if (flg1 && (!flg2 || (set && flg3))) {
29         *type = PCICC;
30       } else if (flg2) {
31         *type = PCILU;
32       } else if (isnormal) {
33         *type = PCNONE;
34       } else if (hasop) { /* likely is a parallel matrix run on one processor */
35         *type = PCBJACOBI;
36       } else {
37         *type = PCNONE;
38       }
39     } else {
40        if (hasop) {
41         *type = PCBJACOBI;
42       } else {
43         *type = PCNONE;
44       }
45     }
46   } else {
47     if (size == 1) {
48       *type = PCILU;
49     } else {
50       *type = PCBJACOBI;
51     }
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 /*@
57    PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats
58 
59    Collective on PC
60 
61    Input Parameter:
62 .  pc - the preconditioner context
63 
64    Level: developer
65 
66    Notes:
67     This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC
68 
69 .seealso: `PCCreate()`, `PCSetUp()`
70 @*/
71 PetscErrorCode  PCReset(PC pc)
72 {
73   PetscFunctionBegin;
74   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
75   PetscTryTypeMethod(pc,reset);
76   PetscCall(VecDestroy(&pc->diagonalscaleright));
77   PetscCall(VecDestroy(&pc->diagonalscaleleft));
78   PetscCall(MatDestroy(&pc->pmat));
79   PetscCall(MatDestroy(&pc->mat));
80 
81   pc->setupcalled = 0;
82   PetscFunctionReturn(0);
83 }
84 
85 /*@C
86    PCDestroy - Destroys PC context that was created with PCCreate().
87 
88    Collective on PC
89 
90    Input Parameter:
91 .  pc - the preconditioner context
92 
93    Level: developer
94 
95 .seealso: `PCCreate()`, `PCSetUp()`
96 @*/
97 PetscErrorCode  PCDestroy(PC *pc)
98 {
99   PetscFunctionBegin;
100   if (!*pc) PetscFunctionReturn(0);
101   PetscValidHeaderSpecific((*pc),PC_CLASSID,1);
102   if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; PetscFunctionReturn(0);}
103 
104   PetscCall(PCReset(*pc));
105 
106   /* if memory was published with SAWs then destroy it */
107   PetscCall(PetscObjectSAWsViewOff((PetscObject)*pc));
108   PetscTryTypeMethod((*pc),destroy);
109   PetscCall(DMDestroy(&(*pc)->dm));
110   PetscCall(PetscHeaderDestroy(pc));
111   PetscFunctionReturn(0);
112 }
113 
114 /*@C
115    PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right
116       scaling as needed by certain time-stepping codes.
117 
118    Logically Collective on PC
119 
120    Input Parameter:
121 .  pc - the preconditioner context
122 
123    Output Parameter:
124 .  flag - PETSC_TRUE if it applies the scaling
125 
126    Level: developer
127 
128    Notes:
129     If this returns PETSC_TRUE then the system solved via the Krylov method is
130 $           D M A D^{-1} y = D M b  for left preconditioning or
131 $           D A M D^{-1} z = D b for right preconditioning
132 
133 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCSetDiagonalScale()`
134 @*/
135 PetscErrorCode  PCGetDiagonalScale(PC pc,PetscBool  *flag)
136 {
137   PetscFunctionBegin;
138   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
139   PetscValidBoolPointer(flag,2);
140   *flag = pc->diagonalscale;
141   PetscFunctionReturn(0);
142 }
143 
144 /*@
145    PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right
146       scaling as needed by certain time-stepping codes.
147 
148    Logically Collective on PC
149 
150    Input Parameters:
151 +  pc - the preconditioner context
152 -  s - scaling vector
153 
154    Level: intermediate
155 
156    Notes:
157     The system solved via the Krylov method is
158 $           D M A D^{-1} y = D M b  for left preconditioning or
159 $           D A M D^{-1} z = D b for right preconditioning
160 
161    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
162 
163 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCGetDiagonalScale()`
164 @*/
165 PetscErrorCode  PCSetDiagonalScale(PC pc,Vec s)
166 {
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
169   PetscValidHeaderSpecific(s,VEC_CLASSID,2);
170   pc->diagonalscale     = PETSC_TRUE;
171 
172   PetscCall(PetscObjectReference((PetscObject)s));
173   PetscCall(VecDestroy(&pc->diagonalscaleleft));
174 
175   pc->diagonalscaleleft = s;
176 
177   PetscCall(VecDuplicate(s,&pc->diagonalscaleright));
178   PetscCall(VecCopy(s,pc->diagonalscaleright));
179   PetscCall(VecReciprocal(pc->diagonalscaleright));
180   PetscFunctionReturn(0);
181 }
182 
183 /*@
184    PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes.
185 
186    Logically Collective on PC
187 
188    Input Parameters:
189 +  pc - the preconditioner context
190 .  in - input vector
191 -  out - scaled vector (maybe the same as in)
192 
193    Level: intermediate
194 
195    Notes:
196     The system solved via the Krylov method is
197 $           D M A D^{-1} y = D M b  for left preconditioning or
198 $           D A M D^{-1} z = D b for right preconditioning
199 
200    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
201 
202    If diagonal scaling is turned off and in is not out then in is copied to out
203 
204 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleSet()`, `PCDiagonalScaleRight()`, `PCDiagonalScale()`
205 @*/
206 PetscErrorCode  PCDiagonalScaleLeft(PC pc,Vec in,Vec out)
207 {
208   PetscFunctionBegin;
209   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
210   PetscValidHeaderSpecific(in,VEC_CLASSID,2);
211   PetscValidHeaderSpecific(out,VEC_CLASSID,3);
212   if (pc->diagonalscale) {
213     PetscCall(VecPointwiseMult(out,pc->diagonalscaleleft,in));
214   } else if (in != out) {
215     PetscCall(VecCopy(in,out));
216   }
217   PetscFunctionReturn(0);
218 }
219 
220 /*@
221    PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.
222 
223    Logically Collective on PC
224 
225    Input Parameters:
226 +  pc - the preconditioner context
227 .  in - input vector
228 -  out - scaled vector (maybe the same as in)
229 
230    Level: intermediate
231 
232    Notes:
233     The system solved via the Krylov method is
234 $           D M A D^{-1} y = D M b  for left preconditioning or
235 $           D A M D^{-1} z = D b for right preconditioning
236 
237    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
238 
239    If diagonal scaling is turned off and in is not out then in is copied to out
240 
241 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleSet()`, `PCDiagonalScale()`
242 @*/
243 PetscErrorCode  PCDiagonalScaleRight(PC pc,Vec in,Vec out)
244 {
245   PetscFunctionBegin;
246   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
247   PetscValidHeaderSpecific(in,VEC_CLASSID,2);
248   PetscValidHeaderSpecific(out,VEC_CLASSID,3);
249   if (pc->diagonalscale) {
250     PetscCall(VecPointwiseMult(out,pc->diagonalscaleright,in));
251   } else if (in != out) {
252     PetscCall(VecCopy(in,out));
253   }
254   PetscFunctionReturn(0);
255 }
256 
257 /*@
258    PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the
259    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
260    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperators() or PCSetOperators() not the Pmat.
261 
262    Logically Collective on PC
263 
264    Input Parameters:
265 +  pc - the preconditioner context
266 -  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
267 
268    Options Database Key:
269 .  -pc_use_amat <true,false> - use the amat to apply the operator
270 
271    Notes:
272    For the common case in which the linear system matrix and the matrix used to construct the
273    preconditioner are identical, this routine is does nothing.
274 
275    Level: intermediate
276 
277 .seealso: `PCGetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`
278 @*/
279 PetscErrorCode  PCSetUseAmat(PC pc,PetscBool flg)
280 {
281   PetscFunctionBegin;
282   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
283   pc->useAmat = flg;
284   PetscFunctionReturn(0);
285 }
286 
287 /*@
288    PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected.
289 
290    Logically Collective on PC
291 
292    Input Parameters:
293 +  pc - iterative context obtained from PCCreate()
294 -  flg - PETSC_TRUE indicates you want the error generated
295 
296    Level: advanced
297 
298    Notes:
299     Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve()
300     to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is
301     detected.
302 
303     This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs
304 
305 .seealso: `PCGetInitialGuessNonzero()`, `PCSetInitialGuessKnoll()`, `PCGetInitialGuessKnoll()`
306 @*/
307 PetscErrorCode  PCSetErrorIfFailure(PC pc,PetscBool flg)
308 {
309   PetscFunctionBegin;
310   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
311   PetscValidLogicalCollectiveBool(pc,flg,2);
312   pc->erroriffailure = flg;
313   PetscFunctionReturn(0);
314 }
315 
316 /*@
317    PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the
318    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
319    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperators() or PCSetOperators() not the Pmat.
320 
321    Logically Collective on PC
322 
323    Input Parameter:
324 .  pc - the preconditioner context
325 
326    Output Parameter:
327 .  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
328 
329    Notes:
330    For the common case in which the linear system matrix and the matrix used to construct the
331    preconditioner are identical, this routine is does nothing.
332 
333    Level: intermediate
334 
335 .seealso: `PCSetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`
336 @*/
337 PetscErrorCode  PCGetUseAmat(PC pc,PetscBool *flg)
338 {
339   PetscFunctionBegin;
340   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
341   *flg = pc->useAmat;
342   PetscFunctionReturn(0);
343 }
344 
345 /*@
346    PCCreate - Creates a preconditioner context.
347 
348    Collective
349 
350    Input Parameter:
351 .  comm - MPI communicator
352 
353    Output Parameter:
354 .  pc - location to put the preconditioner context
355 
356    Notes:
357    The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC
358    in parallel. For dense matrices it is always PCNONE.
359 
360    Level: developer
361 
362 .seealso: `PCSetUp()`, `PCApply()`, `PCDestroy()`
363 @*/
364 PetscErrorCode  PCCreate(MPI_Comm comm,PC *newpc)
365 {
366   PC             pc;
367 
368   PetscFunctionBegin;
369   PetscValidPointer(newpc,2);
370   *newpc = NULL;
371   PetscCall(PCInitializePackage());
372 
373   PetscCall(PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView));
374 
375   pc->mat                  = NULL;
376   pc->pmat                 = NULL;
377   pc->setupcalled          = 0;
378   pc->setfromoptionscalled = 0;
379   pc->data                 = NULL;
380   pc->diagonalscale        = PETSC_FALSE;
381   pc->diagonalscaleleft    = NULL;
382   pc->diagonalscaleright   = NULL;
383 
384   pc->modifysubmatrices  = NULL;
385   pc->modifysubmatricesP = NULL;
386 
387   *newpc = pc;
388   PetscFunctionReturn(0);
389 
390 }
391 
392 /* -------------------------------------------------------------------------------*/
393 
394 /*@
395    PCApply - Applies the preconditioner to a vector.
396 
397    Collective on PC
398 
399    Input Parameters:
400 +  pc - the preconditioner context
401 -  x - input vector
402 
403    Output Parameter:
404 .  y - output vector
405 
406    Level: developer
407 
408 .seealso: `PCApplyTranspose()`, `PCApplyBAorAB()`
409 @*/
410 PetscErrorCode  PCApply(PC pc,Vec x,Vec y)
411 {
412   PetscInt       m,n,mv,nv;
413 
414   PetscFunctionBegin;
415   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
416   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
417   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
418   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
419   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
420   /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */
421   PetscCall(MatGetLocalSize(pc->pmat,&m,&n));
422   PetscCall(VecGetLocalSize(x,&mv));
423   PetscCall(VecGetLocalSize(y,&nv));
424   /* check pmat * y = x is feasible */
425   PetscCheck(mv == m,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %" PetscInt_FMT " does not equal input vector size %" PetscInt_FMT,m,mv);
426   PetscCheck(nv == n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %" PetscInt_FMT " does not equal output vector size %" PetscInt_FMT,n,nv);
427   PetscCall(VecSetErrorIfLocked(y,3));
428 
429   PetscCall(PCSetUp(pc));
430   PetscCall(VecLockReadPush(x));
431   PetscCall(PetscLogEventBegin(PC_Apply,pc,x,y,0));
432   PetscUseTypeMethod(pc,apply ,x,y);
433   PetscCall(PetscLogEventEnd(PC_Apply,pc,x,y,0));
434   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
435   PetscCall(VecLockReadPop(x));
436   PetscFunctionReturn(0);
437 }
438 
439 /*@
440    PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices.
441 
442    Collective on PC
443 
444    Input Parameters:
445 +  pc - the preconditioner context
446 -  X - block of input vectors
447 
448    Output Parameter:
449 .  Y - block of output vectors
450 
451    Level: developer
452 
453 .seealso: `PCApply()`, `KSPMatSolve()`
454 @*/
455 PetscErrorCode  PCMatApply(PC pc,Mat X,Mat Y)
456 {
457   Mat            A;
458   Vec            cy, cx;
459   PetscInt       m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3;
460   PetscBool      match;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
464   PetscValidHeaderSpecific(X, MAT_CLASSID, 2);
465   PetscValidHeaderSpecific(Y, MAT_CLASSID, 3);
466   PetscCheckSameComm(pc, 1, X, 2);
467   PetscCheckSameComm(pc, 1, Y, 3);
468   PetscCheck(Y != X,PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices");
469   PetscCall(PCGetOperators(pc, NULL, &A));
470   PetscCall(MatGetLocalSize(A, &m3, &n3));
471   PetscCall(MatGetLocalSize(X, &m2, &n2));
472   PetscCall(MatGetLocalSize(Y, &m1, &n1));
473   PetscCall(MatGetSize(A, &M3, &N3));
474   PetscCall(MatGetSize(X, &M2, &N2));
475   PetscCall(MatGetSize(Y, &M1, &N1));
476   PetscCheck(n1 == n2 && N1 == N2,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of output vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1);
477   PetscCheck(m2 == m3 && M2 == M3,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m2, M2, m3, M3, n3, N3);
478   PetscCheck(m1 == n3 && M1 == N3,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m1, M1, m3, M3, n3, N3);
479   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, ""));
480   PetscCheck(match,PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat");
481   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, ""));
482   PetscCheck(match,PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat");
483   PetscCall(PCSetUp(pc));
484   if (pc->ops->matapply) {
485     PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0));
486     PetscUseTypeMethod(pc,matapply , X, Y);
487     PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0));
488   } else {
489     PetscCall(PetscInfo(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name));
490     for (n1 = 0; n1 < N1; ++n1) {
491       PetscCall(MatDenseGetColumnVecRead(X, n1, &cx));
492       PetscCall(MatDenseGetColumnVecWrite(Y, n1, &cy));
493       PetscCall(PCApply(pc, cx, cy));
494       PetscCall(MatDenseRestoreColumnVecWrite(Y, n1, &cy));
495       PetscCall(MatDenseRestoreColumnVecRead(X, n1, &cx));
496     }
497   }
498   PetscFunctionReturn(0);
499 }
500 
501 /*@
502    PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.
503 
504    Collective on PC
505 
506    Input Parameters:
507 +  pc - the preconditioner context
508 -  x - input vector
509 
510    Output Parameter:
511 .  y - output vector
512 
513    Notes:
514    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
515 
516    Level: developer
517 
518 .seealso: `PCApply()`, `PCApplySymmetricRight()`
519 @*/
520 PetscErrorCode  PCApplySymmetricLeft(PC pc,Vec x,Vec y)
521 {
522   PetscFunctionBegin;
523   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
524   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
525   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
526   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
527   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
528   PetscCall(PCSetUp(pc));
529   PetscCall(VecLockReadPush(x));
530   PetscCall(PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0));
531   PetscUseTypeMethod(pc,applysymmetricleft ,x,y);
532   PetscCall(PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0));
533   PetscCall(VecLockReadPop(x));
534   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
535   PetscFunctionReturn(0);
536 }
537 
538 /*@
539    PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.
540 
541    Collective on PC
542 
543    Input Parameters:
544 +  pc - the preconditioner context
545 -  x - input vector
546 
547    Output Parameter:
548 .  y - output vector
549 
550    Level: developer
551 
552    Notes:
553    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
554 
555 .seealso: `PCApply()`, `PCApplySymmetricLeft()`
556 @*/
557 PetscErrorCode  PCApplySymmetricRight(PC pc,Vec x,Vec y)
558 {
559   PetscFunctionBegin;
560   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
561   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
562   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
563   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
564   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
565   PetscCall(PCSetUp(pc));
566   PetscCall(VecLockReadPush(x));
567   PetscCall(PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0));
568   PetscUseTypeMethod(pc,applysymmetricright ,x,y);
569   PetscCall(PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0));
570   PetscCall(VecLockReadPop(x));
571   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
572   PetscFunctionReturn(0);
573 }
574 
575 /*@
576    PCApplyTranspose - Applies the transpose of preconditioner to a vector.
577 
578    Collective on PC
579 
580    Input Parameters:
581 +  pc - the preconditioner context
582 -  x - input vector
583 
584    Output Parameter:
585 .  y - output vector
586 
587    Notes:
588     For complex numbers this applies the non-Hermitian transpose.
589 
590    Developer Notes:
591     We need to implement a PCApplyHermitianTranspose()
592 
593    Level: developer
594 
595 .seealso: `PCApply()`, `PCApplyBAorAB()`, `PCApplyBAorABTranspose()`, `PCApplyTransposeExists()`
596 @*/
597 PetscErrorCode  PCApplyTranspose(PC pc,Vec x,Vec y)
598 {
599   PetscFunctionBegin;
600   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
601   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
602   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
603   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
604   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
605   PetscCall(PCSetUp(pc));
606   PetscCall(VecLockReadPush(x));
607   PetscCall(PetscLogEventBegin(PC_Apply,pc,x,y,0));
608   PetscUseTypeMethod(pc,applytranspose ,x,y);
609   PetscCall(PetscLogEventEnd(PC_Apply,pc,x,y,0));
610   PetscCall(VecLockReadPop(x));
611   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
612   PetscFunctionReturn(0);
613 }
614 
615 /*@
616    PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation
617 
618    Collective on PC
619 
620    Input Parameters:
621 .  pc - the preconditioner context
622 
623    Output Parameter:
624 .  flg - PETSC_TRUE if a transpose operation is defined
625 
626    Level: developer
627 
628 .seealso: `PCApplyTranspose()`
629 @*/
630 PetscErrorCode  PCApplyTransposeExists(PC pc,PetscBool  *flg)
631 {
632   PetscFunctionBegin;
633   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
634   PetscValidBoolPointer(flg,2);
635   if (pc->ops->applytranspose) *flg = PETSC_TRUE;
636   else *flg = PETSC_FALSE;
637   PetscFunctionReturn(0);
638 }
639 
640 /*@
641    PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.
642 
643    Collective on PC
644 
645    Input Parameters:
646 +  pc - the preconditioner context
647 .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
648 .  x - input vector
649 -  work - work vector
650 
651    Output Parameter:
652 .  y - output vector
653 
654    Level: developer
655 
656    Notes:
657     If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or  D A M D^{-1} is actually applied. Note that the
658    specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling.
659 
660 .seealso: `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorABTranspose()`
661 @*/
662 PetscErrorCode  PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work)
663 {
664   PetscFunctionBegin;
665   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
666   PetscValidLogicalCollectiveEnum(pc,side,2);
667   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
668   PetscValidHeaderSpecific(y,VEC_CLASSID,4);
669   PetscValidHeaderSpecific(work,VEC_CLASSID,5);
670   PetscCheckSameComm(pc,1,x,3);
671   PetscCheckSameComm(pc,1,y,4);
672   PetscCheckSameComm(pc,1,work,5);
673   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
674   PetscCheck(side == PC_LEFT || side == PC_SYMMETRIC || side == PC_RIGHT,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric");
675   PetscCheck(!pc->diagonalscale || side != PC_SYMMETRIC,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application");
676   if (pc->erroriffailure) PetscCall(VecValidValues(x,3,PETSC_TRUE));
677 
678   PetscCall(PCSetUp(pc));
679   if (pc->diagonalscale) {
680     if (pc->ops->applyBA) {
681       Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
682       PetscCall(VecDuplicate(x,&work2));
683       PetscCall(PCDiagonalScaleRight(pc,x,work2));
684       PetscUseTypeMethod(pc,applyBA ,side,work2,y,work);
685       PetscCall(PCDiagonalScaleLeft(pc,y,y));
686       PetscCall(VecDestroy(&work2));
687     } else if (side == PC_RIGHT) {
688       PetscCall(PCDiagonalScaleRight(pc,x,y));
689       PetscCall(PCApply(pc,y,work));
690       PetscCall(MatMult(pc->mat,work,y));
691       PetscCall(PCDiagonalScaleLeft(pc,y,y));
692     } else if (side == PC_LEFT) {
693       PetscCall(PCDiagonalScaleRight(pc,x,y));
694       PetscCall(MatMult(pc->mat,y,work));
695       PetscCall(PCApply(pc,work,y));
696       PetscCall(PCDiagonalScaleLeft(pc,y,y));
697     } else PetscCheck(side != PC_SYMMETRIC,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner");
698   } else {
699     if (pc->ops->applyBA) {
700       PetscUseTypeMethod(pc,applyBA ,side,x,y,work);
701     } else if (side == PC_RIGHT) {
702       PetscCall(PCApply(pc,x,work));
703       PetscCall(MatMult(pc->mat,work,y));
704     } else if (side == PC_LEFT) {
705       PetscCall(MatMult(pc->mat,x,work));
706       PetscCall(PCApply(pc,work,y));
707     } else if (side == PC_SYMMETRIC) {
708       /* There's an extra copy here; maybe should provide 2 work vectors instead? */
709       PetscCall(PCApplySymmetricRight(pc,x,work));
710       PetscCall(MatMult(pc->mat,work,y));
711       PetscCall(VecCopy(y,work));
712       PetscCall(PCApplySymmetricLeft(pc,work,y));
713     }
714   }
715   if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
716   PetscFunctionReturn(0);
717 }
718 
719 /*@
720    PCApplyBAorABTranspose - Applies the transpose of the preconditioner
721    and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
722    NOT tr(B*A) = tr(A)*tr(B).
723 
724    Collective on PC
725 
726    Input Parameters:
727 +  pc - the preconditioner context
728 .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
729 .  x - input vector
730 -  work - work vector
731 
732    Output Parameter:
733 .  y - output vector
734 
735    Notes:
736     this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
737       defined by B'. This is why this has the funny form that it computes tr(B) * tr(A)
738 
739     Level: developer
740 
741 .seealso: `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorAB()`
742 @*/
743 PetscErrorCode  PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work)
744 {
745   PetscFunctionBegin;
746   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
747   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
748   PetscValidHeaderSpecific(y,VEC_CLASSID,4);
749   PetscValidHeaderSpecific(work,VEC_CLASSID,5);
750   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
751   if (pc->erroriffailure) PetscCall(VecValidValues(x,3,PETSC_TRUE));
752   if (pc->ops->applyBAtranspose) {
753     PetscUseTypeMethod(pc,applyBAtranspose ,side,x,y,work);
754     if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
755     PetscFunctionReturn(0);
756   }
757   PetscCheck(side == PC_LEFT || side == PC_RIGHT,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left");
758 
759   PetscCall(PCSetUp(pc));
760   if (side == PC_RIGHT) {
761     PetscCall(PCApplyTranspose(pc,x,work));
762     PetscCall(MatMultTranspose(pc->mat,work,y));
763   } else if (side == PC_LEFT) {
764     PetscCall(MatMultTranspose(pc->mat,x,work));
765     PetscCall(PCApplyTranspose(pc,work,y));
766   }
767   /* add support for PC_SYMMETRIC */
768   if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
769   PetscFunctionReturn(0);
770 }
771 
772 /* -------------------------------------------------------------------------------*/
773 
774 /*@
775    PCApplyRichardsonExists - Determines whether a particular preconditioner has a
776    built-in fast application of Richardson's method.
777 
778    Not Collective
779 
780    Input Parameter:
781 .  pc - the preconditioner
782 
783    Output Parameter:
784 .  exists - PETSC_TRUE or PETSC_FALSE
785 
786    Level: developer
787 
788 .seealso: `PCApplyRichardson()`
789 @*/
790 PetscErrorCode  PCApplyRichardsonExists(PC pc,PetscBool  *exists)
791 {
792   PetscFunctionBegin;
793   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
794   PetscValidBoolPointer(exists,2);
795   if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
796   else *exists = PETSC_FALSE;
797   PetscFunctionReturn(0);
798 }
799 
800 /*@
801    PCApplyRichardson - Applies several steps of Richardson iteration with
802    the particular preconditioner. This routine is usually used by the
803    Krylov solvers and not the application code directly.
804 
805    Collective on PC
806 
807    Input Parameters:
808 +  pc  - the preconditioner context
809 .  b   - the right hand side
810 .  w   - one work vector
811 .  rtol - relative decrease in residual norm convergence criteria
812 .  abstol - absolute residual norm convergence criteria
813 .  dtol - divergence residual norm increase criteria
814 .  its - the number of iterations to apply.
815 -  guesszero - if the input x contains nonzero initial guess
816 
817    Output Parameters:
818 +  outits - number of iterations actually used (for SOR this always equals its)
819 .  reason - the reason the apply terminated
820 -  y - the solution (also contains initial guess if guesszero is PETSC_FALSE
821 
822    Notes:
823    Most preconditioners do not support this function. Use the command
824    PCApplyRichardsonExists() to determine if one does.
825 
826    Except for the multigrid PC this routine ignores the convergence tolerances
827    and always runs for the number of iterations
828 
829    Level: developer
830 
831 .seealso: `PCApplyRichardsonExists()`
832 @*/
833 PetscErrorCode  PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason)
834 {
835   PetscFunctionBegin;
836   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
837   PetscValidHeaderSpecific(b,VEC_CLASSID,2);
838   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
839   PetscValidHeaderSpecific(w,VEC_CLASSID,4);
840   PetscCheck(b != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors");
841   PetscCall(PCSetUp(pc));
842   PetscUseTypeMethod(pc,applyrichardson ,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);
843   PetscFunctionReturn(0);
844 }
845 
846 /*@
847    PCSetFailedReason - Sets the reason a PCSetUp() failed or PC_NOERROR if it did not fail
848 
849    Logically Collective on PC
850 
851    Input Parameters:
852 +  pc - the preconditioner context
853 -  reason - the reason it failedx
854 
855    Level: advanced
856 
857 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCFailedReason`
858 @*/
859 PetscErrorCode PCSetFailedReason(PC pc,PCFailedReason reason)
860 {
861   PetscFunctionBegin;
862   pc->failedreason = reason;
863   PetscFunctionReturn(0);
864 }
865 
866 /*@
867    PCGetFailedReason - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail
868 
869    Logically Collective on PC
870 
871    Input Parameter:
872 .  pc - the preconditioner context
873 
874    Output Parameter:
875 .  reason - the reason it failed
876 
877    Level: advanced
878 
879    Notes: This is the maximum over reason over all ranks in the PC communicator. It is only valid after
880    a call KSPCheckDot() or  KSPCheckNorm() inside a KSPSolve(). It is not valid immediately after a PCSetUp()
881    or PCApply(), then use PCGetFailedReasonRank()
882 
883 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReasonRank()`, `PCSetFailedReason()`
884 @*/
885 PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason)
886 {
887   PetscFunctionBegin;
888   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
889   else *reason = pc->failedreason;
890   PetscFunctionReturn(0);
891 }
892 
893 /*@
894    PCGetFailedReasonRank - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail on this MPI rank
895 
896   Not Collective on PC
897 
898    Input Parameter:
899 .  pc - the preconditioner context
900 
901    Output Parameter:
902 .  reason - the reason it failed
903 
904    Notes:
905      Different ranks may have different reasons or no reason, see PCGetFailedReason()
906 
907    Level: advanced
908 
909 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()`
910 @*/
911 PetscErrorCode PCGetFailedReasonRank(PC pc,PCFailedReason *reason)
912 {
913   PetscFunctionBegin;
914   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
915   else *reason = pc->failedreason;
916   PetscFunctionReturn(0);
917 }
918 
919 /*  Next line needed to deactivate KSP_Solve logging */
920 #include <petsc/private/kspimpl.h>
921 
922 /*
923       a setupcall of 0 indicates never setup,
924                      1 indicates has been previously setup
925                     -1 indicates a PCSetUp() was attempted and failed
926 */
927 /*@
928    PCSetUp - Prepares for the use of a preconditioner.
929 
930    Collective on PC
931 
932    Input Parameter:
933 .  pc - the preconditioner context
934 
935    Level: developer
936 
937 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`
938 @*/
939 PetscErrorCode  PCSetUp(PC pc)
940 {
941   const char       *def;
942   PetscObjectState matstate, matnonzerostate;
943 
944   PetscFunctionBegin;
945   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
946   PetscCheck(pc->mat,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first");
947 
948   if (pc->setupcalled && pc->reusepreconditioner) {
949     PetscCall(PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n"));
950     PetscFunctionReturn(0);
951   }
952 
953   PetscCall(PetscObjectStateGet((PetscObject)pc->pmat,&matstate));
954   PetscCall(MatGetNonzeroState(pc->pmat,&matnonzerostate));
955   if (!pc->setupcalled) {
956     PetscCall(PetscInfo(pc,"Setting up PC for first time\n"));
957     pc->flag = DIFFERENT_NONZERO_PATTERN;
958   } else if (matstate == pc->matstate) {
959     PetscCall(PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n"));
960     PetscFunctionReturn(0);
961   } else {
962     if (matnonzerostate > pc->matnonzerostate) {
963       PetscCall(PetscInfo(pc,"Setting up PC with different nonzero pattern\n"));
964       pc->flag = DIFFERENT_NONZERO_PATTERN;
965     } else {
966       PetscCall(PetscInfo(pc,"Setting up PC with same nonzero pattern\n"));
967       pc->flag = SAME_NONZERO_PATTERN;
968     }
969   }
970   pc->matstate        = matstate;
971   pc->matnonzerostate = matnonzerostate;
972 
973   if (!((PetscObject)pc)->type_name) {
974     PetscCall(PCGetDefaultType_Private(pc,&def));
975     PetscCall(PCSetType(pc,def));
976   }
977 
978   PetscCall(MatSetErrorIfFailure(pc->pmat,pc->erroriffailure));
979   PetscCall(MatSetErrorIfFailure(pc->mat,pc->erroriffailure));
980   PetscCall(PetscLogEventBegin(PC_SetUp,pc,0,0,0));
981   if (pc->ops->setup) {
982     /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */
983     PetscCall(KSPInitializePackage());
984     PetscCall(PetscLogEventDeactivatePush(KSP_Solve));
985     PetscCall(PetscLogEventDeactivatePush(PC_Apply));
986     PetscUseTypeMethod(pc,setup);
987     PetscCall(PetscLogEventDeactivatePop(KSP_Solve));
988     PetscCall(PetscLogEventDeactivatePop(PC_Apply));
989   }
990   PetscCall(PetscLogEventEnd(PC_SetUp,pc,0,0,0));
991   if (!pc->setupcalled) pc->setupcalled = 1;
992   PetscFunctionReturn(0);
993 }
994 
995 /*@
996    PCSetUpOnBlocks - Sets up the preconditioner for each block in
997    the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
998    methods.
999 
1000    Collective on PC
1001 
1002    Input Parameters:
1003 .  pc - the preconditioner context
1004 
1005    Level: developer
1006 
1007 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCSetUp()`
1008 @*/
1009 PetscErrorCode  PCSetUpOnBlocks(PC pc)
1010 {
1011   PetscFunctionBegin;
1012   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1013   if (!pc->ops->setuponblocks) PetscFunctionReturn(0);
1014   PetscCall(PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0));
1015   PetscUseTypeMethod(pc,setuponblocks);
1016   PetscCall(PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0));
1017   PetscFunctionReturn(0);
1018 }
1019 
1020 /*@C
1021    PCSetModifySubMatrices - Sets a user-defined routine for modifying the
1022    submatrices that arise within certain subdomain-based preconditioners.
1023    The basic submatrices are extracted from the preconditioner matrix as
1024    usual; the user can then alter these (for example, to set different boundary
1025    conditions for each submatrix) before they are used for the local solves.
1026 
1027    Logically Collective on PC
1028 
1029    Input Parameters:
1030 +  pc - the preconditioner context
1031 .  func - routine for modifying the submatrices
1032 -  ctx - optional user-defined context (may be null)
1033 
1034    Calling sequence of func:
1035 $     func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);
1036 
1037 +  row - an array of index sets that contain the global row numbers
1038          that comprise each local submatrix
1039 .  col - an array of index sets that contain the global column numbers
1040          that comprise each local submatrix
1041 .  submat - array of local submatrices
1042 -  ctx - optional user-defined context for private data for the
1043          user-defined func routine (may be null)
1044 
1045    Notes:
1046    PCSetModifySubMatrices() MUST be called before KSPSetUp() and
1047    KSPSolve().
1048 
1049    A routine set by PCSetModifySubMatrices() is currently called within
1050    the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM)
1051    preconditioners.  All other preconditioners ignore this routine.
1052 
1053    Level: advanced
1054 
1055 .seealso: `PCModifySubMatrices()`
1056 @*/
1057 PetscErrorCode  PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx)
1058 {
1059   PetscFunctionBegin;
1060   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1061   pc->modifysubmatrices  = func;
1062   pc->modifysubmatricesP = ctx;
1063   PetscFunctionReturn(0);
1064 }
1065 
1066 /*@C
1067    PCModifySubMatrices - Calls an optional user-defined routine within
1068    certain preconditioners if one has been set with PCSetModifySubMatrices().
1069 
1070    Collective on PC
1071 
1072    Input Parameters:
1073 +  pc - the preconditioner context
1074 .  nsub - the number of local submatrices
1075 .  row - an array of index sets that contain the global row numbers
1076          that comprise each local submatrix
1077 .  col - an array of index sets that contain the global column numbers
1078          that comprise each local submatrix
1079 .  submat - array of local submatrices
1080 -  ctx - optional user-defined context for private data for the
1081          user-defined routine (may be null)
1082 
1083    Output Parameter:
1084 .  submat - array of local submatrices (the entries of which may
1085             have been modified)
1086 
1087    Notes:
1088    The user should NOT generally call this routine, as it will
1089    automatically be called within certain preconditioners (currently
1090    block Jacobi, additive Schwarz) if set.
1091 
1092    The basic submatrices are extracted from the preconditioner matrix
1093    as usual; the user can then alter these (for example, to set different
1094    boundary conditions for each submatrix) before they are used for the
1095    local solves.
1096 
1097    Level: developer
1098 
1099 .seealso: `PCSetModifySubMatrices()`
1100 @*/
1101 PetscErrorCode  PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx)
1102 {
1103   PetscFunctionBegin;
1104   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1105   if (!pc->modifysubmatrices) PetscFunctionReturn(0);
1106   PetscCall(PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0));
1107   PetscCall((*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx));
1108   PetscCall(PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0));
1109   PetscFunctionReturn(0);
1110 }
1111 
1112 /*@
1113    PCSetOperators - Sets the matrix associated with the linear system and
1114    a (possibly) different one associated with the preconditioner.
1115 
1116    Logically Collective on PC
1117 
1118    Input Parameters:
1119 +  pc - the preconditioner context
1120 .  Amat - the matrix that defines the linear system
1121 -  Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat.
1122 
1123    Notes:
1124     Passing a NULL for Amat or Pmat removes the matrix that is currently used.
1125 
1126     If you wish to replace either Amat or Pmat but leave the other one untouched then
1127     first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference()
1128     on it and then pass it back in in your call to KSPSetOperators().
1129 
1130    More Notes about Repeated Solution of Linear Systems:
1131    PETSc does NOT reset the matrix entries of either Amat or Pmat
1132    to zero after a linear solve; the user is completely responsible for
1133    matrix assembly.  See the routine MatZeroEntries() if desiring to
1134    zero all elements of a matrix.
1135 
1136    Level: intermediate
1137 
1138 .seealso: `PCGetOperators()`, `MatZeroEntries()`
1139  @*/
1140 PetscErrorCode  PCSetOperators(PC pc,Mat Amat,Mat Pmat)
1141 {
1142   PetscInt         m1,n1,m2,n2;
1143 
1144   PetscFunctionBegin;
1145   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1146   if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2);
1147   if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3);
1148   if (Amat) PetscCheckSameComm(pc,1,Amat,2);
1149   if (Pmat) PetscCheckSameComm(pc,1,Pmat,3);
1150   if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) {
1151     PetscCall(MatGetLocalSize(Amat,&m1,&n1));
1152     PetscCall(MatGetLocalSize(pc->mat,&m2,&n2));
1153     PetscCheck(m1 == m2 && n1 == n2,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT,m2,n2,m1,n1);
1154     PetscCall(MatGetLocalSize(Pmat,&m1,&n1));
1155     PetscCall(MatGetLocalSize(pc->pmat,&m2,&n2));
1156     PetscCheck(m1 == m2 && n1 == n2,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT,m2,n2,m1,n1);
1157   }
1158 
1159   if (Pmat != pc->pmat) {
1160     /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */
1161     pc->matnonzerostate = -1;
1162     pc->matstate        = -1;
1163   }
1164 
1165   /* reference first in case the matrices are the same */
1166   if (Amat) PetscCall(PetscObjectReference((PetscObject)Amat));
1167   PetscCall(MatDestroy(&pc->mat));
1168   if (Pmat) PetscCall(PetscObjectReference((PetscObject)Pmat));
1169   PetscCall(MatDestroy(&pc->pmat));
1170   pc->mat  = Amat;
1171   pc->pmat = Pmat;
1172   PetscFunctionReturn(0);
1173 }
1174 
1175 /*@
1176    PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed.
1177 
1178    Logically Collective on PC
1179 
1180    Input Parameters:
1181 +  pc - the preconditioner context
1182 -  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner
1183 
1184     Level: intermediate
1185 
1186 .seealso: `PCGetOperators()`, `MatZeroEntries()`, `PCGetReusePreconditioner()`, `KSPSetReusePreconditioner()`
1187  @*/
1188 PetscErrorCode  PCSetReusePreconditioner(PC pc,PetscBool flag)
1189 {
1190   PetscFunctionBegin;
1191   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1192   PetscValidLogicalCollectiveBool(pc,flag,2);
1193   pc->reusepreconditioner = flag;
1194   PetscFunctionReturn(0);
1195 }
1196 
1197 /*@
1198    PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed.
1199 
1200    Not Collective
1201 
1202    Input Parameter:
1203 .  pc - the preconditioner context
1204 
1205    Output Parameter:
1206 .  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner
1207 
1208    Level: intermediate
1209 
1210 .seealso: `PCGetOperators()`, `MatZeroEntries()`, `PCSetReusePreconditioner()`
1211  @*/
1212 PetscErrorCode  PCGetReusePreconditioner(PC pc,PetscBool *flag)
1213 {
1214   PetscFunctionBegin;
1215   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1216   PetscValidBoolPointer(flag,2);
1217   *flag = pc->reusepreconditioner;
1218   PetscFunctionReturn(0);
1219 }
1220 
1221 /*@
1222    PCGetOperators - Gets the matrix associated with the linear system and
1223    possibly a different one associated with the preconditioner.
1224 
1225    Not collective, though parallel Mats are returned if the PC is parallel
1226 
1227    Input Parameter:
1228 .  pc - the preconditioner context
1229 
1230    Output Parameters:
1231 +  Amat - the matrix defining the linear system
1232 -  Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat.
1233 
1234    Level: intermediate
1235 
1236    Notes:
1237     Does not increase the reference count of the matrices, so you should not destroy them
1238 
1239    Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators
1240       are created in PC and returned to the user. In this case, if both operators
1241       mat and pmat are requested, two DIFFERENT operators will be returned. If
1242       only one is requested both operators in the PC will be the same (i.e. as
1243       if one had called KSP/PCSetOperators() with the same argument for both Mats).
1244       The user must set the sizes of the returned matrices and their type etc just
1245       as if the user created them with MatCreate(). For example,
1246 
1247 $         KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to
1248 $           set size, type, etc of Amat
1249 
1250 $         MatCreate(comm,&mat);
1251 $         KSP/PCSetOperators(ksp/pc,Amat,Amat);
1252 $         PetscObjectDereference((PetscObject)mat);
1253 $           set size, type, etc of Amat
1254 
1255      and
1256 
1257 $         KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to
1258 $           set size, type, etc of Amat and Pmat
1259 
1260 $         MatCreate(comm,&Amat);
1261 $         MatCreate(comm,&Pmat);
1262 $         KSP/PCSetOperators(ksp/pc,Amat,Pmat);
1263 $         PetscObjectDereference((PetscObject)Amat);
1264 $         PetscObjectDereference((PetscObject)Pmat);
1265 $           set size, type, etc of Amat and Pmat
1266 
1267     The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy
1268     of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely
1269     managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look
1270     at this is when you create a SNES you do not NEED to create a KSP and attach it to
1271     the SNES object (the SNES object manages it for you). Similarly when you create a KSP
1272     you do not need to attach a PC to it (the KSP object manages the PC object for you).
1273     Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when
1274     it can be created for you?
1275 
1276 .seealso: `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperatorsSet()`
1277 @*/
1278 PetscErrorCode  PCGetOperators(PC pc,Mat *Amat,Mat *Pmat)
1279 {
1280   PetscFunctionBegin;
1281   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1282   if (Amat) {
1283     if (!pc->mat) {
1284       if (pc->pmat && !Pmat) {  /* Apmat has been set, but user did not request it, so use for Amat */
1285         pc->mat = pc->pmat;
1286         PetscCall(PetscObjectReference((PetscObject)pc->mat));
1287       } else {                  /* both Amat and Pmat are empty */
1288         PetscCall(MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat));
1289         if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */
1290           pc->pmat = pc->mat;
1291           PetscCall(PetscObjectReference((PetscObject)pc->pmat));
1292         }
1293       }
1294     }
1295     *Amat = pc->mat;
1296   }
1297   if (Pmat) {
1298     if (!pc->pmat) {
1299       if (pc->mat && !Amat) {    /* Amat has been set but was not requested, so use for pmat */
1300         pc->pmat = pc->mat;
1301         PetscCall(PetscObjectReference((PetscObject)pc->pmat));
1302       } else {
1303         PetscCall(MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat));
1304         if (!Amat) { /* user did NOT request Amat, so make same as Pmat */
1305           pc->mat = pc->pmat;
1306           PetscCall(PetscObjectReference((PetscObject)pc->mat));
1307         }
1308       }
1309     }
1310     *Pmat = pc->pmat;
1311   }
1312   PetscFunctionReturn(0);
1313 }
1314 
1315 /*@C
1316    PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1317    possibly a different one associated with the preconditioner have been set in the PC.
1318 
1319    Not collective, though the results on all processes should be the same
1320 
1321    Input Parameter:
1322 .  pc - the preconditioner context
1323 
1324    Output Parameters:
1325 +  mat - the matrix associated with the linear system was set
1326 -  pmat - matrix associated with the preconditioner was set, usually the same
1327 
1328    Level: intermediate
1329 
1330 .seealso: `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperators()`
1331 @*/
1332 PetscErrorCode  PCGetOperatorsSet(PC pc,PetscBool  *mat,PetscBool  *pmat)
1333 {
1334   PetscFunctionBegin;
1335   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1336   if (mat) *mat = (pc->mat)  ? PETSC_TRUE : PETSC_FALSE;
1337   if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1338   PetscFunctionReturn(0);
1339 }
1340 
1341 /*@
1342    PCFactorGetMatrix - Gets the factored matrix from the
1343    preconditioner context.  This routine is valid only for the LU,
1344    incomplete LU, Cholesky, and incomplete Cholesky methods.
1345 
1346    Not Collective on PC though Mat is parallel if PC is parallel
1347 
1348    Input Parameters:
1349 .  pc - the preconditioner context
1350 
1351    Output parameters:
1352 .  mat - the factored matrix
1353 
1354    Level: advanced
1355 
1356    Notes:
1357     Does not increase the reference count for the matrix so DO NOT destroy it
1358 
1359 @*/
1360 PetscErrorCode  PCFactorGetMatrix(PC pc,Mat *mat)
1361 {
1362   PetscFunctionBegin;
1363   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1364   PetscValidPointer(mat,2);
1365   PetscUseTypeMethod(pc,getfactoredmatrix ,mat);
1366   PetscFunctionReturn(0);
1367 }
1368 
1369 /*@C
1370    PCSetOptionsPrefix - Sets the prefix used for searching for all
1371    PC options in the database.
1372 
1373    Logically Collective on PC
1374 
1375    Input Parameters:
1376 +  pc - the preconditioner context
1377 -  prefix - the prefix string to prepend to all PC option requests
1378 
1379    Notes:
1380    A hyphen (-) must NOT be given at the beginning of the prefix name.
1381    The first character of all runtime options is AUTOMATICALLY the
1382    hyphen.
1383 
1384    Level: advanced
1385 
1386 .seealso: `PCAppendOptionsPrefix()`, `PCGetOptionsPrefix()`
1387 @*/
1388 PetscErrorCode  PCSetOptionsPrefix(PC pc,const char prefix[])
1389 {
1390   PetscFunctionBegin;
1391   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1392   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc,prefix));
1393   PetscFunctionReturn(0);
1394 }
1395 
1396 /*@C
1397    PCAppendOptionsPrefix - Appends to the prefix used for searching for all
1398    PC options in the database.
1399 
1400    Logically Collective on PC
1401 
1402    Input Parameters:
1403 +  pc - the preconditioner context
1404 -  prefix - the prefix string to prepend to all PC option requests
1405 
1406    Notes:
1407    A hyphen (-) must NOT be given at the beginning of the prefix name.
1408    The first character of all runtime options is AUTOMATICALLY the
1409    hyphen.
1410 
1411    Level: advanced
1412 
1413 .seealso: `PCSetOptionsPrefix()`, `PCGetOptionsPrefix()`
1414 @*/
1415 PetscErrorCode  PCAppendOptionsPrefix(PC pc,const char prefix[])
1416 {
1417   PetscFunctionBegin;
1418   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1419   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix));
1420   PetscFunctionReturn(0);
1421 }
1422 
1423 /*@C
1424    PCGetOptionsPrefix - Gets the prefix used for searching for all
1425    PC options in the database.
1426 
1427    Not Collective
1428 
1429    Input Parameters:
1430 .  pc - the preconditioner context
1431 
1432    Output Parameters:
1433 .  prefix - pointer to the prefix string used, is returned
1434 
1435    Notes:
1436     On the fortran side, the user should pass in a string 'prefix' of
1437    sufficient length to hold the prefix.
1438 
1439    Level: advanced
1440 
1441 .seealso: `PCSetOptionsPrefix()`, `PCAppendOptionsPrefix()`
1442 @*/
1443 PetscErrorCode  PCGetOptionsPrefix(PC pc,const char *prefix[])
1444 {
1445   PetscFunctionBegin;
1446   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1447   PetscValidPointer(prefix,2);
1448   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc,prefix));
1449   PetscFunctionReturn(0);
1450 }
1451 
1452 /*
1453    Indicates the right hand side will be changed by KSPSolve(), this occurs for a few
1454   preconditioners including BDDC and Eisentat that transform the equations before applying
1455   the Krylov methods
1456 */
1457 PETSC_INTERN PetscErrorCode  PCPreSolveChangeRHS(PC pc,PetscBool *change)
1458 {
1459   PetscFunctionBegin;
1460   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1461   PetscValidPointer(change,2);
1462   *change = PETSC_FALSE;
1463   PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));
1464   PetscFunctionReturn(0);
1465 }
1466 
1467 /*@
1468    PCPreSolve - Optional pre-solve phase, intended for any
1469    preconditioner-specific actions that must be performed before
1470    the iterative solve itself.
1471 
1472    Collective on PC
1473 
1474    Input Parameters:
1475 +  pc - the preconditioner context
1476 -  ksp - the Krylov subspace context
1477 
1478    Level: developer
1479 
1480    Sample of Usage:
1481 .vb
1482     PCPreSolve(pc,ksp);
1483     KSPSolve(ksp,b,x);
1484     PCPostSolve(pc,ksp);
1485 .ve
1486 
1487    Notes:
1488    The pre-solve phase is distinct from the PCSetUp() phase.
1489 
1490    KSPSolve() calls this directly, so is rarely called by the user.
1491 
1492 .seealso: `PCPostSolve()`
1493 @*/
1494 PetscErrorCode PCPreSolve(PC pc,KSP ksp)
1495 {
1496   Vec            x,rhs;
1497 
1498   PetscFunctionBegin;
1499   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1500   PetscValidHeaderSpecific(ksp,KSP_CLASSID,2);
1501   pc->presolvedone++;
1502   PetscCheck(pc->presolvedone <= 2,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice");
1503   PetscCall(KSPGetSolution(ksp,&x));
1504   PetscCall(KSPGetRhs(ksp,&rhs));
1505 
1506   if (pc->ops->presolve) PetscUseTypeMethod(pc,presolve,ksp,rhs,x);
1507   else if (pc->presolve) PetscCall((pc->presolve)(pc,ksp));
1508   PetscFunctionReturn(0);
1509 }
1510 
1511 /*@C
1512    PCSetPreSolve - Sets function PCPreSolve() which is intended for any
1513    preconditioner-specific actions that must be performed before
1514    the iterative solve itself.
1515 
1516    Logically Collective on pc
1517 
1518    Input Parameters:
1519 +   pc - the preconditioner object
1520 -   presolve - the function to call before the solve
1521 
1522    Calling sequence of presolve:
1523 $  func(PC pc,KSP ksp)
1524 
1525 +  pc - the PC context
1526 -  ksp - the KSP context
1527 
1528    Level: developer
1529 
1530 .seealso: `PC`, `PCSetUp()`, `PCPreSolve()`
1531 @*/
1532 PetscErrorCode PCSetPreSolve(PC pc,PetscErrorCode (*presolve)(PC,KSP))
1533 {
1534   PetscFunctionBegin;
1535   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1536   pc->presolve = presolve;
1537   PetscFunctionReturn(0);
1538 }
1539 
1540 /*@
1541    PCPostSolve - Optional post-solve phase, intended for any
1542    preconditioner-specific actions that must be performed after
1543    the iterative solve itself.
1544 
1545    Collective on PC
1546 
1547    Input Parameters:
1548 +  pc - the preconditioner context
1549 -  ksp - the Krylov subspace context
1550 
1551    Sample of Usage:
1552 .vb
1553     PCPreSolve(pc,ksp);
1554     KSPSolve(ksp,b,x);
1555     PCPostSolve(pc,ksp);
1556 .ve
1557 
1558    Note:
1559    KSPSolve() calls this routine directly, so it is rarely called by the user.
1560 
1561    Level: developer
1562 
1563 .seealso: `PCPreSolve()`, `KSPSolve()`
1564 @*/
1565 PetscErrorCode  PCPostSolve(PC pc,KSP ksp)
1566 {
1567   Vec            x,rhs;
1568 
1569   PetscFunctionBegin;
1570   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1571   PetscValidHeaderSpecific(ksp,KSP_CLASSID,2);
1572   pc->presolvedone--;
1573   PetscCall(KSPGetSolution(ksp,&x));
1574   PetscCall(KSPGetRhs(ksp,&rhs));
1575   PetscTryTypeMethod(pc,postsolve,ksp,rhs,x);
1576   PetscFunctionReturn(0);
1577 }
1578 
1579 /*@C
1580   PCLoad - Loads a PC that has been stored in binary  with PCView().
1581 
1582   Collective on PetscViewer
1583 
1584   Input Parameters:
1585 + newdm - the newly loaded PC, this needs to have been created with PCCreate() or
1586            some related function before a call to PCLoad().
1587 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen()
1588 
1589    Level: intermediate
1590 
1591   Notes:
1592    The type is determined by the data in the file, any type set into the PC before this call is ignored.
1593 
1594   Notes for advanced users:
1595   Most users should not need to know the details of the binary storage
1596   format, since PCLoad() and PCView() completely hide these details.
1597   But for anyone who's interested, the standard binary matrix storage
1598   format is
1599 .vb
1600      has not yet been determined
1601 .ve
1602 
1603 .seealso: `PetscViewerBinaryOpen()`, `PCView()`, `MatLoad()`, `VecLoad()`
1604 @*/
1605 PetscErrorCode  PCLoad(PC newdm, PetscViewer viewer)
1606 {
1607   PetscBool      isbinary;
1608   PetscInt       classid;
1609   char           type[256];
1610 
1611   PetscFunctionBegin;
1612   PetscValidHeaderSpecific(newdm,PC_CLASSID,1);
1613   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
1614   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary));
1615   PetscCheck(isbinary,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
1616 
1617   PetscCall(PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT));
1618   PetscCheck(classid == PC_FILE_CLASSID,PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file");
1619   PetscCall(PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR));
1620   PetscCall(PCSetType(newdm, type));
1621   PetscTryTypeMethod(newdm,load,viewer);
1622   PetscFunctionReturn(0);
1623 }
1624 
1625 #include <petscdraw.h>
1626 #if defined(PETSC_HAVE_SAWS)
1627 #include <petscviewersaws.h>
1628 #endif
1629 
1630 /*@C
1631    PCViewFromOptions - View from Options
1632 
1633    Collective on PC
1634 
1635    Input Parameters:
1636 +  A - the PC context
1637 .  obj - Optional object
1638 -  name - command line option
1639 
1640    Level: intermediate
1641 .seealso: `PC`, `PCView`, `PetscObjectViewFromOptions()`, `PCCreate()`
1642 @*/
1643 PetscErrorCode  PCViewFromOptions(PC A,PetscObject obj,const char name[])
1644 {
1645   PetscFunctionBegin;
1646   PetscValidHeaderSpecific(A,PC_CLASSID,1);
1647   PetscCall(PetscObjectViewFromOptions((PetscObject)A,obj,name));
1648   PetscFunctionReturn(0);
1649 }
1650 
1651 /*@C
1652    PCView - Prints the PC data structure.
1653 
1654    Collective on PC
1655 
1656    Input Parameters:
1657 +  PC - the PC context
1658 -  viewer - optional visualization context
1659 
1660    Note:
1661    The available visualization contexts include
1662 +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
1663 -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
1664          output where only the first processor opens
1665          the file.  All other processors send their
1666          data to the first processor to print.
1667 
1668    The user can open an alternative visualization contexts with
1669    PetscViewerASCIIOpen() (output to a specified file).
1670 
1671    Level: developer
1672 
1673 .seealso: `KSPView()`, `PetscViewerASCIIOpen()`
1674 @*/
1675 PetscErrorCode  PCView(PC pc,PetscViewer viewer)
1676 {
1677   PCType         cstr;
1678   PetscBool      iascii,isstring,isbinary,isdraw;
1679 #if defined(PETSC_HAVE_SAWS)
1680   PetscBool      issaws;
1681 #endif
1682 
1683   PetscFunctionBegin;
1684   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1685   if (!viewer) {
1686     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer));
1687   }
1688   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
1689   PetscCheckSameComm(pc,1,viewer,2);
1690 
1691   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
1692   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring));
1693   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary));
1694   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw));
1695 #if defined(PETSC_HAVE_SAWS)
1696   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws));
1697 #endif
1698 
1699   if (iascii) {
1700     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer));
1701     if (!pc->setupcalled) {
1702       PetscCall(PetscViewerASCIIPrintf(viewer,"  PC has not been set up so information may be incomplete\n"));
1703     }
1704     PetscCall(PetscViewerASCIIPushTab(viewer));
1705     PetscTryTypeMethod(pc,view ,viewer);
1706     PetscCall(PetscViewerASCIIPopTab(viewer));
1707     if (pc->mat) {
1708       PetscCall(PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO));
1709       if (pc->pmat == pc->mat) {
1710         PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix = precond matrix:\n"));
1711         PetscCall(PetscViewerASCIIPushTab(viewer));
1712         PetscCall(MatView(pc->mat,viewer));
1713         PetscCall(PetscViewerASCIIPopTab(viewer));
1714       } else {
1715         if (pc->pmat) {
1716           PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix followed by preconditioner matrix:\n"));
1717         } else {
1718           PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix:\n"));
1719         }
1720         PetscCall(PetscViewerASCIIPushTab(viewer));
1721         PetscCall(MatView(pc->mat,viewer));
1722         if (pc->pmat) PetscCall(MatView(pc->pmat,viewer));
1723         PetscCall(PetscViewerASCIIPopTab(viewer));
1724       }
1725       PetscCall(PetscViewerPopFormat(viewer));
1726     }
1727   } else if (isstring) {
1728     PetscCall(PCGetType(pc,&cstr));
1729     PetscCall(PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr));
1730     PetscTryTypeMethod(pc,view,viewer);
1731     if (pc->mat) PetscCall(MatView(pc->mat,viewer));
1732     if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat,viewer));
1733   } else if (isbinary) {
1734     PetscInt    classid = PC_FILE_CLASSID;
1735     MPI_Comm    comm;
1736     PetscMPIInt rank;
1737     char        type[256];
1738 
1739     PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
1740     PetscCallMPI(MPI_Comm_rank(comm,&rank));
1741     if (rank == 0) {
1742       PetscCall(PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT));
1743       PetscCall(PetscStrncpy(type,((PetscObject)pc)->type_name,256));
1744       PetscCall(PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR));
1745     }
1746     PetscTryTypeMethod(pc,view,viewer);
1747   } else if (isdraw) {
1748     PetscDraw draw;
1749     char      str[25];
1750     PetscReal x,y,bottom,h;
1751     PetscInt  n;
1752 
1753     PetscCall(PetscViewerDrawGetDraw(viewer,0,&draw));
1754     PetscCall(PetscDrawGetCurrentPoint(draw,&x,&y));
1755     if (pc->mat) {
1756       PetscCall(MatGetSize(pc->mat,&n,NULL));
1757       PetscCall(PetscSNPrintf(str,25,"PC: %s (%" PetscInt_FMT ")",((PetscObject)pc)->type_name,n));
1758     } else {
1759       PetscCall(PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name));
1760     }
1761     PetscCall(PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h));
1762     bottom = y - h;
1763     PetscCall(PetscDrawPushCurrentPoint(draw,x,bottom));
1764     PetscTryTypeMethod(pc,view,viewer);
1765     PetscCall(PetscDrawPopCurrentPoint(draw));
1766 #if defined(PETSC_HAVE_SAWS)
1767   } else if (issaws) {
1768     PetscMPIInt rank;
1769 
1770     PetscCall(PetscObjectName((PetscObject)pc));
1771     PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
1772     if (!((PetscObject)pc)->amsmem && rank == 0) {
1773       PetscCall(PetscObjectViewSAWs((PetscObject)pc,viewer));
1774     }
1775     if (pc->mat) PetscCall(MatView(pc->mat,viewer));
1776     if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat,viewer));
1777 #endif
1778   }
1779   PetscFunctionReturn(0);
1780 }
1781 
1782 /*@C
1783   PCRegister -  Adds a method to the preconditioner package.
1784 
1785    Not collective
1786 
1787    Input Parameters:
1788 +  name_solver - name of a new user-defined solver
1789 -  routine_create - routine to create method context
1790 
1791    Notes:
1792    PCRegister() may be called multiple times to add several user-defined preconditioners.
1793 
1794    Sample usage:
1795 .vb
1796    PCRegister("my_solver", MySolverCreate);
1797 .ve
1798 
1799    Then, your solver can be chosen with the procedural interface via
1800 $     PCSetType(pc,"my_solver")
1801    or at runtime via the option
1802 $     -pc_type my_solver
1803 
1804    Level: advanced
1805 
1806 .seealso: `PCRegisterAll()`
1807 @*/
1808 PetscErrorCode  PCRegister(const char sname[],PetscErrorCode (*function)(PC))
1809 {
1810   PetscFunctionBegin;
1811   PetscCall(PCInitializePackage());
1812   PetscCall(PetscFunctionListAdd(&PCList,sname,function));
1813   PetscFunctionReturn(0);
1814 }
1815 
1816 static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y)
1817 {
1818   PC             pc;
1819 
1820   PetscFunctionBegin;
1821   PetscCall(MatShellGetContext(A,&pc));
1822   PetscCall(PCApply(pc,X,Y));
1823   PetscFunctionReturn(0);
1824 }
1825 
1826 /*@
1827     PCComputeOperator - Computes the explicit preconditioned operator.
1828 
1829     Collective on PC
1830 
1831     Input Parameters:
1832 +   pc - the preconditioner object
1833 -   mattype - the matrix type to be used for the operator
1834 
1835     Output Parameter:
1836 .   mat - the explicit preconditioned operator
1837 
1838     Notes:
1839     This computation is done by applying the operators to columns of the identity matrix.
1840     This routine is costly in general, and is recommended for use only with relatively small systems.
1841     Currently, this routine uses a dense matrix format when mattype == NULL
1842 
1843     Level: advanced
1844 
1845 .seealso: `KSPComputeOperator()`, `MatType`
1846 
1847 @*/
1848 PetscErrorCode  PCComputeOperator(PC pc,MatType mattype,Mat *mat)
1849 {
1850   PetscInt       N,M,m,n;
1851   Mat            A,Apc;
1852 
1853   PetscFunctionBegin;
1854   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1855   PetscValidPointer(mat,3);
1856   PetscCall(PCGetOperators(pc,&A,NULL));
1857   PetscCall(MatGetLocalSize(A,&m,&n));
1858   PetscCall(MatGetSize(A,&M,&N));
1859   PetscCall(MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc));
1860   PetscCall(MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC));
1861   PetscCall(MatComputeOperator(Apc,mattype,mat));
1862   PetscCall(MatDestroy(&Apc));
1863   PetscFunctionReturn(0);
1864 }
1865 
1866 /*@
1867    PCSetCoordinates - sets the coordinates of all the nodes on the local process
1868 
1869    Collective on PC
1870 
1871    Input Parameters:
1872 +  pc - the solver context
1873 .  dim - the dimension of the coordinates 1, 2, or 3
1874 .  nloc - the blocked size of the coordinates array
1875 -  coords - the coordinates array
1876 
1877    Level: intermediate
1878 
1879    Notes:
1880    coords is an array of the dim coordinates for the nodes on
1881    the local processor, of size dim*nloc.
1882    If there are 108 equation on a processor
1883    for a displacement finite element discretization of elasticity (so
1884    that there are nloc = 36 = 108/3 nodes) then the array must have 108
1885    double precision values (ie, 3 * 36).  These x y z coordinates
1886    should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x,
1887    ... , N-1.z ].
1888 
1889 .seealso: `MatSetNearNullSpace()`
1890 @*/
1891 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
1892 {
1893   PetscFunctionBegin;
1894   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1895   PetscValidLogicalCollectiveInt(pc,dim,2);
1896   PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));
1897   PetscFunctionReturn(0);
1898 }
1899 
1900 /*@
1901    PCGetInterpolations - Gets interpolation matrices for all levels (except level 0)
1902 
1903    Logically Collective on PC
1904 
1905    Input Parameter:
1906 .  pc - the precondition context
1907 
1908    Output Parameters:
1909 +  num_levels - the number of levels
1910 -  interpolations - the interpolation matrices (size of num_levels-1)
1911 
1912    Level: advanced
1913 
1914 .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level
1915 
1916 .seealso: `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetInterpolation()`, `PCGetCoarseOperators()`
1917 @*/
1918 PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[])
1919 {
1920   PetscFunctionBegin;
1921   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1922   PetscValidIntPointer(num_levels,2);
1923   PetscValidPointer(interpolations,3);
1924   PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));
1925   PetscFunctionReturn(0);
1926 }
1927 
1928 /*@
1929    PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level)
1930 
1931    Logically Collective on PC
1932 
1933    Input Parameter:
1934 .  pc - the precondition context
1935 
1936    Output Parameters:
1937 +  num_levels - the number of levels
1938 -  coarseOperators - the coarse operator matrices (size of num_levels-1)
1939 
1940    Level: advanced
1941 
1942 .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level
1943 
1944 .seealso: `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()`
1945 @*/
1946 PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[])
1947 {
1948   PetscFunctionBegin;
1949   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1950   PetscValidIntPointer(num_levels,2);
1951   PetscValidPointer(coarseOperators,3);
1952   PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));
1953   PetscFunctionReturn(0);
1954 }
1955