xref: /petsc/src/ksp/pc/interface/precon.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 
2 /*
3     The PC (preconditioner) interface routines, callable by users.
4 */
5 #include <petsc/private/pcimpl.h>            /*I "petscksp.h" I*/
6 #include <petscdm.h>
7 
8 /* Logging support */
9 PetscClassId  PC_CLASSID;
10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks;
12 PetscInt      PetscMGLevelId;
13 
14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[])
15 {
16   PetscMPIInt    size;
17   PetscBool      hasop,flg1,flg2,set,flg3,isnormal;
18 
19   PetscFunctionBegin;
20   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
21   if (pc->pmat) {
22     PetscCall(MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop));
23     if (size == 1) {
24       PetscCall(MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1));
25       PetscCall(MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2));
26       PetscCall(MatIsSymmetricKnown(pc->pmat,&set,&flg3));
27       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat,&isnormal,MATNORMAL,MATNORMALHERMITIAN,NULL));
28       if (flg1 && (!flg2 || (set && flg3))) {
29         *type = PCICC;
30       } else if (flg2) {
31         *type = PCILU;
32       } else if (isnormal) {
33         *type = PCNONE;
34       } else if (hasop) { /* likely is a parallel matrix run on one processor */
35         *type = PCBJACOBI;
36       } else {
37         *type = PCNONE;
38       }
39     } else {
40        if (hasop) {
41         *type = PCBJACOBI;
42       } else {
43         *type = PCNONE;
44       }
45     }
46   } else {
47     if (size == 1) {
48       *type = PCILU;
49     } else {
50       *type = PCBJACOBI;
51     }
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 /*@
57    PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats
58 
59    Collective on PC
60 
61    Input Parameter:
62 .  pc - the preconditioner context
63 
64    Level: developer
65 
66    Notes:
67     This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC
68 
69 .seealso: `PCCreate()`, `PCSetUp()`
70 @*/
71 PetscErrorCode  PCReset(PC pc)
72 {
73   PetscFunctionBegin;
74   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
75   if (pc->ops->reset) PetscCall((*pc->ops->reset)(pc));
76   PetscCall(VecDestroy(&pc->diagonalscaleright));
77   PetscCall(VecDestroy(&pc->diagonalscaleleft));
78   PetscCall(MatDestroy(&pc->pmat));
79   PetscCall(MatDestroy(&pc->mat));
80 
81   pc->setupcalled = 0;
82   PetscFunctionReturn(0);
83 }
84 
85 /*@C
86    PCDestroy - Destroys PC context that was created with PCCreate().
87 
88    Collective on PC
89 
90    Input Parameter:
91 .  pc - the preconditioner context
92 
93    Level: developer
94 
95 .seealso: `PCCreate()`, `PCSetUp()`
96 @*/
97 PetscErrorCode  PCDestroy(PC *pc)
98 {
99   PetscFunctionBegin;
100   if (!*pc) PetscFunctionReturn(0);
101   PetscValidHeaderSpecific((*pc),PC_CLASSID,1);
102   if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; PetscFunctionReturn(0);}
103 
104   PetscCall(PCReset(*pc));
105 
106   /* if memory was published with SAWs then destroy it */
107   PetscCall(PetscObjectSAWsViewOff((PetscObject)*pc));
108   if ((*pc)->ops->destroy) PetscCall((*(*pc)->ops->destroy)((*pc)));
109   PetscCall(DMDestroy(&(*pc)->dm));
110   PetscCall(PetscHeaderDestroy(pc));
111   PetscFunctionReturn(0);
112 }
113 
114 /*@C
115    PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right
116       scaling as needed by certain time-stepping codes.
117 
118    Logically Collective on PC
119 
120    Input Parameter:
121 .  pc - the preconditioner context
122 
123    Output Parameter:
124 .  flag - PETSC_TRUE if it applies the scaling
125 
126    Level: developer
127 
128    Notes:
129     If this returns PETSC_TRUE then the system solved via the Krylov method is
130 $           D M A D^{-1} y = D M b  for left preconditioning or
131 $           D A M D^{-1} z = D b for right preconditioning
132 
133 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCSetDiagonalScale()`
134 @*/
135 PetscErrorCode  PCGetDiagonalScale(PC pc,PetscBool  *flag)
136 {
137   PetscFunctionBegin;
138   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
139   PetscValidBoolPointer(flag,2);
140   *flag = pc->diagonalscale;
141   PetscFunctionReturn(0);
142 }
143 
144 /*@
145    PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right
146       scaling as needed by certain time-stepping codes.
147 
148    Logically Collective on PC
149 
150    Input Parameters:
151 +  pc - the preconditioner context
152 -  s - scaling vector
153 
154    Level: intermediate
155 
156    Notes:
157     The system solved via the Krylov method is
158 $           D M A D^{-1} y = D M b  for left preconditioning or
159 $           D A M D^{-1} z = D b for right preconditioning
160 
161    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
162 
163 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCGetDiagonalScale()`
164 @*/
165 PetscErrorCode  PCSetDiagonalScale(PC pc,Vec s)
166 {
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
169   PetscValidHeaderSpecific(s,VEC_CLASSID,2);
170   pc->diagonalscale     = PETSC_TRUE;
171 
172   PetscCall(PetscObjectReference((PetscObject)s));
173   PetscCall(VecDestroy(&pc->diagonalscaleleft));
174 
175   pc->diagonalscaleleft = s;
176 
177   PetscCall(VecDuplicate(s,&pc->diagonalscaleright));
178   PetscCall(VecCopy(s,pc->diagonalscaleright));
179   PetscCall(VecReciprocal(pc->diagonalscaleright));
180   PetscFunctionReturn(0);
181 }
182 
183 /*@
184    PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes.
185 
186    Logically Collective on PC
187 
188    Input Parameters:
189 +  pc - the preconditioner context
190 .  in - input vector
191 -  out - scaled vector (maybe the same as in)
192 
193    Level: intermediate
194 
195    Notes:
196     The system solved via the Krylov method is
197 $           D M A D^{-1} y = D M b  for left preconditioning or
198 $           D A M D^{-1} z = D b for right preconditioning
199 
200    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
201 
202    If diagonal scaling is turned off and in is not out then in is copied to out
203 
204 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleSet()`, `PCDiagonalScaleRight()`, `PCDiagonalScale()`
205 @*/
206 PetscErrorCode  PCDiagonalScaleLeft(PC pc,Vec in,Vec out)
207 {
208   PetscFunctionBegin;
209   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
210   PetscValidHeaderSpecific(in,VEC_CLASSID,2);
211   PetscValidHeaderSpecific(out,VEC_CLASSID,3);
212   if (pc->diagonalscale) {
213     PetscCall(VecPointwiseMult(out,pc->diagonalscaleleft,in));
214   } else if (in != out) {
215     PetscCall(VecCopy(in,out));
216   }
217   PetscFunctionReturn(0);
218 }
219 
220 /*@
221    PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.
222 
223    Logically Collective on PC
224 
225    Input Parameters:
226 +  pc - the preconditioner context
227 .  in - input vector
228 -  out - scaled vector (maybe the same as in)
229 
230    Level: intermediate
231 
232    Notes:
233     The system solved via the Krylov method is
234 $           D M A D^{-1} y = D M b  for left preconditioning or
235 $           D A M D^{-1} z = D b for right preconditioning
236 
237    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
238 
239    If diagonal scaling is turned off and in is not out then in is copied to out
240 
241 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleSet()`, `PCDiagonalScale()`
242 @*/
243 PetscErrorCode  PCDiagonalScaleRight(PC pc,Vec in,Vec out)
244 {
245   PetscFunctionBegin;
246   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
247   PetscValidHeaderSpecific(in,VEC_CLASSID,2);
248   PetscValidHeaderSpecific(out,VEC_CLASSID,3);
249   if (pc->diagonalscale) {
250     PetscCall(VecPointwiseMult(out,pc->diagonalscaleright,in));
251   } else if (in != out) {
252     PetscCall(VecCopy(in,out));
253   }
254   PetscFunctionReturn(0);
255 }
256 
257 /*@
258    PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the
259    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
260    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperators() or PCSetOperators() not the Pmat.
261 
262    Logically Collective on PC
263 
264    Input Parameters:
265 +  pc - the preconditioner context
266 -  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
267 
268    Options Database Key:
269 .  -pc_use_amat <true,false> - use the amat to apply the operator
270 
271    Notes:
272    For the common case in which the linear system matrix and the matrix used to construct the
273    preconditioner are identical, this routine is does nothing.
274 
275    Level: intermediate
276 
277 .seealso: `PCGetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`
278 @*/
279 PetscErrorCode  PCSetUseAmat(PC pc,PetscBool flg)
280 {
281   PetscFunctionBegin;
282   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
283   pc->useAmat = flg;
284   PetscFunctionReturn(0);
285 }
286 
287 /*@
288    PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected.
289 
290    Logically Collective on PC
291 
292    Input Parameters:
293 +  pc - iterative context obtained from PCCreate()
294 -  flg - PETSC_TRUE indicates you want the error generated
295 
296    Level: advanced
297 
298    Notes:
299     Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve()
300     to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is
301     detected.
302 
303     This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs
304 
305 .seealso: `PCGetInitialGuessNonzero()`, `PCSetInitialGuessKnoll()`, `PCGetInitialGuessKnoll()`
306 @*/
307 PetscErrorCode  PCSetErrorIfFailure(PC pc,PetscBool flg)
308 {
309   PetscFunctionBegin;
310   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
311   PetscValidLogicalCollectiveBool(pc,flg,2);
312   pc->erroriffailure = flg;
313   PetscFunctionReturn(0);
314 }
315 
316 /*@
317    PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the
318    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
319    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperators() or PCSetOperators() not the Pmat.
320 
321    Logically Collective on PC
322 
323    Input Parameter:
324 .  pc - the preconditioner context
325 
326    Output Parameter:
327 .  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
328 
329    Notes:
330    For the common case in which the linear system matrix and the matrix used to construct the
331    preconditioner are identical, this routine is does nothing.
332 
333    Level: intermediate
334 
335 .seealso: `PCSetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`
336 @*/
337 PetscErrorCode  PCGetUseAmat(PC pc,PetscBool *flg)
338 {
339   PetscFunctionBegin;
340   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
341   *flg = pc->useAmat;
342   PetscFunctionReturn(0);
343 }
344 
345 /*@
346    PCCreate - Creates a preconditioner context.
347 
348    Collective
349 
350    Input Parameter:
351 .  comm - MPI communicator
352 
353    Output Parameter:
354 .  pc - location to put the preconditioner context
355 
356    Notes:
357    The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC
358    in parallel. For dense matrices it is always PCNONE.
359 
360    Level: developer
361 
362 .seealso: `PCSetUp()`, `PCApply()`, `PCDestroy()`
363 @*/
364 PetscErrorCode  PCCreate(MPI_Comm comm,PC *newpc)
365 {
366   PC             pc;
367 
368   PetscFunctionBegin;
369   PetscValidPointer(newpc,2);
370   *newpc = NULL;
371   PetscCall(PCInitializePackage());
372 
373   PetscCall(PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView));
374 
375   pc->mat                  = NULL;
376   pc->pmat                 = NULL;
377   pc->setupcalled          = 0;
378   pc->setfromoptionscalled = 0;
379   pc->data                 = NULL;
380   pc->diagonalscale        = PETSC_FALSE;
381   pc->diagonalscaleleft    = NULL;
382   pc->diagonalscaleright   = NULL;
383 
384   pc->modifysubmatrices  = NULL;
385   pc->modifysubmatricesP = NULL;
386 
387   *newpc = pc;
388   PetscFunctionReturn(0);
389 
390 }
391 
392 /* -------------------------------------------------------------------------------*/
393 
394 /*@
395    PCApply - Applies the preconditioner to a vector.
396 
397    Collective on PC
398 
399    Input Parameters:
400 +  pc - the preconditioner context
401 -  x - input vector
402 
403    Output Parameter:
404 .  y - output vector
405 
406    Level: developer
407 
408 .seealso: `PCApplyTranspose()`, `PCApplyBAorAB()`
409 @*/
410 PetscErrorCode  PCApply(PC pc,Vec x,Vec y)
411 {
412   PetscInt       m,n,mv,nv;
413 
414   PetscFunctionBegin;
415   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
416   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
417   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
418   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
419   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
420   /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */
421   PetscCall(MatGetLocalSize(pc->pmat,&m,&n));
422   PetscCall(VecGetLocalSize(x,&mv));
423   PetscCall(VecGetLocalSize(y,&nv));
424   /* check pmat * y = x is feasible */
425   PetscCheck(mv == m,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %" PetscInt_FMT " does not equal input vector size %" PetscInt_FMT,m,mv);
426   PetscCheck(nv == n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %" PetscInt_FMT " does not equal output vector size %" PetscInt_FMT,n,nv);
427   PetscCall(VecSetErrorIfLocked(y,3));
428 
429   PetscCall(PCSetUp(pc));
430   PetscCheck(pc->ops->apply,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply");
431   PetscCall(VecLockReadPush(x));
432   PetscCall(PetscLogEventBegin(PC_Apply,pc,x,y,0));
433   PetscCall((*pc->ops->apply)(pc,x,y));
434   PetscCall(PetscLogEventEnd(PC_Apply,pc,x,y,0));
435   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
436   PetscCall(VecLockReadPop(x));
437   PetscFunctionReturn(0);
438 }
439 
440 /*@
441    PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices.
442 
443    Collective on PC
444 
445    Input Parameters:
446 +  pc - the preconditioner context
447 -  X - block of input vectors
448 
449    Output Parameter:
450 .  Y - block of output vectors
451 
452    Level: developer
453 
454 .seealso: `PCApply()`, `KSPMatSolve()`
455 @*/
456 PetscErrorCode  PCMatApply(PC pc,Mat X,Mat Y)
457 {
458   Mat            A;
459   Vec            cy, cx;
460   PetscInt       m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3;
461   PetscBool      match;
462 
463   PetscFunctionBegin;
464   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
465   PetscValidHeaderSpecific(X, MAT_CLASSID, 2);
466   PetscValidHeaderSpecific(Y, MAT_CLASSID, 3);
467   PetscCheckSameComm(pc, 1, X, 2);
468   PetscCheckSameComm(pc, 1, Y, 3);
469   PetscCheck(Y != X,PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices");
470   PetscCall(PCGetOperators(pc, NULL, &A));
471   PetscCall(MatGetLocalSize(A, &m3, &n3));
472   PetscCall(MatGetLocalSize(X, &m2, &n2));
473   PetscCall(MatGetLocalSize(Y, &m1, &n1));
474   PetscCall(MatGetSize(A, &M3, &N3));
475   PetscCall(MatGetSize(X, &M2, &N2));
476   PetscCall(MatGetSize(Y, &M1, &N1));
477   PetscCheck(n1 == n2 && N1 == N2,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of output vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1);
478   PetscCheck(m2 == m3 && M2 == M3,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m2, M2, m3, M3, n3, N3);
479   PetscCheck(m1 == n3 && M1 == N3,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m1, M1, m3, M3, n3, N3);
480   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, ""));
481   PetscCheck(match,PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat");
482   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, ""));
483   PetscCheck(match,PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat");
484   PetscCall(PCSetUp(pc));
485   if (pc->ops->matapply) {
486     PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0));
487     PetscCall((*pc->ops->matapply)(pc, X, Y));
488     PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0));
489   } else {
490     PetscCall(PetscInfo(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name));
491     for (n1 = 0; n1 < N1; ++n1) {
492       PetscCall(MatDenseGetColumnVecRead(X, n1, &cx));
493       PetscCall(MatDenseGetColumnVecWrite(Y, n1, &cy));
494       PetscCall(PCApply(pc, cx, cy));
495       PetscCall(MatDenseRestoreColumnVecWrite(Y, n1, &cy));
496       PetscCall(MatDenseRestoreColumnVecRead(X, n1, &cx));
497     }
498   }
499   PetscFunctionReturn(0);
500 }
501 
502 /*@
503    PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.
504 
505    Collective on PC
506 
507    Input Parameters:
508 +  pc - the preconditioner context
509 -  x - input vector
510 
511    Output Parameter:
512 .  y - output vector
513 
514    Notes:
515    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
516 
517    Level: developer
518 
519 .seealso: `PCApply()`, `PCApplySymmetricRight()`
520 @*/
521 PetscErrorCode  PCApplySymmetricLeft(PC pc,Vec x,Vec y)
522 {
523   PetscFunctionBegin;
524   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
525   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
526   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
527   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
528   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
529   PetscCall(PCSetUp(pc));
530   PetscCheck(pc->ops->applysymmetricleft,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
531   PetscCall(VecLockReadPush(x));
532   PetscCall(PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0));
533   PetscCall((*pc->ops->applysymmetricleft)(pc,x,y));
534   PetscCall(PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0));
535   PetscCall(VecLockReadPop(x));
536   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
537   PetscFunctionReturn(0);
538 }
539 
540 /*@
541    PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.
542 
543    Collective on PC
544 
545    Input Parameters:
546 +  pc - the preconditioner context
547 -  x - input vector
548 
549    Output Parameter:
550 .  y - output vector
551 
552    Level: developer
553 
554    Notes:
555    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
556 
557 .seealso: `PCApply()`, `PCApplySymmetricLeft()`
558 @*/
559 PetscErrorCode  PCApplySymmetricRight(PC pc,Vec x,Vec y)
560 {
561   PetscFunctionBegin;
562   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
563   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
564   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
565   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
566   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
567   PetscCall(PCSetUp(pc));
568   PetscCheck(pc->ops->applysymmetricright,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
569   PetscCall(VecLockReadPush(x));
570   PetscCall(PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0));
571   PetscCall((*pc->ops->applysymmetricright)(pc,x,y));
572   PetscCall(PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0));
573   PetscCall(VecLockReadPop(x));
574   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
575   PetscFunctionReturn(0);
576 }
577 
578 /*@
579    PCApplyTranspose - Applies the transpose of preconditioner to a vector.
580 
581    Collective on PC
582 
583    Input Parameters:
584 +  pc - the preconditioner context
585 -  x - input vector
586 
587    Output Parameter:
588 .  y - output vector
589 
590    Notes:
591     For complex numbers this applies the non-Hermitian transpose.
592 
593    Developer Notes:
594     We need to implement a PCApplyHermitianTranspose()
595 
596    Level: developer
597 
598 .seealso: `PCApply()`, `PCApplyBAorAB()`, `PCApplyBAorABTranspose()`, `PCApplyTransposeExists()`
599 @*/
600 PetscErrorCode  PCApplyTranspose(PC pc,Vec x,Vec y)
601 {
602   PetscFunctionBegin;
603   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
604   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
605   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
606   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
607   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
608   PetscCall(PCSetUp(pc));
609   PetscCheck(pc->ops->applytranspose,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose");
610   PetscCall(VecLockReadPush(x));
611   PetscCall(PetscLogEventBegin(PC_Apply,pc,x,y,0));
612   PetscCall((*pc->ops->applytranspose)(pc,x,y));
613   PetscCall(PetscLogEventEnd(PC_Apply,pc,x,y,0));
614   PetscCall(VecLockReadPop(x));
615   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
616   PetscFunctionReturn(0);
617 }
618 
619 /*@
620    PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation
621 
622    Collective on PC
623 
624    Input Parameters:
625 .  pc - the preconditioner context
626 
627    Output Parameter:
628 .  flg - PETSC_TRUE if a transpose operation is defined
629 
630    Level: developer
631 
632 .seealso: `PCApplyTranspose()`
633 @*/
634 PetscErrorCode  PCApplyTransposeExists(PC pc,PetscBool  *flg)
635 {
636   PetscFunctionBegin;
637   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
638   PetscValidBoolPointer(flg,2);
639   if (pc->ops->applytranspose) *flg = PETSC_TRUE;
640   else *flg = PETSC_FALSE;
641   PetscFunctionReturn(0);
642 }
643 
644 /*@
645    PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.
646 
647    Collective on PC
648 
649    Input Parameters:
650 +  pc - the preconditioner context
651 .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
652 .  x - input vector
653 -  work - work vector
654 
655    Output Parameter:
656 .  y - output vector
657 
658    Level: developer
659 
660    Notes:
661     If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or  D A M D^{-1} is actually applied. Note that the
662    specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling.
663 
664 .seealso: `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorABTranspose()`
665 @*/
666 PetscErrorCode  PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work)
667 {
668   PetscFunctionBegin;
669   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
670   PetscValidLogicalCollectiveEnum(pc,side,2);
671   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
672   PetscValidHeaderSpecific(y,VEC_CLASSID,4);
673   PetscValidHeaderSpecific(work,VEC_CLASSID,5);
674   PetscCheckSameComm(pc,1,x,3);
675   PetscCheckSameComm(pc,1,y,4);
676   PetscCheckSameComm(pc,1,work,5);
677   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
678   PetscCheck(side == PC_LEFT || side == PC_SYMMETRIC || side == PC_RIGHT,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric");
679   PetscCheck(!pc->diagonalscale || side != PC_SYMMETRIC,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application");
680   if (pc->erroriffailure) PetscCall(VecValidValues(x,3,PETSC_TRUE));
681 
682   PetscCall(PCSetUp(pc));
683   if (pc->diagonalscale) {
684     if (pc->ops->applyBA) {
685       Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
686       PetscCall(VecDuplicate(x,&work2));
687       PetscCall(PCDiagonalScaleRight(pc,x,work2));
688       PetscCall((*pc->ops->applyBA)(pc,side,work2,y,work));
689       PetscCall(PCDiagonalScaleLeft(pc,y,y));
690       PetscCall(VecDestroy(&work2));
691     } else if (side == PC_RIGHT) {
692       PetscCall(PCDiagonalScaleRight(pc,x,y));
693       PetscCall(PCApply(pc,y,work));
694       PetscCall(MatMult(pc->mat,work,y));
695       PetscCall(PCDiagonalScaleLeft(pc,y,y));
696     } else if (side == PC_LEFT) {
697       PetscCall(PCDiagonalScaleRight(pc,x,y));
698       PetscCall(MatMult(pc->mat,y,work));
699       PetscCall(PCApply(pc,work,y));
700       PetscCall(PCDiagonalScaleLeft(pc,y,y));
701     } else PetscCheck(side != PC_SYMMETRIC,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner");
702   } else {
703     if (pc->ops->applyBA) {
704       PetscCall((*pc->ops->applyBA)(pc,side,x,y,work));
705     } else if (side == PC_RIGHT) {
706       PetscCall(PCApply(pc,x,work));
707       PetscCall(MatMult(pc->mat,work,y));
708     } else if (side == PC_LEFT) {
709       PetscCall(MatMult(pc->mat,x,work));
710       PetscCall(PCApply(pc,work,y));
711     } else if (side == PC_SYMMETRIC) {
712       /* There's an extra copy here; maybe should provide 2 work vectors instead? */
713       PetscCall(PCApplySymmetricRight(pc,x,work));
714       PetscCall(MatMult(pc->mat,work,y));
715       PetscCall(VecCopy(y,work));
716       PetscCall(PCApplySymmetricLeft(pc,work,y));
717     }
718   }
719   if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
720   PetscFunctionReturn(0);
721 }
722 
723 /*@
724    PCApplyBAorABTranspose - Applies the transpose of the preconditioner
725    and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
726    NOT tr(B*A) = tr(A)*tr(B).
727 
728    Collective on PC
729 
730    Input Parameters:
731 +  pc - the preconditioner context
732 .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
733 .  x - input vector
734 -  work - work vector
735 
736    Output Parameter:
737 .  y - output vector
738 
739    Notes:
740     this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
741       defined by B'. This is why this has the funny form that it computes tr(B) * tr(A)
742 
743     Level: developer
744 
745 .seealso: `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorAB()`
746 @*/
747 PetscErrorCode  PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work)
748 {
749   PetscFunctionBegin;
750   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
751   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
752   PetscValidHeaderSpecific(y,VEC_CLASSID,4);
753   PetscValidHeaderSpecific(work,VEC_CLASSID,5);
754   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
755   if (pc->erroriffailure) PetscCall(VecValidValues(x,3,PETSC_TRUE));
756   if (pc->ops->applyBAtranspose) {
757     PetscCall((*pc->ops->applyBAtranspose)(pc,side,x,y,work));
758     if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
759     PetscFunctionReturn(0);
760   }
761   PetscCheck(side == PC_LEFT || side == PC_RIGHT,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left");
762 
763   PetscCall(PCSetUp(pc));
764   if (side == PC_RIGHT) {
765     PetscCall(PCApplyTranspose(pc,x,work));
766     PetscCall(MatMultTranspose(pc->mat,work,y));
767   } else if (side == PC_LEFT) {
768     PetscCall(MatMultTranspose(pc->mat,x,work));
769     PetscCall(PCApplyTranspose(pc,work,y));
770   }
771   /* add support for PC_SYMMETRIC */
772   if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
773   PetscFunctionReturn(0);
774 }
775 
776 /* -------------------------------------------------------------------------------*/
777 
778 /*@
779    PCApplyRichardsonExists - Determines whether a particular preconditioner has a
780    built-in fast application of Richardson's method.
781 
782    Not Collective
783 
784    Input Parameter:
785 .  pc - the preconditioner
786 
787    Output Parameter:
788 .  exists - PETSC_TRUE or PETSC_FALSE
789 
790    Level: developer
791 
792 .seealso: `PCApplyRichardson()`
793 @*/
794 PetscErrorCode  PCApplyRichardsonExists(PC pc,PetscBool  *exists)
795 {
796   PetscFunctionBegin;
797   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
798   PetscValidBoolPointer(exists,2);
799   if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
800   else *exists = PETSC_FALSE;
801   PetscFunctionReturn(0);
802 }
803 
804 /*@
805    PCApplyRichardson - Applies several steps of Richardson iteration with
806    the particular preconditioner. This routine is usually used by the
807    Krylov solvers and not the application code directly.
808 
809    Collective on PC
810 
811    Input Parameters:
812 +  pc  - the preconditioner context
813 .  b   - the right hand side
814 .  w   - one work vector
815 .  rtol - relative decrease in residual norm convergence criteria
816 .  abstol - absolute residual norm convergence criteria
817 .  dtol - divergence residual norm increase criteria
818 .  its - the number of iterations to apply.
819 -  guesszero - if the input x contains nonzero initial guess
820 
821    Output Parameters:
822 +  outits - number of iterations actually used (for SOR this always equals its)
823 .  reason - the reason the apply terminated
824 -  y - the solution (also contains initial guess if guesszero is PETSC_FALSE
825 
826    Notes:
827    Most preconditioners do not support this function. Use the command
828    PCApplyRichardsonExists() to determine if one does.
829 
830    Except for the multigrid PC this routine ignores the convergence tolerances
831    and always runs for the number of iterations
832 
833    Level: developer
834 
835 .seealso: `PCApplyRichardsonExists()`
836 @*/
837 PetscErrorCode  PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason)
838 {
839   PetscFunctionBegin;
840   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
841   PetscValidHeaderSpecific(b,VEC_CLASSID,2);
842   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
843   PetscValidHeaderSpecific(w,VEC_CLASSID,4);
844   PetscCheck(b != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors");
845   PetscCall(PCSetUp(pc));
846   PetscCheck(pc->ops->applyrichardson,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson");
847   PetscCall((*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason));
848   PetscFunctionReturn(0);
849 }
850 
851 /*@
852    PCSetFailedReason - Sets the reason a PCSetUp() failed or PC_NOERROR if it did not fail
853 
854    Logically Collective on PC
855 
856    Input Parameters:
857 +  pc - the preconditioner context
858 -  reason - the reason it failedx
859 
860    Level: advanced
861 
862 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCFailedReason`
863 @*/
864 PetscErrorCode PCSetFailedReason(PC pc,PCFailedReason reason)
865 {
866   PetscFunctionBegin;
867   pc->failedreason = reason;
868   PetscFunctionReturn(0);
869 }
870 
871 /*@
872    PCGetFailedReason - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail
873 
874    Logically Collective on PC
875 
876    Input Parameter:
877 .  pc - the preconditioner context
878 
879    Output Parameter:
880 .  reason - the reason it failed
881 
882    Level: advanced
883 
884    Notes: This is the maximum over reason over all ranks in the PC communicator. It is only valid after
885    a call KSPCheckDot() or  KSPCheckNorm() inside a KSPSolve(). It is not valid immediately after a PCSetUp()
886    or PCApply(), then use PCGetFailedReasonRank()
887 
888 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReasonRank()`, `PCSetFailedReason()`
889 @*/
890 PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason)
891 {
892   PetscFunctionBegin;
893   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
894   else *reason = pc->failedreason;
895   PetscFunctionReturn(0);
896 }
897 
898 /*@
899    PCGetFailedReasonRank - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail on this MPI rank
900 
901   Not Collective on PC
902 
903    Input Parameter:
904 .  pc - the preconditioner context
905 
906    Output Parameter:
907 .  reason - the reason it failed
908 
909    Notes:
910      Different ranks may have different reasons or no reason, see PCGetFailedReason()
911 
912    Level: advanced
913 
914 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()`
915 @*/
916 PetscErrorCode PCGetFailedReasonRank(PC pc,PCFailedReason *reason)
917 {
918   PetscFunctionBegin;
919   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
920   else *reason = pc->failedreason;
921   PetscFunctionReturn(0);
922 }
923 
924 /*  Next line needed to deactivate KSP_Solve logging */
925 #include <petsc/private/kspimpl.h>
926 
927 /*
928       a setupcall of 0 indicates never setup,
929                      1 indicates has been previously setup
930                     -1 indicates a PCSetUp() was attempted and failed
931 */
932 /*@
933    PCSetUp - Prepares for the use of a preconditioner.
934 
935    Collective on PC
936 
937    Input Parameter:
938 .  pc - the preconditioner context
939 
940    Level: developer
941 
942 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`
943 @*/
944 PetscErrorCode  PCSetUp(PC pc)
945 {
946   const char       *def;
947   PetscObjectState matstate, matnonzerostate;
948 
949   PetscFunctionBegin;
950   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
951   PetscCheck(pc->mat,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first");
952 
953   if (pc->setupcalled && pc->reusepreconditioner) {
954     PetscCall(PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n"));
955     PetscFunctionReturn(0);
956   }
957 
958   PetscCall(PetscObjectStateGet((PetscObject)pc->pmat,&matstate));
959   PetscCall(MatGetNonzeroState(pc->pmat,&matnonzerostate));
960   if (!pc->setupcalled) {
961     PetscCall(PetscInfo(pc,"Setting up PC for first time\n"));
962     pc->flag = DIFFERENT_NONZERO_PATTERN;
963   } else if (matstate == pc->matstate) {
964     PetscCall(PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n"));
965     PetscFunctionReturn(0);
966   } else {
967     if (matnonzerostate > pc->matnonzerostate) {
968       PetscCall(PetscInfo(pc,"Setting up PC with different nonzero pattern\n"));
969       pc->flag = DIFFERENT_NONZERO_PATTERN;
970     } else {
971       PetscCall(PetscInfo(pc,"Setting up PC with same nonzero pattern\n"));
972       pc->flag = SAME_NONZERO_PATTERN;
973     }
974   }
975   pc->matstate        = matstate;
976   pc->matnonzerostate = matnonzerostate;
977 
978   if (!((PetscObject)pc)->type_name) {
979     PetscCall(PCGetDefaultType_Private(pc,&def));
980     PetscCall(PCSetType(pc,def));
981   }
982 
983   PetscCall(MatSetErrorIfFailure(pc->pmat,pc->erroriffailure));
984   PetscCall(MatSetErrorIfFailure(pc->mat,pc->erroriffailure));
985   PetscCall(PetscLogEventBegin(PC_SetUp,pc,0,0,0));
986   if (pc->ops->setup) {
987     /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */
988     PetscCall(KSPInitializePackage());
989     PetscCall(PetscLogEventDeactivatePush(KSP_Solve));
990     PetscCall(PetscLogEventDeactivatePush(PC_Apply));
991     PetscCall((*pc->ops->setup)(pc));
992     PetscCall(PetscLogEventDeactivatePop(KSP_Solve));
993     PetscCall(PetscLogEventDeactivatePop(PC_Apply));
994   }
995   PetscCall(PetscLogEventEnd(PC_SetUp,pc,0,0,0));
996   if (!pc->setupcalled) pc->setupcalled = 1;
997   PetscFunctionReturn(0);
998 }
999 
1000 /*@
1001    PCSetUpOnBlocks - Sets up the preconditioner for each block in
1002    the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
1003    methods.
1004 
1005    Collective on PC
1006 
1007    Input Parameters:
1008 .  pc - the preconditioner context
1009 
1010    Level: developer
1011 
1012 .seealso: `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCSetUp()`
1013 @*/
1014 PetscErrorCode  PCSetUpOnBlocks(PC pc)
1015 {
1016   PetscFunctionBegin;
1017   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1018   if (!pc->ops->setuponblocks) PetscFunctionReturn(0);
1019   PetscCall(PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0));
1020   PetscCall((*pc->ops->setuponblocks)(pc));
1021   PetscCall(PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0));
1022   PetscFunctionReturn(0);
1023 }
1024 
1025 /*@C
1026    PCSetModifySubMatrices - Sets a user-defined routine for modifying the
1027    submatrices that arise within certain subdomain-based preconditioners.
1028    The basic submatrices are extracted from the preconditioner matrix as
1029    usual; the user can then alter these (for example, to set different boundary
1030    conditions for each submatrix) before they are used for the local solves.
1031 
1032    Logically Collective on PC
1033 
1034    Input Parameters:
1035 +  pc - the preconditioner context
1036 .  func - routine for modifying the submatrices
1037 -  ctx - optional user-defined context (may be null)
1038 
1039    Calling sequence of func:
1040 $     func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);
1041 
1042 +  row - an array of index sets that contain the global row numbers
1043          that comprise each local submatrix
1044 .  col - an array of index sets that contain the global column numbers
1045          that comprise each local submatrix
1046 .  submat - array of local submatrices
1047 -  ctx - optional user-defined context for private data for the
1048          user-defined func routine (may be null)
1049 
1050    Notes:
1051    PCSetModifySubMatrices() MUST be called before KSPSetUp() and
1052    KSPSolve().
1053 
1054    A routine set by PCSetModifySubMatrices() is currently called within
1055    the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM)
1056    preconditioners.  All other preconditioners ignore this routine.
1057 
1058    Level: advanced
1059 
1060 .seealso: `PCModifySubMatrices()`
1061 @*/
1062 PetscErrorCode  PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx)
1063 {
1064   PetscFunctionBegin;
1065   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1066   pc->modifysubmatrices  = func;
1067   pc->modifysubmatricesP = ctx;
1068   PetscFunctionReturn(0);
1069 }
1070 
1071 /*@C
1072    PCModifySubMatrices - Calls an optional user-defined routine within
1073    certain preconditioners if one has been set with PCSetModifySubMatrices().
1074 
1075    Collective on PC
1076 
1077    Input Parameters:
1078 +  pc - the preconditioner context
1079 .  nsub - the number of local submatrices
1080 .  row - an array of index sets that contain the global row numbers
1081          that comprise each local submatrix
1082 .  col - an array of index sets that contain the global column numbers
1083          that comprise each local submatrix
1084 .  submat - array of local submatrices
1085 -  ctx - optional user-defined context for private data for the
1086          user-defined routine (may be null)
1087 
1088    Output Parameter:
1089 .  submat - array of local submatrices (the entries of which may
1090             have been modified)
1091 
1092    Notes:
1093    The user should NOT generally call this routine, as it will
1094    automatically be called within certain preconditioners (currently
1095    block Jacobi, additive Schwarz) if set.
1096 
1097    The basic submatrices are extracted from the preconditioner matrix
1098    as usual; the user can then alter these (for example, to set different
1099    boundary conditions for each submatrix) before they are used for the
1100    local solves.
1101 
1102    Level: developer
1103 
1104 .seealso: `PCSetModifySubMatrices()`
1105 @*/
1106 PetscErrorCode  PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx)
1107 {
1108   PetscFunctionBegin;
1109   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1110   if (!pc->modifysubmatrices) PetscFunctionReturn(0);
1111   PetscCall(PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0));
1112   PetscCall((*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx));
1113   PetscCall(PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0));
1114   PetscFunctionReturn(0);
1115 }
1116 
1117 /*@
1118    PCSetOperators - Sets the matrix associated with the linear system and
1119    a (possibly) different one associated with the preconditioner.
1120 
1121    Logically Collective on PC
1122 
1123    Input Parameters:
1124 +  pc - the preconditioner context
1125 .  Amat - the matrix that defines the linear system
1126 -  Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat.
1127 
1128    Notes:
1129     Passing a NULL for Amat or Pmat removes the matrix that is currently used.
1130 
1131     If you wish to replace either Amat or Pmat but leave the other one untouched then
1132     first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference()
1133     on it and then pass it back in in your call to KSPSetOperators().
1134 
1135    More Notes about Repeated Solution of Linear Systems:
1136    PETSc does NOT reset the matrix entries of either Amat or Pmat
1137    to zero after a linear solve; the user is completely responsible for
1138    matrix assembly.  See the routine MatZeroEntries() if desiring to
1139    zero all elements of a matrix.
1140 
1141    Level: intermediate
1142 
1143 .seealso: `PCGetOperators()`, `MatZeroEntries()`
1144  @*/
1145 PetscErrorCode  PCSetOperators(PC pc,Mat Amat,Mat Pmat)
1146 {
1147   PetscInt         m1,n1,m2,n2;
1148 
1149   PetscFunctionBegin;
1150   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1151   if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2);
1152   if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3);
1153   if (Amat) PetscCheckSameComm(pc,1,Amat,2);
1154   if (Pmat) PetscCheckSameComm(pc,1,Pmat,3);
1155   if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) {
1156     PetscCall(MatGetLocalSize(Amat,&m1,&n1));
1157     PetscCall(MatGetLocalSize(pc->mat,&m2,&n2));
1158     PetscCheck(m1 == m2 && n1 == n2,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT,m2,n2,m1,n1);
1159     PetscCall(MatGetLocalSize(Pmat,&m1,&n1));
1160     PetscCall(MatGetLocalSize(pc->pmat,&m2,&n2));
1161     PetscCheck(m1 == m2 && n1 == n2,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT,m2,n2,m1,n1);
1162   }
1163 
1164   if (Pmat != pc->pmat) {
1165     /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */
1166     pc->matnonzerostate = -1;
1167     pc->matstate        = -1;
1168   }
1169 
1170   /* reference first in case the matrices are the same */
1171   if (Amat) PetscCall(PetscObjectReference((PetscObject)Amat));
1172   PetscCall(MatDestroy(&pc->mat));
1173   if (Pmat) PetscCall(PetscObjectReference((PetscObject)Pmat));
1174   PetscCall(MatDestroy(&pc->pmat));
1175   pc->mat  = Amat;
1176   pc->pmat = Pmat;
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 /*@
1181    PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed.
1182 
1183    Logically Collective on PC
1184 
1185    Input Parameters:
1186 +  pc - the preconditioner context
1187 -  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner
1188 
1189     Level: intermediate
1190 
1191 .seealso: `PCGetOperators()`, `MatZeroEntries()`, `PCGetReusePreconditioner()`, `KSPSetReusePreconditioner()`
1192  @*/
1193 PetscErrorCode  PCSetReusePreconditioner(PC pc,PetscBool flag)
1194 {
1195   PetscFunctionBegin;
1196   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1197   PetscValidLogicalCollectiveBool(pc,flag,2);
1198   pc->reusepreconditioner = flag;
1199   PetscFunctionReturn(0);
1200 }
1201 
1202 /*@
1203    PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed.
1204 
1205    Not Collective
1206 
1207    Input Parameter:
1208 .  pc - the preconditioner context
1209 
1210    Output Parameter:
1211 .  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner
1212 
1213    Level: intermediate
1214 
1215 .seealso: `PCGetOperators()`, `MatZeroEntries()`, `PCSetReusePreconditioner()`
1216  @*/
1217 PetscErrorCode  PCGetReusePreconditioner(PC pc,PetscBool *flag)
1218 {
1219   PetscFunctionBegin;
1220   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1221   PetscValidBoolPointer(flag,2);
1222   *flag = pc->reusepreconditioner;
1223   PetscFunctionReturn(0);
1224 }
1225 
1226 /*@
1227    PCGetOperators - Gets the matrix associated with the linear system and
1228    possibly a different one associated with the preconditioner.
1229 
1230    Not collective, though parallel Mats are returned if the PC is parallel
1231 
1232    Input Parameter:
1233 .  pc - the preconditioner context
1234 
1235    Output Parameters:
1236 +  Amat - the matrix defining the linear system
1237 -  Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat.
1238 
1239    Level: intermediate
1240 
1241    Notes:
1242     Does not increase the reference count of the matrices, so you should not destroy them
1243 
1244    Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators
1245       are created in PC and returned to the user. In this case, if both operators
1246       mat and pmat are requested, two DIFFERENT operators will be returned. If
1247       only one is requested both operators in the PC will be the same (i.e. as
1248       if one had called KSP/PCSetOperators() with the same argument for both Mats).
1249       The user must set the sizes of the returned matrices and their type etc just
1250       as if the user created them with MatCreate(). For example,
1251 
1252 $         KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to
1253 $           set size, type, etc of Amat
1254 
1255 $         MatCreate(comm,&mat);
1256 $         KSP/PCSetOperators(ksp/pc,Amat,Amat);
1257 $         PetscObjectDereference((PetscObject)mat);
1258 $           set size, type, etc of Amat
1259 
1260      and
1261 
1262 $         KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to
1263 $           set size, type, etc of Amat and Pmat
1264 
1265 $         MatCreate(comm,&Amat);
1266 $         MatCreate(comm,&Pmat);
1267 $         KSP/PCSetOperators(ksp/pc,Amat,Pmat);
1268 $         PetscObjectDereference((PetscObject)Amat);
1269 $         PetscObjectDereference((PetscObject)Pmat);
1270 $           set size, type, etc of Amat and Pmat
1271 
1272     The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy
1273     of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely
1274     managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look
1275     at this is when you create a SNES you do not NEED to create a KSP and attach it to
1276     the SNES object (the SNES object manages it for you). Similarly when you create a KSP
1277     you do not need to attach a PC to it (the KSP object manages the PC object for you).
1278     Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when
1279     it can be created for you?
1280 
1281 .seealso: `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperatorsSet()`
1282 @*/
1283 PetscErrorCode  PCGetOperators(PC pc,Mat *Amat,Mat *Pmat)
1284 {
1285   PetscFunctionBegin;
1286   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1287   if (Amat) {
1288     if (!pc->mat) {
1289       if (pc->pmat && !Pmat) {  /* Apmat has been set, but user did not request it, so use for Amat */
1290         pc->mat = pc->pmat;
1291         PetscCall(PetscObjectReference((PetscObject)pc->mat));
1292       } else {                  /* both Amat and Pmat are empty */
1293         PetscCall(MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat));
1294         if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */
1295           pc->pmat = pc->mat;
1296           PetscCall(PetscObjectReference((PetscObject)pc->pmat));
1297         }
1298       }
1299     }
1300     *Amat = pc->mat;
1301   }
1302   if (Pmat) {
1303     if (!pc->pmat) {
1304       if (pc->mat && !Amat) {    /* Amat has been set but was not requested, so use for pmat */
1305         pc->pmat = pc->mat;
1306         PetscCall(PetscObjectReference((PetscObject)pc->pmat));
1307       } else {
1308         PetscCall(MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat));
1309         if (!Amat) { /* user did NOT request Amat, so make same as Pmat */
1310           pc->mat = pc->pmat;
1311           PetscCall(PetscObjectReference((PetscObject)pc->mat));
1312         }
1313       }
1314     }
1315     *Pmat = pc->pmat;
1316   }
1317   PetscFunctionReturn(0);
1318 }
1319 
1320 /*@C
1321    PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1322    possibly a different one associated with the preconditioner have been set in the PC.
1323 
1324    Not collective, though the results on all processes should be the same
1325 
1326    Input Parameter:
1327 .  pc - the preconditioner context
1328 
1329    Output Parameters:
1330 +  mat - the matrix associated with the linear system was set
1331 -  pmat - matrix associated with the preconditioner was set, usually the same
1332 
1333    Level: intermediate
1334 
1335 .seealso: `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperators()`
1336 @*/
1337 PetscErrorCode  PCGetOperatorsSet(PC pc,PetscBool  *mat,PetscBool  *pmat)
1338 {
1339   PetscFunctionBegin;
1340   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1341   if (mat) *mat = (pc->mat)  ? PETSC_TRUE : PETSC_FALSE;
1342   if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1343   PetscFunctionReturn(0);
1344 }
1345 
1346 /*@
1347    PCFactorGetMatrix - Gets the factored matrix from the
1348    preconditioner context.  This routine is valid only for the LU,
1349    incomplete LU, Cholesky, and incomplete Cholesky methods.
1350 
1351    Not Collective on PC though Mat is parallel if PC is parallel
1352 
1353    Input Parameters:
1354 .  pc - the preconditioner context
1355 
1356    Output parameters:
1357 .  mat - the factored matrix
1358 
1359    Level: advanced
1360 
1361    Notes:
1362     Does not increase the reference count for the matrix so DO NOT destroy it
1363 
1364 @*/
1365 PetscErrorCode  PCFactorGetMatrix(PC pc,Mat *mat)
1366 {
1367   PetscFunctionBegin;
1368   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1369   PetscValidPointer(mat,2);
1370   if (pc->ops->getfactoredmatrix) {
1371     PetscCall((*pc->ops->getfactoredmatrix)(pc,mat));
1372   } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix");
1373   PetscFunctionReturn(0);
1374 }
1375 
1376 /*@C
1377    PCSetOptionsPrefix - Sets the prefix used for searching for all
1378    PC options in the database.
1379 
1380    Logically Collective on PC
1381 
1382    Input Parameters:
1383 +  pc - the preconditioner context
1384 -  prefix - the prefix string to prepend to all PC option requests
1385 
1386    Notes:
1387    A hyphen (-) must NOT be given at the beginning of the prefix name.
1388    The first character of all runtime options is AUTOMATICALLY the
1389    hyphen.
1390 
1391    Level: advanced
1392 
1393 .seealso: `PCAppendOptionsPrefix()`, `PCGetOptionsPrefix()`
1394 @*/
1395 PetscErrorCode  PCSetOptionsPrefix(PC pc,const char prefix[])
1396 {
1397   PetscFunctionBegin;
1398   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1399   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc,prefix));
1400   PetscFunctionReturn(0);
1401 }
1402 
1403 /*@C
1404    PCAppendOptionsPrefix - Appends to the prefix used for searching for all
1405    PC options in the database.
1406 
1407    Logically Collective on PC
1408 
1409    Input Parameters:
1410 +  pc - the preconditioner context
1411 -  prefix - the prefix string to prepend to all PC option requests
1412 
1413    Notes:
1414    A hyphen (-) must NOT be given at the beginning of the prefix name.
1415    The first character of all runtime options is AUTOMATICALLY the
1416    hyphen.
1417 
1418    Level: advanced
1419 
1420 .seealso: `PCSetOptionsPrefix()`, `PCGetOptionsPrefix()`
1421 @*/
1422 PetscErrorCode  PCAppendOptionsPrefix(PC pc,const char prefix[])
1423 {
1424   PetscFunctionBegin;
1425   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1426   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix));
1427   PetscFunctionReturn(0);
1428 }
1429 
1430 /*@C
1431    PCGetOptionsPrefix - Gets the prefix used for searching for all
1432    PC options in the database.
1433 
1434    Not Collective
1435 
1436    Input Parameters:
1437 .  pc - the preconditioner context
1438 
1439    Output Parameters:
1440 .  prefix - pointer to the prefix string used, is returned
1441 
1442    Notes:
1443     On the fortran side, the user should pass in a string 'prefix' of
1444    sufficient length to hold the prefix.
1445 
1446    Level: advanced
1447 
1448 .seealso: `PCSetOptionsPrefix()`, `PCAppendOptionsPrefix()`
1449 @*/
1450 PetscErrorCode  PCGetOptionsPrefix(PC pc,const char *prefix[])
1451 {
1452   PetscFunctionBegin;
1453   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1454   PetscValidPointer(prefix,2);
1455   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc,prefix));
1456   PetscFunctionReturn(0);
1457 }
1458 
1459 /*
1460    Indicates the right hand side will be changed by KSPSolve(), this occurs for a few
1461   preconditioners including BDDC and Eisentat that transform the equations before applying
1462   the Krylov methods
1463 */
1464 PETSC_INTERN PetscErrorCode  PCPreSolveChangeRHS(PC pc,PetscBool *change)
1465 {
1466   PetscFunctionBegin;
1467   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1468   PetscValidPointer(change,2);
1469   *change = PETSC_FALSE;
1470   PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));
1471   PetscFunctionReturn(0);
1472 }
1473 
1474 /*@
1475    PCPreSolve - Optional pre-solve phase, intended for any
1476    preconditioner-specific actions that must be performed before
1477    the iterative solve itself.
1478 
1479    Collective on PC
1480 
1481    Input Parameters:
1482 +  pc - the preconditioner context
1483 -  ksp - the Krylov subspace context
1484 
1485    Level: developer
1486 
1487    Sample of Usage:
1488 .vb
1489     PCPreSolve(pc,ksp);
1490     KSPSolve(ksp,b,x);
1491     PCPostSolve(pc,ksp);
1492 .ve
1493 
1494    Notes:
1495    The pre-solve phase is distinct from the PCSetUp() phase.
1496 
1497    KSPSolve() calls this directly, so is rarely called by the user.
1498 
1499 .seealso: `PCPostSolve()`
1500 @*/
1501 PetscErrorCode PCPreSolve(PC pc,KSP ksp)
1502 {
1503   Vec            x,rhs;
1504 
1505   PetscFunctionBegin;
1506   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1507   PetscValidHeaderSpecific(ksp,KSP_CLASSID,2);
1508   pc->presolvedone++;
1509   PetscCheck(pc->presolvedone <= 2,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice");
1510   PetscCall(KSPGetSolution(ksp,&x));
1511   PetscCall(KSPGetRhs(ksp,&rhs));
1512 
1513   if (pc->ops->presolve) PetscCall((*pc->ops->presolve)(pc,ksp,rhs,x));
1514   else if (pc->presolve) PetscCall((pc->presolve)(pc,ksp));
1515   PetscFunctionReturn(0);
1516 }
1517 
1518 /*@C
1519    PCSetPreSolve - Sets function PCPreSolve() which is intended for any
1520    preconditioner-specific actions that must be performed before
1521    the iterative solve itself.
1522 
1523    Logically Collective on pc
1524 
1525    Input Parameters:
1526 +   pc - the preconditioner object
1527 -   presolve - the function to call before the solve
1528 
1529    Calling sequence of presolve:
1530 $  func(PC pc,KSP ksp)
1531 
1532 +  pc - the PC context
1533 -  ksp - the KSP context
1534 
1535    Level: developer
1536 
1537 .seealso: `PC`, `PCSetUp()`, `PCPreSolve()`
1538 @*/
1539 PetscErrorCode PCSetPreSolve(PC pc,PetscErrorCode (*presolve)(PC,KSP))
1540 {
1541   PetscFunctionBegin;
1542   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1543   pc->presolve = presolve;
1544   PetscFunctionReturn(0);
1545 }
1546 
1547 /*@
1548    PCPostSolve - Optional post-solve phase, intended for any
1549    preconditioner-specific actions that must be performed after
1550    the iterative solve itself.
1551 
1552    Collective on PC
1553 
1554    Input Parameters:
1555 +  pc - the preconditioner context
1556 -  ksp - the Krylov subspace context
1557 
1558    Sample of Usage:
1559 .vb
1560     PCPreSolve(pc,ksp);
1561     KSPSolve(ksp,b,x);
1562     PCPostSolve(pc,ksp);
1563 .ve
1564 
1565    Note:
1566    KSPSolve() calls this routine directly, so it is rarely called by the user.
1567 
1568    Level: developer
1569 
1570 .seealso: `PCPreSolve()`, `KSPSolve()`
1571 @*/
1572 PetscErrorCode  PCPostSolve(PC pc,KSP ksp)
1573 {
1574   Vec            x,rhs;
1575 
1576   PetscFunctionBegin;
1577   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1578   PetscValidHeaderSpecific(ksp,KSP_CLASSID,2);
1579   pc->presolvedone--;
1580   PetscCall(KSPGetSolution(ksp,&x));
1581   PetscCall(KSPGetRhs(ksp,&rhs));
1582   if (pc->ops->postsolve) PetscCall((*pc->ops->postsolve)(pc,ksp,rhs,x));
1583   PetscFunctionReturn(0);
1584 }
1585 
1586 /*@C
1587   PCLoad - Loads a PC that has been stored in binary  with PCView().
1588 
1589   Collective on PetscViewer
1590 
1591   Input Parameters:
1592 + newdm - the newly loaded PC, this needs to have been created with PCCreate() or
1593            some related function before a call to PCLoad().
1594 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen()
1595 
1596    Level: intermediate
1597 
1598   Notes:
1599    The type is determined by the data in the file, any type set into the PC before this call is ignored.
1600 
1601   Notes for advanced users:
1602   Most users should not need to know the details of the binary storage
1603   format, since PCLoad() and PCView() completely hide these details.
1604   But for anyone who's interested, the standard binary matrix storage
1605   format is
1606 .vb
1607      has not yet been determined
1608 .ve
1609 
1610 .seealso: `PetscViewerBinaryOpen()`, `PCView()`, `MatLoad()`, `VecLoad()`
1611 @*/
1612 PetscErrorCode  PCLoad(PC newdm, PetscViewer viewer)
1613 {
1614   PetscBool      isbinary;
1615   PetscInt       classid;
1616   char           type[256];
1617 
1618   PetscFunctionBegin;
1619   PetscValidHeaderSpecific(newdm,PC_CLASSID,1);
1620   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
1621   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary));
1622   PetscCheck(isbinary,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
1623 
1624   PetscCall(PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT));
1625   PetscCheck(classid == PC_FILE_CLASSID,PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file");
1626   PetscCall(PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR));
1627   PetscCall(PCSetType(newdm, type));
1628   if (newdm->ops->load) PetscCall((*newdm->ops->load)(newdm,viewer));
1629   PetscFunctionReturn(0);
1630 }
1631 
1632 #include <petscdraw.h>
1633 #if defined(PETSC_HAVE_SAWS)
1634 #include <petscviewersaws.h>
1635 #endif
1636 
1637 /*@C
1638    PCViewFromOptions - View from Options
1639 
1640    Collective on PC
1641 
1642    Input Parameters:
1643 +  A - the PC context
1644 .  obj - Optional object
1645 -  name - command line option
1646 
1647    Level: intermediate
1648 .seealso: `PC`, `PCView`, `PetscObjectViewFromOptions()`, `PCCreate()`
1649 @*/
1650 PetscErrorCode  PCViewFromOptions(PC A,PetscObject obj,const char name[])
1651 {
1652   PetscFunctionBegin;
1653   PetscValidHeaderSpecific(A,PC_CLASSID,1);
1654   PetscCall(PetscObjectViewFromOptions((PetscObject)A,obj,name));
1655   PetscFunctionReturn(0);
1656 }
1657 
1658 /*@C
1659    PCView - Prints the PC data structure.
1660 
1661    Collective on PC
1662 
1663    Input Parameters:
1664 +  PC - the PC context
1665 -  viewer - optional visualization context
1666 
1667    Note:
1668    The available visualization contexts include
1669 +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
1670 -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
1671          output where only the first processor opens
1672          the file.  All other processors send their
1673          data to the first processor to print.
1674 
1675    The user can open an alternative visualization contexts with
1676    PetscViewerASCIIOpen() (output to a specified file).
1677 
1678    Level: developer
1679 
1680 .seealso: `KSPView()`, `PetscViewerASCIIOpen()`
1681 @*/
1682 PetscErrorCode  PCView(PC pc,PetscViewer viewer)
1683 {
1684   PCType         cstr;
1685   PetscBool      iascii,isstring,isbinary,isdraw;
1686 #if defined(PETSC_HAVE_SAWS)
1687   PetscBool      issaws;
1688 #endif
1689 
1690   PetscFunctionBegin;
1691   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1692   if (!viewer) {
1693     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer));
1694   }
1695   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
1696   PetscCheckSameComm(pc,1,viewer,2);
1697 
1698   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
1699   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring));
1700   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary));
1701   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw));
1702 #if defined(PETSC_HAVE_SAWS)
1703   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws));
1704 #endif
1705 
1706   if (iascii) {
1707     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer));
1708     if (!pc->setupcalled) {
1709       PetscCall(PetscViewerASCIIPrintf(viewer,"  PC has not been set up so information may be incomplete\n"));
1710     }
1711     if (pc->ops->view) {
1712       PetscCall(PetscViewerASCIIPushTab(viewer));
1713       PetscCall((*pc->ops->view)(pc,viewer));
1714       PetscCall(PetscViewerASCIIPopTab(viewer));
1715     }
1716     if (pc->mat) {
1717       PetscCall(PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO));
1718       if (pc->pmat == pc->mat) {
1719         PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix = precond matrix:\n"));
1720         PetscCall(PetscViewerASCIIPushTab(viewer));
1721         PetscCall(MatView(pc->mat,viewer));
1722         PetscCall(PetscViewerASCIIPopTab(viewer));
1723       } else {
1724         if (pc->pmat) {
1725           PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix followed by preconditioner matrix:\n"));
1726         } else {
1727           PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix:\n"));
1728         }
1729         PetscCall(PetscViewerASCIIPushTab(viewer));
1730         PetscCall(MatView(pc->mat,viewer));
1731         if (pc->pmat) PetscCall(MatView(pc->pmat,viewer));
1732         PetscCall(PetscViewerASCIIPopTab(viewer));
1733       }
1734       PetscCall(PetscViewerPopFormat(viewer));
1735     }
1736   } else if (isstring) {
1737     PetscCall(PCGetType(pc,&cstr));
1738     PetscCall(PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr));
1739     if (pc->ops->view) PetscCall((*pc->ops->view)(pc,viewer));
1740     if (pc->mat) PetscCall(MatView(pc->mat,viewer));
1741     if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat,viewer));
1742   } else if (isbinary) {
1743     PetscInt    classid = PC_FILE_CLASSID;
1744     MPI_Comm    comm;
1745     PetscMPIInt rank;
1746     char        type[256];
1747 
1748     PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
1749     PetscCallMPI(MPI_Comm_rank(comm,&rank));
1750     if (rank == 0) {
1751       PetscCall(PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT));
1752       PetscCall(PetscStrncpy(type,((PetscObject)pc)->type_name,256));
1753       PetscCall(PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR));
1754     }
1755     if (pc->ops->view) PetscCall((*pc->ops->view)(pc,viewer));
1756   } else if (isdraw) {
1757     PetscDraw draw;
1758     char      str[25];
1759     PetscReal x,y,bottom,h;
1760     PetscInt  n;
1761 
1762     PetscCall(PetscViewerDrawGetDraw(viewer,0,&draw));
1763     PetscCall(PetscDrawGetCurrentPoint(draw,&x,&y));
1764     if (pc->mat) {
1765       PetscCall(MatGetSize(pc->mat,&n,NULL));
1766       PetscCall(PetscSNPrintf(str,25,"PC: %s (%" PetscInt_FMT ")",((PetscObject)pc)->type_name,n));
1767     } else {
1768       PetscCall(PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name));
1769     }
1770     PetscCall(PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h));
1771     bottom = y - h;
1772     PetscCall(PetscDrawPushCurrentPoint(draw,x,bottom));
1773     if (pc->ops->view) PetscCall((*pc->ops->view)(pc,viewer));
1774     PetscCall(PetscDrawPopCurrentPoint(draw));
1775 #if defined(PETSC_HAVE_SAWS)
1776   } else if (issaws) {
1777     PetscMPIInt rank;
1778 
1779     PetscCall(PetscObjectName((PetscObject)pc));
1780     PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
1781     if (!((PetscObject)pc)->amsmem && rank == 0) {
1782       PetscCall(PetscObjectViewSAWs((PetscObject)pc,viewer));
1783     }
1784     if (pc->mat) PetscCall(MatView(pc->mat,viewer));
1785     if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat,viewer));
1786 #endif
1787   }
1788   PetscFunctionReturn(0);
1789 }
1790 
1791 /*@C
1792   PCRegister -  Adds a method to the preconditioner package.
1793 
1794    Not collective
1795 
1796    Input Parameters:
1797 +  name_solver - name of a new user-defined solver
1798 -  routine_create - routine to create method context
1799 
1800    Notes:
1801    PCRegister() may be called multiple times to add several user-defined preconditioners.
1802 
1803    Sample usage:
1804 .vb
1805    PCRegister("my_solver", MySolverCreate);
1806 .ve
1807 
1808    Then, your solver can be chosen with the procedural interface via
1809 $     PCSetType(pc,"my_solver")
1810    or at runtime via the option
1811 $     -pc_type my_solver
1812 
1813    Level: advanced
1814 
1815 .seealso: `PCRegisterAll()`
1816 @*/
1817 PetscErrorCode  PCRegister(const char sname[],PetscErrorCode (*function)(PC))
1818 {
1819   PetscFunctionBegin;
1820   PetscCall(PCInitializePackage());
1821   PetscCall(PetscFunctionListAdd(&PCList,sname,function));
1822   PetscFunctionReturn(0);
1823 }
1824 
1825 static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y)
1826 {
1827   PC             pc;
1828 
1829   PetscFunctionBegin;
1830   PetscCall(MatShellGetContext(A,&pc));
1831   PetscCall(PCApply(pc,X,Y));
1832   PetscFunctionReturn(0);
1833 }
1834 
1835 /*@
1836     PCComputeOperator - Computes the explicit preconditioned operator.
1837 
1838     Collective on PC
1839 
1840     Input Parameters:
1841 +   pc - the preconditioner object
1842 -   mattype - the matrix type to be used for the operator
1843 
1844     Output Parameter:
1845 .   mat - the explicit preconditioned operator
1846 
1847     Notes:
1848     This computation is done by applying the operators to columns of the identity matrix.
1849     This routine is costly in general, and is recommended for use only with relatively small systems.
1850     Currently, this routine uses a dense matrix format when mattype == NULL
1851 
1852     Level: advanced
1853 
1854 .seealso: `KSPComputeOperator()`, `MatType`
1855 
1856 @*/
1857 PetscErrorCode  PCComputeOperator(PC pc,MatType mattype,Mat *mat)
1858 {
1859   PetscInt       N,M,m,n;
1860   Mat            A,Apc;
1861 
1862   PetscFunctionBegin;
1863   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1864   PetscValidPointer(mat,3);
1865   PetscCall(PCGetOperators(pc,&A,NULL));
1866   PetscCall(MatGetLocalSize(A,&m,&n));
1867   PetscCall(MatGetSize(A,&M,&N));
1868   PetscCall(MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc));
1869   PetscCall(MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC));
1870   PetscCall(MatComputeOperator(Apc,mattype,mat));
1871   PetscCall(MatDestroy(&Apc));
1872   PetscFunctionReturn(0);
1873 }
1874 
1875 /*@
1876    PCSetCoordinates - sets the coordinates of all the nodes on the local process
1877 
1878    Collective on PC
1879 
1880    Input Parameters:
1881 +  pc - the solver context
1882 .  dim - the dimension of the coordinates 1, 2, or 3
1883 .  nloc - the blocked size of the coordinates array
1884 -  coords - the coordinates array
1885 
1886    Level: intermediate
1887 
1888    Notes:
1889    coords is an array of the dim coordinates for the nodes on
1890    the local processor, of size dim*nloc.
1891    If there are 108 equation on a processor
1892    for a displacement finite element discretization of elasticity (so
1893    that there are nloc = 36 = 108/3 nodes) then the array must have 108
1894    double precision values (ie, 3 * 36).  These x y z coordinates
1895    should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x,
1896    ... , N-1.z ].
1897 
1898 .seealso: `MatSetNearNullSpace()`
1899 @*/
1900 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
1901 {
1902   PetscFunctionBegin;
1903   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1904   PetscValidLogicalCollectiveInt(pc,dim,2);
1905   PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));
1906   PetscFunctionReturn(0);
1907 }
1908 
1909 /*@
1910    PCGetInterpolations - Gets interpolation matrices for all levels (except level 0)
1911 
1912    Logically Collective on PC
1913 
1914    Input Parameter:
1915 .  pc - the precondition context
1916 
1917    Output Parameters:
1918 +  num_levels - the number of levels
1919 -  interpolations - the interpolation matrices (size of num_levels-1)
1920 
1921    Level: advanced
1922 
1923 .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level
1924 
1925 .seealso: `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetInterpolation()`, `PCGetCoarseOperators()`
1926 @*/
1927 PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[])
1928 {
1929   PetscFunctionBegin;
1930   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1931   PetscValidIntPointer(num_levels,2);
1932   PetscValidPointer(interpolations,3);
1933   PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));
1934   PetscFunctionReturn(0);
1935 }
1936 
1937 /*@
1938    PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level)
1939 
1940    Logically Collective on PC
1941 
1942    Input Parameter:
1943 .  pc - the precondition context
1944 
1945    Output Parameters:
1946 +  num_levels - the number of levels
1947 -  coarseOperators - the coarse operator matrices (size of num_levels-1)
1948 
1949    Level: advanced
1950 
1951 .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level
1952 
1953 .seealso: `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()`
1954 @*/
1955 PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[])
1956 {
1957   PetscFunctionBegin;
1958   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1959   PetscValidIntPointer(num_levels,2);
1960   PetscValidPointer(coarseOperators,3);
1961   PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));
1962   PetscFunctionReturn(0);
1963 }
1964