xref: /petsc/src/ksp/pc/interface/precon.c (revision dfd676b1a855b7f967ece75a22ee7f6626d10f89)
1 
2 /*
3     The PC (preconditioner) interface routines, callable by users.
4 */
5 #include <petsc/private/pcimpl.h>            /*I "petscksp.h" I*/
6 #include <petscdm.h>
7 
8 /* Logging support */
9 PetscClassId  PC_CLASSID;
10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks;
12 PetscInt      PetscMGLevelId;
13 
14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[])
15 {
16   PetscMPIInt    size;
17   PetscBool      hasop,flg1,flg2,set,flg3;
18 
19   PetscFunctionBegin;
20   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
21   if (pc->pmat) {
22     PetscCall(MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop));
23     if (size == 1) {
24       PetscCall(MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1));
25       PetscCall(MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2));
26       PetscCall(MatIsSymmetricKnown(pc->pmat,&set,&flg3));
27       if (flg1 && (!flg2 || (set && flg3))) {
28         *type = PCICC;
29       } else if (flg2) {
30         *type = PCILU;
31       } else if (hasop) { /* likely is a parallel matrix run on one processor */
32         *type = PCBJACOBI;
33       } else {
34         *type = PCNONE;
35       }
36     } else {
37        if (hasop) {
38         *type = PCBJACOBI;
39       } else {
40         *type = PCNONE;
41       }
42     }
43   } else {
44     if (size == 1) {
45       *type = PCILU;
46     } else {
47       *type = PCBJACOBI;
48     }
49   }
50   PetscFunctionReturn(0);
51 }
52 
53 /*@
54    PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats
55 
56    Collective on PC
57 
58    Input Parameter:
59 .  pc - the preconditioner context
60 
61    Level: developer
62 
63    Notes:
64     This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC
65 
66 .seealso: PCCreate(), PCSetUp()
67 @*/
68 PetscErrorCode  PCReset(PC pc)
69 {
70   PetscFunctionBegin;
71   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
72   if (pc->ops->reset) {
73     PetscCall((*pc->ops->reset)(pc));
74   }
75   PetscCall(VecDestroy(&pc->diagonalscaleright));
76   PetscCall(VecDestroy(&pc->diagonalscaleleft));
77   PetscCall(MatDestroy(&pc->pmat));
78   PetscCall(MatDestroy(&pc->mat));
79 
80   pc->setupcalled = 0;
81   PetscFunctionReturn(0);
82 }
83 
84 /*@C
85    PCDestroy - Destroys PC context that was created with PCCreate().
86 
87    Collective on PC
88 
89    Input Parameter:
90 .  pc - the preconditioner context
91 
92    Level: developer
93 
94 .seealso: PCCreate(), PCSetUp()
95 @*/
96 PetscErrorCode  PCDestroy(PC *pc)
97 {
98   PetscFunctionBegin;
99   if (!*pc) PetscFunctionReturn(0);
100   PetscValidHeaderSpecific((*pc),PC_CLASSID,1);
101   if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; PetscFunctionReturn(0);}
102 
103   PetscCall(PCReset(*pc));
104 
105   /* if memory was published with SAWs then destroy it */
106   PetscCall(PetscObjectSAWsViewOff((PetscObject)*pc));
107   if ((*pc)->ops->destroy) PetscCall((*(*pc)->ops->destroy)((*pc)));
108   PetscCall(DMDestroy(&(*pc)->dm));
109   PetscCall(PetscHeaderDestroy(pc));
110   PetscFunctionReturn(0);
111 }
112 
113 /*@C
114    PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right
115       scaling as needed by certain time-stepping codes.
116 
117    Logically Collective on PC
118 
119    Input Parameter:
120 .  pc - the preconditioner context
121 
122    Output Parameter:
123 .  flag - PETSC_TRUE if it applies the scaling
124 
125    Level: developer
126 
127    Notes:
128     If this returns PETSC_TRUE then the system solved via the Krylov method is
129 $           D M A D^{-1} y = D M b  for left preconditioning or
130 $           D A M D^{-1} z = D b for right preconditioning
131 
132 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale()
133 @*/
134 PetscErrorCode  PCGetDiagonalScale(PC pc,PetscBool  *flag)
135 {
136   PetscFunctionBegin;
137   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
138   PetscValidBoolPointer(flag,2);
139   *flag = pc->diagonalscale;
140   PetscFunctionReturn(0);
141 }
142 
143 /*@
144    PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right
145       scaling as needed by certain time-stepping codes.
146 
147    Logically Collective on PC
148 
149    Input Parameters:
150 +  pc - the preconditioner context
151 -  s - scaling vector
152 
153    Level: intermediate
154 
155    Notes:
156     The system solved via the Krylov method is
157 $           D M A D^{-1} y = D M b  for left preconditioning or
158 $           D A M D^{-1} z = D b for right preconditioning
159 
160    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
161 
162 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale()
163 @*/
164 PetscErrorCode  PCSetDiagonalScale(PC pc,Vec s)
165 {
166   PetscFunctionBegin;
167   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
168   PetscValidHeaderSpecific(s,VEC_CLASSID,2);
169   pc->diagonalscale     = PETSC_TRUE;
170 
171   PetscCall(PetscObjectReference((PetscObject)s));
172   PetscCall(VecDestroy(&pc->diagonalscaleleft));
173 
174   pc->diagonalscaleleft = s;
175 
176   PetscCall(VecDuplicate(s,&pc->diagonalscaleright));
177   PetscCall(VecCopy(s,pc->diagonalscaleright));
178   PetscCall(VecReciprocal(pc->diagonalscaleright));
179   PetscFunctionReturn(0);
180 }
181 
182 /*@
183    PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes.
184 
185    Logically Collective on PC
186 
187    Input Parameters:
188 +  pc - the preconditioner context
189 .  in - input vector
190 -  out - scaled vector (maybe the same as in)
191 
192    Level: intermediate
193 
194    Notes:
195     The system solved via the Krylov method is
196 $           D M A D^{-1} y = D M b  for left preconditioning or
197 $           D A M D^{-1} z = D b for right preconditioning
198 
199    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
200 
201    If diagonal scaling is turned off and in is not out then in is copied to out
202 
203 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale()
204 @*/
205 PetscErrorCode  PCDiagonalScaleLeft(PC pc,Vec in,Vec out)
206 {
207   PetscFunctionBegin;
208   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
209   PetscValidHeaderSpecific(in,VEC_CLASSID,2);
210   PetscValidHeaderSpecific(out,VEC_CLASSID,3);
211   if (pc->diagonalscale) {
212     PetscCall(VecPointwiseMult(out,pc->diagonalscaleleft,in));
213   } else if (in != out) {
214     PetscCall(VecCopy(in,out));
215   }
216   PetscFunctionReturn(0);
217 }
218 
219 /*@
220    PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.
221 
222    Logically Collective on PC
223 
224    Input Parameters:
225 +  pc - the preconditioner context
226 .  in - input vector
227 -  out - scaled vector (maybe the same as in)
228 
229    Level: intermediate
230 
231    Notes:
232     The system solved via the Krylov method is
233 $           D M A D^{-1} y = D M b  for left preconditioning or
234 $           D A M D^{-1} z = D b for right preconditioning
235 
236    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
237 
238    If diagonal scaling is turned off and in is not out then in is copied to out
239 
240 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale()
241 @*/
242 PetscErrorCode  PCDiagonalScaleRight(PC pc,Vec in,Vec out)
243 {
244   PetscFunctionBegin;
245   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
246   PetscValidHeaderSpecific(in,VEC_CLASSID,2);
247   PetscValidHeaderSpecific(out,VEC_CLASSID,3);
248   if (pc->diagonalscale) {
249     PetscCall(VecPointwiseMult(out,pc->diagonalscaleright,in));
250   } else if (in != out) {
251     PetscCall(VecCopy(in,out));
252   }
253   PetscFunctionReturn(0);
254 }
255 
256 /*@
257    PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the
258    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
259    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperators() or PCSetOperators() not the Pmat.
260 
261    Logically Collective on PC
262 
263    Input Parameters:
264 +  pc - the preconditioner context
265 -  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
266 
267    Options Database Key:
268 .  -pc_use_amat <true,false> - use the amat to apply the operator
269 
270    Notes:
271    For the common case in which the linear system matrix and the matrix used to construct the
272    preconditioner are identical, this routine is does nothing.
273 
274    Level: intermediate
275 
276 .seealso: PCGetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE
277 @*/
278 PetscErrorCode  PCSetUseAmat(PC pc,PetscBool flg)
279 {
280   PetscFunctionBegin;
281   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
282   pc->useAmat = flg;
283   PetscFunctionReturn(0);
284 }
285 
286 /*@
287    PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected.
288 
289    Logically Collective on PC
290 
291    Input Parameters:
292 +  pc - iterative context obtained from PCCreate()
293 -  flg - PETSC_TRUE indicates you want the error generated
294 
295    Level: advanced
296 
297    Notes:
298     Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve()
299     to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is
300     detected.
301 
302     This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs
303 
304 .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll()
305 @*/
306 PetscErrorCode  PCSetErrorIfFailure(PC pc,PetscBool flg)
307 {
308   PetscFunctionBegin;
309   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
310   PetscValidLogicalCollectiveBool(pc,flg,2);
311   pc->erroriffailure = flg;
312   PetscFunctionReturn(0);
313 }
314 
315 /*@
316    PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the
317    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
318    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperators() or PCSetOperators() not the Pmat.
319 
320    Logically Collective on PC
321 
322    Input Parameter:
323 .  pc - the preconditioner context
324 
325    Output Parameter:
326 .  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
327 
328    Notes:
329    For the common case in which the linear system matrix and the matrix used to construct the
330    preconditioner are identical, this routine is does nothing.
331 
332    Level: intermediate
333 
334 .seealso: PCSetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE
335 @*/
336 PetscErrorCode  PCGetUseAmat(PC pc,PetscBool *flg)
337 {
338   PetscFunctionBegin;
339   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
340   *flg = pc->useAmat;
341   PetscFunctionReturn(0);
342 }
343 
344 /*@
345    PCCreate - Creates a preconditioner context.
346 
347    Collective
348 
349    Input Parameter:
350 .  comm - MPI communicator
351 
352    Output Parameter:
353 .  pc - location to put the preconditioner context
354 
355    Notes:
356    The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC
357    in parallel. For dense matrices it is always PCNONE.
358 
359    Level: developer
360 
361 .seealso: PCSetUp(), PCApply(), PCDestroy()
362 @*/
363 PetscErrorCode  PCCreate(MPI_Comm comm,PC *newpc)
364 {
365   PC             pc;
366 
367   PetscFunctionBegin;
368   PetscValidPointer(newpc,2);
369   *newpc = NULL;
370   PetscCall(PCInitializePackage());
371 
372   PetscCall(PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView));
373 
374   pc->mat                  = NULL;
375   pc->pmat                 = NULL;
376   pc->setupcalled          = 0;
377   pc->setfromoptionscalled = 0;
378   pc->data                 = NULL;
379   pc->diagonalscale        = PETSC_FALSE;
380   pc->diagonalscaleleft    = NULL;
381   pc->diagonalscaleright   = NULL;
382 
383   pc->modifysubmatrices  = NULL;
384   pc->modifysubmatricesP = NULL;
385 
386   *newpc = pc;
387   PetscFunctionReturn(0);
388 
389 }
390 
391 /* -------------------------------------------------------------------------------*/
392 
393 /*@
394    PCApply - Applies the preconditioner to a vector.
395 
396    Collective on PC
397 
398    Input Parameters:
399 +  pc - the preconditioner context
400 -  x - input vector
401 
402    Output Parameter:
403 .  y - output vector
404 
405    Level: developer
406 
407 .seealso: PCApplyTranspose(), PCApplyBAorAB()
408 @*/
409 PetscErrorCode  PCApply(PC pc,Vec x,Vec y)
410 {
411   PetscInt       m,n,mv,nv;
412 
413   PetscFunctionBegin;
414   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
415   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
416   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
417   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
418   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
419   /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */
420   PetscCall(MatGetLocalSize(pc->pmat,&m,&n));
421   PetscCall(VecGetLocalSize(x,&mv));
422   PetscCall(VecGetLocalSize(y,&nv));
423   /* check pmat * y = x is feasible */
424   PetscCheck(mv == m,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %" PetscInt_FMT " does not equal input vector size %" PetscInt_FMT,m,mv);
425   PetscCheck(nv == n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %" PetscInt_FMT " does not equal output vector size %" PetscInt_FMT,n,nv);
426   PetscCall(VecSetErrorIfLocked(y,3));
427 
428   PetscCall(PCSetUp(pc));
429   PetscCheck(pc->ops->apply,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply");
430   PetscCall(VecLockReadPush(x));
431   PetscCall(PetscLogEventBegin(PC_Apply,pc,x,y,0));
432   PetscCall((*pc->ops->apply)(pc,x,y));
433   PetscCall(PetscLogEventEnd(PC_Apply,pc,x,y,0));
434   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
435   PetscCall(VecLockReadPop(x));
436   PetscFunctionReturn(0);
437 }
438 
439 /*@
440    PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices.
441 
442    Collective on PC
443 
444    Input Parameters:
445 +  pc - the preconditioner context
446 -  X - block of input vectors
447 
448    Output Parameter:
449 .  Y - block of output vectors
450 
451    Level: developer
452 
453 .seealso: PCApply(), KSPMatSolve()
454 @*/
455 PetscErrorCode  PCMatApply(PC pc,Mat X,Mat Y)
456 {
457   Mat            A;
458   Vec            cy, cx;
459   PetscInt       m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3;
460   PetscBool      match;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
464   PetscValidHeaderSpecific(X, MAT_CLASSID, 2);
465   PetscValidHeaderSpecific(Y, MAT_CLASSID, 3);
466   PetscCheckSameComm(pc, 1, X, 2);
467   PetscCheckSameComm(pc, 1, Y, 3);
468   PetscCheck(Y != X,PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices");
469   PetscCall(PCGetOperators(pc, NULL, &A));
470   PetscCall(MatGetLocalSize(A, &m3, &n3));
471   PetscCall(MatGetLocalSize(X, &m2, &n2));
472   PetscCall(MatGetLocalSize(Y, &m1, &n1));
473   PetscCall(MatGetSize(A, &M3, &N3));
474   PetscCall(MatGetSize(X, &M2, &N2));
475   PetscCall(MatGetSize(Y, &M1, &N1));
476   PetscCheck(n1 == n2 && N1 == N2,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of output vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1);
477   PetscCheck(m2 == m3 && M2 == M3,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m2, M2, m3, M3, n3, N3);
478   PetscCheck(m1 == n3 && M1 == N3,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m1, M1, m3, M3, n3, N3);
479   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, ""));
480   PetscCheck(match,PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat");
481   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, ""));
482   PetscCheck(match,PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat");
483   PetscCall(PCSetUp(pc));
484   if (pc->ops->matapply) {
485     PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0));
486     PetscCall((*pc->ops->matapply)(pc, X, Y));
487     PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0));
488   } else {
489     PetscCall(PetscInfo(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name));
490     for (n1 = 0; n1 < N1; ++n1) {
491       PetscCall(MatDenseGetColumnVecRead(X, n1, &cx));
492       PetscCall(MatDenseGetColumnVecWrite(Y, n1, &cy));
493       PetscCall(PCApply(pc, cx, cy));
494       PetscCall(MatDenseRestoreColumnVecWrite(Y, n1, &cy));
495       PetscCall(MatDenseRestoreColumnVecRead(X, n1, &cx));
496     }
497   }
498   PetscFunctionReturn(0);
499 }
500 
501 /*@
502    PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.
503 
504    Collective on PC
505 
506    Input Parameters:
507 +  pc - the preconditioner context
508 -  x - input vector
509 
510    Output Parameter:
511 .  y - output vector
512 
513    Notes:
514    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
515 
516    Level: developer
517 
518 .seealso: PCApply(), PCApplySymmetricRight()
519 @*/
520 PetscErrorCode  PCApplySymmetricLeft(PC pc,Vec x,Vec y)
521 {
522   PetscFunctionBegin;
523   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
524   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
525   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
526   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
527   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
528   PetscCall(PCSetUp(pc));
529   PetscCheck(pc->ops->applysymmetricleft,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
530   PetscCall(VecLockReadPush(x));
531   PetscCall(PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0));
532   PetscCall((*pc->ops->applysymmetricleft)(pc,x,y));
533   PetscCall(PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0));
534   PetscCall(VecLockReadPop(x));
535   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
536   PetscFunctionReturn(0);
537 }
538 
539 /*@
540    PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.
541 
542    Collective on PC
543 
544    Input Parameters:
545 +  pc - the preconditioner context
546 -  x - input vector
547 
548    Output Parameter:
549 .  y - output vector
550 
551    Level: developer
552 
553    Notes:
554    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
555 
556 .seealso: PCApply(), PCApplySymmetricLeft()
557 @*/
558 PetscErrorCode  PCApplySymmetricRight(PC pc,Vec x,Vec y)
559 {
560   PetscFunctionBegin;
561   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
562   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
563   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
564   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
565   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
566   PetscCall(PCSetUp(pc));
567   PetscCheck(pc->ops->applysymmetricright,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
568   PetscCall(VecLockReadPush(x));
569   PetscCall(PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0));
570   PetscCall((*pc->ops->applysymmetricright)(pc,x,y));
571   PetscCall(PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0));
572   PetscCall(VecLockReadPop(x));
573   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
574   PetscFunctionReturn(0);
575 }
576 
577 /*@
578    PCApplyTranspose - Applies the transpose of preconditioner to a vector.
579 
580    Collective on PC
581 
582    Input Parameters:
583 +  pc - the preconditioner context
584 -  x - input vector
585 
586    Output Parameter:
587 .  y - output vector
588 
589    Notes:
590     For complex numbers this applies the non-Hermitian transpose.
591 
592    Developer Notes:
593     We need to implement a PCApplyHermitianTranspose()
594 
595    Level: developer
596 
597 .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists()
598 @*/
599 PetscErrorCode  PCApplyTranspose(PC pc,Vec x,Vec y)
600 {
601   PetscFunctionBegin;
602   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
603   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
604   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
605   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
606   if (pc->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
607   PetscCall(PCSetUp(pc));
608   PetscCheck(pc->ops->applytranspose,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose");
609   PetscCall(VecLockReadPush(x));
610   PetscCall(PetscLogEventBegin(PC_Apply,pc,x,y,0));
611   PetscCall((*pc->ops->applytranspose)(pc,x,y));
612   PetscCall(PetscLogEventEnd(PC_Apply,pc,x,y,0));
613   PetscCall(VecLockReadPop(x));
614   if (pc->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
615   PetscFunctionReturn(0);
616 }
617 
618 /*@
619    PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation
620 
621    Collective on PC
622 
623    Input Parameters:
624 .  pc - the preconditioner context
625 
626    Output Parameter:
627 .  flg - PETSC_TRUE if a transpose operation is defined
628 
629    Level: developer
630 
631 .seealso: PCApplyTranspose()
632 @*/
633 PetscErrorCode  PCApplyTransposeExists(PC pc,PetscBool  *flg)
634 {
635   PetscFunctionBegin;
636   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
637   PetscValidBoolPointer(flg,2);
638   if (pc->ops->applytranspose) *flg = PETSC_TRUE;
639   else *flg = PETSC_FALSE;
640   PetscFunctionReturn(0);
641 }
642 
643 /*@
644    PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.
645 
646    Collective on PC
647 
648    Input Parameters:
649 +  pc - the preconditioner context
650 .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
651 .  x - input vector
652 -  work - work vector
653 
654    Output Parameter:
655 .  y - output vector
656 
657    Level: developer
658 
659    Notes:
660     If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or  D A M D^{-1} is actually applied. Note that the
661    specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling.
662 
663 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose()
664 @*/
665 PetscErrorCode  PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work)
666 {
667   PetscFunctionBegin;
668   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
669   PetscValidLogicalCollectiveEnum(pc,side,2);
670   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
671   PetscValidHeaderSpecific(y,VEC_CLASSID,4);
672   PetscValidHeaderSpecific(work,VEC_CLASSID,5);
673   PetscCheckSameComm(pc,1,x,3);
674   PetscCheckSameComm(pc,1,y,4);
675   PetscCheckSameComm(pc,1,work,5);
676   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
677   PetscCheck(side == PC_LEFT || side == PC_SYMMETRIC || side == PC_RIGHT,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric");
678   PetscCheck(!pc->diagonalscale || side != PC_SYMMETRIC,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application");
679   if (pc->erroriffailure) PetscCall(VecValidValues(x,3,PETSC_TRUE));
680 
681   PetscCall(PCSetUp(pc));
682   if (pc->diagonalscale) {
683     if (pc->ops->applyBA) {
684       Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
685       PetscCall(VecDuplicate(x,&work2));
686       PetscCall(PCDiagonalScaleRight(pc,x,work2));
687       PetscCall((*pc->ops->applyBA)(pc,side,work2,y,work));
688       PetscCall(PCDiagonalScaleLeft(pc,y,y));
689       PetscCall(VecDestroy(&work2));
690     } else if (side == PC_RIGHT) {
691       PetscCall(PCDiagonalScaleRight(pc,x,y));
692       PetscCall(PCApply(pc,y,work));
693       PetscCall(MatMult(pc->mat,work,y));
694       PetscCall(PCDiagonalScaleLeft(pc,y,y));
695     } else if (side == PC_LEFT) {
696       PetscCall(PCDiagonalScaleRight(pc,x,y));
697       PetscCall(MatMult(pc->mat,y,work));
698       PetscCall(PCApply(pc,work,y));
699       PetscCall(PCDiagonalScaleLeft(pc,y,y));
700     } else PetscCheck(side != PC_SYMMETRIC,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner");
701   } else {
702     if (pc->ops->applyBA) {
703       PetscCall((*pc->ops->applyBA)(pc,side,x,y,work));
704     } else if (side == PC_RIGHT) {
705       PetscCall(PCApply(pc,x,work));
706       PetscCall(MatMult(pc->mat,work,y));
707     } else if (side == PC_LEFT) {
708       PetscCall(MatMult(pc->mat,x,work));
709       PetscCall(PCApply(pc,work,y));
710     } else if (side == PC_SYMMETRIC) {
711       /* There's an extra copy here; maybe should provide 2 work vectors instead? */
712       PetscCall(PCApplySymmetricRight(pc,x,work));
713       PetscCall(MatMult(pc->mat,work,y));
714       PetscCall(VecCopy(y,work));
715       PetscCall(PCApplySymmetricLeft(pc,work,y));
716     }
717   }
718   if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
719   PetscFunctionReturn(0);
720 }
721 
722 /*@
723    PCApplyBAorABTranspose - Applies the transpose of the preconditioner
724    and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
725    NOT tr(B*A) = tr(A)*tr(B).
726 
727    Collective on PC
728 
729    Input Parameters:
730 +  pc - the preconditioner context
731 .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
732 .  x - input vector
733 -  work - work vector
734 
735    Output Parameter:
736 .  y - output vector
737 
738    Notes:
739     this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
740       defined by B'. This is why this has the funny form that it computes tr(B) * tr(A)
741 
742     Level: developer
743 
744 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB()
745 @*/
746 PetscErrorCode  PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work)
747 {
748   PetscFunctionBegin;
749   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
750   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
751   PetscValidHeaderSpecific(y,VEC_CLASSID,4);
752   PetscValidHeaderSpecific(work,VEC_CLASSID,5);
753   PetscCheck(x != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
754   if (pc->erroriffailure) PetscCall(VecValidValues(x,3,PETSC_TRUE));
755   if (pc->ops->applyBAtranspose) {
756     PetscCall((*pc->ops->applyBAtranspose)(pc,side,x,y,work));
757     if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
758     PetscFunctionReturn(0);
759   }
760   PetscCheck(side == PC_LEFT || side == PC_RIGHT,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left");
761 
762   PetscCall(PCSetUp(pc));
763   if (side == PC_RIGHT) {
764     PetscCall(PCApplyTranspose(pc,x,work));
765     PetscCall(MatMultTranspose(pc->mat,work,y));
766   } else if (side == PC_LEFT) {
767     PetscCall(MatMultTranspose(pc->mat,x,work));
768     PetscCall(PCApplyTranspose(pc,work,y));
769   }
770   /* add support for PC_SYMMETRIC */
771   if (pc->erroriffailure) PetscCall(VecValidValues(y,4,PETSC_FALSE));
772   PetscFunctionReturn(0);
773 }
774 
775 /* -------------------------------------------------------------------------------*/
776 
777 /*@
778    PCApplyRichardsonExists - Determines whether a particular preconditioner has a
779    built-in fast application of Richardson's method.
780 
781    Not Collective
782 
783    Input Parameter:
784 .  pc - the preconditioner
785 
786    Output Parameter:
787 .  exists - PETSC_TRUE or PETSC_FALSE
788 
789    Level: developer
790 
791 .seealso: PCApplyRichardson()
792 @*/
793 PetscErrorCode  PCApplyRichardsonExists(PC pc,PetscBool  *exists)
794 {
795   PetscFunctionBegin;
796   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
797   PetscValidBoolPointer(exists,2);
798   if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
799   else *exists = PETSC_FALSE;
800   PetscFunctionReturn(0);
801 }
802 
803 /*@
804    PCApplyRichardson - Applies several steps of Richardson iteration with
805    the particular preconditioner. This routine is usually used by the
806    Krylov solvers and not the application code directly.
807 
808    Collective on PC
809 
810    Input Parameters:
811 +  pc  - the preconditioner context
812 .  b   - the right hand side
813 .  w   - one work vector
814 .  rtol - relative decrease in residual norm convergence criteria
815 .  abstol - absolute residual norm convergence criteria
816 .  dtol - divergence residual norm increase criteria
817 .  its - the number of iterations to apply.
818 -  guesszero - if the input x contains nonzero initial guess
819 
820    Output Parameters:
821 +  outits - number of iterations actually used (for SOR this always equals its)
822 .  reason - the reason the apply terminated
823 -  y - the solution (also contains initial guess if guesszero is PETSC_FALSE
824 
825    Notes:
826    Most preconditioners do not support this function. Use the command
827    PCApplyRichardsonExists() to determine if one does.
828 
829    Except for the multigrid PC this routine ignores the convergence tolerances
830    and always runs for the number of iterations
831 
832    Level: developer
833 
834 .seealso: PCApplyRichardsonExists()
835 @*/
836 PetscErrorCode  PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason)
837 {
838   PetscFunctionBegin;
839   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
840   PetscValidHeaderSpecific(b,VEC_CLASSID,2);
841   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
842   PetscValidHeaderSpecific(w,VEC_CLASSID,4);
843   PetscCheck(b != y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors");
844   PetscCall(PCSetUp(pc));
845   PetscCheck(pc->ops->applyrichardson,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson");
846   PetscCall((*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason));
847   PetscFunctionReturn(0);
848 }
849 
850 /*@
851    PCSetFailedReason - Sets the reason a PCSetUp() failed or PC_NOERROR if it did not fail
852 
853    Logically Collective on PC
854 
855    Input Parameters:
856 +  pc - the preconditioner context
857 -  reason - the reason it failedx
858 
859    Level: advanced
860 
861 .seealso: PCCreate(), PCApply(), PCDestroy(), PCFailedReason
862 @*/
863 PetscErrorCode PCSetFailedReason(PC pc,PCFailedReason reason)
864 {
865   PetscFunctionBegin;
866   pc->failedreason = reason;
867   PetscFunctionReturn(0);
868 }
869 
870 /*@
871    PCGetFailedReason - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail
872 
873    Logically Collective on PC
874 
875    Input Parameter:
876 .  pc - the preconditioner context
877 
878    Output Parameter:
879 .  reason - the reason it failed
880 
881    Level: advanced
882 
883    Notes: This is the maximum over reason over all ranks in the PC communicator. It is only valid after
884    a call KSPCheckDot() or  KSPCheckNorm() inside a KSPSolve(). It is not valid immediately after a PCSetUp()
885    or PCApply(), then use PCGetFailedReasonRank()
886 
887 .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReasonRank(), PCSetFailedReason()
888 @*/
889 PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason)
890 {
891   PetscFunctionBegin;
892   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
893   else *reason = pc->failedreason;
894   PetscFunctionReturn(0);
895 }
896 
897 /*@
898    PCGetFailedReasonRank - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail on this MPI rank
899 
900   Not Collective on PC
901 
902    Input Parameter:
903 .  pc - the preconditioner context
904 
905    Output Parameter:
906 .  reason - the reason it failed
907 
908    Notes:
909      Different ranks may have different reasons or no reason, see PCGetFailedReason()
910 
911    Level: advanced
912 
913 .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReason(), PCSetFailedReason()
914 @*/
915 PetscErrorCode PCGetFailedReasonRank(PC pc,PCFailedReason *reason)
916 {
917   PetscFunctionBegin;
918   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
919   else *reason = pc->failedreason;
920   PetscFunctionReturn(0);
921 }
922 
923 /*  Next line needed to deactivate KSP_Solve logging */
924 #include <petsc/private/kspimpl.h>
925 
926 /*
927       a setupcall of 0 indicates never setup,
928                      1 indicates has been previously setup
929                     -1 indicates a PCSetUp() was attempted and failed
930 */
931 /*@
932    PCSetUp - Prepares for the use of a preconditioner.
933 
934    Collective on PC
935 
936    Input Parameter:
937 .  pc - the preconditioner context
938 
939    Level: developer
940 
941 .seealso: PCCreate(), PCApply(), PCDestroy()
942 @*/
943 PetscErrorCode  PCSetUp(PC pc)
944 {
945   const char       *def;
946   PetscObjectState matstate, matnonzerostate;
947 
948   PetscFunctionBegin;
949   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
950   PetscCheck(pc->mat,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first");
951 
952   if (pc->setupcalled && pc->reusepreconditioner) {
953     PetscCall(PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n"));
954     PetscFunctionReturn(0);
955   }
956 
957   PetscCall(PetscObjectStateGet((PetscObject)pc->pmat,&matstate));
958   PetscCall(MatGetNonzeroState(pc->pmat,&matnonzerostate));
959   if (!pc->setupcalled) {
960     PetscCall(PetscInfo(pc,"Setting up PC for first time\n"));
961     pc->flag = DIFFERENT_NONZERO_PATTERN;
962   } else if (matstate == pc->matstate) {
963     PetscCall(PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n"));
964     PetscFunctionReturn(0);
965   } else {
966     if (matnonzerostate > pc->matnonzerostate) {
967       PetscCall(PetscInfo(pc,"Setting up PC with different nonzero pattern\n"));
968       pc->flag = DIFFERENT_NONZERO_PATTERN;
969     } else {
970       PetscCall(PetscInfo(pc,"Setting up PC with same nonzero pattern\n"));
971       pc->flag = SAME_NONZERO_PATTERN;
972     }
973   }
974   pc->matstate        = matstate;
975   pc->matnonzerostate = matnonzerostate;
976 
977   if (!((PetscObject)pc)->type_name) {
978     PetscCall(PCGetDefaultType_Private(pc,&def));
979     PetscCall(PCSetType(pc,def));
980   }
981 
982   PetscCall(MatSetErrorIfFailure(pc->pmat,pc->erroriffailure));
983   PetscCall(MatSetErrorIfFailure(pc->mat,pc->erroriffailure));
984   PetscCall(PetscLogEventBegin(PC_SetUp,pc,0,0,0));
985   if (pc->ops->setup) {
986     /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */
987     PetscCall(KSPInitializePackage());
988     PetscCall(PetscLogEventDeactivatePush(KSP_Solve));
989     PetscCall(PetscLogEventDeactivatePush(PC_Apply));
990     PetscCall((*pc->ops->setup)(pc));
991     PetscCall(PetscLogEventDeactivatePop(KSP_Solve));
992     PetscCall(PetscLogEventDeactivatePop(PC_Apply));
993   }
994   PetscCall(PetscLogEventEnd(PC_SetUp,pc,0,0,0));
995   if (!pc->setupcalled) pc->setupcalled = 1;
996   PetscFunctionReturn(0);
997 }
998 
999 /*@
1000    PCSetUpOnBlocks - Sets up the preconditioner for each block in
1001    the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
1002    methods.
1003 
1004    Collective on PC
1005 
1006    Input Parameters:
1007 .  pc - the preconditioner context
1008 
1009    Level: developer
1010 
1011 .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp()
1012 @*/
1013 PetscErrorCode  PCSetUpOnBlocks(PC pc)
1014 {
1015   PetscFunctionBegin;
1016   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1017   if (!pc->ops->setuponblocks) PetscFunctionReturn(0);
1018   PetscCall(PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0));
1019   PetscCall((*pc->ops->setuponblocks)(pc));
1020   PetscCall(PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0));
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 /*@C
1025    PCSetModifySubMatrices - Sets a user-defined routine for modifying the
1026    submatrices that arise within certain subdomain-based preconditioners.
1027    The basic submatrices are extracted from the preconditioner matrix as
1028    usual; the user can then alter these (for example, to set different boundary
1029    conditions for each submatrix) before they are used for the local solves.
1030 
1031    Logically Collective on PC
1032 
1033    Input Parameters:
1034 +  pc - the preconditioner context
1035 .  func - routine for modifying the submatrices
1036 -  ctx - optional user-defined context (may be null)
1037 
1038    Calling sequence of func:
1039 $     func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);
1040 
1041 +  row - an array of index sets that contain the global row numbers
1042          that comprise each local submatrix
1043 .  col - an array of index sets that contain the global column numbers
1044          that comprise each local submatrix
1045 .  submat - array of local submatrices
1046 -  ctx - optional user-defined context for private data for the
1047          user-defined func routine (may be null)
1048 
1049    Notes:
1050    PCSetModifySubMatrices() MUST be called before KSPSetUp() and
1051    KSPSolve().
1052 
1053    A routine set by PCSetModifySubMatrices() is currently called within
1054    the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM)
1055    preconditioners.  All other preconditioners ignore this routine.
1056 
1057    Level: advanced
1058 
1059 .seealso: PCModifySubMatrices()
1060 @*/
1061 PetscErrorCode  PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx)
1062 {
1063   PetscFunctionBegin;
1064   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1065   pc->modifysubmatrices  = func;
1066   pc->modifysubmatricesP = ctx;
1067   PetscFunctionReturn(0);
1068 }
1069 
1070 /*@C
1071    PCModifySubMatrices - Calls an optional user-defined routine within
1072    certain preconditioners if one has been set with PCSetModifySubMatrices().
1073 
1074    Collective on PC
1075 
1076    Input Parameters:
1077 +  pc - the preconditioner context
1078 .  nsub - the number of local submatrices
1079 .  row - an array of index sets that contain the global row numbers
1080          that comprise each local submatrix
1081 .  col - an array of index sets that contain the global column numbers
1082          that comprise each local submatrix
1083 .  submat - array of local submatrices
1084 -  ctx - optional user-defined context for private data for the
1085          user-defined routine (may be null)
1086 
1087    Output Parameter:
1088 .  submat - array of local submatrices (the entries of which may
1089             have been modified)
1090 
1091    Notes:
1092    The user should NOT generally call this routine, as it will
1093    automatically be called within certain preconditioners (currently
1094    block Jacobi, additive Schwarz) if set.
1095 
1096    The basic submatrices are extracted from the preconditioner matrix
1097    as usual; the user can then alter these (for example, to set different
1098    boundary conditions for each submatrix) before they are used for the
1099    local solves.
1100 
1101    Level: developer
1102 
1103 .seealso: PCSetModifySubMatrices()
1104 @*/
1105 PetscErrorCode  PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx)
1106 {
1107   PetscFunctionBegin;
1108   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1109   if (!pc->modifysubmatrices) PetscFunctionReturn(0);
1110   PetscCall(PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0));
1111   PetscCall((*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx));
1112   PetscCall(PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0));
1113   PetscFunctionReturn(0);
1114 }
1115 
1116 /*@
1117    PCSetOperators - Sets the matrix associated with the linear system and
1118    a (possibly) different one associated with the preconditioner.
1119 
1120    Logically Collective on PC
1121 
1122    Input Parameters:
1123 +  pc - the preconditioner context
1124 .  Amat - the matrix that defines the linear system
1125 -  Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat.
1126 
1127    Notes:
1128     Passing a NULL for Amat or Pmat removes the matrix that is currently used.
1129 
1130     If you wish to replace either Amat or Pmat but leave the other one untouched then
1131     first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference()
1132     on it and then pass it back in in your call to KSPSetOperators().
1133 
1134    More Notes about Repeated Solution of Linear Systems:
1135    PETSc does NOT reset the matrix entries of either Amat or Pmat
1136    to zero after a linear solve; the user is completely responsible for
1137    matrix assembly.  See the routine MatZeroEntries() if desiring to
1138    zero all elements of a matrix.
1139 
1140    Level: intermediate
1141 
1142 .seealso: PCGetOperators(), MatZeroEntries()
1143  @*/
1144 PetscErrorCode  PCSetOperators(PC pc,Mat Amat,Mat Pmat)
1145 {
1146   PetscInt         m1,n1,m2,n2;
1147 
1148   PetscFunctionBegin;
1149   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1150   if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2);
1151   if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3);
1152   if (Amat) PetscCheckSameComm(pc,1,Amat,2);
1153   if (Pmat) PetscCheckSameComm(pc,1,Pmat,3);
1154   if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) {
1155     PetscCall(MatGetLocalSize(Amat,&m1,&n1));
1156     PetscCall(MatGetLocalSize(pc->mat,&m2,&n2));
1157     PetscCheck(m1 == m2 && n1 == n2,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT,m2,n2,m1,n1);
1158     PetscCall(MatGetLocalSize(Pmat,&m1,&n1));
1159     PetscCall(MatGetLocalSize(pc->pmat,&m2,&n2));
1160     PetscCheck(m1 == m2 && n1 == n2,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT,m2,n2,m1,n1);
1161   }
1162 
1163   if (Pmat != pc->pmat) {
1164     /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */
1165     pc->matnonzerostate = -1;
1166     pc->matstate        = -1;
1167   }
1168 
1169   /* reference first in case the matrices are the same */
1170   if (Amat) PetscCall(PetscObjectReference((PetscObject)Amat));
1171   PetscCall(MatDestroy(&pc->mat));
1172   if (Pmat) PetscCall(PetscObjectReference((PetscObject)Pmat));
1173   PetscCall(MatDestroy(&pc->pmat));
1174   pc->mat  = Amat;
1175   pc->pmat = Pmat;
1176   PetscFunctionReturn(0);
1177 }
1178 
1179 /*@
1180    PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed.
1181 
1182    Logically Collective on PC
1183 
1184    Input Parameters:
1185 +  pc - the preconditioner context
1186 -  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner
1187 
1188     Level: intermediate
1189 
1190 .seealso: PCGetOperators(), MatZeroEntries(), PCGetReusePreconditioner(), KSPSetReusePreconditioner()
1191  @*/
1192 PetscErrorCode  PCSetReusePreconditioner(PC pc,PetscBool flag)
1193 {
1194   PetscFunctionBegin;
1195   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1196   PetscValidLogicalCollectiveBool(pc,flag,2);
1197   pc->reusepreconditioner = flag;
1198   PetscFunctionReturn(0);
1199 }
1200 
1201 /*@
1202    PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed.
1203 
1204    Not Collective
1205 
1206    Input Parameter:
1207 .  pc - the preconditioner context
1208 
1209    Output Parameter:
1210 .  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner
1211 
1212    Level: intermediate
1213 
1214 .seealso: PCGetOperators(), MatZeroEntries(), PCSetReusePreconditioner()
1215  @*/
1216 PetscErrorCode  PCGetReusePreconditioner(PC pc,PetscBool *flag)
1217 {
1218   PetscFunctionBegin;
1219   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1220   PetscValidBoolPointer(flag,2);
1221   *flag = pc->reusepreconditioner;
1222   PetscFunctionReturn(0);
1223 }
1224 
1225 /*@
1226    PCGetOperators - Gets the matrix associated with the linear system and
1227    possibly a different one associated with the preconditioner.
1228 
1229    Not collective, though parallel Mats are returned if the PC is parallel
1230 
1231    Input Parameter:
1232 .  pc - the preconditioner context
1233 
1234    Output Parameters:
1235 +  Amat - the matrix defining the linear system
1236 -  Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat.
1237 
1238    Level: intermediate
1239 
1240    Notes:
1241     Does not increase the reference count of the matrices, so you should not destroy them
1242 
1243    Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators
1244       are created in PC and returned to the user. In this case, if both operators
1245       mat and pmat are requested, two DIFFERENT operators will be returned. If
1246       only one is requested both operators in the PC will be the same (i.e. as
1247       if one had called KSP/PCSetOperators() with the same argument for both Mats).
1248       The user must set the sizes of the returned matrices and their type etc just
1249       as if the user created them with MatCreate(). For example,
1250 
1251 $         KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to
1252 $           set size, type, etc of Amat
1253 
1254 $         MatCreate(comm,&mat);
1255 $         KSP/PCSetOperators(ksp/pc,Amat,Amat);
1256 $         PetscObjectDereference((PetscObject)mat);
1257 $           set size, type, etc of Amat
1258 
1259      and
1260 
1261 $         KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to
1262 $           set size, type, etc of Amat and Pmat
1263 
1264 $         MatCreate(comm,&Amat);
1265 $         MatCreate(comm,&Pmat);
1266 $         KSP/PCSetOperators(ksp/pc,Amat,Pmat);
1267 $         PetscObjectDereference((PetscObject)Amat);
1268 $         PetscObjectDereference((PetscObject)Pmat);
1269 $           set size, type, etc of Amat and Pmat
1270 
1271     The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy
1272     of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely
1273     managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look
1274     at this is when you create a SNES you do not NEED to create a KSP and attach it to
1275     the SNES object (the SNES object manages it for you). Similarly when you create a KSP
1276     you do not need to attach a PC to it (the KSP object manages the PC object for you).
1277     Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when
1278     it can be created for you?
1279 
1280 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet()
1281 @*/
1282 PetscErrorCode  PCGetOperators(PC pc,Mat *Amat,Mat *Pmat)
1283 {
1284   PetscFunctionBegin;
1285   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1286   if (Amat) {
1287     if (!pc->mat) {
1288       if (pc->pmat && !Pmat) {  /* Apmat has been set, but user did not request it, so use for Amat */
1289         pc->mat = pc->pmat;
1290         PetscCall(PetscObjectReference((PetscObject)pc->mat));
1291       } else {                  /* both Amat and Pmat are empty */
1292         PetscCall(MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat));
1293         if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */
1294           pc->pmat = pc->mat;
1295           PetscCall(PetscObjectReference((PetscObject)pc->pmat));
1296         }
1297       }
1298     }
1299     *Amat = pc->mat;
1300   }
1301   if (Pmat) {
1302     if (!pc->pmat) {
1303       if (pc->mat && !Amat) {    /* Amat has been set but was not requested, so use for pmat */
1304         pc->pmat = pc->mat;
1305         PetscCall(PetscObjectReference((PetscObject)pc->pmat));
1306       } else {
1307         PetscCall(MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat));
1308         if (!Amat) { /* user did NOT request Amat, so make same as Pmat */
1309           pc->mat = pc->pmat;
1310           PetscCall(PetscObjectReference((PetscObject)pc->mat));
1311         }
1312       }
1313     }
1314     *Pmat = pc->pmat;
1315   }
1316   PetscFunctionReturn(0);
1317 }
1318 
1319 /*@C
1320    PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1321    possibly a different one associated with the preconditioner have been set in the PC.
1322 
1323    Not collective, though the results on all processes should be the same
1324 
1325    Input Parameter:
1326 .  pc - the preconditioner context
1327 
1328    Output Parameters:
1329 +  mat - the matrix associated with the linear system was set
1330 -  pmat - matrix associated with the preconditioner was set, usually the same
1331 
1332    Level: intermediate
1333 
1334 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators()
1335 @*/
1336 PetscErrorCode  PCGetOperatorsSet(PC pc,PetscBool  *mat,PetscBool  *pmat)
1337 {
1338   PetscFunctionBegin;
1339   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1340   if (mat) *mat = (pc->mat)  ? PETSC_TRUE : PETSC_FALSE;
1341   if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1342   PetscFunctionReturn(0);
1343 }
1344 
1345 /*@
1346    PCFactorGetMatrix - Gets the factored matrix from the
1347    preconditioner context.  This routine is valid only for the LU,
1348    incomplete LU, Cholesky, and incomplete Cholesky methods.
1349 
1350    Not Collective on PC though Mat is parallel if PC is parallel
1351 
1352    Input Parameters:
1353 .  pc - the preconditioner context
1354 
1355    Output parameters:
1356 .  mat - the factored matrix
1357 
1358    Level: advanced
1359 
1360    Notes:
1361     Does not increase the reference count for the matrix so DO NOT destroy it
1362 
1363 @*/
1364 PetscErrorCode  PCFactorGetMatrix(PC pc,Mat *mat)
1365 {
1366   PetscFunctionBegin;
1367   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1368   PetscValidPointer(mat,2);
1369   if (pc->ops->getfactoredmatrix) {
1370     PetscCall((*pc->ops->getfactoredmatrix)(pc,mat));
1371   } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix");
1372   PetscFunctionReturn(0);
1373 }
1374 
1375 /*@C
1376    PCSetOptionsPrefix - Sets the prefix used for searching for all
1377    PC options in the database.
1378 
1379    Logically Collective on PC
1380 
1381    Input Parameters:
1382 +  pc - the preconditioner context
1383 -  prefix - the prefix string to prepend to all PC option requests
1384 
1385    Notes:
1386    A hyphen (-) must NOT be given at the beginning of the prefix name.
1387    The first character of all runtime options is AUTOMATICALLY the
1388    hyphen.
1389 
1390    Level: advanced
1391 
1392 .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix()
1393 @*/
1394 PetscErrorCode  PCSetOptionsPrefix(PC pc,const char prefix[])
1395 {
1396   PetscFunctionBegin;
1397   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1398   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc,prefix));
1399   PetscFunctionReturn(0);
1400 }
1401 
1402 /*@C
1403    PCAppendOptionsPrefix - Appends to the prefix used for searching for all
1404    PC options in the database.
1405 
1406    Logically Collective on PC
1407 
1408    Input Parameters:
1409 +  pc - the preconditioner context
1410 -  prefix - the prefix string to prepend to all PC option requests
1411 
1412    Notes:
1413    A hyphen (-) must NOT be given at the beginning of the prefix name.
1414    The first character of all runtime options is AUTOMATICALLY the
1415    hyphen.
1416 
1417    Level: advanced
1418 
1419 .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix()
1420 @*/
1421 PetscErrorCode  PCAppendOptionsPrefix(PC pc,const char prefix[])
1422 {
1423   PetscFunctionBegin;
1424   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1425   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix));
1426   PetscFunctionReturn(0);
1427 }
1428 
1429 /*@C
1430    PCGetOptionsPrefix - Gets the prefix used for searching for all
1431    PC options in the database.
1432 
1433    Not Collective
1434 
1435    Input Parameters:
1436 .  pc - the preconditioner context
1437 
1438    Output Parameters:
1439 .  prefix - pointer to the prefix string used, is returned
1440 
1441    Notes:
1442     On the fortran side, the user should pass in a string 'prifix' of
1443    sufficient length to hold the prefix.
1444 
1445    Level: advanced
1446 
1447 .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix()
1448 @*/
1449 PetscErrorCode  PCGetOptionsPrefix(PC pc,const char *prefix[])
1450 {
1451   PetscFunctionBegin;
1452   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1453   PetscValidPointer(prefix,2);
1454   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc,prefix));
1455   PetscFunctionReturn(0);
1456 }
1457 
1458 /*
1459    Indicates the right hand side will be changed by KSPSolve(), this occurs for a few
1460   preconditioners including BDDC and Eisentat that transform the equations before applying
1461   the Krylov methods
1462 */
1463 PETSC_INTERN PetscErrorCode  PCPreSolveChangeRHS(PC pc,PetscBool *change)
1464 {
1465   PetscFunctionBegin;
1466   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1467   PetscValidPointer(change,2);
1468   *change = PETSC_FALSE;
1469   PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));
1470   PetscFunctionReturn(0);
1471 }
1472 
1473 /*@
1474    PCPreSolve - Optional pre-solve phase, intended for any
1475    preconditioner-specific actions that must be performed before
1476    the iterative solve itself.
1477 
1478    Collective on PC
1479 
1480    Input Parameters:
1481 +  pc - the preconditioner context
1482 -  ksp - the Krylov subspace context
1483 
1484    Level: developer
1485 
1486    Sample of Usage:
1487 .vb
1488     PCPreSolve(pc,ksp);
1489     KSPSolve(ksp,b,x);
1490     PCPostSolve(pc,ksp);
1491 .ve
1492 
1493    Notes:
1494    The pre-solve phase is distinct from the PCSetUp() phase.
1495 
1496    KSPSolve() calls this directly, so is rarely called by the user.
1497 
1498 .seealso: PCPostSolve()
1499 @*/
1500 PetscErrorCode PCPreSolve(PC pc,KSP ksp)
1501 {
1502   Vec            x,rhs;
1503 
1504   PetscFunctionBegin;
1505   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1506   PetscValidHeaderSpecific(ksp,KSP_CLASSID,2);
1507   pc->presolvedone++;
1508   PetscCheck(pc->presolvedone <= 2,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice");
1509   PetscCall(KSPGetSolution(ksp,&x));
1510   PetscCall(KSPGetRhs(ksp,&rhs));
1511 
1512   if (pc->ops->presolve) {
1513     PetscCall((*pc->ops->presolve)(pc,ksp,rhs,x));
1514   } else if (pc->presolve) {
1515     PetscCall((pc->presolve)(pc,ksp));
1516   }
1517   PetscFunctionReturn(0);
1518 }
1519 
1520 /*@C
1521    PCSetPreSolve - Sets function PCPreSolve() which is intended for any
1522    preconditioner-specific actions that must be performed before
1523    the iterative solve itself.
1524 
1525    Logically Collective on pc
1526 
1527    Input Parameters:
1528 +   pc - the preconditioner object
1529 -   presolve - the function to call before the solve
1530 
1531    Calling sequence of presolve:
1532 $  func(PC pc,KSP ksp)
1533 
1534 +  pc - the PC context
1535 -  ksp - the KSP context
1536 
1537    Level: developer
1538 
1539 .seealso: PC, PCSetUp(), PCPreSolve()
1540 @*/
1541 PetscErrorCode PCSetPreSolve(PC pc,PetscErrorCode (*presolve)(PC,KSP))
1542 {
1543   PetscFunctionBegin;
1544   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1545   pc->presolve = presolve;
1546   PetscFunctionReturn(0);
1547 }
1548 
1549 /*@
1550    PCPostSolve - Optional post-solve phase, intended for any
1551    preconditioner-specific actions that must be performed after
1552    the iterative solve itself.
1553 
1554    Collective on PC
1555 
1556    Input Parameters:
1557 +  pc - the preconditioner context
1558 -  ksp - the Krylov subspace context
1559 
1560    Sample of Usage:
1561 .vb
1562     PCPreSolve(pc,ksp);
1563     KSPSolve(ksp,b,x);
1564     PCPostSolve(pc,ksp);
1565 .ve
1566 
1567    Note:
1568    KSPSolve() calls this routine directly, so it is rarely called by the user.
1569 
1570    Level: developer
1571 
1572 .seealso: PCPreSolve(), KSPSolve()
1573 @*/
1574 PetscErrorCode  PCPostSolve(PC pc,KSP ksp)
1575 {
1576   Vec            x,rhs;
1577 
1578   PetscFunctionBegin;
1579   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1580   PetscValidHeaderSpecific(ksp,KSP_CLASSID,2);
1581   pc->presolvedone--;
1582   PetscCall(KSPGetSolution(ksp,&x));
1583   PetscCall(KSPGetRhs(ksp,&rhs));
1584   if (pc->ops->postsolve) {
1585     PetscCall((*pc->ops->postsolve)(pc,ksp,rhs,x));
1586   }
1587   PetscFunctionReturn(0);
1588 }
1589 
1590 /*@C
1591   PCLoad - Loads a PC that has been stored in binary  with PCView().
1592 
1593   Collective on PetscViewer
1594 
1595   Input Parameters:
1596 + newdm - the newly loaded PC, this needs to have been created with PCCreate() or
1597            some related function before a call to PCLoad().
1598 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen()
1599 
1600    Level: intermediate
1601 
1602   Notes:
1603    The type is determined by the data in the file, any type set into the PC before this call is ignored.
1604 
1605   Notes for advanced users:
1606   Most users should not need to know the details of the binary storage
1607   format, since PCLoad() and PCView() completely hide these details.
1608   But for anyone who's interested, the standard binary matrix storage
1609   format is
1610 .vb
1611      has not yet been determined
1612 .ve
1613 
1614 .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad()
1615 @*/
1616 PetscErrorCode  PCLoad(PC newdm, PetscViewer viewer)
1617 {
1618   PetscBool      isbinary;
1619   PetscInt       classid;
1620   char           type[256];
1621 
1622   PetscFunctionBegin;
1623   PetscValidHeaderSpecific(newdm,PC_CLASSID,1);
1624   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
1625   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary));
1626   PetscCheck(isbinary,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
1627 
1628   PetscCall(PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT));
1629   PetscCheck(classid == PC_FILE_CLASSID,PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file");
1630   PetscCall(PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR));
1631   PetscCall(PCSetType(newdm, type));
1632   if (newdm->ops->load) {
1633     PetscCall((*newdm->ops->load)(newdm,viewer));
1634   }
1635   PetscFunctionReturn(0);
1636 }
1637 
1638 #include <petscdraw.h>
1639 #if defined(PETSC_HAVE_SAWS)
1640 #include <petscviewersaws.h>
1641 #endif
1642 
1643 /*@C
1644    PCViewFromOptions - View from Options
1645 
1646    Collective on PC
1647 
1648    Input Parameters:
1649 +  A - the PC context
1650 .  obj - Optional object
1651 -  name - command line option
1652 
1653    Level: intermediate
1654 .seealso:  PC, PCView, PetscObjectViewFromOptions(), PCCreate()
1655 @*/
1656 PetscErrorCode  PCViewFromOptions(PC A,PetscObject obj,const char name[])
1657 {
1658   PetscFunctionBegin;
1659   PetscValidHeaderSpecific(A,PC_CLASSID,1);
1660   PetscCall(PetscObjectViewFromOptions((PetscObject)A,obj,name));
1661   PetscFunctionReturn(0);
1662 }
1663 
1664 /*@C
1665    PCView - Prints the PC data structure.
1666 
1667    Collective on PC
1668 
1669    Input Parameters:
1670 +  PC - the PC context
1671 -  viewer - optional visualization context
1672 
1673    Note:
1674    The available visualization contexts include
1675 +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
1676 -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
1677          output where only the first processor opens
1678          the file.  All other processors send their
1679          data to the first processor to print.
1680 
1681    The user can open an alternative visualization contexts with
1682    PetscViewerASCIIOpen() (output to a specified file).
1683 
1684    Level: developer
1685 
1686 .seealso: KSPView(), PetscViewerASCIIOpen()
1687 @*/
1688 PetscErrorCode  PCView(PC pc,PetscViewer viewer)
1689 {
1690   PCType         cstr;
1691   PetscBool      iascii,isstring,isbinary,isdraw;
1692 #if defined(PETSC_HAVE_SAWS)
1693   PetscBool      issaws;
1694 #endif
1695 
1696   PetscFunctionBegin;
1697   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1698   if (!viewer) {
1699     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer));
1700   }
1701   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
1702   PetscCheckSameComm(pc,1,viewer,2);
1703 
1704   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
1705   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring));
1706   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary));
1707   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw));
1708 #if defined(PETSC_HAVE_SAWS)
1709   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws));
1710 #endif
1711 
1712   if (iascii) {
1713     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer));
1714     if (!pc->setupcalled) {
1715       PetscCall(PetscViewerASCIIPrintf(viewer,"  PC has not been set up so information may be incomplete\n"));
1716     }
1717     if (pc->ops->view) {
1718       PetscCall(PetscViewerASCIIPushTab(viewer));
1719       PetscCall((*pc->ops->view)(pc,viewer));
1720       PetscCall(PetscViewerASCIIPopTab(viewer));
1721     }
1722     if (pc->mat) {
1723       PetscCall(PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO));
1724       if (pc->pmat == pc->mat) {
1725         PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix = precond matrix:\n"));
1726         PetscCall(PetscViewerASCIIPushTab(viewer));
1727         PetscCall(MatView(pc->mat,viewer));
1728         PetscCall(PetscViewerASCIIPopTab(viewer));
1729       } else {
1730         if (pc->pmat) {
1731           PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix followed by preconditioner matrix:\n"));
1732         } else {
1733           PetscCall(PetscViewerASCIIPrintf(viewer,"  linear system matrix:\n"));
1734         }
1735         PetscCall(PetscViewerASCIIPushTab(viewer));
1736         PetscCall(MatView(pc->mat,viewer));
1737         if (pc->pmat) PetscCall(MatView(pc->pmat,viewer));
1738         PetscCall(PetscViewerASCIIPopTab(viewer));
1739       }
1740       PetscCall(PetscViewerPopFormat(viewer));
1741     }
1742   } else if (isstring) {
1743     PetscCall(PCGetType(pc,&cstr));
1744     PetscCall(PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr));
1745     if (pc->ops->view) PetscCall((*pc->ops->view)(pc,viewer));
1746     if (pc->mat) PetscCall(MatView(pc->mat,viewer));
1747     if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat,viewer));
1748   } else if (isbinary) {
1749     PetscInt    classid = PC_FILE_CLASSID;
1750     MPI_Comm    comm;
1751     PetscMPIInt rank;
1752     char        type[256];
1753 
1754     PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
1755     PetscCallMPI(MPI_Comm_rank(comm,&rank));
1756     if (rank == 0) {
1757       PetscCall(PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT));
1758       PetscCall(PetscStrncpy(type,((PetscObject)pc)->type_name,256));
1759       PetscCall(PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR));
1760     }
1761     if (pc->ops->view) {
1762       PetscCall((*pc->ops->view)(pc,viewer));
1763     }
1764   } else if (isdraw) {
1765     PetscDraw draw;
1766     char      str[25];
1767     PetscReal x,y,bottom,h;
1768     PetscInt  n;
1769 
1770     PetscCall(PetscViewerDrawGetDraw(viewer,0,&draw));
1771     PetscCall(PetscDrawGetCurrentPoint(draw,&x,&y));
1772     if (pc->mat) {
1773       PetscCall(MatGetSize(pc->mat,&n,NULL));
1774       PetscCall(PetscSNPrintf(str,25,"PC: %s (%" PetscInt_FMT ")",((PetscObject)pc)->type_name,n));
1775     } else {
1776       PetscCall(PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name));
1777     }
1778     PetscCall(PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h));
1779     bottom = y - h;
1780     PetscCall(PetscDrawPushCurrentPoint(draw,x,bottom));
1781     if (pc->ops->view) {
1782       PetscCall((*pc->ops->view)(pc,viewer));
1783     }
1784     PetscCall(PetscDrawPopCurrentPoint(draw));
1785 #if defined(PETSC_HAVE_SAWS)
1786   } else if (issaws) {
1787     PetscMPIInt rank;
1788 
1789     PetscCall(PetscObjectName((PetscObject)pc));
1790     PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
1791     if (!((PetscObject)pc)->amsmem && rank == 0) {
1792       PetscCall(PetscObjectViewSAWs((PetscObject)pc,viewer));
1793     }
1794     if (pc->mat) PetscCall(MatView(pc->mat,viewer));
1795     if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat,viewer));
1796 #endif
1797   }
1798   PetscFunctionReturn(0);
1799 }
1800 
1801 /*@C
1802   PCRegister -  Adds a method to the preconditioner package.
1803 
1804    Not collective
1805 
1806    Input Parameters:
1807 +  name_solver - name of a new user-defined solver
1808 -  routine_create - routine to create method context
1809 
1810    Notes:
1811    PCRegister() may be called multiple times to add several user-defined preconditioners.
1812 
1813    Sample usage:
1814 .vb
1815    PCRegister("my_solver", MySolverCreate);
1816 .ve
1817 
1818    Then, your solver can be chosen with the procedural interface via
1819 $     PCSetType(pc,"my_solver")
1820    or at runtime via the option
1821 $     -pc_type my_solver
1822 
1823    Level: advanced
1824 
1825 .seealso: PCRegisterAll()
1826 @*/
1827 PetscErrorCode  PCRegister(const char sname[],PetscErrorCode (*function)(PC))
1828 {
1829   PetscFunctionBegin;
1830   PetscCall(PCInitializePackage());
1831   PetscCall(PetscFunctionListAdd(&PCList,sname,function));
1832   PetscFunctionReturn(0);
1833 }
1834 
1835 static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y)
1836 {
1837   PC             pc;
1838 
1839   PetscFunctionBegin;
1840   PetscCall(MatShellGetContext(A,&pc));
1841   PetscCall(PCApply(pc,X,Y));
1842   PetscFunctionReturn(0);
1843 }
1844 
1845 /*@
1846     PCComputeOperator - Computes the explicit preconditioned operator.
1847 
1848     Collective on PC
1849 
1850     Input Parameters:
1851 +   pc - the preconditioner object
1852 -   mattype - the matrix type to be used for the operator
1853 
1854     Output Parameter:
1855 .   mat - the explicit preconditioned operator
1856 
1857     Notes:
1858     This computation is done by applying the operators to columns of the identity matrix.
1859     This routine is costly in general, and is recommended for use only with relatively small systems.
1860     Currently, this routine uses a dense matrix format when mattype == NULL
1861 
1862     Level: advanced
1863 
1864 .seealso: KSPComputeOperator(), MatType
1865 
1866 @*/
1867 PetscErrorCode  PCComputeOperator(PC pc,MatType mattype,Mat *mat)
1868 {
1869   PetscInt       N,M,m,n;
1870   Mat            A,Apc;
1871 
1872   PetscFunctionBegin;
1873   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1874   PetscValidPointer(mat,3);
1875   PetscCall(PCGetOperators(pc,&A,NULL));
1876   PetscCall(MatGetLocalSize(A,&m,&n));
1877   PetscCall(MatGetSize(A,&M,&N));
1878   PetscCall(MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc));
1879   PetscCall(MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC));
1880   PetscCall(MatComputeOperator(Apc,mattype,mat));
1881   PetscCall(MatDestroy(&Apc));
1882   PetscFunctionReturn(0);
1883 }
1884 
1885 /*@
1886    PCSetCoordinates - sets the coordinates of all the nodes on the local process
1887 
1888    Collective on PC
1889 
1890    Input Parameters:
1891 +  pc - the solver context
1892 .  dim - the dimension of the coordinates 1, 2, or 3
1893 .  nloc - the blocked size of the coordinates array
1894 -  coords - the coordinates array
1895 
1896    Level: intermediate
1897 
1898    Notes:
1899    coords is an array of the dim coordinates for the nodes on
1900    the local processor, of size dim*nloc.
1901    If there are 108 equation on a processor
1902    for a displacement finite element discretization of elasticity (so
1903    that there are nloc = 36 = 108/3 nodes) then the array must have 108
1904    double precision values (ie, 3 * 36).  These x y z coordinates
1905    should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x,
1906    ... , N-1.z ].
1907 
1908 .seealso: MatSetNearNullSpace()
1909 @*/
1910 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
1911 {
1912   PetscFunctionBegin;
1913   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1914   PetscValidLogicalCollectiveInt(pc,dim,2);
1915   PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));
1916   PetscFunctionReturn(0);
1917 }
1918 
1919 /*@
1920    PCGetInterpolations - Gets interpolation matrices for all levels (except level 0)
1921 
1922    Logically Collective on PC
1923 
1924    Input Parameter:
1925 .  pc - the precondition context
1926 
1927    Output Parameters:
1928 +  num_levels - the number of levels
1929 -  interpolations - the interpolation matrices (size of num_levels-1)
1930 
1931    Level: advanced
1932 
1933 .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level
1934 
1935 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetInterpolation(), PCGetCoarseOperators()
1936 @*/
1937 PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[])
1938 {
1939   PetscFunctionBegin;
1940   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1941   PetscValidIntPointer(num_levels,2);
1942   PetscValidPointer(interpolations,3);
1943   PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));
1944   PetscFunctionReturn(0);
1945 }
1946 
1947 /*@
1948    PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level)
1949 
1950    Logically Collective on PC
1951 
1952    Input Parameter:
1953 .  pc - the precondition context
1954 
1955    Output Parameters:
1956 +  num_levels - the number of levels
1957 -  coarseOperators - the coarse operator matrices (size of num_levels-1)
1958 
1959    Level: advanced
1960 
1961 .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level
1962 
1963 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetRScale(), PCMGGetInterpolation(), PCGetInterpolations()
1964 @*/
1965 PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[])
1966 {
1967   PetscFunctionBegin;
1968   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1969   PetscValidIntPointer(num_levels,2);
1970   PetscValidPointer(coarseOperators,3);
1971   PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));
1972   PetscFunctionReturn(0);
1973 }
1974