xref: /petsc/src/ksp/pc/interface/precon.c (revision 670f3ff9a12f3667ff7d4cebfc50ebc8579c9e33)
1 
2 /*
3     The PC (preconditioner) interface routines, callable by users.
4 */
5 #include <private/pcimpl.h>            /*I "petscksp.h" I*/
6 
7 /* Logging support */
8 PetscClassId   PC_CLASSID;
9 PetscLogEvent  PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
10 PetscLogEvent  PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks, PC_ApplyOnMproc;
11 
12 #undef __FUNCT__
13 #define __FUNCT__ "PCGetDefaultType_Private"
14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char* type[])
15 {
16   PetscErrorCode ierr;
17   PetscMPIInt    size;
18   PetscBool      flg1,flg2,set,flg3;
19 
20   PetscFunctionBegin;
21   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
22   if (pc->pmat) {
23     PetscErrorCode (*f)(Mat,MatReuse,Mat*);
24     ierr = PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
25     if (size == 1) {
26       ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);CHKERRQ(ierr);
27       ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);CHKERRQ(ierr);
28       ierr = MatIsSymmetricKnown(pc->pmat,&set,&flg3);CHKERRQ(ierr);
29       if (flg1 && (!flg2 || (set && flg3))) {
30 	*type = PCICC;
31       } else if (flg2) {
32 	*type = PCILU;
33       } else if (f) { /* likely is a parallel matrix run on one processor */
34 	*type = PCBJACOBI;
35       } else {
36 	*type = PCNONE;
37       }
38     } else {
39        if (f) {
40 	*type = PCBJACOBI;
41       } else {
42 	*type = PCNONE;
43       }
44     }
45   } else {
46     if (size == 1) {
47       *type = PCILU;
48     } else {
49       *type = PCBJACOBI;
50     }
51   }
52   PetscFunctionReturn(0);
53 }
54 
55 #undef __FUNCT__
56 #define __FUNCT__ "PCReset"
57 /*@
58    PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats
59 
60    Collective on PC
61 
62    Input Parameter:
63 .  pc - the preconditioner context
64 
65    Level: developer
66 
67    Notes: This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC
68 
69 .keywords: PC, destroy
70 
71 .seealso: PCCreate(), PCSetUp()
72 @*/
73 PetscErrorCode  PCReset(PC pc)
74 {
75   PetscErrorCode ierr;
76 
77   PetscFunctionBegin;
78   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
79   if (pc->ops->reset) {
80     ierr = (*pc->ops->reset)(pc);CHKERRQ(ierr);
81   }
82   ierr = VecDestroy(&pc->diagonalscaleright);CHKERRQ(ierr);
83   ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr);
84   ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr);
85   ierr = MatDestroy(&pc->mat);CHKERRQ(ierr);
86   pc->setupcalled = 0;
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "PCDestroy"
92 /*@
93    PCDestroy - Destroys PC context that was created with PCCreate().
94 
95    Collective on PC
96 
97    Input Parameter:
98 .  pc - the preconditioner context
99 
100    Level: developer
101 
102 .keywords: PC, destroy
103 
104 .seealso: PCCreate(), PCSetUp()
105 @*/
106 PetscErrorCode  PCDestroy(PC *pc)
107 {
108   PetscErrorCode ierr;
109 
110   PetscFunctionBegin;
111   if (!*pc) PetscFunctionReturn(0);
112   PetscValidHeaderSpecific((*pc),PC_CLASSID,1);
113   if (--((PetscObject)(*pc))->refct > 0) {*pc = 0; PetscFunctionReturn(0);}
114 
115   ierr = PCReset(*pc);CHKERRQ(ierr);
116 
117   /* if memory was published with AMS then destroy it */
118   ierr = PetscObjectDepublish((*pc));CHKERRQ(ierr);
119   if ((*pc)->ops->destroy) {ierr = (*(*pc)->ops->destroy)((*pc));CHKERRQ(ierr);}
120   ierr = DMDestroy(&(*pc)->dm);CHKERRQ(ierr);
121   ierr = PetscHeaderDestroy(pc);CHKERRQ(ierr);
122   PetscFunctionReturn(0);
123 }
124 
125 #undef __FUNCT__
126 #define __FUNCT__ "PCGetDiagonalScale"
127 /*@C
128    PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right
129       scaling as needed by certain time-stepping codes.
130 
131    Logically Collective on PC
132 
133    Input Parameter:
134 .  pc - the preconditioner context
135 
136    Output Parameter:
137 .  flag - PETSC_TRUE if it applies the scaling
138 
139    Level: developer
140 
141    Notes: If this returns PETSC_TRUE then the system solved via the Krylov method is
142 $           D M A D^{-1} y = D M b  for left preconditioning or
143 $           D A M D^{-1} z = D b for right preconditioning
144 
145 .keywords: PC
146 
147 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale()
148 @*/
149 PetscErrorCode  PCGetDiagonalScale(PC pc,PetscBool  *flag)
150 {
151   PetscFunctionBegin;
152   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
153   PetscValidPointer(flag,2);
154   *flag = pc->diagonalscale;
155   PetscFunctionReturn(0);
156 }
157 
158 #undef __FUNCT__
159 #define __FUNCT__ "PCSetDiagonalScale"
160 /*@
161    PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right
162       scaling as needed by certain time-stepping codes.
163 
164    Logically Collective on PC
165 
166    Input Parameters:
167 +  pc - the preconditioner context
168 -  s - scaling vector
169 
170    Level: intermediate
171 
172    Notes: The system solved via the Krylov method is
173 $           D M A D^{-1} y = D M b  for left preconditioning or
174 $           D A M D^{-1} z = D b for right preconditioning
175 
176    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
177 
178 .keywords: PC
179 
180 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale()
181 @*/
182 PetscErrorCode  PCSetDiagonalScale(PC pc,Vec s)
183 {
184   PetscErrorCode ierr;
185 
186   PetscFunctionBegin;
187   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
188   PetscValidHeaderSpecific(s,VEC_CLASSID,2);
189   pc->diagonalscale     = PETSC_TRUE;
190   ierr = PetscObjectReference((PetscObject)s);CHKERRQ(ierr);
191   ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr);
192   pc->diagonalscaleleft = s;
193   ierr = VecDuplicate(s,&pc->diagonalscaleright);CHKERRQ(ierr);
194   ierr = VecCopy(s,pc->diagonalscaleright);CHKERRQ(ierr);
195   ierr = VecReciprocal(pc->diagonalscaleright);CHKERRQ(ierr);
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "PCDiagonalScaleLeft"
201 /*@
202    PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes.
203 
204    Logically Collective on PC
205 
206    Input Parameters:
207 +  pc - the preconditioner context
208 .  in - input vector
209 +  out - scaled vector (maybe the same as in)
210 
211    Level: intermediate
212 
213    Notes: The system solved via the Krylov method is
214 $           D M A D^{-1} y = D M b  for left preconditioning or
215 $           D A M D^{-1} z = D b for right preconditioning
216 
217    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
218 
219    If diagonal scaling is turned off and in is not out then in is copied to out
220 
221 .keywords: PC
222 
223 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale()
224 @*/
225 PetscErrorCode  PCDiagonalScaleLeft(PC pc,Vec in,Vec out)
226 {
227   PetscErrorCode ierr;
228 
229   PetscFunctionBegin;
230   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
231   PetscValidHeaderSpecific(in,VEC_CLASSID,2);
232   PetscValidHeaderSpecific(out,VEC_CLASSID,3);
233   if (pc->diagonalscale) {
234     ierr = VecPointwiseMult(out,pc->diagonalscaleleft,in);CHKERRQ(ierr);
235   } else if (in != out) {
236     ierr = VecCopy(in,out);CHKERRQ(ierr);
237   }
238   PetscFunctionReturn(0);
239 }
240 
241 #undef __FUNCT__
242 #define __FUNCT__ "PCDiagonalScaleRight"
243 /*@
244    PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.
245 
246    Logically Collective on PC
247 
248    Input Parameters:
249 +  pc - the preconditioner context
250 .  in - input vector
251 +  out - scaled vector (maybe the same as in)
252 
253    Level: intermediate
254 
255    Notes: The system solved via the Krylov method is
256 $           D M A D^{-1} y = D M b  for left preconditioning or
257 $           D A M D^{-1} z = D b for right preconditioning
258 
259    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
260 
261    If diagonal scaling is turned off and in is not out then in is copied to out
262 
263 .keywords: PC
264 
265 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale()
266 @*/
267 PetscErrorCode  PCDiagonalScaleRight(PC pc,Vec in,Vec out)
268 {
269   PetscErrorCode ierr;
270 
271   PetscFunctionBegin;
272   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
273   PetscValidHeaderSpecific(in,VEC_CLASSID,2);
274   PetscValidHeaderSpecific(out,VEC_CLASSID,3);
275   if (pc->diagonalscale) {
276     ierr = VecPointwiseMult(out,pc->diagonalscaleright,in);CHKERRQ(ierr);
277   } else if (in != out) {
278     ierr = VecCopy(in,out);CHKERRQ(ierr);
279   }
280   PetscFunctionReturn(0);
281 }
282 
283 #if 0
284 #undef __FUNCT__
285 #define __FUNCT__ "PCPublish_Petsc"
286 static PetscErrorCode PCPublish_Petsc(PetscObject obj)
287 {
288   PetscFunctionBegin;
289   PetscFunctionReturn(0);
290 }
291 #endif
292 
293 #undef __FUNCT__
294 #define __FUNCT__ "PCCreate"
295 /*@
296    PCCreate - Creates a preconditioner context.
297 
298    Collective on MPI_Comm
299 
300    Input Parameter:
301 .  comm - MPI communicator
302 
303    Output Parameter:
304 .  pc - location to put the preconditioner context
305 
306    Notes:
307    The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or ICC
308    in parallel. For dense matrices it is always PCNONE.
309 
310    Level: developer
311 
312 .keywords: PC, create, context
313 
314 .seealso: PCSetUp(), PCApply(), PCDestroy()
315 @*/
316 PetscErrorCode  PCCreate(MPI_Comm comm,PC *newpc)
317 {
318   PC             pc;
319   PetscErrorCode ierr;
320 
321   PetscFunctionBegin;
322   PetscValidPointer(newpc,1);
323   *newpc = 0;
324 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
325   ierr = PCInitializePackage(PETSC_NULL);CHKERRQ(ierr);
326 #endif
327 
328   ierr = PetscHeaderCreate(pc,_p_PC,struct _PCOps,PC_CLASSID,-1,"PC","Preconditioner","PC",comm,PCDestroy,PCView);CHKERRQ(ierr);
329 
330   pc->mat                  = 0;
331   pc->pmat                 = 0;
332   pc->setupcalled          = 0;
333   pc->setfromoptionscalled = 0;
334   pc->data                 = 0;
335   pc->diagonalscale        = PETSC_FALSE;
336   pc->diagonalscaleleft    = 0;
337   pc->diagonalscaleright   = 0;
338   pc->reuse                = 0;
339 
340   pc->modifysubmatrices   = 0;
341   pc->modifysubmatricesP  = 0;
342   *newpc = pc;
343   PetscFunctionReturn(0);
344 
345 }
346 
347 /* -------------------------------------------------------------------------------*/
348 
349 #undef __FUNCT__
350 #define __FUNCT__ "PCApply"
351 /*@
352    PCApply - Applies the preconditioner to a vector.
353 
354    Collective on PC and Vec
355 
356    Input Parameters:
357 +  pc - the preconditioner context
358 -  x - input vector
359 
360    Output Parameter:
361 .  y - output vector
362 
363    Level: developer
364 
365 .keywords: PC, apply
366 
367 .seealso: PCApplyTranspose(), PCApplyBAorAB()
368 @*/
369 PetscErrorCode  PCApply(PC pc,Vec x,Vec y)
370 {
371   PetscErrorCode ierr;
372 
373   PetscFunctionBegin;
374   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
375   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
376   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
377   if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors");
378   VecValidValues(x,2,PETSC_TRUE);
379   if (pc->setupcalled < 2) {
380     ierr = PCSetUp(pc);CHKERRQ(ierr);
381   }
382   if (!pc->ops->apply) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply");
383   ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr);
384   ierr = (*pc->ops->apply)(pc,x,y);CHKERRQ(ierr);
385   ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr);
386   VecValidValues(y,3,PETSC_FALSE);
387   PetscFunctionReturn(0);
388 }
389 
390 #undef __FUNCT__
391 #define __FUNCT__ "PCApplySymmetricLeft"
392 /*@
393    PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.
394 
395    Collective on PC and Vec
396 
397    Input Parameters:
398 +  pc - the preconditioner context
399 -  x - input vector
400 
401    Output Parameter:
402 .  y - output vector
403 
404    Notes:
405    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
406 
407    Level: developer
408 
409 .keywords: PC, apply, symmetric, left
410 
411 .seealso: PCApply(), PCApplySymmetricRight()
412 @*/
413 PetscErrorCode  PCApplySymmetricLeft(PC pc,Vec x,Vec y)
414 {
415   PetscErrorCode ierr;
416 
417   PetscFunctionBegin;
418   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
419   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
420   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
421   if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors");
422   VecValidValues(x,2,PETSC_TRUE);
423   if (pc->setupcalled < 2) {
424     ierr = PCSetUp(pc);CHKERRQ(ierr);
425   }
426   if (!pc->ops->applysymmetricleft) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have left symmetric apply");
427   ierr = PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr);
428   ierr = (*pc->ops->applysymmetricleft)(pc,x,y);CHKERRQ(ierr);
429   ierr = PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr);
430   VecValidValues(y,3,PETSC_FALSE);
431   PetscFunctionReturn(0);
432 }
433 
434 #undef __FUNCT__
435 #define __FUNCT__ "PCApplySymmetricRight"
436 /*@
437    PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.
438 
439    Collective on PC and Vec
440 
441    Input Parameters:
442 +  pc - the preconditioner context
443 -  x - input vector
444 
445    Output Parameter:
446 .  y - output vector
447 
448    Level: developer
449 
450    Notes:
451    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
452 
453 .keywords: PC, apply, symmetric, right
454 
455 .seealso: PCApply(), PCApplySymmetricLeft()
456 @*/
457 PetscErrorCode  PCApplySymmetricRight(PC pc,Vec x,Vec y)
458 {
459   PetscErrorCode ierr;
460 
461   PetscFunctionBegin;
462   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
463   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
464   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
465   if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors");
466   VecValidValues(x,2,PETSC_TRUE);
467   if (pc->setupcalled < 2) {
468     ierr = PCSetUp(pc);CHKERRQ(ierr);
469   }
470   if (!pc->ops->applysymmetricright) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have left symmetric apply");
471   ierr = PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr);
472   ierr = (*pc->ops->applysymmetricright)(pc,x,y);CHKERRQ(ierr);
473   ierr = PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr);
474   VecValidValues(y,3,PETSC_FALSE);
475   PetscFunctionReturn(0);
476 }
477 
478 #undef __FUNCT__
479 #define __FUNCT__ "PCApplyTranspose"
480 /*@
481    PCApplyTranspose - Applies the transpose of preconditioner to a vector.
482 
483    Collective on PC and Vec
484 
485    Input Parameters:
486 +  pc - the preconditioner context
487 -  x - input vector
488 
489    Output Parameter:
490 .  y - output vector
491 
492    Notes: For complex numbers this applies the non-Hermitian transpose.
493 
494    Developer Notes: We need to implement a PCApplyHermitianTranspose()
495 
496    Level: developer
497 
498 .keywords: PC, apply, transpose
499 
500 .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists()
501 @*/
502 PetscErrorCode  PCApplyTranspose(PC pc,Vec x,Vec y)
503 {
504   PetscErrorCode ierr;
505 
506   PetscFunctionBegin;
507   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
508   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
509   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
510   if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors");
511   VecValidValues(x,2,PETSC_TRUE);
512   if (pc->setupcalled < 2) {
513     ierr = PCSetUp(pc);CHKERRQ(ierr);
514   }
515   if (!pc->ops->applytranspose) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply transpose");
516   ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr);
517   ierr = (*pc->ops->applytranspose)(pc,x,y);CHKERRQ(ierr);
518   ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr);
519   VecValidValues(y,3,PETSC_FALSE);
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "PCApplyTransposeExists"
525 /*@
526    PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation
527 
528    Collective on PC and Vec
529 
530    Input Parameters:
531 .  pc - the preconditioner context
532 
533    Output Parameter:
534 .  flg - PETSC_TRUE if a transpose operation is defined
535 
536    Level: developer
537 
538 .keywords: PC, apply, transpose
539 
540 .seealso: PCApplyTranspose()
541 @*/
542 PetscErrorCode  PCApplyTransposeExists(PC pc,PetscBool  *flg)
543 {
544   PetscFunctionBegin;
545   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
546   PetscValidPointer(flg,2);
547   if (pc->ops->applytranspose) *flg = PETSC_TRUE;
548   else                         *flg = PETSC_FALSE;
549   PetscFunctionReturn(0);
550 }
551 
552 #undef __FUNCT__
553 #define __FUNCT__ "PCApplyBAorAB"
554 /*@
555    PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.
556 
557    Collective on PC and Vec
558 
559    Input Parameters:
560 +  pc - the preconditioner context
561 .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
562 .  x - input vector
563 -  work - work vector
564 
565    Output Parameter:
566 .  y - output vector
567 
568    Level: developer
569 
570    Notes: If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or  D A M D^{-1} is actually applied. Note that the
571    specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling.
572 
573 .keywords: PC, apply, operator
574 
575 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose()
576 @*/
577 PetscErrorCode  PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work)
578 {
579   PetscErrorCode ierr;
580 
581   PetscFunctionBegin;
582   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
583   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
584   PetscValidHeaderSpecific(y,VEC_CLASSID,4);
585   PetscValidHeaderSpecific(work,VEC_CLASSID,5);
586   if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors");
587   VecValidValues(x,3,PETSC_TRUE);
588   if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric");
589   if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application");
590 
591   if (pc->setupcalled < 2) {
592     ierr = PCSetUp(pc);CHKERRQ(ierr);
593   }
594 
595   if (pc->diagonalscale) {
596     if (pc->ops->applyBA) {
597       Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
598       ierr = VecDuplicate(x,&work2);CHKERRQ(ierr);
599       ierr = PCDiagonalScaleRight(pc,x,work2);CHKERRQ(ierr);
600       ierr = (*pc->ops->applyBA)(pc,side,work2,y,work);CHKERRQ(ierr);
601       ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr);
602       ierr = VecDestroy(&work2);CHKERRQ(ierr);
603     } else if (side == PC_RIGHT) {
604       ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr);
605       ierr = PCApply(pc,y,work);CHKERRQ(ierr);
606       ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr);
607       ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr);
608     } else if (side == PC_LEFT) {
609       ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr);
610       ierr = MatMult(pc->mat,y,work);CHKERRQ(ierr);
611       ierr = PCApply(pc,work,y);CHKERRQ(ierr);
612       ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr);
613     } else if (side == PC_SYMMETRIC) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner");
614   } else {
615     if (pc->ops->applyBA) {
616       ierr = (*pc->ops->applyBA)(pc,side,x,y,work);CHKERRQ(ierr);
617     } else if (side == PC_RIGHT) {
618       ierr = PCApply(pc,x,work);CHKERRQ(ierr);
619       ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr);
620     } else if (side == PC_LEFT) {
621       ierr = MatMult(pc->mat,x,work);CHKERRQ(ierr);
622       ierr = PCApply(pc,work,y);CHKERRQ(ierr);
623     } else if (side == PC_SYMMETRIC) {
624       /* There's an extra copy here; maybe should provide 2 work vectors instead? */
625       ierr = PCApplySymmetricRight(pc,x,work);CHKERRQ(ierr);
626       ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr);
627       ierr = VecCopy(y,work);CHKERRQ(ierr);
628       ierr = PCApplySymmetricLeft(pc,work,y);CHKERRQ(ierr);
629     }
630   }
631   VecValidValues(y,4,PETSC_FALSE);
632   PetscFunctionReturn(0);
633 }
634 
635 #undef __FUNCT__
636 #define __FUNCT__ "PCApplyBAorABTranspose"
637 /*@
638    PCApplyBAorABTranspose - Applies the transpose of the preconditioner
639    and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
640    NOT tr(B*A) = tr(A)*tr(B).
641 
642    Collective on PC and Vec
643 
644    Input Parameters:
645 +  pc - the preconditioner context
646 .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
647 .  x - input vector
648 -  work - work vector
649 
650    Output Parameter:
651 .  y - output vector
652 
653 
654    Notes: this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
655       defined by B'. This is why this has the funny form that it computes tr(B) * tr(A)
656 
657     Level: developer
658 
659 .keywords: PC, apply, operator, transpose
660 
661 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB()
662 @*/
663 PetscErrorCode  PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work)
664 {
665   PetscErrorCode ierr;
666 
667   PetscFunctionBegin;
668   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
669   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
670   PetscValidHeaderSpecific(y,VEC_CLASSID,4);
671   PetscValidHeaderSpecific(work,VEC_CLASSID,5);
672   if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors");
673   VecValidValues(x,3,PETSC_TRUE);
674   if (pc->ops->applyBAtranspose) {
675     ierr = (*pc->ops->applyBAtranspose)(pc,side,x,y,work);CHKERRQ(ierr);
676     VecValidValues(y,4,PETSC_FALSE);
677     PetscFunctionReturn(0);
678   }
679   if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left");
680 
681   if (pc->setupcalled < 2) {
682     ierr = PCSetUp(pc);CHKERRQ(ierr);
683   }
684 
685   if (side == PC_RIGHT) {
686     ierr = PCApplyTranspose(pc,x,work);CHKERRQ(ierr);
687     ierr = MatMultTranspose(pc->mat,work,y);CHKERRQ(ierr);
688   } else if (side == PC_LEFT) {
689     ierr = MatMultTranspose(pc->mat,x,work);CHKERRQ(ierr);
690     ierr = PCApplyTranspose(pc,work,y);CHKERRQ(ierr);
691   }
692   /* add support for PC_SYMMETRIC */
693   VecValidValues(y,4,PETSC_FALSE);
694   PetscFunctionReturn(0);
695 }
696 
697 /* -------------------------------------------------------------------------------*/
698 
699 #undef __FUNCT__
700 #define __FUNCT__ "PCApplyRichardsonExists"
701 /*@
702    PCApplyRichardsonExists - Determines whether a particular preconditioner has a
703    built-in fast application of Richardson's method.
704 
705    Not Collective
706 
707    Input Parameter:
708 .  pc - the preconditioner
709 
710    Output Parameter:
711 .  exists - PETSC_TRUE or PETSC_FALSE
712 
713    Level: developer
714 
715 .keywords: PC, apply, Richardson, exists
716 
717 .seealso: PCApplyRichardson()
718 @*/
719 PetscErrorCode  PCApplyRichardsonExists(PC pc,PetscBool  *exists)
720 {
721   PetscFunctionBegin;
722   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
723   PetscValidIntPointer(exists,2);
724   if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
725   else                          *exists = PETSC_FALSE;
726   PetscFunctionReturn(0);
727 }
728 
729 #undef __FUNCT__
730 #define __FUNCT__ "PCApplyRichardson"
731 /*@
732    PCApplyRichardson - Applies several steps of Richardson iteration with
733    the particular preconditioner. This routine is usually used by the
734    Krylov solvers and not the application code directly.
735 
736    Collective on PC
737 
738    Input Parameters:
739 +  pc  - the preconditioner context
740 .  b   - the right hand side
741 .  w   - one work vector
742 .  rtol - relative decrease in residual norm convergence criteria
743 .  abstol - absolute residual norm convergence criteria
744 .  dtol - divergence residual norm increase criteria
745 .  its - the number of iterations to apply.
746 -  guesszero - if the input x contains nonzero initial guess
747 
748    Output Parameter:
749 +  outits - number of iterations actually used (for SOR this always equals its)
750 .  reason - the reason the apply terminated
751 -  y - the solution (also contains initial guess if guesszero is PETSC_FALSE
752 
753    Notes:
754    Most preconditioners do not support this function. Use the command
755    PCApplyRichardsonExists() to determine if one does.
756 
757    Except for the multigrid PC this routine ignores the convergence tolerances
758    and always runs for the number of iterations
759 
760    Level: developer
761 
762 .keywords: PC, apply, Richardson
763 
764 .seealso: PCApplyRichardsonExists()
765 @*/
766 PetscErrorCode  PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool  guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason)
767 {
768   PetscErrorCode ierr;
769 
770   PetscFunctionBegin;
771   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
772   PetscValidHeaderSpecific(b,VEC_CLASSID,2);
773   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
774   PetscValidHeaderSpecific(w,VEC_CLASSID,4);
775   if (b == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"b and y must be different vectors");
776   if (pc->setupcalled < 2) {
777     ierr = PCSetUp(pc);CHKERRQ(ierr);
778   }
779   if (!pc->ops->applyrichardson) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply richardson");
780   ierr = (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);CHKERRQ(ierr);
781   PetscFunctionReturn(0);
782 }
783 
784 /*
785       a setupcall of 0 indicates never setup,
786                      1 needs to be resetup,
787                      2 does not need any changes.
788 */
789 #undef __FUNCT__
790 #define __FUNCT__ "PCSetUp"
791 /*@
792    PCSetUp - Prepares for the use of a preconditioner.
793 
794    Collective on PC
795 
796    Input Parameter:
797 .  pc - the preconditioner context
798 
799    Level: developer
800 
801 .keywords: PC, setup
802 
803 .seealso: PCCreate(), PCApply(), PCDestroy()
804 @*/
805 PetscErrorCode  PCSetUp(PC pc)
806 {
807   PetscErrorCode ierr;
808   const char     *def;
809 
810   PetscFunctionBegin;
811   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
812   if (!pc->mat) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first");
813 
814   if (pc->setupcalled > 1) {
815     ierr = PetscInfo(pc,"Setting PC with identical preconditioner\n");CHKERRQ(ierr);
816     PetscFunctionReturn(0);
817   } else if (!pc->setupcalled) {
818     ierr = PetscInfo(pc,"Setting up new PC\n");CHKERRQ(ierr);
819   } else if (pc->flag == SAME_NONZERO_PATTERN) {
820     ierr = PetscInfo(pc,"Setting up PC with same nonzero pattern\n");CHKERRQ(ierr);
821   } else {
822     ierr = PetscInfo(pc,"Setting up PC with different nonzero pattern\n");CHKERRQ(ierr);
823   }
824 
825   if (!((PetscObject)pc)->type_name) {
826     ierr = PCGetDefaultType_Private(pc,&def);CHKERRQ(ierr);
827     ierr = PCSetType(pc,def);CHKERRQ(ierr);
828   }
829 
830   ierr = PetscLogEventBegin(PC_SetUp,pc,0,0,0);CHKERRQ(ierr);
831   if (pc->ops->setup) {
832     ierr = (*pc->ops->setup)(pc);CHKERRQ(ierr);
833   }
834   pc->setupcalled = 2;
835   ierr = PetscLogEventEnd(PC_SetUp,pc,0,0,0);CHKERRQ(ierr);
836   PetscFunctionReturn(0);
837 }
838 
839 #undef __FUNCT__
840 #define __FUNCT__ "PCSetUpOnBlocks"
841 /*@
842    PCSetUpOnBlocks - Sets up the preconditioner for each block in
843    the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
844    methods.
845 
846    Collective on PC
847 
848    Input Parameters:
849 .  pc - the preconditioner context
850 
851    Level: developer
852 
853 .keywords: PC, setup, blocks
854 
855 .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp()
856 @*/
857 PetscErrorCode  PCSetUpOnBlocks(PC pc)
858 {
859   PetscErrorCode ierr;
860 
861   PetscFunctionBegin;
862   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
863   if (!pc->ops->setuponblocks) PetscFunctionReturn(0);
864   ierr = PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr);
865   ierr = (*pc->ops->setuponblocks)(pc);CHKERRQ(ierr);
866   ierr = PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr);
867   PetscFunctionReturn(0);
868 }
869 
870 #undef __FUNCT__
871 #define __FUNCT__ "PCSetModifySubMatrices"
872 /*@C
873    PCSetModifySubMatrices - Sets a user-defined routine for modifying the
874    submatrices that arise within certain subdomain-based preconditioners.
875    The basic submatrices are extracted from the preconditioner matrix as
876    usual; the user can then alter these (for example, to set different boundary
877    conditions for each submatrix) before they are used for the local solves.
878 
879    Logically Collective on PC
880 
881    Input Parameters:
882 +  pc - the preconditioner context
883 .  func - routine for modifying the submatrices
884 -  ctx - optional user-defined context (may be null)
885 
886    Calling sequence of func:
887 $     func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);
888 
889 .  row - an array of index sets that contain the global row numbers
890          that comprise each local submatrix
891 .  col - an array of index sets that contain the global column numbers
892          that comprise each local submatrix
893 .  submat - array of local submatrices
894 -  ctx - optional user-defined context for private data for the
895          user-defined func routine (may be null)
896 
897    Notes:
898    PCSetModifySubMatrices() MUST be called before KSPSetUp() and
899    KSPSolve().
900 
901    A routine set by PCSetModifySubMatrices() is currently called within
902    the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM)
903    preconditioners.  All other preconditioners ignore this routine.
904 
905    Level: advanced
906 
907 .keywords: PC, set, modify, submatrices
908 
909 .seealso: PCModifySubMatrices(), PCASMGetSubMatrices()
910 @*/
911 PetscErrorCode  PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx)
912 {
913   PetscFunctionBegin;
914   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
915   pc->modifysubmatrices  = func;
916   pc->modifysubmatricesP = ctx;
917   PetscFunctionReturn(0);
918 }
919 
920 #undef __FUNCT__
921 #define __FUNCT__ "PCModifySubMatrices"
922 /*@C
923    PCModifySubMatrices - Calls an optional user-defined routine within
924    certain preconditioners if one has been set with PCSetModifySubMarices().
925 
926    Collective on PC
927 
928    Input Parameters:
929 +  pc - the preconditioner context
930 .  nsub - the number of local submatrices
931 .  row - an array of index sets that contain the global row numbers
932          that comprise each local submatrix
933 .  col - an array of index sets that contain the global column numbers
934          that comprise each local submatrix
935 .  submat - array of local submatrices
936 -  ctx - optional user-defined context for private data for the
937          user-defined routine (may be null)
938 
939    Output Parameter:
940 .  submat - array of local submatrices (the entries of which may
941             have been modified)
942 
943    Notes:
944    The user should NOT generally call this routine, as it will
945    automatically be called within certain preconditioners (currently
946    block Jacobi, additive Schwarz) if set.
947 
948    The basic submatrices are extracted from the preconditioner matrix
949    as usual; the user can then alter these (for example, to set different
950    boundary conditions for each submatrix) before they are used for the
951    local solves.
952 
953    Level: developer
954 
955 .keywords: PC, modify, submatrices
956 
957 .seealso: PCSetModifySubMatrices()
958 @*/
959 PetscErrorCode  PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx)
960 {
961   PetscErrorCode ierr;
962 
963   PetscFunctionBegin;
964   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
965   if (!pc->modifysubmatrices) PetscFunctionReturn(0);
966   ierr = PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr);
967   ierr = (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);CHKERRQ(ierr);
968   ierr = PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr);
969   PetscFunctionReturn(0);
970 }
971 
972 #undef __FUNCT__
973 #define __FUNCT__ "PCSetOperators"
974 /*@
975    PCSetOperators - Sets the matrix associated with the linear system and
976    a (possibly) different one associated with the preconditioner.
977 
978    Logically Collective on PC and Mat
979 
980    Input Parameters:
981 +  pc - the preconditioner context
982 .  Amat - the matrix associated with the linear system
983 .  Pmat - the matrix to be used in constructing the preconditioner, usually
984           the same as Amat.
985 -  flag - flag indicating information about the preconditioner matrix structure
986    during successive linear solves.  This flag is ignored the first time a
987    linear system is solved, and thus is irrelevant when solving just one linear
988    system.
989 
990    Notes:
991    The flag can be used to eliminate unnecessary work in the preconditioner
992    during the repeated solution of linear systems of the same size.  The
993    available options are
994 +    SAME_PRECONDITIONER -
995        Pmat is identical during successive linear solves.
996        This option is intended for folks who are using
997        different Amat and Pmat matrices and wish to reuse the
998        same preconditioner matrix.  For example, this option
999        saves work by not recomputing incomplete factorization
1000        for ILU/ICC preconditioners.
1001 .     SAME_NONZERO_PATTERN -
1002        Pmat has the same nonzero structure during
1003        successive linear solves.
1004 -     DIFFERENT_NONZERO_PATTERN -
1005        Pmat does not have the same nonzero structure.
1006 
1007     Passing a PETSC_NULL for Amat or Pmat removes the matrix that is currently used.
1008 
1009     If you wish to replace either Amat or Pmat but leave the other one untouched then
1010     first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference()
1011     on it and then pass it back in in your call to KSPSetOperators().
1012 
1013    Caution:
1014    If you specify SAME_NONZERO_PATTERN, PETSc believes your assertion
1015    and does not check the structure of the matrix.  If you erroneously
1016    claim that the structure is the same when it actually is not, the new
1017    preconditioner will not function correctly.  Thus, use this optimization
1018    feature carefully!
1019 
1020    If in doubt about whether your preconditioner matrix has changed
1021    structure or not, use the flag DIFFERENT_NONZERO_PATTERN.
1022 
1023    More Notes about Repeated Solution of Linear Systems:
1024    PETSc does NOT reset the matrix entries of either Amat or Pmat
1025    to zero after a linear solve; the user is completely responsible for
1026    matrix assembly.  See the routine MatZeroEntries() if desiring to
1027    zero all elements of a matrix.
1028 
1029    Level: intermediate
1030 
1031 .keywords: PC, set, operators, matrix, linear system
1032 
1033 .seealso: PCGetOperators(), MatZeroEntries()
1034  @*/
1035 PetscErrorCode  PCSetOperators(PC pc,Mat Amat,Mat Pmat,MatStructure flag)
1036 {
1037   PetscErrorCode ierr;
1038   PetscInt       m1,n1,m2,n2;
1039 
1040   PetscFunctionBegin;
1041   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1042   if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2);
1043   if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3);
1044   if (Amat) PetscCheckSameComm(pc,1,Amat,2);
1045   if (Pmat) PetscCheckSameComm(pc,1,Pmat,3);
1046   if (pc->setupcalled && Amat && Pmat) {
1047     ierr = MatGetLocalSize(Amat,&m1,&n1);CHKERRQ(ierr);
1048     ierr = MatGetLocalSize(pc->mat,&m2,&n2);CHKERRQ(ierr);
1049     if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1);
1050     ierr = MatGetLocalSize(Pmat,&m1,&n1);CHKERRQ(ierr);
1051     ierr = MatGetLocalSize(pc->pmat,&m2,&n2);CHKERRQ(ierr);
1052     if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1);
1053   }
1054 
1055   /* reference first in case the matrices are the same */
1056   if (Amat) {ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr);}
1057   ierr = MatDestroy(&pc->mat);CHKERRQ(ierr);
1058   if (Pmat) {ierr = PetscObjectReference((PetscObject)Pmat);CHKERRQ(ierr);}
1059   ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr);
1060   pc->mat  = Amat;
1061   pc->pmat = Pmat;
1062 
1063   if (pc->setupcalled == 2 && flag != SAME_PRECONDITIONER) {
1064     pc->setupcalled = 1;
1065   }
1066   pc->flag = flag;
1067   PetscFunctionReturn(0);
1068 }
1069 
1070 #undef __FUNCT__
1071 #define __FUNCT__ "PCGetOperators"
1072 /*@C
1073    PCGetOperators - Gets the matrix associated with the linear system and
1074    possibly a different one associated with the preconditioner.
1075 
1076    Not collective, though parallel Mats are returned if the PC is parallel
1077 
1078    Input Parameter:
1079 .  pc - the preconditioner context
1080 
1081    Output Parameters:
1082 +  mat - the matrix associated with the linear system
1083 .  pmat - matrix associated with the preconditioner, usually the same
1084           as mat.
1085 -  flag - flag indicating information about the preconditioner
1086           matrix structure.  See PCSetOperators() for details.
1087 
1088    Level: intermediate
1089 
1090    Notes: Does not increase the reference count of the matrices, so you should not destroy them
1091 
1092    Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators
1093       are created in PC and returned to the user. In this case, if both operators
1094       mat and pmat are requested, two DIFFERENT operators will be returned. If
1095       only one is requested both operators in the PC will be the same (i.e. as
1096       if one had called KSP/PCSetOperators() with the same argument for both Mats).
1097       The user must set the sizes of the returned matrices and their type etc just
1098       as if the user created them with MatCreate(). For example,
1099 
1100 $         KSP/PCGetOperators(ksp/pc,&mat,PETSC_NULL,PETSC_NULL); is equivalent to
1101 $           set size, type, etc of mat
1102 
1103 $         MatCreate(comm,&mat);
1104 $         KSP/PCSetOperators(ksp/pc,mat,mat,SAME_NONZERO_PATTERN);
1105 $         PetscObjectDereference((PetscObject)mat);
1106 $           set size, type, etc of mat
1107 
1108      and
1109 
1110 $         KSP/PCGetOperators(ksp/pc,&mat,&pmat,PETSC_NULL); is equivalent to
1111 $           set size, type, etc of mat and pmat
1112 
1113 $         MatCreate(comm,&mat);
1114 $         MatCreate(comm,&pmat);
1115 $         KSP/PCSetOperators(ksp/pc,mat,pmat,SAME_NONZERO_PATTERN);
1116 $         PetscObjectDereference((PetscObject)mat);
1117 $         PetscObjectDereference((PetscObject)pmat);
1118 $           set size, type, etc of mat and pmat
1119 
1120     The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy
1121     of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely
1122     managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look
1123     at this is when you create a SNES you do not NEED to create a KSP and attach it to
1124     the SNES object (the SNES object manages it for you). Similarly when you create a KSP
1125     you do not need to attach a PC to it (the KSP object manages the PC object for you).
1126     Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when
1127     it can be created for you?
1128 
1129 
1130 .keywords: PC, get, operators, matrix, linear system
1131 
1132 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet()
1133 @*/
1134 PetscErrorCode  PCGetOperators(PC pc,Mat *mat,Mat *pmat,MatStructure *flag)
1135 {
1136   PetscErrorCode ierr;
1137 
1138   PetscFunctionBegin;
1139   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1140   if (mat) {
1141     if (!pc->mat) {
1142       if (pc->pmat && !pmat) {  /* pmat has been set, but user did not request it, so use for mat */
1143         pc->mat = pc->pmat;
1144         ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr);
1145       } else {                  /* both mat and pmat are empty */
1146         ierr = MatCreate(((PetscObject)pc)->comm,&pc->mat);CHKERRQ(ierr);
1147         if (!pmat) { /* user did NOT request pmat, so make same as mat */
1148           pc->pmat = pc->mat;
1149           ierr     = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr);
1150         }
1151       }
1152     }
1153     *mat  = pc->mat;
1154   }
1155   if (pmat) {
1156     if (!pc->pmat) {
1157       if (pc->mat && !mat) {    /* mat has been set but was not requested, so use for pmat */
1158         pc->pmat = pc->mat;
1159         ierr    = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr);
1160       } else {
1161         ierr = MatCreate(((PetscObject)pc)->comm,&pc->pmat);CHKERRQ(ierr);
1162         if (!mat) { /* user did NOT request mat, so make same as pmat */
1163           pc->mat = pc->pmat;
1164           ierr    = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr);
1165         }
1166       }
1167     }
1168     *pmat = pc->pmat;
1169   }
1170   if (flag) *flag = pc->flag;
1171   PetscFunctionReturn(0);
1172 }
1173 
1174 #undef __FUNCT__
1175 #define __FUNCT__ "PCGetOperatorsSet"
1176 /*@C
1177    PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1178    possibly a different one associated with the preconditioner have been set in the PC.
1179 
1180    Not collective, though the results on all processes should be the same
1181 
1182    Input Parameter:
1183 .  pc - the preconditioner context
1184 
1185    Output Parameters:
1186 +  mat - the matrix associated with the linear system was set
1187 -  pmat - matrix associated with the preconditioner was set, usually the same
1188 
1189    Level: intermediate
1190 
1191 .keywords: PC, get, operators, matrix, linear system
1192 
1193 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators()
1194 @*/
1195 PetscErrorCode  PCGetOperatorsSet(PC pc,PetscBool  *mat,PetscBool  *pmat)
1196 {
1197   PetscFunctionBegin;
1198   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1199   if (mat)  *mat  = (pc->mat)  ? PETSC_TRUE : PETSC_FALSE;
1200   if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1201   PetscFunctionReturn(0);
1202 }
1203 
1204 #undef __FUNCT__
1205 #define __FUNCT__ "PCFactorGetMatrix"
1206 /*@
1207    PCFactorGetMatrix - Gets the factored matrix from the
1208    preconditioner context.  This routine is valid only for the LU,
1209    incomplete LU, Cholesky, and incomplete Cholesky methods.
1210 
1211    Not Collective on PC though Mat is parallel if PC is parallel
1212 
1213    Input Parameters:
1214 .  pc - the preconditioner context
1215 
1216    Output parameters:
1217 .  mat - the factored matrix
1218 
1219    Level: advanced
1220 
1221    Notes: Does not increase the reference count for the matrix so DO NOT destroy it
1222 
1223 .keywords: PC, get, factored, matrix
1224 @*/
1225 PetscErrorCode  PCFactorGetMatrix(PC pc,Mat *mat)
1226 {
1227   PetscErrorCode ierr;
1228 
1229   PetscFunctionBegin;
1230   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1231   PetscValidPointer(mat,2);
1232   if (pc->ops->getfactoredmatrix) {
1233     ierr = (*pc->ops->getfactoredmatrix)(pc,mat);CHKERRQ(ierr);
1234   } else {
1235     SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC type does not support getting factor matrix");
1236   }
1237   PetscFunctionReturn(0);
1238 }
1239 
1240 #undef __FUNCT__
1241 #define __FUNCT__ "PCSetOptionsPrefix"
1242 /*@C
1243    PCSetOptionsPrefix - Sets the prefix used for searching for all
1244    PC options in the database.
1245 
1246    Logically Collective on PC
1247 
1248    Input Parameters:
1249 +  pc - the preconditioner context
1250 -  prefix - the prefix string to prepend to all PC option requests
1251 
1252    Notes:
1253    A hyphen (-) must NOT be given at the beginning of the prefix name.
1254    The first character of all runtime options is AUTOMATICALLY the
1255    hyphen.
1256 
1257    Level: advanced
1258 
1259 .keywords: PC, set, options, prefix, database
1260 
1261 .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix()
1262 @*/
1263 PetscErrorCode  PCSetOptionsPrefix(PC pc,const char prefix[])
1264 {
1265   PetscErrorCode ierr;
1266 
1267   PetscFunctionBegin;
1268   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1269   ierr = PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr);
1270   PetscFunctionReturn(0);
1271 }
1272 
1273 #undef __FUNCT__
1274 #define __FUNCT__ "PCAppendOptionsPrefix"
1275 /*@C
1276    PCAppendOptionsPrefix - Appends to the prefix used for searching for all
1277    PC options in the database.
1278 
1279    Logically Collective on PC
1280 
1281    Input Parameters:
1282 +  pc - the preconditioner context
1283 -  prefix - the prefix string to prepend to all PC option requests
1284 
1285    Notes:
1286    A hyphen (-) must NOT be given at the beginning of the prefix name.
1287    The first character of all runtime options is AUTOMATICALLY the
1288    hyphen.
1289 
1290    Level: advanced
1291 
1292 .keywords: PC, append, options, prefix, database
1293 
1294 .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix()
1295 @*/
1296 PetscErrorCode  PCAppendOptionsPrefix(PC pc,const char prefix[])
1297 {
1298   PetscErrorCode ierr;
1299 
1300   PetscFunctionBegin;
1301   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1302   ierr = PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr);
1303   PetscFunctionReturn(0);
1304 }
1305 
1306 #undef __FUNCT__
1307 #define __FUNCT__ "PCGetOptionsPrefix"
1308 /*@C
1309    PCGetOptionsPrefix - Gets the prefix used for searching for all
1310    PC options in the database.
1311 
1312    Not Collective
1313 
1314    Input Parameters:
1315 .  pc - the preconditioner context
1316 
1317    Output Parameters:
1318 .  prefix - pointer to the prefix string used, is returned
1319 
1320    Notes: On the fortran side, the user should pass in a string 'prifix' of
1321    sufficient length to hold the prefix.
1322 
1323    Level: advanced
1324 
1325 .keywords: PC, get, options, prefix, database
1326 
1327 .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix()
1328 @*/
1329 PetscErrorCode  PCGetOptionsPrefix(PC pc,const char *prefix[])
1330 {
1331   PetscErrorCode ierr;
1332 
1333   PetscFunctionBegin;
1334   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1335   PetscValidPointer(prefix,2);
1336   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr);
1337   PetscFunctionReturn(0);
1338 }
1339 
1340 #undef __FUNCT__
1341 #define __FUNCT__ "PCPreSolve"
1342 /*@
1343    PCPreSolve - Optional pre-solve phase, intended for any
1344    preconditioner-specific actions that must be performed before
1345    the iterative solve itself.
1346 
1347    Collective on PC
1348 
1349    Input Parameters:
1350 +  pc - the preconditioner context
1351 -  ksp - the Krylov subspace context
1352 
1353    Level: developer
1354 
1355    Sample of Usage:
1356 .vb
1357     PCPreSolve(pc,ksp);
1358     KSPSolve(ksp,b,x);
1359     PCPostSolve(pc,ksp);
1360 .ve
1361 
1362    Notes:
1363    The pre-solve phase is distinct from the PCSetUp() phase.
1364 
1365    KSPSolve() calls this directly, so is rarely called by the user.
1366 
1367 .keywords: PC, pre-solve
1368 
1369 .seealso: PCPostSolve()
1370 @*/
1371 PetscErrorCode  PCPreSolve(PC pc,KSP ksp)
1372 {
1373   PetscErrorCode ierr;
1374   Vec            x,rhs;
1375   Mat            A,B;
1376 
1377   PetscFunctionBegin;
1378   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1379   PetscValidHeaderSpecific(ksp,KSP_CLASSID,2);
1380   ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr);
1381   ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr);
1382   /*
1383       Scale the system and have the matrices use the scaled form
1384     only if the two matrices are actually the same (and hence
1385     have the same scaling
1386   */
1387   ierr = PCGetOperators(pc,&A,&B,PETSC_NULL);CHKERRQ(ierr);
1388   if (A == B) {
1389     ierr = MatScaleSystem(pc->mat,rhs,x);CHKERRQ(ierr);
1390     ierr = MatUseScaledForm(pc->mat,PETSC_TRUE);CHKERRQ(ierr);
1391   }
1392 
1393   if (pc->ops->presolve) {
1394     ierr = (*pc->ops->presolve)(pc,ksp,rhs,x);CHKERRQ(ierr);
1395   }
1396   PetscFunctionReturn(0);
1397 }
1398 
1399 #undef __FUNCT__
1400 #define __FUNCT__ "PCPostSolve"
1401 /*@
1402    PCPostSolve - Optional post-solve phase, intended for any
1403    preconditioner-specific actions that must be performed after
1404    the iterative solve itself.
1405 
1406    Collective on PC
1407 
1408    Input Parameters:
1409 +  pc - the preconditioner context
1410 -  ksp - the Krylov subspace context
1411 
1412    Sample of Usage:
1413 .vb
1414     PCPreSolve(pc,ksp);
1415     KSPSolve(ksp,b,x);
1416     PCPostSolve(pc,ksp);
1417 .ve
1418 
1419    Note:
1420    KSPSolve() calls this routine directly, so it is rarely called by the user.
1421 
1422    Level: developer
1423 
1424 .keywords: PC, post-solve
1425 
1426 .seealso: PCPreSolve(), KSPSolve()
1427 @*/
1428 PetscErrorCode  PCPostSolve(PC pc,KSP ksp)
1429 {
1430   PetscErrorCode ierr;
1431   Vec            x,rhs;
1432   Mat            A,B;
1433 
1434   PetscFunctionBegin;
1435   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1436   PetscValidHeaderSpecific(ksp,KSP_CLASSID,2);
1437   ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr);
1438   ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr);
1439   if (pc->ops->postsolve) {
1440     ierr =  (*pc->ops->postsolve)(pc,ksp,rhs,x);CHKERRQ(ierr);
1441   }
1442   /*
1443       Scale the system and have the matrices use the scaled form
1444     only if the two matrices are actually the same (and hence
1445     have the same scaling
1446   */
1447   ierr = PCGetOperators(pc,&A,&B,PETSC_NULL);CHKERRQ(ierr);
1448   if (A == B) {
1449     ierr = MatUnScaleSystem(pc->mat,rhs,x);CHKERRQ(ierr);
1450     ierr = MatUseScaledForm(pc->mat,PETSC_FALSE);CHKERRQ(ierr);
1451   }
1452   PetscFunctionReturn(0);
1453 }
1454 
1455 #undef __FUNCT__
1456 #define __FUNCT__ "PCView"
1457 /*@C
1458    PCView - Prints the PC data structure.
1459 
1460    Collective on PC
1461 
1462    Input Parameters:
1463 +  PC - the PC context
1464 -  viewer - optional visualization context
1465 
1466    Note:
1467    The available visualization contexts include
1468 +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
1469 -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
1470          output where only the first processor opens
1471          the file.  All other processors send their
1472          data to the first processor to print.
1473 
1474    The user can open an alternative visualization contexts with
1475    PetscViewerASCIIOpen() (output to a specified file).
1476 
1477    Level: developer
1478 
1479 .keywords: PC, view
1480 
1481 .seealso: KSPView(), PetscViewerASCIIOpen()
1482 @*/
1483 PetscErrorCode  PCView(PC pc,PetscViewer viewer)
1484 {
1485   const PCType      cstr;
1486   PetscErrorCode    ierr;
1487   PetscBool         iascii,isstring;
1488   PetscViewerFormat format;
1489 
1490   PetscFunctionBegin;
1491   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1492   if (!viewer) {
1493     ierr = PetscViewerASCIIGetStdout(((PetscObject)pc)->comm,&viewer);CHKERRQ(ierr);
1494   }
1495   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
1496   PetscCheckSameComm(pc,1,viewer,2);
1497 
1498   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1499   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
1500   if (iascii) {
1501     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1502     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer,"PC Object");CHKERRQ(ierr);
1503     if (pc->ops->view) {
1504       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1505       ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr);
1506       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1507     }
1508     if (pc->mat) {
1509       ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);CHKERRQ(ierr);
1510       if (pc->pmat == pc->mat) {
1511         ierr = PetscViewerASCIIPrintf(viewer,"  linear system matrix = precond matrix:\n");CHKERRQ(ierr);
1512         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1513         ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);
1514         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1515       } else {
1516         if (pc->pmat) {
1517           ierr = PetscViewerASCIIPrintf(viewer,"  linear system matrix followed by preconditioner matrix:\n");CHKERRQ(ierr);
1518         } else {
1519           ierr = PetscViewerASCIIPrintf(viewer,"  linear system matrix:\n");CHKERRQ(ierr);
1520         }
1521         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1522         ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);
1523         if (pc->pmat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);}
1524         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1525       }
1526       ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
1527     }
1528   } else if (isstring) {
1529     ierr = PCGetType(pc,&cstr);CHKERRQ(ierr);
1530     ierr = PetscViewerStringSPrintf(viewer," %-7.7s",cstr);CHKERRQ(ierr);
1531     if (pc->ops->view) {ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr);}
1532   } else {
1533     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported by PC",((PetscObject)viewer)->type_name);
1534   }
1535   PetscFunctionReturn(0);
1536 }
1537 
1538 
1539 #undef __FUNCT__
1540 #define __FUNCT__ "PCSetInitialGuessNonzero"
1541 /*@
1542    PCSetInitialGuessNonzero - Tells the iterative solver that the
1543    initial guess is nonzero; otherwise PC assumes the initial guess
1544    is to be zero (and thus zeros it out before solving).
1545 
1546    Logically Collective on PC
1547 
1548    Input Parameters:
1549 +  pc - iterative context obtained from PCCreate()
1550 -  flg - PETSC_TRUE indicates the guess is non-zero, PETSC_FALSE indicates the guess is zero
1551 
1552    Level: Developer
1553 
1554    Notes:
1555     This is a weird function. Since PC's are linear operators on the right hand side they
1556     CANNOT use an initial guess. This function is for the "pass-through" preconditioners
1557     PCKSP, PCREDUNDANT and PCHMPI and causes the inner KSP object to use the nonzero
1558     initial guess. Not currently working for PCREDUNDANT, that has to be rewritten to use KSP.
1559 
1560 
1561 .keywords: PC, set, initial guess, nonzero
1562 
1563 .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll()
1564 @*/
1565 PetscErrorCode  PCSetInitialGuessNonzero(PC pc,PetscBool  flg)
1566 {
1567   PetscFunctionBegin;
1568   PetscValidLogicalCollectiveBool(pc,flg,2);
1569   pc->nonzero_guess   = flg;
1570   PetscFunctionReturn(0);
1571 }
1572 
1573 #undef __FUNCT__
1574 #define __FUNCT__ "PCRegister"
1575 /*@C
1576   PCRegister - See PCRegisterDynamic()
1577 
1578   Level: advanced
1579 @*/
1580 PetscErrorCode  PCRegister(const char sname[],const char path[],const char name[],PetscErrorCode (*function)(PC))
1581 {
1582   PetscErrorCode ierr;
1583   char           fullname[PETSC_MAX_PATH_LEN];
1584 
1585   PetscFunctionBegin;
1586 
1587   ierr = PetscFListConcat(path,name,fullname);CHKERRQ(ierr);
1588   ierr = PetscFListAdd(&PCList,sname,fullname,(void (*)(void))function);CHKERRQ(ierr);
1589   PetscFunctionReturn(0);
1590 }
1591 
1592 #undef __FUNCT__
1593 #define __FUNCT__ "PCComputeExplicitOperator"
1594 /*@
1595     PCComputeExplicitOperator - Computes the explicit preconditioned operator.
1596 
1597     Collective on PC
1598 
1599     Input Parameter:
1600 .   pc - the preconditioner object
1601 
1602     Output Parameter:
1603 .   mat - the explict preconditioned operator
1604 
1605     Notes:
1606     This computation is done by applying the operators to columns of the
1607     identity matrix.
1608 
1609     Currently, this routine uses a dense matrix format when 1 processor
1610     is used and a sparse format otherwise.  This routine is costly in general,
1611     and is recommended for use only with relatively small systems.
1612 
1613     Level: advanced
1614 
1615 .keywords: PC, compute, explicit, operator
1616 
1617 .seealso: KSPComputeExplicitOperator()
1618 
1619 @*/
1620 PetscErrorCode  PCComputeExplicitOperator(PC pc,Mat *mat)
1621 {
1622   Vec            in,out;
1623   PetscErrorCode ierr;
1624   PetscInt       i,M,m,*rows,start,end;
1625   PetscMPIInt    size;
1626   MPI_Comm       comm;
1627   PetscScalar    *array,one = 1.0;
1628 
1629   PetscFunctionBegin;
1630   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1631   PetscValidPointer(mat,2);
1632 
1633   comm = ((PetscObject)pc)->comm;
1634   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1635 
1636   if (!pc->pmat) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"You must call KSPSetOperators() or PCSetOperators() before this call");
1637   ierr = MatGetVecs(pc->pmat,&in,0);CHKERRQ(ierr);
1638   ierr = VecDuplicate(in,&out);CHKERRQ(ierr);
1639   ierr = VecGetOwnershipRange(in,&start,&end);CHKERRQ(ierr);
1640   ierr = VecGetSize(in,&M);CHKERRQ(ierr);
1641   ierr = VecGetLocalSize(in,&m);CHKERRQ(ierr);
1642   ierr = PetscMalloc((m+1)*sizeof(PetscInt),&rows);CHKERRQ(ierr);
1643   for (i=0; i<m; i++) {rows[i] = start + i;}
1644 
1645   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
1646   ierr = MatSetSizes(*mat,m,m,M,M);CHKERRQ(ierr);
1647   if (size == 1) {
1648     ierr = MatSetType(*mat,MATSEQDENSE);CHKERRQ(ierr);
1649     ierr = MatSeqDenseSetPreallocation(*mat,PETSC_NULL);CHKERRQ(ierr);
1650   } else {
1651     ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr);
1652     ierr = MatMPIAIJSetPreallocation(*mat,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
1653   }
1654 
1655   for (i=0; i<M; i++) {
1656 
1657     ierr = VecSet(in,0.0);CHKERRQ(ierr);
1658     ierr = VecSetValues(in,1,&i,&one,INSERT_VALUES);CHKERRQ(ierr);
1659     ierr = VecAssemblyBegin(in);CHKERRQ(ierr);
1660     ierr = VecAssemblyEnd(in);CHKERRQ(ierr);
1661 
1662     /* should fix, allowing user to choose side */
1663     ierr = PCApply(pc,in,out);CHKERRQ(ierr);
1664 
1665     ierr = VecGetArray(out,&array);CHKERRQ(ierr);
1666     ierr = MatSetValues(*mat,m,rows,1,&i,array,INSERT_VALUES);CHKERRQ(ierr);
1667     ierr = VecRestoreArray(out,&array);CHKERRQ(ierr);
1668 
1669   }
1670   ierr = PetscFree(rows);CHKERRQ(ierr);
1671   ierr = VecDestroy(&out);CHKERRQ(ierr);
1672   ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1673   ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1674   PetscFunctionReturn(0);
1675 }
1676 
1677 #undef __FUNCT__
1678 #define __FUNCT__ "PCSetCoordinates"
1679 /*@
1680    PCSetCoordinates - sets the coordinates of all the nodes on the local process
1681 
1682    Collective on PC
1683 
1684    Input Parameters:
1685 +  pc - the solver context
1686 .  dim - the dimension of the coordinates 1, 2, or 3
1687 -  coords - the coordinates
1688 
1689    Level: intermediate
1690 
1691    Notes: coords is an array of the 3D coordinates for the nodes on
1692    the local processor.  So if there are 108 equation on a processor
1693    for a displacement finite element discretization of elasticity (so
1694    that there are 36 = 108/3 nodes) then the array must have 108
1695    double precision values (ie, 3 * 36).  These x y z coordinates
1696    should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x,
1697    ... , N-1.z ].
1698 
1699 .seealso: PCPROMETHEUS
1700 @*/
1701 PetscErrorCode  PCSetCoordinates(PC pc,PetscInt dim,PetscReal *coords)
1702 {
1703   PetscErrorCode ierr;
1704 
1705   PetscFunctionBegin;
1706   ierr = PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscReal*),(pc,dim,coords));CHKERRQ(ierr);
1707   PetscFunctionReturn(0);
1708 }
1709