xref: /petsc/src/ksp/pc/impls/bjacobi/bjacobi.c (revision d60b7d5cda9a3148fab971d2544cf20a2ffedfa6)
1 
2 /*
3    Defines a block Jacobi preconditioner.
4 */
5 
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /*I "petscpc.h" I*/
7 
8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat);
9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat);
10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);
11 
12 static PetscErrorCode PCSetUp_BJacobi(PC pc)
13 {
14   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
15   Mat            mat  = pc->mat,pmat = pc->pmat;
16   PetscErrorCode ierr;
17   PetscBool      hasop;
18   PetscInt       N,M,start,i,sum,end;
19   PetscInt       bs,i_start=-1,i_end=-1;
20   PetscMPIInt    rank,size;
21   const char     *pprefix,*mprefix;
22 
23   PetscFunctionBegin;
24   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
25   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
26   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
27   ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
28 
29   if (jac->n > 0 && jac->n < size) {
30     ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr);
31     PetscFunctionReturn(0);
32   }
33 
34   /* --------------------------------------------------------------------------
35       Determines the number of blocks assigned to each processor
36   -----------------------------------------------------------------------------*/
37 
38   /*   local block count  given */
39   if (jac->n_local > 0 && jac->n < 0) {
40     ierr = MPIU_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
41     if (jac->l_lens) { /* check that user set these correctly */
42       sum = 0;
43       for (i=0; i<jac->n_local; i++) {
44         if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
45         sum += jac->l_lens[i];
46       }
47       if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly");
48     } else {
49       ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
50       for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
51     }
52   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
53     /* global blocks given: determine which ones are local */
54     if (jac->g_lens) {
55       /* check if the g_lens is has valid entries */
56       for (i=0; i<jac->n; i++) {
57         if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed");
58         if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
59       }
60       if (size == 1) {
61         jac->n_local = jac->n;
62         ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
63         ierr         = PetscArraycpy(jac->l_lens,jac->g_lens,jac->n_local);CHKERRQ(ierr);
64         /* check that user set these correctly */
65         sum = 0;
66         for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i];
67         if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly");
68       } else {
69         ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr);
70         /* loop over blocks determing first one owned by me */
71         sum = 0;
72         for (i=0; i<jac->n+1; i++) {
73           if (sum == start) { i_start = i; goto start_1;}
74           if (i < jac->n) sum += jac->g_lens[i];
75         }
76         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
77 start_1:
78         for (i=i_start; i<jac->n+1; i++) {
79           if (sum == end) { i_end = i; goto end_1; }
80           if (i < jac->n) sum += jac->g_lens[i];
81         }
82         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
83 end_1:
84         jac->n_local = i_end - i_start;
85         ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
86         ierr         = PetscArraycpy(jac->l_lens,jac->g_lens+i_start,jac->n_local);CHKERRQ(ierr);
87       }
88     } else { /* no global blocks given, determine then using default layout */
89       jac->n_local = jac->n/size + ((jac->n % size) > rank);
90       ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
91       for (i=0; i<jac->n_local; i++) {
92         jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs;
93         if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given");
94       }
95     }
96   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
97     jac->n         = size;
98     jac->n_local   = 1;
99     ierr           = PetscMalloc1(1,&jac->l_lens);CHKERRQ(ierr);
100     jac->l_lens[0] = M;
101   } else { /* jac->n > 0 && jac->n_local > 0 */
102     if (!jac->l_lens) {
103       ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
104       for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
105     }
106   }
107   if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors");
108 
109   /* -------------------------
110       Determines mat and pmat
111   ---------------------------*/
112   ierr = MatHasOperation(pc->mat,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
113   if (!hasop && size == 1) {
114     mat  = pc->mat;
115     pmat = pc->pmat;
116   } else {
117     if (pc->useAmat) {
118       /* use block from Amat matrix, not Pmat for local MatMult() */
119       ierr = MatGetDiagonalBlock(pc->mat,&mat);CHKERRQ(ierr);
120       /* make submatrix have same prefix as entire matrix */
121       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr);
122       ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr);
123     }
124     if (pc->pmat != pc->mat || !pc->useAmat) {
125       ierr = MatGetDiagonalBlock(pc->pmat,&pmat);CHKERRQ(ierr);
126       /* make submatrix have same prefix as entire matrix */
127       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
128       ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr);
129     } else pmat = mat;
130   }
131 
132   /* ------
133      Setup code depends on the number of blocks
134   */
135   if (jac->n_local == 1) {
136     ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr);
137   } else {
138     ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr);
139   }
140   PetscFunctionReturn(0);
141 }
142 
143 /* Default destroy, if it has never been setup */
144 static PetscErrorCode PCDestroy_BJacobi(PC pc)
145 {
146   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
147   PetscErrorCode ierr;
148 
149   PetscFunctionBegin;
150   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
151   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
152   ierr = PetscFree(pc->data);CHKERRQ(ierr);
153   PetscFunctionReturn(0);
154 }
155 
156 
157 static PetscErrorCode PCSetFromOptions_BJacobi(PetscOptionItems *PetscOptionsObject,PC pc)
158 {
159   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
160   PetscErrorCode ierr;
161   PetscInt       blocks,i;
162   PetscBool      flg;
163 
164   PetscFunctionBegin;
165   ierr = PetscOptionsHead(PetscOptionsObject,"Block Jacobi options");CHKERRQ(ierr);
166   ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr);
167   if (flg) {ierr = PCBJacobiSetTotalBlocks(pc,blocks,NULL);CHKERRQ(ierr);}
168   ierr = PetscOptionsInt("-pc_bjacobi_local_blocks","Local number of blocks","PCBJacobiSetLocalBlocks",jac->n_local,&blocks,&flg);CHKERRQ(ierr);
169   if (flg) {ierr = PCBJacobiSetLocalBlocks(pc,blocks,NULL);CHKERRQ(ierr);}
170   if (jac->ksp) {
171     /* The sub-KSP has already been set up (e.g., PCSetUp_BJacobi_Singleblock), but KSPSetFromOptions was not called
172      * unless we had already been called. */
173     for (i=0; i<jac->n_local; i++) {
174       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
175     }
176   }
177   ierr = PetscOptionsTail();CHKERRQ(ierr);
178   PetscFunctionReturn(0);
179 }
180 
181 #include <petscdraw.h>
182 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer)
183 {
184   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
185   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
186   PetscErrorCode       ierr;
187   PetscMPIInt          rank;
188   PetscInt             i;
189   PetscBool            iascii,isstring,isdraw;
190   PetscViewer          sviewer;
191 
192   PetscFunctionBegin;
193   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
194   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
195   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
196   if (iascii) {
197     if (pc->useAmat) {
198       ierr = PetscViewerASCIIPrintf(viewer,"  using Amat local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr);
199     }
200     ierr = PetscViewerASCIIPrintf(viewer,"  number of blocks = %D\n",jac->n);CHKERRQ(ierr);
201     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
202     if (jac->same_local_solves) {
203       ierr = PetscViewerASCIIPrintf(viewer,"  Local solver is the same for all blocks, as in the following KSP and PC objects on rank 0:\n");CHKERRQ(ierr);
204       if (jac->ksp && !jac->psubcomm) {
205         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
206         if (!rank) {
207           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
208           ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
209           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
210         }
211         ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
212         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
213       } else if (mpjac && jac->ksp && mpjac->psubcomm) {
214         ierr = PetscViewerGetSubViewer(viewer,mpjac->psubcomm->child,&sviewer);CHKERRQ(ierr);
215         if (!mpjac->psubcomm->color) {
216           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
217           ierr = KSPView(*(jac->ksp),sviewer);CHKERRQ(ierr);
218           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
219         }
220         ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
221         ierr = PetscViewerRestoreSubViewer(viewer,mpjac->psubcomm->child,&sviewer);CHKERRQ(ierr);
222       }
223       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
224       /*  extra call needed because of the two calls to PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
225       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
226     } else {
227       PetscInt n_global;
228       ierr = MPIU_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
229       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
230       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
231       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n",
232                                                 rank,jac->n_local,jac->first_local);CHKERRQ(ierr);
233       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
234       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
235       for (i=0; i<jac->n_local; i++) {
236         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
237         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
238         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
239       }
240       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
241       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
242       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
243       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
244     }
245   } else if (isstring) {
246     ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr);
247     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
248     if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
249     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
250   } else if (isdraw) {
251     PetscDraw draw;
252     char      str[25];
253     PetscReal x,y,bottom,h;
254 
255     ierr   = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
256     ierr   = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr);
257     ierr   = PetscSNPrintf(str,25,"Number blocks %D",jac->n);CHKERRQ(ierr);
258     ierr   = PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr);
259     bottom = y - h;
260     ierr   = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr);
261     /* warning the communicator on viewer is different then on ksp in parallel */
262     if (jac->ksp) {ierr = KSPView(jac->ksp[0],viewer);CHKERRQ(ierr);}
263     ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr);
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 /* -------------------------------------------------------------------------------------*/
269 
270 static PetscErrorCode  PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
271 {
272   PC_BJacobi *jac = (PC_BJacobi*)pc->data;
273 
274   PetscFunctionBegin;
275   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first");
276 
277   if (n_local) *n_local = jac->n_local;
278   if (first_local) *first_local = jac->first_local;
279   *ksp                   = jac->ksp;
280   jac->same_local_solves = PETSC_FALSE;        /* Assume that local solves are now different;
281                                                   not necessarily true though!  This flag is
282                                                   used only for PCView_BJacobi() */
283   PetscFunctionReturn(0);
284 }
285 
286 static PetscErrorCode  PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens)
287 {
288   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
289   PetscErrorCode ierr;
290 
291   PetscFunctionBegin;
292   if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
293   jac->n = blocks;
294   if (!lens) jac->g_lens = NULL;
295   else {
296     ierr = PetscMalloc1(blocks,&jac->g_lens);CHKERRQ(ierr);
297     ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
298     ierr = PetscArraycpy(jac->g_lens,lens,blocks);CHKERRQ(ierr);
299   }
300   PetscFunctionReturn(0);
301 }
302 
303 static PetscErrorCode  PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
304 {
305   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
306 
307   PetscFunctionBegin;
308   *blocks = jac->n;
309   if (lens) *lens = jac->g_lens;
310   PetscFunctionReturn(0);
311 }
312 
313 static PetscErrorCode  PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[])
314 {
315   PC_BJacobi     *jac;
316   PetscErrorCode ierr;
317 
318   PetscFunctionBegin;
319   jac = (PC_BJacobi*)pc->data;
320 
321   jac->n_local = blocks;
322   if (!lens) jac->l_lens = NULL;
323   else {
324     ierr = PetscMalloc1(blocks,&jac->l_lens);CHKERRQ(ierr);
325     ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
326     ierr = PetscArraycpy(jac->l_lens,lens,blocks);CHKERRQ(ierr);
327   }
328   PetscFunctionReturn(0);
329 }
330 
331 static PetscErrorCode  PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
332 {
333   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
334 
335   PetscFunctionBegin;
336   *blocks = jac->n_local;
337   if (lens) *lens = jac->l_lens;
338   PetscFunctionReturn(0);
339 }
340 
341 /* -------------------------------------------------------------------------------------*/
342 
343 /*@C
344    PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on
345    this processor.
346 
347    Not Collective
348 
349    Input Parameter:
350 .  pc - the preconditioner context
351 
352    Output Parameters:
353 +  n_local - the number of blocks on this processor, or NULL
354 .  first_local - the global number of the first block on this processor, or NULL
355 -  ksp - the array of KSP contexts
356 
357    Notes:
358    After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed.
359 
360    Currently for some matrix implementations only 1 block per processor
361    is supported.
362 
363    You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP().
364 
365    Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs.
366       You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,PETSC_NULL_KSP,ierr) to determine how large the
367       KSP array must be.
368 
369    Level: advanced
370 
371 .seealso: PCBJacobiGetSubKSP()
372 @*/
373 PetscErrorCode  PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
374 {
375   PetscErrorCode ierr;
376 
377   PetscFunctionBegin;
378   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
379   ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
380   PetscFunctionReturn(0);
381 }
382 
383 /*@
384    PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
385    Jacobi preconditioner.
386 
387    Collective on PC
388 
389    Input Parameters:
390 +  pc - the preconditioner context
391 .  blocks - the number of blocks
392 -  lens - [optional] integer array containing the size of each block
393 
394    Options Database Key:
395 .  -pc_bjacobi_blocks <blocks> - Sets the number of global blocks
396 
397    Notes:
398    Currently only a limited number of blocking configurations are supported.
399    All processors sharing the PC must call this routine with the same data.
400 
401    Level: intermediate
402 
403 .seealso: PCSetUseAmat(), PCBJacobiSetLocalBlocks()
404 @*/
405 PetscErrorCode  PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
406 {
407   PetscErrorCode ierr;
408 
409   PetscFunctionBegin;
410   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
411   if (blocks <= 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks");
412   ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr);
413   PetscFunctionReturn(0);
414 }
415 
416 /*@C
417    PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
418    Jacobi preconditioner.
419 
420    Not Collective
421 
422    Input Parameter:
423 .  pc - the preconditioner context
424 
425    Output parameters:
426 +  blocks - the number of blocks
427 -  lens - integer array containing the size of each block
428 
429    Level: intermediate
430 
431 .seealso: PCSetUseAmat(), PCBJacobiGetLocalBlocks()
432 @*/
433 PetscErrorCode  PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
439   PetscValidIntPointer(blocks,2);
440   ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
441   PetscFunctionReturn(0);
442 }
443 
444 /*@
445    PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
446    Jacobi preconditioner.
447 
448    Not Collective
449 
450    Input Parameters:
451 +  pc - the preconditioner context
452 .  blocks - the number of blocks
453 -  lens - [optional] integer array containing size of each block
454 
455    Options Database Key:
456 .  -pc_bjacobi_local_blocks <blocks> - Sets the number of local blocks
457 
458    Note:
459    Currently only a limited number of blocking configurations are supported.
460 
461    Level: intermediate
462 
463 .seealso: PCSetUseAmat(), PCBJacobiSetTotalBlocks()
464 @*/
465 PetscErrorCode  PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
466 {
467   PetscErrorCode ierr;
468 
469   PetscFunctionBegin;
470   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
471   if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks");
472   ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr);
473   PetscFunctionReturn(0);
474 }
475 
476 /*@C
477    PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
478    Jacobi preconditioner.
479 
480    Not Collective
481 
482    Input Parameters:
483 +  pc - the preconditioner context
484 .  blocks - the number of blocks
485 -  lens - [optional] integer array containing size of each block
486 
487    Note:
488    Currently only a limited number of blocking configurations are supported.
489 
490    Level: intermediate
491 
492 .seealso: PCSetUseAmat(), PCBJacobiGetTotalBlocks()
493 @*/
494 PetscErrorCode  PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
495 {
496   PetscErrorCode ierr;
497 
498   PetscFunctionBegin;
499   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
500   PetscValidIntPointer(blocks,2);
501   ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
502   PetscFunctionReturn(0);
503 }
504 
505 /* -----------------------------------------------------------------------------------*/
506 
507 /*MC
508    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
509            its own KSP object.
510 
511    Options Database Keys:
512 +  -pc_use_amat - use Amat to apply block of operator in inner Krylov method
513 -  -pc_bjacobi_blocks <n> - use n total blocks
514 
515    Notes:
516     Each processor can have one or more blocks, or a single block can be shared by several processes. Defaults to one block per processor.
517 
518      To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
519         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
520 
521      To set the options on the solvers separate for each block call PCBJacobiGetSubKSP()
522          and set the options directly on the resulting KSP object (you can access its PC
523          KSPGetPC())
524 
525      For GPU-based vectors (CUDA, ViennaCL) it is recommended to use exactly one block per MPI process for best
526          performance.  Different block partitioning may lead to additional data transfers
527          between host and GPU that lead to degraded performance.
528 
529      The options prefix for each block is sub_, for example -sub_pc_type lu.
530 
531      When multiple processes share a single block, each block encompasses exactly all the unknowns owned its set of processes.
532 
533    Level: beginner
534 
535 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
536            PCASM, PCSetUseAmat(), PCGetUseAmat(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(),
537            PCBJacobiSetLocalBlocks(), PCSetModifySubMatrices()
538 M*/
539 
540 PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc)
541 {
542   PetscErrorCode ierr;
543   PetscMPIInt    rank;
544   PC_BJacobi     *jac;
545 
546   PetscFunctionBegin;
547   ierr = PetscNewLog(pc,&jac);CHKERRQ(ierr);
548   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
549 
550   pc->ops->apply           = NULL;
551   pc->ops->matapply        = NULL;
552   pc->ops->applytranspose  = NULL;
553   pc->ops->setup           = PCSetUp_BJacobi;
554   pc->ops->destroy         = PCDestroy_BJacobi;
555   pc->ops->setfromoptions  = PCSetFromOptions_BJacobi;
556   pc->ops->view            = PCView_BJacobi;
557   pc->ops->applyrichardson = NULL;
558 
559   pc->data               = (void*)jac;
560   jac->n                 = -1;
561   jac->n_local           = -1;
562   jac->first_local       = rank;
563   jac->ksp               = NULL;
564   jac->same_local_solves = PETSC_TRUE;
565   jac->g_lens            = NULL;
566   jac->l_lens            = NULL;
567   jac->psubcomm          = NULL;
568 
569   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C",PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr);
570   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C",PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr);
571   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C",PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr);
572   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C",PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr);
573   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C",PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr);
574   PetscFunctionReturn(0);
575 }
576 
577 /* --------------------------------------------------------------------------------------------*/
578 /*
579         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
580 */
581 static PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
582 {
583   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
584   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
585   PetscErrorCode         ierr;
586 
587   PetscFunctionBegin;
588   ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);
589   ierr = VecDestroy(&bjac->x);CHKERRQ(ierr);
590   ierr = VecDestroy(&bjac->y);CHKERRQ(ierr);
591   PetscFunctionReturn(0);
592 }
593 
594 static PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
595 {
596   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
597   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
598   PetscErrorCode         ierr;
599 
600   PetscFunctionBegin;
601   ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr);
602   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
603   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
604   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
605   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
606   ierr = PetscFree(bjac);CHKERRQ(ierr);
607   ierr = PetscFree(pc->data);CHKERRQ(ierr);
608   PetscFunctionReturn(0);
609 }
610 
611 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
612 {
613   PetscErrorCode     ierr;
614   PC_BJacobi         *jac = (PC_BJacobi*)pc->data;
615   KSP                subksp = jac->ksp[0];
616   KSPConvergedReason reason;
617 
618   PetscFunctionBegin;
619   ierr = KSPSetUp(subksp);CHKERRQ(ierr);
620   ierr = KSPGetConvergedReason(subksp,&reason);CHKERRQ(ierr);
621   if (reason == KSP_DIVERGED_PC_FAILED) {
622     pc->failedreason = PC_SUBPC_ERROR;
623   }
624   PetscFunctionReturn(0);
625 }
626 
627 static PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y)
628 {
629   PetscErrorCode         ierr;
630   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
631   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
632 
633   PetscFunctionBegin;
634   ierr = VecGetLocalVectorRead(x, bjac->x);CHKERRQ(ierr);
635   ierr = VecGetLocalVector(y, bjac->y);CHKERRQ(ierr);
636  /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
637      matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild
638      of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/
639   ierr = KSPSetReusePreconditioner(jac->ksp[0],pc->reusepreconditioner);CHKERRQ(ierr);
640   ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
641   ierr = KSPCheckSolve(jac->ksp[0],pc,bjac->y);CHKERRQ(ierr);
642   ierr = VecRestoreLocalVectorRead(x, bjac->x);CHKERRQ(ierr);
643   ierr = VecRestoreLocalVector(y, bjac->y);CHKERRQ(ierr);
644   PetscFunctionReturn(0);
645 }
646 
647 static PetscErrorCode PCMatApply_BJacobi_Singleblock(PC pc,Mat X,Mat Y)
648 {
649   PC_BJacobi     *jac  = (PC_BJacobi*)pc->data;
650   Mat            sX,sY;
651   PetscErrorCode ierr;
652 
653   PetscFunctionBegin;
654  /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
655      matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild
656      of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/
657   ierr = KSPSetReusePreconditioner(jac->ksp[0],pc->reusepreconditioner);CHKERRQ(ierr);
658   ierr = MatDenseGetLocalMatrix(X,&sX);CHKERRQ(ierr);
659   ierr = MatDenseGetLocalMatrix(Y,&sY);CHKERRQ(ierr);
660   ierr = KSPMatSolve(jac->ksp[0],sX,sY);CHKERRQ(ierr);
661   PetscFunctionReturn(0);
662 }
663 
664 static PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y)
665 {
666   PetscErrorCode         ierr;
667   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
668   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
669   PetscScalar            *y_array;
670   const PetscScalar      *x_array;
671   PC                     subpc;
672 
673   PetscFunctionBegin;
674   /*
675       The VecPlaceArray() is to avoid having to copy the
676     y vector into the bjac->x vector. The reason for
677     the bjac->x vector is that we need a sequential vector
678     for the sequential solve.
679   */
680   ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr);
681   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
682   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
683   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
684   /* apply the symmetric left portion of the inner PC operator */
685   /* note this by-passes the inner KSP and its options completely */
686   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
687   ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
688   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
689   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
690   ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr);
691   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
692   PetscFunctionReturn(0);
693 }
694 
695 static PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y)
696 {
697   PetscErrorCode         ierr;
698   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
699   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
700   PetscScalar            *y_array;
701   const PetscScalar      *x_array;
702   PC                     subpc;
703 
704   PetscFunctionBegin;
705   /*
706       The VecPlaceArray() is to avoid having to copy the
707     y vector into the bjac->x vector. The reason for
708     the bjac->x vector is that we need a sequential vector
709     for the sequential solve.
710   */
711   ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr);
712   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
713   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
714   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
715 
716   /* apply the symmetric right portion of the inner PC operator */
717   /* note this by-passes the inner KSP and its options completely */
718 
719   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
720   ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
721 
722   ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr);
723   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
724   PetscFunctionReturn(0);
725 }
726 
727 static PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y)
728 {
729   PetscErrorCode         ierr;
730   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
731   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
732   PetscScalar            *y_array;
733   const PetscScalar      *x_array;
734 
735   PetscFunctionBegin;
736   /*
737       The VecPlaceArray() is to avoid having to copy the
738     y vector into the bjac->x vector. The reason for
739     the bjac->x vector is that we need a sequential vector
740     for the sequential solve.
741   */
742   ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr);
743   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
744   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
745   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
746   ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
747   ierr = KSPCheckSolve(jac->ksp[0],pc,bjac->y);CHKERRQ(ierr);
748   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
749   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
750   ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr);
751   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
752   PetscFunctionReturn(0);
753 }
754 
755 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat)
756 {
757   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
758   PetscErrorCode         ierr;
759   PetscInt               m;
760   KSP                    ksp;
761   PC_BJacobi_Singleblock *bjac;
762   PetscBool              wasSetup = PETSC_TRUE;
763   VecType                vectype;
764 
765   PetscFunctionBegin;
766   if (!pc->setupcalled) {
767     const char *prefix;
768 
769     if (!jac->ksp) {
770       wasSetup = PETSC_FALSE;
771 
772       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
773       ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
774       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
775       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
776       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
777       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
778       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
779       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
780 
781       pc->ops->reset               = PCReset_BJacobi_Singleblock;
782       pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
783       pc->ops->apply               = PCApply_BJacobi_Singleblock;
784       pc->ops->matapply            = PCMatApply_BJacobi_Singleblock;
785       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
786       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
787       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
788       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;
789 
790       ierr        = PetscMalloc1(1,&jac->ksp);CHKERRQ(ierr);
791       jac->ksp[0] = ksp;
792 
793       ierr      = PetscNewLog(pc,&bjac);CHKERRQ(ierr);
794       jac->data = (void*)bjac;
795     } else {
796       ksp  = jac->ksp[0];
797       bjac = (PC_BJacobi_Singleblock*)jac->data;
798     }
799 
800     /*
801       The reason we need to generate these vectors is to serve
802       as the right-hand side and solution vector for the solve on the
803       block. We do not need to allocate space for the vectors since
804       that is provided via VecPlaceArray() just before the call to
805       KSPSolve() on the block.
806     */
807     ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr);
808     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->x);CHKERRQ(ierr);
809     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->y);CHKERRQ(ierr);
810     ierr = MatGetVecType(pmat,&vectype);CHKERRQ(ierr);
811     ierr = VecSetType(bjac->x,vectype);CHKERRQ(ierr);
812     ierr = VecSetType(bjac->y,vectype);CHKERRQ(ierr);
813     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->x);CHKERRQ(ierr);
814     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->y);CHKERRQ(ierr);
815   } else {
816     ksp  = jac->ksp[0];
817     bjac = (PC_BJacobi_Singleblock*)jac->data;
818   }
819   if (pc->useAmat) {
820     ierr = KSPSetOperators(ksp,mat,pmat);CHKERRQ(ierr);
821   } else {
822     ierr = KSPSetOperators(ksp,pmat,pmat);CHKERRQ(ierr);
823   }
824   if (!wasSetup && pc->setfromoptionscalled) {
825     /* If PCSetFromOptions_BJacobi is called later, KSPSetFromOptions will be called at that time. */
826     ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
827   }
828   PetscFunctionReturn(0);
829 }
830 
831 /* ---------------------------------------------------------------------------------------------*/
832 static PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
833 {
834   PC_BJacobi            *jac  = (PC_BJacobi*)pc->data;
835   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
836   PetscErrorCode        ierr;
837   PetscInt              i;
838 
839   PetscFunctionBegin;
840   if (bjac && bjac->pmat) {
841     ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr);
842     if (pc->useAmat) {
843       ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr);
844     }
845   }
846 
847   for (i=0; i<jac->n_local; i++) {
848     ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr);
849     if (bjac && bjac->x) {
850       ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr);
851       ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr);
852       ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr);
853     }
854   }
855   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
856   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
857   PetscFunctionReturn(0);
858 }
859 
860 static PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
861 {
862   PC_BJacobi            *jac  = (PC_BJacobi*)pc->data;
863   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
864   PetscErrorCode        ierr;
865   PetscInt              i;
866 
867   PetscFunctionBegin;
868   ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr);
869   if (bjac) {
870     ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr);
871     ierr = PetscFree(bjac->starts);CHKERRQ(ierr);
872     ierr = PetscFree(bjac->is);CHKERRQ(ierr);
873   }
874   ierr = PetscFree(jac->data);CHKERRQ(ierr);
875   for (i=0; i<jac->n_local; i++) {
876     ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr);
877   }
878   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
879   ierr = PetscFree(pc->data);CHKERRQ(ierr);
880   PetscFunctionReturn(0);
881 }
882 
883 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
884 {
885   PC_BJacobi         *jac = (PC_BJacobi*)pc->data;
886   PetscErrorCode     ierr;
887   PetscInt           i,n_local = jac->n_local;
888   KSPConvergedReason reason;
889 
890   PetscFunctionBegin;
891   for (i=0; i<n_local; i++) {
892     ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr);
893     ierr = KSPGetConvergedReason(jac->ksp[i],&reason);CHKERRQ(ierr);
894     if (reason == KSP_DIVERGED_PC_FAILED) {
895       pc->failedreason = PC_SUBPC_ERROR;
896     }
897   }
898   PetscFunctionReturn(0);
899 }
900 
901 /*
902       Preconditioner for block Jacobi
903 */
904 static PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y)
905 {
906   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
907   PetscErrorCode        ierr;
908   PetscInt              i,n_local = jac->n_local;
909   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
910   PetscScalar           *yin;
911   const PetscScalar     *xin;
912 
913   PetscFunctionBegin;
914   ierr = VecGetArrayRead(x,&xin);CHKERRQ(ierr);
915   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
916   for (i=0; i<n_local; i++) {
917     /*
918        To avoid copying the subvector from x into a workspace we instead
919        make the workspace vector array point to the subpart of the array of
920        the global vector.
921     */
922     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
923     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
924 
925     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
926     ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
927     ierr = KSPCheckSolve(jac->ksp[i],pc,bjac->y[i]);CHKERRQ(ierr);
928     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
929 
930     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
931     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
932   }
933   ierr = VecRestoreArrayRead(x,&xin);CHKERRQ(ierr);
934   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
935   PetscFunctionReturn(0);
936 }
937 
938 /*
939       Preconditioner for block Jacobi
940 */
941 static PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y)
942 {
943   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
944   PetscErrorCode        ierr;
945   PetscInt              i,n_local = jac->n_local;
946   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
947   PetscScalar           *yin;
948   const PetscScalar     *xin;
949 
950   PetscFunctionBegin;
951   ierr = VecGetArrayRead(x,&xin);CHKERRQ(ierr);
952   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
953   for (i=0; i<n_local; i++) {
954     /*
955        To avoid copying the subvector from x into a workspace we instead
956        make the workspace vector array point to the subpart of the array of
957        the global vector.
958     */
959     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
960     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
961 
962     ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
963     ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
964     ierr = KSPCheckSolve(jac->ksp[i],pc,bjac->y[i]);CHKERRQ(ierr);
965     ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
966 
967     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
968     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
969   }
970   ierr = VecRestoreArrayRead(x,&xin);CHKERRQ(ierr);
971   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
972   PetscFunctionReturn(0);
973 }
974 
975 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat)
976 {
977   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
978   PetscErrorCode        ierr;
979   PetscInt              m,n_local,N,M,start,i;
980   const char            *prefix,*pprefix,*mprefix;
981   KSP                   ksp;
982   Vec                   x,y;
983   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
984   PC                    subpc;
985   IS                    is;
986   MatReuse              scall;
987   VecType               vectype;
988 
989   PetscFunctionBegin;
990   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
991 
992   n_local = jac->n_local;
993 
994   if (pc->useAmat) {
995     PetscBool same;
996     ierr = PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr);
997     if (!same) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_INCOMP,"Matrices not of same type");
998   }
999 
1000   if (!pc->setupcalled) {
1001     scall = MAT_INITIAL_MATRIX;
1002 
1003     if (!jac->ksp) {
1004       pc->ops->reset         = PCReset_BJacobi_Multiblock;
1005       pc->ops->destroy       = PCDestroy_BJacobi_Multiblock;
1006       pc->ops->apply         = PCApply_BJacobi_Multiblock;
1007       pc->ops->matapply      = NULL;
1008       pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock;
1009       pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock;
1010 
1011       ierr = PetscNewLog(pc,&bjac);CHKERRQ(ierr);
1012       ierr = PetscMalloc1(n_local,&jac->ksp);CHKERRQ(ierr);
1013       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr);
1014       ierr = PetscMalloc2(n_local,&bjac->x,n_local,&bjac->y);CHKERRQ(ierr);
1015       ierr = PetscMalloc1(n_local,&bjac->starts);CHKERRQ(ierr);
1016       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr);
1017 
1018       jac->data = (void*)bjac;
1019       ierr      = PetscMalloc1(n_local,&bjac->is);CHKERRQ(ierr);
1020       ierr      = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr);
1021 
1022       for (i=0; i<n_local; i++) {
1023         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
1024         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
1025         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
1026         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
1027         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
1028         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
1029         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1030         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
1031         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
1032 
1033         jac->ksp[i] = ksp;
1034       }
1035     } else {
1036       bjac = (PC_BJacobi_Multiblock*)jac->data;
1037     }
1038 
1039     start = 0;
1040     ierr = MatGetVecType(pmat,&vectype);CHKERRQ(ierr);
1041     for (i=0; i<n_local; i++) {
1042       m = jac->l_lens[i];
1043       /*
1044       The reason we need to generate these vectors is to serve
1045       as the right-hand side and solution vector for the solve on the
1046       block. We do not need to allocate space for the vectors since
1047       that is provided via VecPlaceArray() just before the call to
1048       KSPSolve() on the block.
1049 
1050       */
1051       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr);
1052       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&y);CHKERRQ(ierr);
1053       ierr = VecSetType(x,vectype);CHKERRQ(ierr);
1054       ierr = VecSetType(y,vectype);CHKERRQ(ierr);
1055       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)x);CHKERRQ(ierr);
1056       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)y);CHKERRQ(ierr);
1057 
1058       bjac->x[i]      = x;
1059       bjac->y[i]      = y;
1060       bjac->starts[i] = start;
1061 
1062       ierr        = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr);
1063       bjac->is[i] = is;
1064       ierr        = PetscLogObjectParent((PetscObject)pc,(PetscObject)is);CHKERRQ(ierr);
1065 
1066       start += m;
1067     }
1068   } else {
1069     bjac = (PC_BJacobi_Multiblock*)jac->data;
1070     /*
1071        Destroy the blocks from the previous iteration
1072     */
1073     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1074       ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr);
1075       if (pc->useAmat) {
1076         ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr);
1077       }
1078       scall = MAT_INITIAL_MATRIX;
1079     } else scall = MAT_REUSE_MATRIX;
1080   }
1081 
1082   ierr = MatCreateSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr);
1083   if (pc->useAmat) {
1084     ierr = MatCreateSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr);
1085   }
1086   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1087      different boundary conditions for the submatrices than for the global problem) */
1088   ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
1089 
1090   ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr);
1091   for (i=0; i<n_local; i++) {
1092     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->pmat[i]);CHKERRQ(ierr);
1093     ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr);
1094     if (pc->useAmat) {
1095       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->mat[i]);CHKERRQ(ierr);
1096       ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr);
1097       ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr);
1098       ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i]);CHKERRQ(ierr);
1099     } else {
1100       ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i]);CHKERRQ(ierr);
1101     }
1102     if (pc->setfromoptionscalled) {
1103       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
1104     }
1105   }
1106   PetscFunctionReturn(0);
1107 }
1108 
1109 /* ---------------------------------------------------------------------------------------------*/
1110 /*
1111       These are for a single block with multiple processes;
1112 */
1113 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1114 {
1115   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1116   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1117   PetscErrorCode       ierr;
1118 
1119   PetscFunctionBegin;
1120   ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr);
1121   ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr);
1122   ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);
1123   if (jac->ksp) {ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);}
1124   PetscFunctionReturn(0);
1125 }
1126 
1127 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1128 {
1129   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1130   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1131   PetscErrorCode       ierr;
1132 
1133   PetscFunctionBegin;
1134   ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr);
1135   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
1136   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
1137   ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr);
1138 
1139   ierr = PetscFree(mpjac);CHKERRQ(ierr);
1140   ierr = PetscFree(pc->data);CHKERRQ(ierr);
1141   PetscFunctionReturn(0);
1142 }
1143 
1144 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y)
1145 {
1146   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1147   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1148   PetscErrorCode       ierr;
1149   PetscScalar          *yarray;
1150   const PetscScalar    *xarray;
1151   KSPConvergedReason   reason;
1152 
1153   PetscFunctionBegin;
1154   /* place x's and y's local arrays into xsub and ysub */
1155   ierr = VecGetArrayRead(x,&xarray);CHKERRQ(ierr);
1156   ierr = VecGetArray(y,&yarray);CHKERRQ(ierr);
1157   ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr);
1158   ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr);
1159 
1160   /* apply preconditioner on each matrix block */
1161   ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1162   ierr = KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);CHKERRQ(ierr);
1163   ierr = KSPCheckSolve(jac->ksp[0],pc,mpjac->ysub);CHKERRQ(ierr);
1164   ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1165   ierr = KSPGetConvergedReason(jac->ksp[0],&reason);CHKERRQ(ierr);
1166   if (reason == KSP_DIVERGED_PC_FAILED) {
1167     pc->failedreason = PC_SUBPC_ERROR;
1168   }
1169 
1170   ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr);
1171   ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr);
1172   ierr = VecRestoreArrayRead(x,&xarray);CHKERRQ(ierr);
1173   ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1174   PetscFunctionReturn(0);
1175 }
1176 
1177 static PetscErrorCode PCMatApply_BJacobi_Multiproc(PC pc,Mat X,Mat Y)
1178 {
1179   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1180   KSPConvergedReason   reason;
1181   Mat                  sX,sY;
1182   const PetscScalar    *x;
1183   PetscScalar          *y;
1184   PetscInt             m,N,lda,ldb;
1185   PetscErrorCode       ierr;
1186 
1187   PetscFunctionBegin;
1188   /* apply preconditioner on each matrix block */
1189   ierr = MatGetLocalSize(X,&m,NULL);CHKERRQ(ierr);
1190   ierr = MatGetSize(X,NULL,&N);CHKERRQ(ierr);
1191   ierr = MatDenseGetLDA(X,&lda);CHKERRQ(ierr);
1192   ierr = MatDenseGetLDA(Y,&ldb);CHKERRQ(ierr);
1193   ierr = MatDenseGetArrayRead(X,&x);CHKERRQ(ierr);
1194   ierr = MatDenseGetArrayWrite(Y,&y);CHKERRQ(ierr);
1195   ierr = MatCreateDense(PetscObjectComm((PetscObject)jac->ksp[0]),m,PETSC_DECIDE,PETSC_DECIDE,N,(PetscScalar*)x,&sX);CHKERRQ(ierr);
1196   ierr = MatCreateDense(PetscObjectComm((PetscObject)jac->ksp[0]),m,PETSC_DECIDE,PETSC_DECIDE,N,y,&sY);CHKERRQ(ierr);
1197   ierr = MatDenseSetLDA(sX,lda);CHKERRQ(ierr);
1198   ierr = MatDenseSetLDA(sY,ldb);CHKERRQ(ierr);
1199   ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[0],X,Y,0);CHKERRQ(ierr);
1200   ierr = KSPMatSolve(jac->ksp[0],sX,sY);CHKERRQ(ierr);
1201   ierr = KSPCheckSolve(jac->ksp[0],pc,NULL);CHKERRQ(ierr);
1202   ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[0],X,Y,0);CHKERRQ(ierr);
1203   ierr = MatDestroy(&sY);CHKERRQ(ierr);
1204   ierr = MatDestroy(&sX);CHKERRQ(ierr);
1205   ierr = MatDenseRestoreArrayWrite(Y,&y);CHKERRQ(ierr);
1206   ierr = MatDenseRestoreArrayRead(X,&x);CHKERRQ(ierr);
1207   ierr = KSPGetConvergedReason(jac->ksp[0],&reason);CHKERRQ(ierr);
1208   if (reason == KSP_DIVERGED_PC_FAILED) {
1209     pc->failedreason = PC_SUBPC_ERROR;
1210   }
1211   PetscFunctionReturn(0);
1212 }
1213 
1214 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1215 {
1216   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1217   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1218   PetscErrorCode       ierr;
1219   PetscInt             m,n;
1220   MPI_Comm             comm,subcomm=0;
1221   const char           *prefix;
1222   PetscBool            wasSetup = PETSC_TRUE;
1223   VecType              vectype;
1224 
1225 
1226   PetscFunctionBegin;
1227   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1228   if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported");
1229   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1230   if (!pc->setupcalled) {
1231     wasSetup  = PETSC_FALSE;
1232     ierr      = PetscNewLog(pc,&mpjac);CHKERRQ(ierr);
1233     jac->data = (void*)mpjac;
1234 
1235     /* initialize datastructure mpjac */
1236     if (!jac->psubcomm) {
1237       /* Create default contiguous subcommunicatiors if user does not provide them */
1238       ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr);
1239       ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr);
1240       ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
1241       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
1242     }
1243     mpjac->psubcomm = jac->psubcomm;
1244     subcomm         = PetscSubcommChild(mpjac->psubcomm);
1245 
1246     /* Get matrix blocks of pmat */
1247     ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1248 
1249     /* create a new PC that processors in each subcomm have copy of */
1250     ierr = PetscMalloc1(1,&jac->ksp);CHKERRQ(ierr);
1251     ierr = KSPCreate(subcomm,&jac->ksp[0]);CHKERRQ(ierr);
1252     ierr = KSPSetErrorIfNotConverged(jac->ksp[0],pc->erroriffailure);CHKERRQ(ierr);
1253     ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);CHKERRQ(ierr);
1254     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)jac->ksp[0]);CHKERRQ(ierr);
1255     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr);
1256     ierr = KSPGetPC(jac->ksp[0],&mpjac->pc);CHKERRQ(ierr);
1257 
1258     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1259     ierr = KSPSetOptionsPrefix(jac->ksp[0],prefix);CHKERRQ(ierr);
1260     ierr = KSPAppendOptionsPrefix(jac->ksp[0],"sub_");CHKERRQ(ierr);
1261 
1262     /* create dummy vectors xsub and ysub */
1263     ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr);
1264     ierr = VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&mpjac->xsub);CHKERRQ(ierr);
1265     ierr = VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&mpjac->ysub);CHKERRQ(ierr);
1266     ierr = MatGetVecType(mpjac->submats,&vectype);CHKERRQ(ierr);
1267     ierr = VecSetType(mpjac->xsub,vectype);CHKERRQ(ierr);
1268     ierr = VecSetType(mpjac->ysub,vectype);CHKERRQ(ierr);
1269     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->xsub);CHKERRQ(ierr);
1270     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->ysub);CHKERRQ(ierr);
1271 
1272     pc->ops->reset   = PCReset_BJacobi_Multiproc;
1273     pc->ops->destroy = PCDestroy_BJacobi_Multiproc;
1274     pc->ops->apply   = PCApply_BJacobi_Multiproc;
1275     pc->ops->matapply= PCMatApply_BJacobi_Multiproc;
1276   } else { /* pc->setupcalled */
1277     subcomm = PetscSubcommChild(mpjac->psubcomm);
1278     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1279       /* destroy old matrix blocks, then get new matrix blocks */
1280       if (mpjac->submats) {ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);}
1281       ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1282     } else {
1283       ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1284     }
1285     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr);
1286   }
1287 
1288   if (!wasSetup && pc->setfromoptionscalled) {
1289     ierr = KSPSetFromOptions(jac->ksp[0]);CHKERRQ(ierr);
1290   }
1291   PetscFunctionReturn(0);
1292 }
1293