xref: /petsc/src/ksp/pc/impls/bjacobi/bjacobi.c (revision f5ff9c666c0d37e8a7ec3aa2f8e2aa9e44449bdb)
1 
2 /*
3    Defines a block Jacobi preconditioner.
4 */
5 
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /*I "petscpc.h" I*/
7 
8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat);
9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat);
10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);
11 
12 static PetscErrorCode PCSetUp_BJacobi(PC pc)
13 {
14   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
15   Mat            mat  = pc->mat,pmat = pc->pmat;
16   PetscErrorCode ierr;
17   PetscBool      hasop;
18   PetscInt       N,M,start,i,sum,end;
19   PetscInt       bs,i_start=-1,i_end=-1;
20   PetscMPIInt    rank,size;
21 
22   PetscFunctionBegin;
23   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRMPI(ierr);
24   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRMPI(ierr);
25   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
26   ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
27 
28   if (jac->n > 0 && jac->n < size) {
29     ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr);
30     PetscFunctionReturn(0);
31   }
32 
33   /* --------------------------------------------------------------------------
34       Determines the number of blocks assigned to each processor
35   -----------------------------------------------------------------------------*/
36 
37   /*   local block count  given */
38   if (jac->n_local > 0 && jac->n < 0) {
39     ierr = MPIU_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
40     if (jac->l_lens) { /* check that user set these correctly */
41       sum = 0;
42       for (i=0; i<jac->n_local; i++) {
43         if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
44         sum += jac->l_lens[i];
45       }
46       if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly");
47     } else {
48       ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
49       for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
50     }
51   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
52     /* global blocks given: determine which ones are local */
53     if (jac->g_lens) {
54       /* check if the g_lens is has valid entries */
55       for (i=0; i<jac->n; i++) {
56         if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed");
57         if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
58       }
59       if (size == 1) {
60         jac->n_local = jac->n;
61         ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
62         ierr         = PetscArraycpy(jac->l_lens,jac->g_lens,jac->n_local);CHKERRQ(ierr);
63         /* check that user set these correctly */
64         sum = 0;
65         for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i];
66         if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly");
67       } else {
68         ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr);
69         /* loop over blocks determing first one owned by me */
70         sum = 0;
71         for (i=0; i<jac->n+1; i++) {
72           if (sum == start) { i_start = i; goto start_1;}
73           if (i < jac->n) sum += jac->g_lens[i];
74         }
75         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
76 start_1:
77         for (i=i_start; i<jac->n+1; i++) {
78           if (sum == end) { i_end = i; goto end_1; }
79           if (i < jac->n) sum += jac->g_lens[i];
80         }
81         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
82 end_1:
83         jac->n_local = i_end - i_start;
84         ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
85         ierr         = PetscArraycpy(jac->l_lens,jac->g_lens+i_start,jac->n_local);CHKERRQ(ierr);
86       }
87     } else { /* no global blocks given, determine then using default layout */
88       jac->n_local = jac->n/size + ((jac->n % size) > rank);
89       ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
90       for (i=0; i<jac->n_local; i++) {
91         jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs;
92         if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given");
93       }
94     }
95   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
96     jac->n         = size;
97     jac->n_local   = 1;
98     ierr           = PetscMalloc1(1,&jac->l_lens);CHKERRQ(ierr);
99     jac->l_lens[0] = M;
100   } else { /* jac->n > 0 && jac->n_local > 0 */
101     if (!jac->l_lens) {
102       ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
103       for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
104     }
105   }
106   if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors");
107 
108   /* -------------------------
109       Determines mat and pmat
110   ---------------------------*/
111   ierr = MatHasOperation(pc->mat,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
112   if (!hasop && size == 1) {
113     mat  = pc->mat;
114     pmat = pc->pmat;
115   } else {
116     if (pc->useAmat) {
117       /* use block from Amat matrix, not Pmat for local MatMult() */
118       ierr = MatGetDiagonalBlock(pc->mat,&mat);CHKERRQ(ierr);
119     }
120     if (pc->pmat != pc->mat || !pc->useAmat) {
121       ierr = MatGetDiagonalBlock(pc->pmat,&pmat);CHKERRQ(ierr);
122     } else pmat = mat;
123   }
124 
125   /* ------
126      Setup code depends on the number of blocks
127   */
128   if (jac->n_local == 1) {
129     ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr);
130   } else {
131     ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr);
132   }
133   PetscFunctionReturn(0);
134 }
135 
136 /* Default destroy, if it has never been setup */
137 static PetscErrorCode PCDestroy_BJacobi(PC pc)
138 {
139   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
140   PetscErrorCode ierr;
141 
142   PetscFunctionBegin;
143   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
144   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
145   ierr = PetscFree(pc->data);CHKERRQ(ierr);
146   PetscFunctionReturn(0);
147 }
148 
149 
150 static PetscErrorCode PCSetFromOptions_BJacobi(PetscOptionItems *PetscOptionsObject,PC pc)
151 {
152   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
153   PetscErrorCode ierr;
154   PetscInt       blocks,i;
155   PetscBool      flg;
156 
157   PetscFunctionBegin;
158   ierr = PetscOptionsHead(PetscOptionsObject,"Block Jacobi options");CHKERRQ(ierr);
159   ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr);
160   if (flg) {ierr = PCBJacobiSetTotalBlocks(pc,blocks,NULL);CHKERRQ(ierr);}
161   ierr = PetscOptionsInt("-pc_bjacobi_local_blocks","Local number of blocks","PCBJacobiSetLocalBlocks",jac->n_local,&blocks,&flg);CHKERRQ(ierr);
162   if (flg) {ierr = PCBJacobiSetLocalBlocks(pc,blocks,NULL);CHKERRQ(ierr);}
163   if (jac->ksp) {
164     /* The sub-KSP has already been set up (e.g., PCSetUp_BJacobi_Singleblock), but KSPSetFromOptions was not called
165      * unless we had already been called. */
166     for (i=0; i<jac->n_local; i++) {
167       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
168     }
169   }
170   ierr = PetscOptionsTail();CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #include <petscdraw.h>
175 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer)
176 {
177   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
178   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
179   PetscErrorCode       ierr;
180   PetscMPIInt          rank;
181   PetscInt             i;
182   PetscBool            iascii,isstring,isdraw;
183   PetscViewer          sviewer;
184   PetscViewerFormat    format;
185   const char           *prefix;
186 
187   PetscFunctionBegin;
188   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
189   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
190   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
191   if (iascii) {
192     if (pc->useAmat) {
193       ierr = PetscViewerASCIIPrintf(viewer,"  using Amat local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr);
194     }
195     ierr = PetscViewerASCIIPrintf(viewer,"  number of blocks = %D\n",jac->n);CHKERRQ(ierr);
196     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRMPI(ierr);
197     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
198     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
199       ierr = PetscViewerASCIIPrintf(viewer,"  Local solver information for first block is in the following KSP and PC objects on rank 0:\n");CHKERRQ(ierr);
200       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
201       ierr = PetscViewerASCIIPrintf(viewer,"  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n",prefix?prefix:"");CHKERRQ(ierr);
202       if (jac->ksp && !jac->psubcomm) {
203         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
204         if (!rank) {
205           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
206           ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
207           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
208         }
209         ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
210         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
211         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
212         /*  extra call needed because of the two calls to PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
213         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
214       } else if (mpjac && jac->ksp && mpjac->psubcomm) {
215         ierr = PetscViewerGetSubViewer(viewer,mpjac->psubcomm->child,&sviewer);CHKERRQ(ierr);
216         if (!mpjac->psubcomm->color) {
217           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
218           ierr = KSPView(*(jac->ksp),sviewer);CHKERRQ(ierr);
219           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
220         }
221         ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
222         ierr = PetscViewerRestoreSubViewer(viewer,mpjac->psubcomm->child,&sviewer);CHKERRQ(ierr);
223         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
224         /*  extra call needed because of the two calls to PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
225         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
226       } else {
227         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
228       }
229     } else {
230       PetscInt n_global;
231       ierr = MPIU_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
232       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
233       ierr = PetscViewerASCIIPrintf(viewer,"  Local solver information for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
234       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n",
235                                                 rank,jac->n_local,jac->first_local);CHKERRQ(ierr);
236       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
237       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
238       for (i=0; i<jac->n_local; i++) {
239         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
240         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
241         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
242       }
243       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
244       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
245       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
246       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
247     }
248   } else if (isstring) {
249     ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr);
250     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
251     if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
252     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
253   } else if (isdraw) {
254     PetscDraw draw;
255     char      str[25];
256     PetscReal x,y,bottom,h;
257 
258     ierr   = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
259     ierr   = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr);
260     ierr   = PetscSNPrintf(str,25,"Number blocks %D",jac->n);CHKERRQ(ierr);
261     ierr   = PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr);
262     bottom = y - h;
263     ierr   = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr);
264     /* warning the communicator on viewer is different then on ksp in parallel */
265     if (jac->ksp) {ierr = KSPView(jac->ksp[0],viewer);CHKERRQ(ierr);}
266     ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr);
267   }
268   PetscFunctionReturn(0);
269 }
270 
271 /* -------------------------------------------------------------------------------------*/
272 
273 static PetscErrorCode  PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
274 {
275   PC_BJacobi *jac = (PC_BJacobi*)pc->data;
276 
277   PetscFunctionBegin;
278   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first");
279 
280   if (n_local) *n_local = jac->n_local;
281   if (first_local) *first_local = jac->first_local;
282   if (ksp) *ksp                 = jac->ksp;
283   PetscFunctionReturn(0);
284 }
285 
286 static PetscErrorCode  PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens)
287 {
288   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
289   PetscErrorCode ierr;
290 
291   PetscFunctionBegin;
292   if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
293   jac->n = blocks;
294   if (!lens) jac->g_lens = NULL;
295   else {
296     ierr = PetscMalloc1(blocks,&jac->g_lens);CHKERRQ(ierr);
297     ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
298     ierr = PetscArraycpy(jac->g_lens,lens,blocks);CHKERRQ(ierr);
299   }
300   PetscFunctionReturn(0);
301 }
302 
303 static PetscErrorCode  PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
304 {
305   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
306 
307   PetscFunctionBegin;
308   *blocks = jac->n;
309   if (lens) *lens = jac->g_lens;
310   PetscFunctionReturn(0);
311 }
312 
313 static PetscErrorCode  PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[])
314 {
315   PC_BJacobi     *jac;
316   PetscErrorCode ierr;
317 
318   PetscFunctionBegin;
319   jac = (PC_BJacobi*)pc->data;
320 
321   jac->n_local = blocks;
322   if (!lens) jac->l_lens = NULL;
323   else {
324     ierr = PetscMalloc1(blocks,&jac->l_lens);CHKERRQ(ierr);
325     ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
326     ierr = PetscArraycpy(jac->l_lens,lens,blocks);CHKERRQ(ierr);
327   }
328   PetscFunctionReturn(0);
329 }
330 
331 static PetscErrorCode  PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
332 {
333   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
334 
335   PetscFunctionBegin;
336   *blocks = jac->n_local;
337   if (lens) *lens = jac->l_lens;
338   PetscFunctionReturn(0);
339 }
340 
341 /* -------------------------------------------------------------------------------------*/
342 
343 /*@C
344    PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on
345    this processor.
346 
347    Not Collective
348 
349    Input Parameter:
350 .  pc - the preconditioner context
351 
352    Output Parameters:
353 +  n_local - the number of blocks on this processor, or NULL
354 .  first_local - the global number of the first block on this processor, or NULL
355 -  ksp - the array of KSP contexts
356 
357    Notes:
358    After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed.
359 
360    Currently for some matrix implementations only 1 block per processor
361    is supported.
362 
363    You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP().
364 
365    Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs.
366       You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,PETSC_NULL_KSP,ierr) to determine how large the
367       KSP array must be.
368 
369    Level: advanced
370 
371 .seealso: PCASMGetSubKSP()
372 @*/
373 PetscErrorCode  PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
374 {
375   PetscErrorCode ierr;
376 
377   PetscFunctionBegin;
378   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
379   ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
380   PetscFunctionReturn(0);
381 }
382 
383 /*@
384    PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
385    Jacobi preconditioner.
386 
387    Collective on PC
388 
389    Input Parameters:
390 +  pc - the preconditioner context
391 .  blocks - the number of blocks
392 -  lens - [optional] integer array containing the size of each block
393 
394    Options Database Key:
395 .  -pc_bjacobi_blocks <blocks> - Sets the number of global blocks
396 
397    Notes:
398    Currently only a limited number of blocking configurations are supported.
399    All processors sharing the PC must call this routine with the same data.
400 
401    Level: intermediate
402 
403 .seealso: PCSetUseAmat(), PCBJacobiSetLocalBlocks()
404 @*/
405 PetscErrorCode  PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
406 {
407   PetscErrorCode ierr;
408 
409   PetscFunctionBegin;
410   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
411   if (blocks <= 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks");
412   ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr);
413   PetscFunctionReturn(0);
414 }
415 
416 /*@C
417    PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
418    Jacobi preconditioner.
419 
420    Not Collective
421 
422    Input Parameter:
423 .  pc - the preconditioner context
424 
425    Output parameters:
426 +  blocks - the number of blocks
427 -  lens - integer array containing the size of each block
428 
429    Level: intermediate
430 
431 .seealso: PCSetUseAmat(), PCBJacobiGetLocalBlocks()
432 @*/
433 PetscErrorCode  PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
439   PetscValidIntPointer(blocks,2);
440   ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
441   PetscFunctionReturn(0);
442 }
443 
444 /*@
445    PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
446    Jacobi preconditioner.
447 
448    Not Collective
449 
450    Input Parameters:
451 +  pc - the preconditioner context
452 .  blocks - the number of blocks
453 -  lens - [optional] integer array containing size of each block
454 
455    Options Database Key:
456 .  -pc_bjacobi_local_blocks <blocks> - Sets the number of local blocks
457 
458    Note:
459    Currently only a limited number of blocking configurations are supported.
460 
461    Level: intermediate
462 
463 .seealso: PCSetUseAmat(), PCBJacobiSetTotalBlocks()
464 @*/
465 PetscErrorCode  PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
466 {
467   PetscErrorCode ierr;
468 
469   PetscFunctionBegin;
470   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
471   if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks");
472   ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr);
473   PetscFunctionReturn(0);
474 }
475 
476 /*@C
477    PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
478    Jacobi preconditioner.
479 
480    Not Collective
481 
482    Input Parameters:
483 +  pc - the preconditioner context
484 .  blocks - the number of blocks
485 -  lens - [optional] integer array containing size of each block
486 
487    Note:
488    Currently only a limited number of blocking configurations are supported.
489 
490    Level: intermediate
491 
492 .seealso: PCSetUseAmat(), PCBJacobiGetTotalBlocks()
493 @*/
494 PetscErrorCode  PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
495 {
496   PetscErrorCode ierr;
497 
498   PetscFunctionBegin;
499   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
500   PetscValidIntPointer(blocks,2);
501   ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
502   PetscFunctionReturn(0);
503 }
504 
505 /* -----------------------------------------------------------------------------------*/
506 
507 /*MC
508    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
509            its own KSP object.
510 
511    Options Database Keys:
512 +  -pc_use_amat - use Amat to apply block of operator in inner Krylov method
513 -  -pc_bjacobi_blocks <n> - use n total blocks
514 
515    Notes:
516     Each processor can have one or more blocks, or a single block can be shared by several processes. Defaults to one block per processor.
517 
518      To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
519         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
520 
521      To set the options on the solvers separate for each block call PCBJacobiGetSubKSP()
522          and set the options directly on the resulting KSP object (you can access its PC
523          KSPGetPC())
524 
525      For GPU-based vectors (CUDA, ViennaCL) it is recommended to use exactly one block per MPI process for best
526          performance.  Different block partitioning may lead to additional data transfers
527          between host and GPU that lead to degraded performance.
528 
529      The options prefix for each block is sub_, for example -sub_pc_type lu.
530 
531      When multiple processes share a single block, each block encompasses exactly all the unknowns owned its set of processes.
532 
533    Level: beginner
534 
535 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
536            PCASM, PCSetUseAmat(), PCGetUseAmat(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(),
537            PCBJacobiSetLocalBlocks(), PCSetModifySubMatrices()
538 M*/
539 
540 PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc)
541 {
542   PetscErrorCode ierr;
543   PetscMPIInt    rank;
544   PC_BJacobi     *jac;
545 
546   PetscFunctionBegin;
547   ierr = PetscNewLog(pc,&jac);CHKERRQ(ierr);
548   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRMPI(ierr);
549 
550   pc->ops->apply           = NULL;
551   pc->ops->matapply        = NULL;
552   pc->ops->applytranspose  = NULL;
553   pc->ops->setup           = PCSetUp_BJacobi;
554   pc->ops->destroy         = PCDestroy_BJacobi;
555   pc->ops->setfromoptions  = PCSetFromOptions_BJacobi;
556   pc->ops->view            = PCView_BJacobi;
557   pc->ops->applyrichardson = NULL;
558 
559   pc->data               = (void*)jac;
560   jac->n                 = -1;
561   jac->n_local           = -1;
562   jac->first_local       = rank;
563   jac->ksp               = NULL;
564   jac->g_lens            = NULL;
565   jac->l_lens            = NULL;
566   jac->psubcomm          = NULL;
567 
568   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C",PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr);
569   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C",PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr);
570   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C",PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr);
571   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C",PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr);
572   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C",PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr);
573   PetscFunctionReturn(0);
574 }
575 
576 /* --------------------------------------------------------------------------------------------*/
577 /*
578         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
579 */
580 static PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
581 {
582   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
583   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
584   PetscErrorCode         ierr;
585 
586   PetscFunctionBegin;
587   ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);
588   ierr = VecDestroy(&bjac->x);CHKERRQ(ierr);
589   ierr = VecDestroy(&bjac->y);CHKERRQ(ierr);
590   PetscFunctionReturn(0);
591 }
592 
593 static PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
594 {
595   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
596   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
597   PetscErrorCode         ierr;
598 
599   PetscFunctionBegin;
600   ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr);
601   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
602   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
603   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
604   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
605   ierr = PetscFree(bjac);CHKERRQ(ierr);
606   ierr = PetscFree(pc->data);CHKERRQ(ierr);
607   PetscFunctionReturn(0);
608 }
609 
610 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
611 {
612   PetscErrorCode     ierr;
613   PC_BJacobi         *jac = (PC_BJacobi*)pc->data;
614   KSP                subksp = jac->ksp[0];
615   KSPConvergedReason reason;
616 
617   PetscFunctionBegin;
618   ierr = KSPSetUp(subksp);CHKERRQ(ierr);
619   ierr = KSPGetConvergedReason(subksp,&reason);CHKERRQ(ierr);
620   if (reason == KSP_DIVERGED_PC_FAILED) {
621     pc->failedreason = PC_SUBPC_ERROR;
622   }
623   PetscFunctionReturn(0);
624 }
625 
626 static PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y)
627 {
628   PetscErrorCode         ierr;
629   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
630   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
631 
632   PetscFunctionBegin;
633   ierr = VecGetLocalVectorRead(x, bjac->x);CHKERRQ(ierr);
634   ierr = VecGetLocalVector(y, bjac->y);CHKERRQ(ierr);
635  /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
636      matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild
637      of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/
638   ierr = KSPSetReusePreconditioner(jac->ksp[0],pc->reusepreconditioner);CHKERRQ(ierr);
639   ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
640   ierr = KSPCheckSolve(jac->ksp[0],pc,bjac->y);CHKERRQ(ierr);
641   ierr = VecRestoreLocalVectorRead(x, bjac->x);CHKERRQ(ierr);
642   ierr = VecRestoreLocalVector(y, bjac->y);CHKERRQ(ierr);
643   PetscFunctionReturn(0);
644 }
645 
646 static PetscErrorCode PCMatApply_BJacobi_Singleblock(PC pc,Mat X,Mat Y)
647 {
648   PC_BJacobi     *jac  = (PC_BJacobi*)pc->data;
649   Mat            sX,sY;
650   PetscErrorCode ierr;
651 
652   PetscFunctionBegin;
653  /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
654      matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild
655      of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/
656   ierr = KSPSetReusePreconditioner(jac->ksp[0],pc->reusepreconditioner);CHKERRQ(ierr);
657   ierr = MatDenseGetLocalMatrix(X,&sX);CHKERRQ(ierr);
658   ierr = MatDenseGetLocalMatrix(Y,&sY);CHKERRQ(ierr);
659   ierr = KSPMatSolve(jac->ksp[0],sX,sY);CHKERRQ(ierr);
660   PetscFunctionReturn(0);
661 }
662 
663 static PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y)
664 {
665   PetscErrorCode         ierr;
666   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
667   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
668   PetscScalar            *y_array;
669   const PetscScalar      *x_array;
670   PC                     subpc;
671 
672   PetscFunctionBegin;
673   /*
674       The VecPlaceArray() is to avoid having to copy the
675     y vector into the bjac->x vector. The reason for
676     the bjac->x vector is that we need a sequential vector
677     for the sequential solve.
678   */
679   ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr);
680   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
681   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
682   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
683   /* apply the symmetric left portion of the inner PC operator */
684   /* note this by-passes the inner KSP and its options completely */
685   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
686   ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
687   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
688   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
689   ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr);
690   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
691   PetscFunctionReturn(0);
692 }
693 
694 static PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y)
695 {
696   PetscErrorCode         ierr;
697   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
698   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
699   PetscScalar            *y_array;
700   const PetscScalar      *x_array;
701   PC                     subpc;
702 
703   PetscFunctionBegin;
704   /*
705       The VecPlaceArray() is to avoid having to copy the
706     y vector into the bjac->x vector. The reason for
707     the bjac->x vector is that we need a sequential vector
708     for the sequential solve.
709   */
710   ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr);
711   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
712   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
713   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
714 
715   /* apply the symmetric right portion of the inner PC operator */
716   /* note this by-passes the inner KSP and its options completely */
717 
718   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
719   ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
720 
721   ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr);
722   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
723   PetscFunctionReturn(0);
724 }
725 
726 static PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y)
727 {
728   PetscErrorCode         ierr;
729   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
730   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
731   PetscScalar            *y_array;
732   const PetscScalar      *x_array;
733 
734   PetscFunctionBegin;
735   /*
736       The VecPlaceArray() is to avoid having to copy the
737     y vector into the bjac->x vector. The reason for
738     the bjac->x vector is that we need a sequential vector
739     for the sequential solve.
740   */
741   ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr);
742   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
743   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
744   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
745   ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
746   ierr = KSPCheckSolve(jac->ksp[0],pc,bjac->y);CHKERRQ(ierr);
747   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
748   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
749   ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr);
750   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
751   PetscFunctionReturn(0);
752 }
753 
754 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat)
755 {
756   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
757   PetscErrorCode         ierr;
758   PetscInt               m;
759   KSP                    ksp;
760   PC_BJacobi_Singleblock *bjac;
761   PetscBool              wasSetup = PETSC_TRUE;
762   VecType                vectype;
763   const char             *prefix;
764 
765   PetscFunctionBegin;
766   if (!pc->setupcalled) {
767 
768     if (!jac->ksp) {
769       wasSetup = PETSC_FALSE;
770 
771       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
772       ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
773       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
774       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
775       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
776       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
777       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
778       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
779 
780       pc->ops->reset               = PCReset_BJacobi_Singleblock;
781       pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
782       pc->ops->apply               = PCApply_BJacobi_Singleblock;
783       pc->ops->matapply            = PCMatApply_BJacobi_Singleblock;
784       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
785       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
786       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
787       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;
788 
789       ierr        = PetscMalloc1(1,&jac->ksp);CHKERRQ(ierr);
790       jac->ksp[0] = ksp;
791 
792       ierr      = PetscNewLog(pc,&bjac);CHKERRQ(ierr);
793       jac->data = (void*)bjac;
794     } else {
795       ksp  = jac->ksp[0];
796       bjac = (PC_BJacobi_Singleblock*)jac->data;
797     }
798 
799     /*
800       The reason we need to generate these vectors is to serve
801       as the right-hand side and solution vector for the solve on the
802       block. We do not need to allocate space for the vectors since
803       that is provided via VecPlaceArray() just before the call to
804       KSPSolve() on the block.
805     */
806     ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr);
807     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->x);CHKERRQ(ierr);
808     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->y);CHKERRQ(ierr);
809     ierr = MatGetVecType(pmat,&vectype);CHKERRQ(ierr);
810     ierr = VecSetType(bjac->x,vectype);CHKERRQ(ierr);
811     ierr = VecSetType(bjac->y,vectype);CHKERRQ(ierr);
812     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->x);CHKERRQ(ierr);
813     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->y);CHKERRQ(ierr);
814   } else {
815     ksp  = jac->ksp[0];
816     bjac = (PC_BJacobi_Singleblock*)jac->data;
817   }
818   ierr = KSPGetOptionsPrefix(ksp,&prefix);CHKERRQ(ierr);
819   if (pc->useAmat) {
820     ierr = KSPSetOperators(ksp,mat,pmat);CHKERRQ(ierr);
821     ierr = MatSetOptionsPrefix(mat,prefix);CHKERRQ(ierr);
822   } else {
823     ierr = KSPSetOperators(ksp,pmat,pmat);CHKERRQ(ierr);
824   }
825   ierr = MatSetOptionsPrefix(pmat,prefix);CHKERRQ(ierr);
826   if (!wasSetup && pc->setfromoptionscalled) {
827     /* If PCSetFromOptions_BJacobi is called later, KSPSetFromOptions will be called at that time. */
828     ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
829   }
830   PetscFunctionReturn(0);
831 }
832 
833 /* ---------------------------------------------------------------------------------------------*/
834 static PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
835 {
836   PC_BJacobi            *jac  = (PC_BJacobi*)pc->data;
837   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
838   PetscErrorCode        ierr;
839   PetscInt              i;
840 
841   PetscFunctionBegin;
842   if (bjac && bjac->pmat) {
843     ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr);
844     if (pc->useAmat) {
845       ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr);
846     }
847   }
848 
849   for (i=0; i<jac->n_local; i++) {
850     ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr);
851     if (bjac && bjac->x) {
852       ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr);
853       ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr);
854       ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr);
855     }
856   }
857   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
858   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
859   PetscFunctionReturn(0);
860 }
861 
862 static PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
863 {
864   PC_BJacobi            *jac  = (PC_BJacobi*)pc->data;
865   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
866   PetscErrorCode        ierr;
867   PetscInt              i;
868 
869   PetscFunctionBegin;
870   ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr);
871   if (bjac) {
872     ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr);
873     ierr = PetscFree(bjac->starts);CHKERRQ(ierr);
874     ierr = PetscFree(bjac->is);CHKERRQ(ierr);
875   }
876   ierr = PetscFree(jac->data);CHKERRQ(ierr);
877   for (i=0; i<jac->n_local; i++) {
878     ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr);
879   }
880   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
881   ierr = PetscFree(pc->data);CHKERRQ(ierr);
882   PetscFunctionReturn(0);
883 }
884 
885 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
886 {
887   PC_BJacobi         *jac = (PC_BJacobi*)pc->data;
888   PetscErrorCode     ierr;
889   PetscInt           i,n_local = jac->n_local;
890   KSPConvergedReason reason;
891 
892   PetscFunctionBegin;
893   for (i=0; i<n_local; i++) {
894     ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr);
895     ierr = KSPGetConvergedReason(jac->ksp[i],&reason);CHKERRQ(ierr);
896     if (reason == KSP_DIVERGED_PC_FAILED) {
897       pc->failedreason = PC_SUBPC_ERROR;
898     }
899   }
900   PetscFunctionReturn(0);
901 }
902 
903 /*
904       Preconditioner for block Jacobi
905 */
906 static PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y)
907 {
908   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
909   PetscErrorCode        ierr;
910   PetscInt              i,n_local = jac->n_local;
911   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
912   PetscScalar           *yin;
913   const PetscScalar     *xin;
914 
915   PetscFunctionBegin;
916   ierr = VecGetArrayRead(x,&xin);CHKERRQ(ierr);
917   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
918   for (i=0; i<n_local; i++) {
919     /*
920        To avoid copying the subvector from x into a workspace we instead
921        make the workspace vector array point to the subpart of the array of
922        the global vector.
923     */
924     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
925     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
926 
927     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
928     ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
929     ierr = KSPCheckSolve(jac->ksp[i],pc,bjac->y[i]);CHKERRQ(ierr);
930     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
931 
932     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
933     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
934   }
935   ierr = VecRestoreArrayRead(x,&xin);CHKERRQ(ierr);
936   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
937   PetscFunctionReturn(0);
938 }
939 
940 /*
941       Preconditioner for block Jacobi
942 */
943 static PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y)
944 {
945   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
946   PetscErrorCode        ierr;
947   PetscInt              i,n_local = jac->n_local;
948   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
949   PetscScalar           *yin;
950   const PetscScalar     *xin;
951 
952   PetscFunctionBegin;
953   ierr = VecGetArrayRead(x,&xin);CHKERRQ(ierr);
954   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
955   for (i=0; i<n_local; i++) {
956     /*
957        To avoid copying the subvector from x into a workspace we instead
958        make the workspace vector array point to the subpart of the array of
959        the global vector.
960     */
961     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
962     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
963 
964     ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
965     ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
966     ierr = KSPCheckSolve(jac->ksp[i],pc,bjac->y[i]);CHKERRQ(ierr);
967     ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
968 
969     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
970     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
971   }
972   ierr = VecRestoreArrayRead(x,&xin);CHKERRQ(ierr);
973   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
974   PetscFunctionReturn(0);
975 }
976 
977 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat)
978 {
979   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
980   PetscErrorCode        ierr;
981   PetscInt              m,n_local,N,M,start,i;
982   const char            *prefix;
983   KSP                   ksp;
984   Vec                   x,y;
985   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
986   PC                    subpc;
987   IS                    is;
988   MatReuse              scall;
989   VecType               vectype;
990 
991   PetscFunctionBegin;
992   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
993 
994   n_local = jac->n_local;
995 
996   if (pc->useAmat) {
997     PetscBool same;
998     ierr = PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr);
999     if (!same) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_INCOMP,"Matrices not of same type");
1000   }
1001 
1002   if (!pc->setupcalled) {
1003     scall = MAT_INITIAL_MATRIX;
1004 
1005     if (!jac->ksp) {
1006       pc->ops->reset         = PCReset_BJacobi_Multiblock;
1007       pc->ops->destroy       = PCDestroy_BJacobi_Multiblock;
1008       pc->ops->apply         = PCApply_BJacobi_Multiblock;
1009       pc->ops->matapply      = NULL;
1010       pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock;
1011       pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock;
1012 
1013       ierr = PetscNewLog(pc,&bjac);CHKERRQ(ierr);
1014       ierr = PetscMalloc1(n_local,&jac->ksp);CHKERRQ(ierr);
1015       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr);
1016       ierr = PetscMalloc2(n_local,&bjac->x,n_local,&bjac->y);CHKERRQ(ierr);
1017       ierr = PetscMalloc1(n_local,&bjac->starts);CHKERRQ(ierr);
1018       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr);
1019 
1020       jac->data = (void*)bjac;
1021       ierr      = PetscMalloc1(n_local,&bjac->is);CHKERRQ(ierr);
1022       ierr      = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr);
1023 
1024       for (i=0; i<n_local; i++) {
1025         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
1026         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
1027         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
1028         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
1029         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
1030         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
1031         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1032         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
1033         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
1034 
1035         jac->ksp[i] = ksp;
1036       }
1037     } else {
1038       bjac = (PC_BJacobi_Multiblock*)jac->data;
1039     }
1040 
1041     start = 0;
1042     ierr = MatGetVecType(pmat,&vectype);CHKERRQ(ierr);
1043     for (i=0; i<n_local; i++) {
1044       m = jac->l_lens[i];
1045       /*
1046       The reason we need to generate these vectors is to serve
1047       as the right-hand side and solution vector for the solve on the
1048       block. We do not need to allocate space for the vectors since
1049       that is provided via VecPlaceArray() just before the call to
1050       KSPSolve() on the block.
1051 
1052       */
1053       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr);
1054       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&y);CHKERRQ(ierr);
1055       ierr = VecSetType(x,vectype);CHKERRQ(ierr);
1056       ierr = VecSetType(y,vectype);CHKERRQ(ierr);
1057       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)x);CHKERRQ(ierr);
1058       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)y);CHKERRQ(ierr);
1059 
1060       bjac->x[i]      = x;
1061       bjac->y[i]      = y;
1062       bjac->starts[i] = start;
1063 
1064       ierr        = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr);
1065       bjac->is[i] = is;
1066       ierr        = PetscLogObjectParent((PetscObject)pc,(PetscObject)is);CHKERRQ(ierr);
1067 
1068       start += m;
1069     }
1070   } else {
1071     bjac = (PC_BJacobi_Multiblock*)jac->data;
1072     /*
1073        Destroy the blocks from the previous iteration
1074     */
1075     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1076       ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr);
1077       if (pc->useAmat) {
1078         ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr);
1079       }
1080       scall = MAT_INITIAL_MATRIX;
1081     } else scall = MAT_REUSE_MATRIX;
1082   }
1083 
1084   ierr = MatCreateSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr);
1085   if (pc->useAmat) {
1086     ierr = MatCreateSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr);
1087   }
1088   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1089      different boundary conditions for the submatrices than for the global problem) */
1090   ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
1091 
1092   for (i=0; i<n_local; i++) {
1093     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->pmat[i]);CHKERRQ(ierr);
1094     ierr = KSPGetOptionsPrefix(jac->ksp[i],&prefix);CHKERRQ(ierr);
1095     if (pc->useAmat) {
1096       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->mat[i]);CHKERRQ(ierr);
1097       ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i]);CHKERRQ(ierr);
1098       ierr = MatSetOptionsPrefix(bjac->mat[i],prefix);CHKERRQ(ierr);
1099     } else {
1100       ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i]);CHKERRQ(ierr);
1101     }
1102     ierr = MatSetOptionsPrefix(bjac->pmat[i],prefix);CHKERRQ(ierr);
1103     if (pc->setfromoptionscalled) {
1104       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
1105     }
1106   }
1107   PetscFunctionReturn(0);
1108 }
1109 
1110 /* ---------------------------------------------------------------------------------------------*/
1111 /*
1112       These are for a single block with multiple processes;
1113 */
1114 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1115 {
1116   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1117   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1118   PetscErrorCode       ierr;
1119 
1120   PetscFunctionBegin;
1121   ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr);
1122   ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr);
1123   ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);
1124   if (jac->ksp) {ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);}
1125   PetscFunctionReturn(0);
1126 }
1127 
1128 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1129 {
1130   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1131   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1132   PetscErrorCode       ierr;
1133 
1134   PetscFunctionBegin;
1135   ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr);
1136   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
1137   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
1138   ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr);
1139 
1140   ierr = PetscFree(mpjac);CHKERRQ(ierr);
1141   ierr = PetscFree(pc->data);CHKERRQ(ierr);
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y)
1146 {
1147   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1148   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1149   PetscErrorCode       ierr;
1150   PetscScalar          *yarray;
1151   const PetscScalar    *xarray;
1152   KSPConvergedReason   reason;
1153 
1154   PetscFunctionBegin;
1155   /* place x's and y's local arrays into xsub and ysub */
1156   ierr = VecGetArrayRead(x,&xarray);CHKERRQ(ierr);
1157   ierr = VecGetArray(y,&yarray);CHKERRQ(ierr);
1158   ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr);
1159   ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr);
1160 
1161   /* apply preconditioner on each matrix block */
1162   ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1163   ierr = KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);CHKERRQ(ierr);
1164   ierr = KSPCheckSolve(jac->ksp[0],pc,mpjac->ysub);CHKERRQ(ierr);
1165   ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1166   ierr = KSPGetConvergedReason(jac->ksp[0],&reason);CHKERRQ(ierr);
1167   if (reason == KSP_DIVERGED_PC_FAILED) {
1168     pc->failedreason = PC_SUBPC_ERROR;
1169   }
1170 
1171   ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr);
1172   ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr);
1173   ierr = VecRestoreArrayRead(x,&xarray);CHKERRQ(ierr);
1174   ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1175   PetscFunctionReturn(0);
1176 }
1177 
1178 static PetscErrorCode PCMatApply_BJacobi_Multiproc(PC pc,Mat X,Mat Y)
1179 {
1180   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1181   KSPConvergedReason   reason;
1182   Mat                  sX,sY;
1183   const PetscScalar    *x;
1184   PetscScalar          *y;
1185   PetscInt             m,N,lda,ldb;
1186   PetscErrorCode       ierr;
1187 
1188   PetscFunctionBegin;
1189   /* apply preconditioner on each matrix block */
1190   ierr = MatGetLocalSize(X,&m,NULL);CHKERRQ(ierr);
1191   ierr = MatGetSize(X,NULL,&N);CHKERRQ(ierr);
1192   ierr = MatDenseGetLDA(X,&lda);CHKERRQ(ierr);
1193   ierr = MatDenseGetLDA(Y,&ldb);CHKERRQ(ierr);
1194   ierr = MatDenseGetArrayRead(X,&x);CHKERRQ(ierr);
1195   ierr = MatDenseGetArrayWrite(Y,&y);CHKERRQ(ierr);
1196   ierr = MatCreateDense(PetscObjectComm((PetscObject)jac->ksp[0]),m,PETSC_DECIDE,PETSC_DECIDE,N,(PetscScalar*)x,&sX);CHKERRQ(ierr);
1197   ierr = MatCreateDense(PetscObjectComm((PetscObject)jac->ksp[0]),m,PETSC_DECIDE,PETSC_DECIDE,N,y,&sY);CHKERRQ(ierr);
1198   ierr = MatDenseSetLDA(sX,lda);CHKERRQ(ierr);
1199   ierr = MatDenseSetLDA(sY,ldb);CHKERRQ(ierr);
1200   ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[0],X,Y,0);CHKERRQ(ierr);
1201   ierr = KSPMatSolve(jac->ksp[0],sX,sY);CHKERRQ(ierr);
1202   ierr = KSPCheckSolve(jac->ksp[0],pc,NULL);CHKERRQ(ierr);
1203   ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[0],X,Y,0);CHKERRQ(ierr);
1204   ierr = MatDestroy(&sY);CHKERRQ(ierr);
1205   ierr = MatDestroy(&sX);CHKERRQ(ierr);
1206   ierr = MatDenseRestoreArrayWrite(Y,&y);CHKERRQ(ierr);
1207   ierr = MatDenseRestoreArrayRead(X,&x);CHKERRQ(ierr);
1208   ierr = KSPGetConvergedReason(jac->ksp[0],&reason);CHKERRQ(ierr);
1209   if (reason == KSP_DIVERGED_PC_FAILED) {
1210     pc->failedreason = PC_SUBPC_ERROR;
1211   }
1212   PetscFunctionReturn(0);
1213 }
1214 
1215 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1216 {
1217   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1218   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1219   PetscErrorCode       ierr;
1220   PetscInt             m,n;
1221   MPI_Comm             comm,subcomm=0;
1222   const char           *prefix;
1223   PetscBool            wasSetup = PETSC_TRUE;
1224   VecType              vectype;
1225 
1226 
1227   PetscFunctionBegin;
1228   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1229   if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported");
1230   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1231   if (!pc->setupcalled) {
1232     wasSetup  = PETSC_FALSE;
1233     ierr      = PetscNewLog(pc,&mpjac);CHKERRQ(ierr);
1234     jac->data = (void*)mpjac;
1235 
1236     /* initialize datastructure mpjac */
1237     if (!jac->psubcomm) {
1238       /* Create default contiguous subcommunicatiors if user does not provide them */
1239       ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr);
1240       ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr);
1241       ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
1242       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
1243     }
1244     mpjac->psubcomm = jac->psubcomm;
1245     subcomm         = PetscSubcommChild(mpjac->psubcomm);
1246 
1247     /* Get matrix blocks of pmat */
1248     ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1249 
1250     /* create a new PC that processors in each subcomm have copy of */
1251     ierr = PetscMalloc1(1,&jac->ksp);CHKERRQ(ierr);
1252     ierr = KSPCreate(subcomm,&jac->ksp[0]);CHKERRQ(ierr);
1253     ierr = KSPSetErrorIfNotConverged(jac->ksp[0],pc->erroriffailure);CHKERRQ(ierr);
1254     ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);CHKERRQ(ierr);
1255     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)jac->ksp[0]);CHKERRQ(ierr);
1256     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr);
1257     ierr = KSPGetPC(jac->ksp[0],&mpjac->pc);CHKERRQ(ierr);
1258 
1259     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1260     ierr = KSPSetOptionsPrefix(jac->ksp[0],prefix);CHKERRQ(ierr);
1261     ierr = KSPAppendOptionsPrefix(jac->ksp[0],"sub_");CHKERRQ(ierr);
1262     ierr = KSPGetOptionsPrefix(jac->ksp[0],&prefix);CHKERRQ(ierr);
1263     ierr = MatSetOptionsPrefix(mpjac->submats,prefix);CHKERRQ(ierr);
1264 
1265     /* create dummy vectors xsub and ysub */
1266     ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr);
1267     ierr = VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&mpjac->xsub);CHKERRQ(ierr);
1268     ierr = VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&mpjac->ysub);CHKERRQ(ierr);
1269     ierr = MatGetVecType(mpjac->submats,&vectype);CHKERRQ(ierr);
1270     ierr = VecSetType(mpjac->xsub,vectype);CHKERRQ(ierr);
1271     ierr = VecSetType(mpjac->ysub,vectype);CHKERRQ(ierr);
1272     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->xsub);CHKERRQ(ierr);
1273     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->ysub);CHKERRQ(ierr);
1274 
1275     pc->ops->reset   = PCReset_BJacobi_Multiproc;
1276     pc->ops->destroy = PCDestroy_BJacobi_Multiproc;
1277     pc->ops->apply   = PCApply_BJacobi_Multiproc;
1278     pc->ops->matapply= PCMatApply_BJacobi_Multiproc;
1279   } else { /* pc->setupcalled */
1280     subcomm = PetscSubcommChild(mpjac->psubcomm);
1281     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1282       /* destroy old matrix blocks, then get new matrix blocks */
1283       if (mpjac->submats) {ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);}
1284       ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1285     } else {
1286       ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1287     }
1288     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr);
1289   }
1290 
1291   if (!wasSetup && pc->setfromoptionscalled) {
1292     ierr = KSPSetFromOptions(jac->ksp[0]);CHKERRQ(ierr);
1293   }
1294   PetscFunctionReturn(0);
1295 }
1296