xref: /petsc/src/ksp/pc/impls/bjacobi/bjacobi.c (revision 8cc058d9cd56c1ccb3be12a47760ddfc446aaffc)
1 
2 /*
3    Defines a block Jacobi preconditioner.
4 */
5 #include <petsc-private/pcimpl.h>              /*I "petscpc.h" I*/
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h>
7 
8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat);
9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat);
10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);
11 
12 #undef __FUNCT__
13 #define __FUNCT__ "PCSetUp_BJacobi"
14 static PetscErrorCode PCSetUp_BJacobi(PC pc)
15 {
16   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
17   Mat            mat  = pc->mat,pmat = pc->pmat;
18   PetscErrorCode ierr,(*f)(Mat,Mat*);
19   PetscInt       N,M,start,i,sum,end;
20   PetscInt       bs,i_start=-1,i_end=-1;
21   PetscMPIInt    rank,size;
22   const char     *pprefix,*mprefix;
23 
24   PetscFunctionBegin;
25   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
26   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
27   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
28   ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
29 
30   if (jac->n > 0 && jac->n < size) {
31     ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr);
32     PetscFunctionReturn(0);
33   }
34 
35   /* --------------------------------------------------------------------------
36       Determines the number of blocks assigned to each processor
37   -----------------------------------------------------------------------------*/
38 
39   /*   local block count  given */
40   if (jac->n_local > 0 && jac->n < 0) {
41     ierr = MPI_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
42     if (jac->l_lens) { /* check that user set these correctly */
43       sum = 0;
44       for (i=0; i<jac->n_local; i++) {
45         if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
46         sum += jac->l_lens[i];
47       }
48       if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly");
49     } else {
50       ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
51       for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
52     }
53   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
54     /* global blocks given: determine which ones are local */
55     if (jac->g_lens) {
56       /* check if the g_lens is has valid entries */
57       for (i=0; i<jac->n; i++) {
58         if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed");
59         if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
60       }
61       if (size == 1) {
62         jac->n_local = jac->n;
63         ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
64         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
65         /* check that user set these correctly */
66         sum = 0;
67         for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i];
68         if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly");
69       } else {
70         ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr);
71         /* loop over blocks determing first one owned by me */
72         sum = 0;
73         for (i=0; i<jac->n+1; i++) {
74           if (sum == start) { i_start = i; goto start_1;}
75           if (i < jac->n) sum += jac->g_lens[i];
76         }
77         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
78 start_1:
79         for (i=i_start; i<jac->n+1; i++) {
80           if (sum == end) { i_end = i; goto end_1; }
81           if (i < jac->n) sum += jac->g_lens[i];
82         }
83         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
84 end_1:
85         jac->n_local = i_end - i_start;
86         ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
87         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
88       }
89     } else { /* no global blocks given, determine then using default layout */
90       jac->n_local = jac->n/size + ((jac->n % size) > rank);
91       ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
92       for (i=0; i<jac->n_local; i++) {
93         jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs;
94         if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given");
95       }
96     }
97   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
98     jac->n         = size;
99     jac->n_local   = 1;
100     ierr           = PetscMalloc(sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
101     jac->l_lens[0] = M;
102   } else { /* jac->n > 0 && jac->n_local > 0 */
103     if (!jac->l_lens) {
104       ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
105       for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
106     }
107   }
108   if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors");
109 
110   /* -------------------------
111       Determines mat and pmat
112   ---------------------------*/
113   ierr = PetscObjectQueryFunction((PetscObject)pc->mat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
114   if (!f && size == 1) {
115     mat  = pc->mat;
116     pmat = pc->pmat;
117   } else {
118     if (jac->use_true_local) {
119       /* use block from true matrix, not preconditioner matrix for local MatMult() */
120       ierr = MatGetDiagonalBlock(pc->mat,&mat);CHKERRQ(ierr);
121       /* make submatrix have same prefix as entire matrix */
122       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr);
123       ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr);
124     }
125     if (pc->pmat != pc->mat || !jac->use_true_local) {
126       ierr = MatGetDiagonalBlock(pc->pmat,&pmat);CHKERRQ(ierr);
127       /* make submatrix have same prefix as entire matrix */
128       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
129       ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr);
130     } else pmat = mat;
131   }
132 
133   /* ------
134      Setup code depends on the number of blocks
135   */
136   if (jac->n_local == 1) {
137     ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr);
138   } else {
139     ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr);
140   }
141   PetscFunctionReturn(0);
142 }
143 
144 /* Default destroy, if it has never been setup */
145 #undef __FUNCT__
146 #define __FUNCT__ "PCDestroy_BJacobi"
147 static PetscErrorCode PCDestroy_BJacobi(PC pc)
148 {
149   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
150   PetscErrorCode ierr;
151 
152   PetscFunctionBegin;
153   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
154   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
155   ierr = PetscFree(pc->data);CHKERRQ(ierr);
156   PetscFunctionReturn(0);
157 }
158 
159 #undef __FUNCT__
160 #define __FUNCT__ "PCSetFromOptions_BJacobi"
161 
162 static PetscErrorCode PCSetFromOptions_BJacobi(PC pc)
163 {
164   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
165   PetscErrorCode ierr;
166   PetscInt       blocks;
167   PetscBool      flg;
168 
169   PetscFunctionBegin;
170   ierr = PetscOptionsHead("Block Jacobi options");CHKERRQ(ierr);
171   ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr);
172   if (flg) {
173     ierr = PCBJacobiSetTotalBlocks(pc,blocks,NULL);CHKERRQ(ierr);
174   }
175   flg  = PETSC_FALSE;
176   ierr = PetscOptionsBool("-pc_bjacobi_truelocal","Use the true matrix, not preconditioner matrix to define matrix vector product in sub-problems","PCBJacobiSetUseTrueLocal",flg,&flg,NULL);CHKERRQ(ierr);
177   if (flg) {
178     ierr = PCBJacobiSetUseTrueLocal(pc);CHKERRQ(ierr);
179   }
180   ierr = PetscOptionsTail();CHKERRQ(ierr);
181   PetscFunctionReturn(0);
182 }
183 
184 #include <petscdraw.h>
185 #undef __FUNCT__
186 #define __FUNCT__ "PCView_BJacobi"
187 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer)
188 {
189   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
190   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
191   PetscErrorCode       ierr;
192   PetscMPIInt          rank;
193   PetscInt             i;
194   PetscBool            iascii,isstring,isdraw;
195   PetscViewer          sviewer;
196 
197   PetscFunctionBegin;
198   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
199   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
200   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
201   if (iascii) {
202     if (jac->use_true_local) {
203       ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: using true local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr);
204     }
205     ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: number of blocks = %D\n",jac->n);CHKERRQ(ierr);
206     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
207     if (jac->same_local_solves) {
208       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
209       if (jac->ksp && !jac->psubcomm) {
210         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
211         if (!rank) {
212           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
213           ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
214           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
215         }
216         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
217       } else if (jac->psubcomm && !jac->psubcomm->color) {
218         ierr = PetscViewerASCIIGetStdout(mpjac->psubcomm->comm,&sviewer);CHKERRQ(ierr);
219         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
220         ierr = KSPView(*(jac->ksp),sviewer);CHKERRQ(ierr);
221         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
222       }
223     } else {
224       PetscInt n_global;
225       ierr = MPI_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
226       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
227       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
228       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n",
229                                                 rank,jac->n_local,jac->first_local);CHKERRQ(ierr);
230       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
231       for (i=0; i<n_global; i++) {
232         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
233         if (i < jac->n_local) {
234           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
235           ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
236           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
237         }
238         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
239       }
240       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
241       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
242       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
243     }
244   } else if (isstring) {
245     ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr);
246     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
247     if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
248     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
249   } else if (isdraw) {
250     PetscDraw draw;
251     char      str[25];
252     PetscReal x,y,bottom,h;
253 
254     ierr   = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
255     ierr   = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr);
256     ierr   = PetscSNPrintf(str,25,"Number blocks %D",jac->n);CHKERRQ(ierr);
257     ierr   = PetscDrawBoxedString(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr);
258     bottom = y - h;
259     ierr   = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr);
260     /* warning the communicator on viewer is different then on ksp in parallel */
261     if (jac->ksp) {ierr = KSPView(jac->ksp[0],viewer);CHKERRQ(ierr);}
262     ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr);
263   }
264   PetscFunctionReturn(0);
265 }
266 
267 /* -------------------------------------------------------------------------------------*/
268 
269 #undef __FUNCT__
270 #define __FUNCT__ "PCBJacobiSetUseTrueLocal_BJacobi"
271 static PetscErrorCode  PCBJacobiSetUseTrueLocal_BJacobi(PC pc)
272 {
273   PC_BJacobi *jac;
274 
275   PetscFunctionBegin;
276   jac                 = (PC_BJacobi*)pc->data;
277   jac->use_true_local = PETSC_TRUE;
278   PetscFunctionReturn(0);
279 }
280 
281 #undef __FUNCT__
282 #define __FUNCT__ "PCBJacobiGetSubKSP_BJacobi"
283 static PetscErrorCode  PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
284 {
285   PC_BJacobi *jac = (PC_BJacobi*)pc->data;;
286 
287   PetscFunctionBegin;
288   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first");
289 
290   if (n_local) *n_local = jac->n_local;
291   if (first_local) *first_local = jac->first_local;
292   *ksp                   = jac->ksp;
293   jac->same_local_solves = PETSC_FALSE;        /* Assume that local solves are now different;
294                                                   not necessarily true though!  This flag is
295                                                   used only for PCView_BJacobi() */
296   PetscFunctionReturn(0);
297 }
298 
299 #undef __FUNCT__
300 #define __FUNCT__ "PCBJacobiSetTotalBlocks_BJacobi"
301 static PetscErrorCode  PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens)
302 {
303   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
304   PetscErrorCode ierr;
305 
306   PetscFunctionBegin;
307   if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
308   jac->n = blocks;
309   if (!lens) jac->g_lens = 0;
310   else {
311     ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->g_lens);CHKERRQ(ierr);
312     ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
313     ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
314   }
315   PetscFunctionReturn(0);
316 }
317 
318 #undef __FUNCT__
319 #define __FUNCT__ "PCBJacobiGetTotalBlocks_BJacobi"
320 static PetscErrorCode  PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
321 {
322   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
323 
324   PetscFunctionBegin;
325   *blocks = jac->n;
326   if (lens) *lens = jac->g_lens;
327   PetscFunctionReturn(0);
328 }
329 
330 #undef __FUNCT__
331 #define __FUNCT__ "PCBJacobiSetLocalBlocks_BJacobi"
332 static PetscErrorCode  PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[])
333 {
334   PC_BJacobi     *jac;
335   PetscErrorCode ierr;
336 
337   PetscFunctionBegin;
338   jac = (PC_BJacobi*)pc->data;
339 
340   jac->n_local = blocks;
341   if (!lens) jac->l_lens = 0;
342   else {
343     ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
344     ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
345     ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
346   }
347   PetscFunctionReturn(0);
348 }
349 
350 #undef __FUNCT__
351 #define __FUNCT__ "PCBJacobiGetLocalBlocks_BJacobi"
352 static PetscErrorCode  PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
353 {
354   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
355 
356   PetscFunctionBegin;
357   *blocks = jac->n_local;
358   if (lens) *lens = jac->l_lens;
359   PetscFunctionReturn(0);
360 }
361 
362 /* -------------------------------------------------------------------------------------*/
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "PCBJacobiSetUseTrueLocal"
366 /*@
367    PCBJacobiSetUseTrueLocal - Sets a flag to indicate that the block
368    problem is associated with the linear system matrix instead of the
369    default (where it is associated with the preconditioning matrix).
370    That is, if the local system is solved iteratively then it iterates
371    on the block from the matrix using the block from the preconditioner
372    as the preconditioner for the local block.
373 
374    Logically Collective on PC
375 
376    Input Parameters:
377 .  pc - the preconditioner context
378 
379    Options Database Key:
380 .  -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal()
381 
382    Notes:
383    For the common case in which the preconditioning and linear
384    system matrices are identical, this routine is unnecessary.
385 
386    Level: intermediate
387 
388 .keywords:  block, Jacobi, set, true, local, flag
389 
390 .seealso: PCSetOperators(), PCBJacobiSetLocalBlocks()
391 @*/
392 PetscErrorCode  PCBJacobiSetUseTrueLocal(PC pc)
393 {
394   PetscErrorCode ierr;
395 
396   PetscFunctionBegin;
397   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
398   ierr = PetscTryMethod(pc,"PCBJacobiSetUseTrueLocal_C",(PC),(pc));CHKERRQ(ierr);
399   PetscFunctionReturn(0);
400 }
401 
402 #undef __FUNCT__
403 #define __FUNCT__ "PCBJacobiGetSubKSP"
404 /*@C
405    PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on
406    this processor.
407 
408    Note Collective
409 
410    Input Parameter:
411 .  pc - the preconditioner context
412 
413    Output Parameters:
414 +  n_local - the number of blocks on this processor, or NULL
415 .  first_local - the global number of the first block on this processor, or NULL
416 -  ksp - the array of KSP contexts
417 
418    Notes:
419    After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed.
420 
421    Currently for some matrix implementations only 1 block per processor
422    is supported.
423 
424    You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP().
425 
426    Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs.
427       You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,NULL_OBJECT,ierr) to determine how large the
428       KSP array must be.
429 
430    Level: advanced
431 
432 .keywords:  block, Jacobi, get, sub, KSP, context
433 
434 .seealso: PCBJacobiGetSubKSP()
435 @*/
436 PetscErrorCode  PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
437 {
438   PetscErrorCode ierr;
439 
440   PetscFunctionBegin;
441   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
442   ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
443   PetscFunctionReturn(0);
444 }
445 
446 #undef __FUNCT__
447 #define __FUNCT__ "PCBJacobiSetTotalBlocks"
448 /*@
449    PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
450    Jacobi preconditioner.
451 
452    Collective on PC
453 
454    Input Parameters:
455 +  pc - the preconditioner context
456 .  blocks - the number of blocks
457 -  lens - [optional] integer array containing the size of each block
458 
459    Options Database Key:
460 .  -pc_bjacobi_blocks <blocks> - Sets the number of global blocks
461 
462    Notes:
463    Currently only a limited number of blocking configurations are supported.
464    All processors sharing the PC must call this routine with the same data.
465 
466    Level: intermediate
467 
468 .keywords:  set, number, Jacobi, global, total, blocks
469 
470 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetLocalBlocks()
471 @*/
472 PetscErrorCode  PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
473 {
474   PetscErrorCode ierr;
475 
476   PetscFunctionBegin;
477   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
478   if (blocks <= 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks");
479   ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 
483 #undef __FUNCT__
484 #define __FUNCT__ "PCBJacobiGetTotalBlocks"
485 /*@C
486    PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
487    Jacobi preconditioner.
488 
489    Not Collective
490 
491    Input Parameter:
492 .  pc - the preconditioner context
493 
494    Output parameters:
495 +  blocks - the number of blocks
496 -  lens - integer array containing the size of each block
497 
498    Level: intermediate
499 
500 .keywords:  get, number, Jacobi, global, total, blocks
501 
502 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetLocalBlocks()
503 @*/
504 PetscErrorCode  PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
505 {
506   PetscErrorCode ierr;
507 
508   PetscFunctionBegin;
509   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
510   PetscValidIntPointer(blocks,2);
511   ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
512   PetscFunctionReturn(0);
513 }
514 
515 #undef __FUNCT__
516 #define __FUNCT__ "PCBJacobiSetLocalBlocks"
517 /*@
518    PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
519    Jacobi preconditioner.
520 
521    Not Collective
522 
523    Input Parameters:
524 +  pc - the preconditioner context
525 .  blocks - the number of blocks
526 -  lens - [optional] integer array containing size of each block
527 
528    Note:
529    Currently only a limited number of blocking configurations are supported.
530 
531    Level: intermediate
532 
533 .keywords: PC, set, number, Jacobi, local, blocks
534 
535 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetTotalBlocks()
536 @*/
537 PetscErrorCode  PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
538 {
539   PetscErrorCode ierr;
540 
541   PetscFunctionBegin;
542   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
543   if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks");
544   ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr);
545   PetscFunctionReturn(0);
546 }
547 
548 #undef __FUNCT__
549 #define __FUNCT__ "PCBJacobiGetLocalBlocks"
550 /*@C
551    PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
552    Jacobi preconditioner.
553 
554    Not Collective
555 
556    Input Parameters:
557 +  pc - the preconditioner context
558 .  blocks - the number of blocks
559 -  lens - [optional] integer array containing size of each block
560 
561    Note:
562    Currently only a limited number of blocking configurations are supported.
563 
564    Level: intermediate
565 
566 .keywords: PC, get, number, Jacobi, local, blocks
567 
568 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetTotalBlocks()
569 @*/
570 PetscErrorCode  PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
571 {
572   PetscErrorCode ierr;
573 
574   PetscFunctionBegin;
575   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
576   PetscValidIntPointer(blocks,2);
577   ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
578   PetscFunctionReturn(0);
579 }
580 
581 /* -----------------------------------------------------------------------------------*/
582 
583 /*MC
584    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
585            its own KSP object.
586 
587    Options Database Keys:
588 .  -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal()
589 
590    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
591      than one processor. Defaults to one block per processor.
592 
593      To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
594         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
595 
596      To set the options on the solvers separate for each block call PCBJacobiGetSubKSP()
597          and set the options directly on the resulting KSP object (you can access its PC
598          KSPGetPC())
599 
600    Level: beginner
601 
602    Concepts: block Jacobi
603 
604    Developer Notes: This preconditioner does not currently work with CUDA/CUSP for a couple of reasons.
605        (1) It creates seq vectors as work vectors that should be cusp
606        (2) The use of VecPlaceArray() is not handled properly by CUSP (that is it will not know where
607            the ownership of the vector is so may use wrong values) even if it did know the ownership
608            it may induce extra copy ups and downs. Satish suggests a VecTransplantArray() to handle two
609            vectors sharing the same pointer and handling the CUSP side as well instead of VecGetArray()/VecPlaceArray().
610 
611 
612 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
613            PCASM, PCBJacobiSetUseTrueLocal(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(),
614            PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices()
615 M*/
616 
617 #undef __FUNCT__
618 #define __FUNCT__ "PCCreate_BJacobi"
619 PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc)
620 {
621   PetscErrorCode ierr;
622   PetscMPIInt    rank;
623   PC_BJacobi     *jac;
624 
625   PetscFunctionBegin;
626   ierr = PetscNewLog(pc,PC_BJacobi,&jac);CHKERRQ(ierr);
627   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
628 
629   pc->ops->apply           = 0;
630   pc->ops->applytranspose  = 0;
631   pc->ops->setup           = PCSetUp_BJacobi;
632   pc->ops->destroy         = PCDestroy_BJacobi;
633   pc->ops->setfromoptions  = PCSetFromOptions_BJacobi;
634   pc->ops->view            = PCView_BJacobi;
635   pc->ops->applyrichardson = 0;
636 
637   pc->data               = (void*)jac;
638   jac->n                 = -1;
639   jac->n_local           = -1;
640   jac->first_local       = rank;
641   jac->ksp               = 0;
642   jac->use_true_local    = PETSC_FALSE;
643   jac->same_local_solves = PETSC_TRUE;
644   jac->g_lens            = 0;
645   jac->l_lens            = 0;
646   jac->psubcomm          = 0;
647 
648   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetUseTrueLocal_C","PCBJacobiSetUseTrueLocal_BJacobi",PCBJacobiSetUseTrueLocal_BJacobi);CHKERRQ(ierr);
649   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C","PCBJacobiGetSubKSP_BJacobi",PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr);
650   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C","PCBJacobiSetTotalBlocks_BJacobi",PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr);
651   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C","PCBJacobiGetTotalBlocks_BJacobi",PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr);
652   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C","PCBJacobiSetLocalBlocks_BJacobi",PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr);
653   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C","PCBJacobiGetLocalBlocks_BJacobi",PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr);
654   PetscFunctionReturn(0);
655 }
656 
657 /* --------------------------------------------------------------------------------------------*/
658 /*
659         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
660 */
661 #undef __FUNCT__
662 #define __FUNCT__ "PCReset_BJacobi_Singleblock"
663 PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
664 {
665   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
666   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
667   PetscErrorCode         ierr;
668 
669   PetscFunctionBegin;
670   ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);
671   ierr = VecDestroy(&bjac->x);CHKERRQ(ierr);
672   ierr = VecDestroy(&bjac->y);CHKERRQ(ierr);
673   PetscFunctionReturn(0);
674 }
675 
676 #undef __FUNCT__
677 #define __FUNCT__ "PCDestroy_BJacobi_Singleblock"
678 PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
679 {
680   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
681   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
682   PetscErrorCode         ierr;
683 
684   PetscFunctionBegin;
685   ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr);
686   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
687   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
688   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
689   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
690   ierr = PetscFree(bjac);CHKERRQ(ierr);
691   ierr = PetscFree(pc->data);CHKERRQ(ierr);
692   PetscFunctionReturn(0);
693 }
694 
695 #undef __FUNCT__
696 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Singleblock"
697 PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
698 {
699   PetscErrorCode ierr;
700   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
701 
702   PetscFunctionBegin;
703   ierr = KSPSetUp(jac->ksp[0]);CHKERRQ(ierr);
704   PetscFunctionReturn(0);
705 }
706 
707 #undef __FUNCT__
708 #define __FUNCT__ "PCApply_BJacobi_Singleblock"
709 PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y)
710 {
711   PetscErrorCode         ierr;
712   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
713   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
714   PetscScalar            *x_array,*y_array;
715 
716   PetscFunctionBegin;
717   /*
718       The VecPlaceArray() is to avoid having to copy the
719     y vector into the bjac->x vector. The reason for
720     the bjac->x vector is that we need a sequential vector
721     for the sequential solve.
722   */
723   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
724   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
725   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
726   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
727   ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
728   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
729   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
730   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
731   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
732   PetscFunctionReturn(0);
733 }
734 
735 #undef __FUNCT__
736 #define __FUNCT__ "PCApplySymmetricLeft_BJacobi_Singleblock"
737 PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y)
738 {
739   PetscErrorCode         ierr;
740   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
741   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
742   PetscScalar            *x_array,*y_array;
743   PC                     subpc;
744 
745   PetscFunctionBegin;
746   /*
747       The VecPlaceArray() is to avoid having to copy the
748     y vector into the bjac->x vector. The reason for
749     the bjac->x vector is that we need a sequential vector
750     for the sequential solve.
751   */
752   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
753   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
754   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
755   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
756   /* apply the symmetric left portion of the inner PC operator */
757   /* note this by-passes the inner KSP and its options completely */
758   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
759   ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
760   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
761   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
762   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
763   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
764   PetscFunctionReturn(0);
765 }
766 
767 #undef __FUNCT__
768 #define __FUNCT__ "PCApplySymmetricRight_BJacobi_Singleblock"
769 PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y)
770 {
771   PetscErrorCode         ierr;
772   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
773   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
774   PetscScalar            *x_array,*y_array;
775   PC                     subpc;
776 
777   PetscFunctionBegin;
778   /*
779       The VecPlaceArray() is to avoid having to copy the
780     y vector into the bjac->x vector. The reason for
781     the bjac->x vector is that we need a sequential vector
782     for the sequential solve.
783   */
784   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
785   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
786   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
787   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
788 
789   /* apply the symmetric right portion of the inner PC operator */
790   /* note this by-passes the inner KSP and its options completely */
791 
792   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
793   ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
794 
795   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
796   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
797   PetscFunctionReturn(0);
798 }
799 
800 #undef __FUNCT__
801 #define __FUNCT__ "PCApplyTranspose_BJacobi_Singleblock"
802 PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y)
803 {
804   PetscErrorCode         ierr;
805   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
806   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
807   PetscScalar            *x_array,*y_array;
808 
809   PetscFunctionBegin;
810   /*
811       The VecPlaceArray() is to avoid having to copy the
812     y vector into the bjac->x vector. The reason for
813     the bjac->x vector is that we need a sequential vector
814     for the sequential solve.
815   */
816   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
817   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
818   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
819   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
820   ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
821   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
822   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
823   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
824   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
825   PetscFunctionReturn(0);
826 }
827 
828 #undef __FUNCT__
829 #define __FUNCT__ "PCSetUp_BJacobi_Singleblock"
830 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat)
831 {
832   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
833   PetscErrorCode         ierr;
834   PetscInt               m;
835   KSP                    ksp;
836   PC_BJacobi_Singleblock *bjac;
837   PetscBool              wasSetup = PETSC_TRUE;
838 
839   PetscFunctionBegin;
840   if (!pc->setupcalled) {
841     const char *prefix;
842 
843     if (!jac->ksp) {
844       wasSetup = PETSC_FALSE;
845 
846       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
847       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
848       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
849       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
850       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
851       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
852       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
853 
854       pc->ops->reset               = PCReset_BJacobi_Singleblock;
855       pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
856       pc->ops->apply               = PCApply_BJacobi_Singleblock;
857       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
858       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
859       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
860       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;
861 
862       ierr        = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
863       jac->ksp[0] = ksp;
864 
865       ierr      = PetscNewLog(pc,PC_BJacobi_Singleblock,&bjac);CHKERRQ(ierr);
866       jac->data = (void*)bjac;
867     } else {
868       ksp  = jac->ksp[0];
869       bjac = (PC_BJacobi_Singleblock*)jac->data;
870     }
871 
872     /*
873       The reason we need to generate these vectors is to serve
874       as the right-hand side and solution vector for the solve on the
875       block. We do not need to allocate space for the vectors since
876       that is provided via VecPlaceArray() just before the call to
877       KSPSolve() on the block.
878     */
879     ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr);
880     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->x);CHKERRQ(ierr);
881     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->y);CHKERRQ(ierr);
882     ierr = PetscLogObjectParent(pc,bjac->x);CHKERRQ(ierr);
883     ierr = PetscLogObjectParent(pc,bjac->y);CHKERRQ(ierr);
884   } else {
885     ksp  = jac->ksp[0];
886     bjac = (PC_BJacobi_Singleblock*)jac->data;
887   }
888   if (jac->use_true_local) {
889     ierr = KSPSetOperators(ksp,mat,pmat,pc->flag);CHKERRQ(ierr);
890   } else {
891     ierr = KSPSetOperators(ksp,pmat,pmat,pc->flag);CHKERRQ(ierr);
892   }
893   if (!wasSetup && pc->setfromoptionscalled) {
894     ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
895   }
896   PetscFunctionReturn(0);
897 }
898 
899 /* ---------------------------------------------------------------------------------------------*/
900 #undef __FUNCT__
901 #define __FUNCT__ "PCReset_BJacobi_Multiblock"
902 PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
903 {
904   PC_BJacobi            *jac  = (PC_BJacobi*)pc->data;
905   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
906   PetscErrorCode        ierr;
907   PetscInt              i;
908 
909   PetscFunctionBegin;
910   if (bjac && bjac->pmat) {
911     ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr);
912     if (jac->use_true_local) {
913       ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr);
914     }
915   }
916 
917   for (i=0; i<jac->n_local; i++) {
918     ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr);
919     if (bjac && bjac->x) {
920       ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr);
921       ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr);
922       ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr);
923     }
924   }
925   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
926   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
927   PetscFunctionReturn(0);
928 }
929 
930 #undef __FUNCT__
931 #define __FUNCT__ "PCDestroy_BJacobi_Multiblock"
932 PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
933 {
934   PC_BJacobi            *jac  = (PC_BJacobi*)pc->data;
935   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
936   PetscErrorCode        ierr;
937   PetscInt              i;
938 
939   PetscFunctionBegin;
940   ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr);
941   if (bjac) {
942     ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr);
943     ierr = PetscFree(bjac->starts);CHKERRQ(ierr);
944     ierr = PetscFree(bjac->is);CHKERRQ(ierr);
945   }
946   ierr = PetscFree(jac->data);CHKERRQ(ierr);
947   for (i=0; i<jac->n_local; i++) {
948     ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr);
949   }
950   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
951   ierr = PetscFree(pc->data);CHKERRQ(ierr);
952   PetscFunctionReturn(0);
953 }
954 
955 #undef __FUNCT__
956 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Multiblock"
957 PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
958 {
959   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
960   PetscErrorCode ierr;
961   PetscInt       i,n_local = jac->n_local;
962 
963   PetscFunctionBegin;
964   for (i=0; i<n_local; i++) {
965     ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr);
966   }
967   PetscFunctionReturn(0);
968 }
969 
970 /*
971       Preconditioner for block Jacobi
972 */
973 #undef __FUNCT__
974 #define __FUNCT__ "PCApply_BJacobi_Multiblock"
975 PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y)
976 {
977   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
978   PetscErrorCode        ierr;
979   PetscInt              i,n_local = jac->n_local;
980   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
981   PetscScalar           *xin,*yin;
982 
983   PetscFunctionBegin;
984   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
985   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
986   for (i=0; i<n_local; i++) {
987     /*
988        To avoid copying the subvector from x into a workspace we instead
989        make the workspace vector array point to the subpart of the array of
990        the global vector.
991     */
992     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
993     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
994 
995     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
996     ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
997     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
998 
999     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
1000     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
1001   }
1002   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
1003   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
1004   PetscFunctionReturn(0);
1005 }
1006 
1007 /*
1008       Preconditioner for block Jacobi
1009 */
1010 #undef __FUNCT__
1011 #define __FUNCT__ "PCApplyTranspose_BJacobi_Multiblock"
1012 PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y)
1013 {
1014   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1015   PetscErrorCode        ierr;
1016   PetscInt              i,n_local = jac->n_local;
1017   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
1018   PetscScalar           *xin,*yin;
1019 
1020   PetscFunctionBegin;
1021   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
1022   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
1023   for (i=0; i<n_local; i++) {
1024     /*
1025        To avoid copying the subvector from x into a workspace we instead
1026        make the workspace vector array point to the subpart of the array of
1027        the global vector.
1028     */
1029     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
1030     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
1031 
1032     ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1033     ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
1034     ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1035 
1036     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
1037     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
1038   }
1039   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
1040   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
1041   PetscFunctionReturn(0);
1042 }
1043 
1044 #undef __FUNCT__
1045 #define __FUNCT__ "PCSetUp_BJacobi_Multiblock"
1046 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat)
1047 {
1048   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1049   PetscErrorCode        ierr;
1050   PetscInt              m,n_local,N,M,start,i;
1051   const char            *prefix,*pprefix,*mprefix;
1052   KSP                   ksp;
1053   Vec                   x,y;
1054   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
1055   PC                    subpc;
1056   IS                    is;
1057   MatReuse              scall;
1058 
1059   PetscFunctionBegin;
1060   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
1061 
1062   n_local = jac->n_local;
1063 
1064   if (jac->use_true_local) {
1065     PetscBool same;
1066     ierr = PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr);
1067     if (!same) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_INCOMP,"Matrices not of same type");
1068   }
1069 
1070   if (!pc->setupcalled) {
1071     scall = MAT_INITIAL_MATRIX;
1072 
1073     if (!jac->ksp) {
1074       pc->ops->reset         = PCReset_BJacobi_Multiblock;
1075       pc->ops->destroy       = PCDestroy_BJacobi_Multiblock;
1076       pc->ops->apply         = PCApply_BJacobi_Multiblock;
1077       pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock;
1078       pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock;
1079 
1080       ierr = PetscNewLog(pc,PC_BJacobi_Multiblock,&bjac);CHKERRQ(ierr);
1081       ierr = PetscMalloc(n_local*sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
1082       ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr);
1083       ierr = PetscMalloc2(n_local,Vec,&bjac->x,n_local,Vec,&bjac->y);CHKERRQ(ierr);
1084       ierr = PetscMalloc(n_local*sizeof(PetscScalar),&bjac->starts);CHKERRQ(ierr);
1085       ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr);
1086 
1087       jac->data = (void*)bjac;
1088       ierr      = PetscMalloc(n_local*sizeof(IS),&bjac->is);CHKERRQ(ierr);
1089       ierr      = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr);
1090 
1091       for (i=0; i<n_local; i++) {
1092         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
1093         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
1094         ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
1095         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
1096         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
1097         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1098         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
1099         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
1100 
1101         jac->ksp[i] = ksp;
1102       }
1103     } else {
1104       bjac = (PC_BJacobi_Multiblock*)jac->data;
1105     }
1106 
1107     start = 0;
1108     for (i=0; i<n_local; i++) {
1109       m = jac->l_lens[i];
1110       /*
1111       The reason we need to generate these vectors is to serve
1112       as the right-hand side and solution vector for the solve on the
1113       block. We do not need to allocate space for the vectors since
1114       that is provided via VecPlaceArray() just before the call to
1115       KSPSolve() on the block.
1116 
1117       */
1118       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr);
1119       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&y);CHKERRQ(ierr);
1120       ierr = PetscLogObjectParent(pc,x);CHKERRQ(ierr);
1121       ierr = PetscLogObjectParent(pc,y);CHKERRQ(ierr);
1122 
1123       bjac->x[i]      = x;
1124       bjac->y[i]      = y;
1125       bjac->starts[i] = start;
1126 
1127       ierr        = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr);
1128       bjac->is[i] = is;
1129       ierr        = PetscLogObjectParent(pc,is);CHKERRQ(ierr);
1130 
1131       start += m;
1132     }
1133   } else {
1134     bjac = (PC_BJacobi_Multiblock*)jac->data;
1135     /*
1136        Destroy the blocks from the previous iteration
1137     */
1138     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1139       ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr);
1140       if (jac->use_true_local) {
1141         ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr);
1142       }
1143       scall = MAT_INITIAL_MATRIX;
1144     } else scall = MAT_REUSE_MATRIX;
1145   }
1146 
1147   ierr = MatGetSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr);
1148   if (jac->use_true_local) {
1149     ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr);
1150     ierr = MatGetSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr);
1151   }
1152   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1153      different boundary conditions for the submatrices than for the global problem) */
1154   ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
1155 
1156   ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr);
1157   for (i=0; i<n_local; i++) {
1158     ierr = PetscLogObjectParent(pc,bjac->pmat[i]);CHKERRQ(ierr);
1159     ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr);
1160     if (jac->use_true_local) {
1161       ierr = PetscLogObjectParent(pc,bjac->mat[i]);CHKERRQ(ierr);
1162       ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr);
1163       ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr);
1164     } else {
1165       ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr);
1166     }
1167     if (pc->setfromoptionscalled) {
1168       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
1169     }
1170   }
1171   PetscFunctionReturn(0);
1172 }
1173 
1174 /* ---------------------------------------------------------------------------------------------*/
1175 /*
1176       These are for a single block with multiple processes;
1177 */
1178 #undef __FUNCT__
1179 #define __FUNCT__ "PCReset_BJacobi_Multiproc"
1180 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1181 {
1182   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1183   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1184   PetscErrorCode       ierr;
1185 
1186   PetscFunctionBegin;
1187   ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr);
1188   ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr);
1189   ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);
1190   if (jac->ksp) {ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);}
1191   PetscFunctionReturn(0);
1192 }
1193 
1194 #undef __FUNCT__
1195 #define __FUNCT__ "PCDestroy_BJacobi_Multiproc"
1196 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1197 {
1198   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1199   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1200   PetscErrorCode       ierr;
1201 
1202   PetscFunctionBegin;
1203   ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr);
1204   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
1205   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
1206   ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr);
1207 
1208   ierr = PetscFree(mpjac);CHKERRQ(ierr);
1209   ierr = PetscFree(pc->data);CHKERRQ(ierr);
1210   PetscFunctionReturn(0);
1211 }
1212 
1213 #undef __FUNCT__
1214 #define __FUNCT__ "PCApply_BJacobi_Multiproc"
1215 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y)
1216 {
1217   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1218   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1219   PetscErrorCode       ierr;
1220   PetscScalar          *xarray,*yarray;
1221 
1222   PetscFunctionBegin;
1223   /* place x's and y's local arrays into xsub and ysub */
1224   ierr = VecGetArray(x,&xarray);CHKERRQ(ierr);
1225   ierr = VecGetArray(y,&yarray);CHKERRQ(ierr);
1226   ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr);
1227   ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr);
1228 
1229   /* apply preconditioner on each matrix block */
1230   ierr = PetscLogEventBegin(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1231   ierr = KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);CHKERRQ(ierr);
1232   ierr = PetscLogEventEnd(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1233 
1234   ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr);
1235   ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr);
1236   ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1237   ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1238   PetscFunctionReturn(0);
1239 }
1240 
1241 PETSC_EXTERN PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat,MPI_Comm,MatReuse,Mat*);
1242 #undef __FUNCT__
1243 #define __FUNCT__ "PCSetUp_BJacobi_Multiproc"
1244 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1245 {
1246   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1247   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1248   PetscErrorCode       ierr;
1249   PetscInt             m,n;
1250   MPI_Comm             comm,subcomm=0;
1251   const char           *prefix;
1252   PetscBool            wasSetup = PETSC_TRUE;
1253 
1254   PetscFunctionBegin;
1255   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1256   if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported");
1257   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1258   if (!pc->setupcalled) {
1259     wasSetup  = PETSC_FALSE;
1260     ierr      = PetscNewLog(pc,PC_BJacobi_Multiproc,&mpjac);CHKERRQ(ierr);
1261     jac->data = (void*)mpjac;
1262 
1263     /* initialize datastructure mpjac */
1264     if (!jac->psubcomm) {
1265       /* Create default contiguous subcommunicatiors if user does not provide them */
1266       ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr);
1267       ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr);
1268       ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
1269       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
1270     }
1271     mpjac->psubcomm = jac->psubcomm;
1272     subcomm         = mpjac->psubcomm->comm;
1273 
1274     /* Get matrix blocks of pmat */
1275     ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1276 
1277     /* create a new PC that processors in each subcomm have copy of */
1278     ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
1279     ierr = KSPCreate(subcomm,&jac->ksp[0]);CHKERRQ(ierr);
1280     ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);CHKERRQ(ierr);
1281     ierr = PetscLogObjectParent(pc,jac->ksp[0]);CHKERRQ(ierr);
1282     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats,pc->flag);CHKERRQ(ierr);
1283     ierr = KSPGetPC(jac->ksp[0],&mpjac->pc);CHKERRQ(ierr);
1284 
1285     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1286     ierr = KSPSetOptionsPrefix(jac->ksp[0],prefix);CHKERRQ(ierr);
1287     ierr = KSPAppendOptionsPrefix(jac->ksp[0],"sub_");CHKERRQ(ierr);
1288     /*
1289       PetscMPIInt rank,subsize,subrank;
1290       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1291       ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
1292       ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
1293 
1294       ierr = MatGetLocalSize(mpjac->submats,&m,NULL);CHKERRQ(ierr);
1295       ierr = MatGetSize(mpjac->submats,&n,NULL);CHKERRQ(ierr);
1296       ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank);
1297       ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
1298     */
1299 
1300     /* create dummy vectors xsub and ysub */
1301     ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr);
1302     ierr = VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&mpjac->xsub);CHKERRQ(ierr);
1303     ierr = VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&mpjac->ysub);CHKERRQ(ierr);
1304     ierr = PetscLogObjectParent(pc,mpjac->xsub);CHKERRQ(ierr);
1305     ierr = PetscLogObjectParent(pc,mpjac->ysub);CHKERRQ(ierr);
1306 
1307     pc->ops->reset   = PCReset_BJacobi_Multiproc;
1308     pc->ops->destroy = PCDestroy_BJacobi_Multiproc;
1309     pc->ops->apply   = PCApply_BJacobi_Multiproc;
1310   } else { /* pc->setupcalled */
1311     subcomm = mpjac->psubcomm->comm;
1312     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1313       /* destroy old matrix blocks, then get new matrix blocks */
1314       if (mpjac->submats) {ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);}
1315       ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1316     } else {
1317       ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1318     }
1319     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats,pc->flag);CHKERRQ(ierr);
1320   }
1321 
1322   if (!wasSetup && pc->setfromoptionscalled) {
1323     ierr = KSPSetFromOptions(jac->ksp[0]);CHKERRQ(ierr);
1324   }
1325   PetscFunctionReturn(0);
1326 }
1327