xref: /petsc/src/ksp/pc/impls/bjacobi/bjacobi.c (revision 7d0a6c19129e7069c8a40e210b34ed62989173db) !
1 
2 /*
3    Defines a block Jacobi preconditioner.
4 */
5 #include "private/pcimpl.h"              /*I "petscpc.h" I*/
6 #include "../src/ksp/pc/impls/bjacobi/bjacobi.h"
7 
8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat);
9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat);
10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);
11 
12 #undef __FUNCT__
13 #define __FUNCT__ "PCSetUp_BJacobi"
14 static PetscErrorCode PCSetUp_BJacobi(PC pc)
15 {
16   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
17   Mat            mat = pc->mat,pmat = pc->pmat;
18   PetscErrorCode ierr,(*f)(Mat,PetscBool *,MatReuse,Mat*);
19   PetscInt       N,M,start,i,sum,end;
20   PetscInt       bs,i_start=-1,i_end=-1;
21   PetscMPIInt    rank,size;
22   const char     *pprefix,*mprefix;
23 
24   PetscFunctionBegin;
25   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
26   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
27   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
28   ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
29 
30   if (jac->n > 0 && jac->n < size){
31     ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr);
32     PetscFunctionReturn(0);
33   }
34 
35   /* --------------------------------------------------------------------------
36       Determines the number of blocks assigned to each processor
37   -----------------------------------------------------------------------------*/
38 
39   /*   local block count  given */
40   if (jac->n_local > 0 && jac->n < 0) {
41     ierr = MPI_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
42     if (jac->l_lens) { /* check that user set these correctly */
43       sum = 0;
44       for (i=0; i<jac->n_local; i++) {
45         if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
46         sum += jac->l_lens[i];
47       }
48       if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly");
49     } else {
50       ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
51       for (i=0; i<jac->n_local; i++) {
52         jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
53       }
54     }
55   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
56     /* global blocks given: determine which ones are local */
57     if (jac->g_lens) {
58       /* check if the g_lens is has valid entries */
59       for (i=0; i<jac->n; i++) {
60         if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed");
61         if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
62       }
63       if (size == 1) {
64         jac->n_local = jac->n;
65         ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
66         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
67         /* check that user set these correctly */
68         sum = 0;
69         for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i];
70         if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly");
71       } else {
72         ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr);
73         /* loop over blocks determing first one owned by me */
74         sum = 0;
75         for (i=0; i<jac->n+1; i++) {
76           if (sum == start) { i_start = i; goto start_1;}
77           if (i < jac->n) sum += jac->g_lens[i];
78         }
79         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
80  start_1:
81         for (i=i_start; i<jac->n+1; i++) {
82           if (sum == end) { i_end = i; goto end_1; }
83           if (i < jac->n) sum += jac->g_lens[i];
84         }
85         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
86  end_1:
87         jac->n_local = i_end - i_start;
88         ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
89         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
90       }
91     } else { /* no global blocks given, determine then using default layout */
92       jac->n_local = jac->n/size + ((jac->n % size) > rank);
93       ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
94       for (i=0; i<jac->n_local; i++) {
95         jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs;
96         if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given");
97       }
98     }
99   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
100     jac->n         = size;
101     jac->n_local   = 1;
102     ierr           = PetscMalloc(sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
103     jac->l_lens[0] = M;
104   }
105   if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors");
106 
107   /* -------------------------
108       Determines mat and pmat
109   ---------------------------*/
110   ierr = PetscObjectQueryFunction((PetscObject)pc->mat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
111   if (size == 1 && !f) {
112     mat  = pc->mat;
113     pmat = pc->pmat;
114   } else {
115     PetscBool  iscopy;
116     MatReuse   scall;
117 
118     if (jac->use_true_local) {
119       /* use block from true matrix, not preconditioner matrix for local MatMult() */
120       scall = MAT_INITIAL_MATRIX;
121       if (pc->setupcalled) {
122         if (pc->flag == SAME_NONZERO_PATTERN) {
123           if (jac->tp_mat) {
124             scall = MAT_REUSE_MATRIX;
125             mat   = jac->tp_mat;
126           }
127         } else {
128           if (jac->tp_mat)  {
129             ierr = MatDestroy(jac->tp_mat);CHKERRQ(ierr);
130           }
131         }
132       }
133       if (!f) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"This matrix does not support getting diagonal block");
134       ierr = (*f)(pc->mat,&iscopy,scall,&mat);CHKERRQ(ierr);
135       /* make submatrix have same prefix as entire matrix */
136       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr);
137       ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr);
138       if (iscopy) {
139         jac->tp_mat = mat;
140       }
141     }
142     if (pc->pmat != pc->mat || !jac->use_true_local) {
143       scall = MAT_INITIAL_MATRIX;
144       if (pc->setupcalled) {
145         if (pc->flag == SAME_NONZERO_PATTERN) {
146           if (jac->tp_pmat) {
147             scall = MAT_REUSE_MATRIX;
148             pmat   = jac->tp_pmat;
149           }
150         } else {
151           if (jac->tp_pmat)  {
152             ierr = MatDestroy(jac->tp_pmat);CHKERRQ(ierr);
153           }
154         }
155       }
156       ierr = PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
157       if (!f) {
158         const char *type;
159         ierr = PetscObjectGetType((PetscObject) pc->pmat,&type);CHKERRQ(ierr);
160         SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"This matrix type, %s, does not support getting diagonal block", type);
161       }
162       ierr = (*f)(pc->pmat,&iscopy,scall,&pmat);CHKERRQ(ierr);
163       /* make submatrix have same prefix as entire matrix */
164       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
165       ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr);
166       if (iscopy) {
167         jac->tp_pmat = pmat;
168       }
169     } else {
170       pmat = mat;
171     }
172   }
173 
174   /* ------
175      Setup code depends on the number of blocks
176   */
177   if (jac->n_local == 1) {
178     ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr);
179   } else {
180     ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr);
181   }
182   PetscFunctionReturn(0);
183 }
184 
185 /* Default destroy, if it has never been setup */
186 #undef __FUNCT__
187 #define __FUNCT__ "PCDestroy_BJacobi"
188 static PetscErrorCode PCDestroy_BJacobi(PC pc)
189 {
190   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
191   PetscErrorCode ierr;
192 
193   PetscFunctionBegin;
194   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
195   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
196   ierr = PetscFree(pc->data);CHKERRQ(ierr);
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNCT__
201 #define __FUNCT__ "PCSetFromOptions_BJacobi"
202 
203 static PetscErrorCode PCSetFromOptions_BJacobi(PC pc)
204 {
205   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
206   PetscErrorCode ierr;
207   PetscInt       blocks;
208   PetscBool      flg;
209 
210   PetscFunctionBegin;
211   ierr = PetscOptionsHead("Block Jacobi options");CHKERRQ(ierr);
212     ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr);
213     if (flg) {
214       ierr = PCBJacobiSetTotalBlocks(pc,blocks,PETSC_NULL);CHKERRQ(ierr);
215     }
216     flg  = PETSC_FALSE;
217     ierr = PetscOptionsBool("-pc_bjacobi_truelocal","Use the true matrix, not preconditioner matrix to define matrix vector product in sub-problems","PCBJacobiSetUseTrueLocal",flg,&flg,PETSC_NULL);CHKERRQ(ierr);
218     if (flg) {
219       ierr = PCBJacobiSetUseTrueLocal(pc);CHKERRQ(ierr);
220     }
221   ierr = PetscOptionsTail();CHKERRQ(ierr);
222   PetscFunctionReturn(0);
223 }
224 
225 #undef __FUNCT__
226 #define __FUNCT__ "PCView_BJacobi"
227 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer)
228 {
229   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
230   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
231   PetscErrorCode       ierr;
232   PetscMPIInt          rank;
233   PetscInt             i;
234   PetscBool            iascii,isstring;
235   PetscViewer          sviewer;
236 
237   PetscFunctionBegin;
238   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
239   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
240   if (iascii) {
241     if (jac->use_true_local) {
242       ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: using true local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr);
243     }
244     ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: number of blocks = %D\n",jac->n);CHKERRQ(ierr);
245     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
246     if (jac->same_local_solves) {
247       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
248       if (jac->ksp) {
249         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
250         if (!rank){
251           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
252           ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
253           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
254         }
255         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
256       } else if (jac->psubcomm && !jac->psubcomm->color){
257         ierr = PetscViewerASCIIGetStdout(mpjac->psubcomm->comm,&sviewer);CHKERRQ(ierr);
258         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
259         ierr = KSPView(mpjac->ksp,sviewer);CHKERRQ(ierr);
260         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
261       }
262     } else {
263       PetscInt n_global;
264       ierr = MPI_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,((PetscObject)pc)->comm);CHKERRQ(ierr);
265       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
266       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n",
267                    rank,jac->n_local,jac->first_local);CHKERRQ(ierr);
268       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
269       for (i=0; i<n_global; i++) {
270         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
271         if (i < jac->n_local) {
272           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
273           ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
274           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
275         }
276         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
277       }
278       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
279       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
280     }
281   } else if (isstring) {
282     ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr);
283     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
284     if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
285     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
286   } else {
287     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for block Jacobi",((PetscObject)viewer)->type_name);
288   }
289   PetscFunctionReturn(0);
290 }
291 
292 /* -------------------------------------------------------------------------------------*/
293 
294 EXTERN_C_BEGIN
295 #undef __FUNCT__
296 #define __FUNCT__ "PCBJacobiSetUseTrueLocal_BJacobi"
297 PetscErrorCode  PCBJacobiSetUseTrueLocal_BJacobi(PC pc)
298 {
299   PC_BJacobi   *jac;
300 
301   PetscFunctionBegin;
302   jac                 = (PC_BJacobi*)pc->data;
303   jac->use_true_local = PETSC_TRUE;
304   PetscFunctionReturn(0);
305 }
306 EXTERN_C_END
307 
308 EXTERN_C_BEGIN
309 #undef __FUNCT__
310 #define __FUNCT__ "PCBJacobiGetSubKSP_BJacobi"
311 PetscErrorCode  PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
312 {
313   PC_BJacobi   *jac = (PC_BJacobi*)pc->data;;
314 
315   PetscFunctionBegin;
316   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first");
317 
318   if (n_local)     *n_local     = jac->n_local;
319   if (first_local) *first_local = jac->first_local;
320   *ksp                          = jac->ksp;
321   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
322                                                   not necessarily true though!  This flag is
323                                                   used only for PCView_BJacobi() */
324   PetscFunctionReturn(0);
325 }
326 EXTERN_C_END
327 
328 EXTERN_C_BEGIN
329 #undef __FUNCT__
330 #define __FUNCT__ "PCBJacobiSetTotalBlocks_BJacobi"
331 PetscErrorCode  PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens)
332 {
333   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
334   PetscErrorCode ierr;
335 
336   PetscFunctionBegin;
337 
338   if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
339   jac->n = blocks;
340   if (!lens) {
341     jac->g_lens = 0;
342   } else {
343     ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->g_lens);CHKERRQ(ierr);
344     ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
345     ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
346   }
347   PetscFunctionReturn(0);
348 }
349 EXTERN_C_END
350 
351 EXTERN_C_BEGIN
352 #undef __FUNCT__
353 #define __FUNCT__ "PCBJacobiGetTotalBlocks_BJacobi"
354 PetscErrorCode  PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
355 {
356   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
357 
358   PetscFunctionBegin;
359   *blocks = jac->n;
360   if (lens) *lens = jac->g_lens;
361   PetscFunctionReturn(0);
362 }
363 EXTERN_C_END
364 
365 EXTERN_C_BEGIN
366 #undef __FUNCT__
367 #define __FUNCT__ "PCBJacobiSetLocalBlocks_BJacobi"
368 PetscErrorCode  PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[])
369 {
370   PC_BJacobi     *jac;
371   PetscErrorCode ierr;
372 
373   PetscFunctionBegin;
374   jac = (PC_BJacobi*)pc->data;
375 
376   jac->n_local = blocks;
377   if (!lens) {
378     jac->l_lens = 0;
379   } else {
380     ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
381     ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
382     ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
383   }
384   PetscFunctionReturn(0);
385 }
386 EXTERN_C_END
387 
388 EXTERN_C_BEGIN
389 #undef __FUNCT__
390 #define __FUNCT__ "PCBJacobiGetLocalBlocks_BJacobi"
391 PetscErrorCode  PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
392 {
393   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
394 
395   PetscFunctionBegin;
396   *blocks = jac->n_local;
397   if (lens) *lens = jac->l_lens;
398   PetscFunctionReturn(0);
399 }
400 EXTERN_C_END
401 
402 /* -------------------------------------------------------------------------------------*/
403 
404 #undef __FUNCT__
405 #define __FUNCT__ "PCBJacobiSetUseTrueLocal"
406 /*@
407    PCBJacobiSetUseTrueLocal - Sets a flag to indicate that the block
408    problem is associated with the linear system matrix instead of the
409    default (where it is associated with the preconditioning matrix).
410    That is, if the local system is solved iteratively then it iterates
411    on the block from the matrix using the block from the preconditioner
412    as the preconditioner for the local block.
413 
414    Logically Collective on PC
415 
416    Input Parameters:
417 .  pc - the preconditioner context
418 
419    Options Database Key:
420 .  -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal()
421 
422    Notes:
423    For the common case in which the preconditioning and linear
424    system matrices are identical, this routine is unnecessary.
425 
426    Level: intermediate
427 
428 .keywords:  block, Jacobi, set, true, local, flag
429 
430 .seealso: PCSetOperators(), PCBJacobiSetLocalBlocks()
431 @*/
432 PetscErrorCode  PCBJacobiSetUseTrueLocal(PC pc)
433 {
434   PetscErrorCode ierr;
435 
436   PetscFunctionBegin;
437   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
438   ierr = PetscTryMethod(pc,"PCBJacobiSetUseTrueLocal_C",(PC),(pc));CHKERRQ(ierr);
439   PetscFunctionReturn(0);
440 }
441 
442 #undef __FUNCT__
443 #define __FUNCT__ "PCBJacobiGetSubKSP"
444 /*@C
445    PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on
446    this processor.
447 
448    Note Collective
449 
450    Input Parameter:
451 .  pc - the preconditioner context
452 
453    Output Parameters:
454 +  n_local - the number of blocks on this processor, or PETSC_NULL
455 .  first_local - the global number of the first block on this processor, or PETSC_NULL
456 -  ksp - the array of KSP contexts
457 
458    Notes:
459    After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed.
460 
461    Currently for some matrix implementations only 1 block per processor
462    is supported.
463 
464    You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP().
465 
466    Level: advanced
467 
468 .keywords:  block, Jacobi, get, sub, KSP, context
469 
470 .seealso: PCBJacobiGetSubKSP()
471 @*/
472 PetscErrorCode  PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
473 {
474   PetscErrorCode ierr;
475 
476   PetscFunctionBegin;
477   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
478   ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt *,PetscInt *,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
479   PetscFunctionReturn(0);
480 }
481 
482 #undef __FUNCT__
483 #define __FUNCT__ "PCBJacobiSetTotalBlocks"
484 /*@
485    PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
486    Jacobi preconditioner.
487 
488    Collective on PC
489 
490    Input Parameters:
491 +  pc - the preconditioner context
492 .  blocks - the number of blocks
493 -  lens - [optional] integer array containing the size of each block
494 
495    Options Database Key:
496 .  -pc_bjacobi_blocks <blocks> - Sets the number of global blocks
497 
498    Notes:
499    Currently only a limited number of blocking configurations are supported.
500    All processors sharing the PC must call this routine with the same data.
501 
502    Level: intermediate
503 
504 .keywords:  set, number, Jacobi, global, total, blocks
505 
506 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetLocalBlocks()
507 @*/
508 PetscErrorCode  PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
509 {
510   PetscErrorCode ierr;
511 
512   PetscFunctionBegin;
513   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
514   if (blocks <= 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks");
515   ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr);
516   PetscFunctionReturn(0);
517 }
518 
519 #undef __FUNCT__
520 #define __FUNCT__ "PCBJacobiGetTotalBlocks"
521 /*@C
522    PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
523    Jacobi preconditioner.
524 
525    Not Collective
526 
527    Input Parameter:
528 .  pc - the preconditioner context
529 
530    Output parameters:
531 +  blocks - the number of blocks
532 -  lens - integer array containing the size of each block
533 
534    Level: intermediate
535 
536 .keywords:  get, number, Jacobi, global, total, blocks
537 
538 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetLocalBlocks()
539 @*/
540 PetscErrorCode  PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
541 {
542   PetscErrorCode ierr;
543 
544   PetscFunctionBegin;
545   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
546   PetscValidIntPointer(blocks,2);
547   ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
548   PetscFunctionReturn(0);
549 }
550 
551 #undef __FUNCT__
552 #define __FUNCT__ "PCBJacobiSetLocalBlocks"
553 /*@
554    PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
555    Jacobi preconditioner.
556 
557    Not Collective
558 
559    Input Parameters:
560 +  pc - the preconditioner context
561 .  blocks - the number of blocks
562 -  lens - [optional] integer array containing size of each block
563 
564    Note:
565    Currently only a limited number of blocking configurations are supported.
566 
567    Level: intermediate
568 
569 .keywords: PC, set, number, Jacobi, local, blocks
570 
571 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetTotalBlocks()
572 @*/
573 PetscErrorCode  PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
574 {
575   PetscErrorCode ierr;
576 
577   PetscFunctionBegin;
578   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
579   if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks");
580   ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr);
581   PetscFunctionReturn(0);
582 }
583 
584 #undef __FUNCT__
585 #define __FUNCT__ "PCBJacobiGetLocalBlocks"
586 /*@C
587    PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
588    Jacobi preconditioner.
589 
590    Not Collective
591 
592    Input Parameters:
593 +  pc - the preconditioner context
594 .  blocks - the number of blocks
595 -  lens - [optional] integer array containing size of each block
596 
597    Note:
598    Currently only a limited number of blocking configurations are supported.
599 
600    Level: intermediate
601 
602 .keywords: PC, get, number, Jacobi, local, blocks
603 
604 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetTotalBlocks()
605 @*/
606 PetscErrorCode  PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
607 {
608   PetscErrorCode ierr;
609 
610   PetscFunctionBegin;
611   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
612   PetscValidIntPointer(blocks,2);
613   ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
614   PetscFunctionReturn(0);
615 }
616 
617 /* -----------------------------------------------------------------------------------*/
618 
619 /*MC
620    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
621            its own KSP object.
622 
623    Options Database Keys:
624 .  -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal()
625 
626    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
627      than one processor. Defaults to one block per processor.
628 
629      To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
630         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
631 
632      To set the options on the solvers separate for each block call PCBJacobiGetSubKSP()
633          and set the options directly on the resulting KSP object (you can access its PC
634          KSPGetPC())
635 
636    Level: beginner
637 
638    Concepts: block Jacobi
639 
640 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
641            PCASM, PCBJacobiSetUseTrueLocal(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(),
642            PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices()
643 M*/
644 
645 EXTERN_C_BEGIN
646 #undef __FUNCT__
647 #define __FUNCT__ "PCCreate_BJacobi"
648 PetscErrorCode  PCCreate_BJacobi(PC pc)
649 {
650   PetscErrorCode ierr;
651   PetscMPIInt    rank;
652   PC_BJacobi     *jac;
653 
654   PetscFunctionBegin;
655   ierr = PetscNewLog(pc,PC_BJacobi,&jac);CHKERRQ(ierr);
656   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
657   pc->ops->apply              = 0;
658   pc->ops->applytranspose     = 0;
659   pc->ops->setup              = PCSetUp_BJacobi;
660   pc->ops->destroy            = PCDestroy_BJacobi;
661   pc->ops->setfromoptions     = PCSetFromOptions_BJacobi;
662   pc->ops->view               = PCView_BJacobi;
663   pc->ops->applyrichardson    = 0;
664 
665   pc->data               = (void*)jac;
666   jac->n                 = -1;
667   jac->n_local           = -1;
668   jac->first_local       = rank;
669   jac->ksp               = 0;
670   jac->use_true_local    = PETSC_FALSE;
671   jac->same_local_solves = PETSC_TRUE;
672   jac->g_lens            = 0;
673   jac->l_lens            = 0;
674   jac->tp_mat            = 0;
675   jac->tp_pmat           = 0;
676   jac->psubcomm          = 0;
677 
678   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetUseTrueLocal_C",
679                     "PCBJacobiSetUseTrueLocal_BJacobi",
680                     PCBJacobiSetUseTrueLocal_BJacobi);CHKERRQ(ierr);
681   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetSubKSP_C","PCBJacobiGetSubKSP_BJacobi",
682                     PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr);
683   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetTotalBlocks_C","PCBJacobiSetTotalBlocks_BJacobi",
684                     PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr);
685   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetTotalBlocks_C","PCBJacobiGetTotalBlocks_BJacobi",
686                     PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr);
687   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetLocalBlocks_C","PCBJacobiSetLocalBlocks_BJacobi",
688                     PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr);
689   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetLocalBlocks_C","PCBJacobiGetLocalBlocks_BJacobi",
690                     PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr);
691 
692   PetscFunctionReturn(0);
693 }
694 EXTERN_C_END
695 
696 /* --------------------------------------------------------------------------------------------*/
697 /*
698         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
699 */
700 #undef __FUNCT__
701 #define __FUNCT__ "PCReset_BJacobi_Singleblock"
702 PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
703 {
704   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
705   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
706   PetscErrorCode         ierr;
707 
708   PetscFunctionBegin;
709   /*
710         If the on processor block had to be generated via a MatGetDiagonalBlock()
711      that creates a copy, this frees the space
712   */
713   if (jac->tp_mat) {
714     ierr = MatDestroy(jac->tp_mat);CHKERRQ(ierr);
715   }
716   if (jac->tp_pmat) {
717     ierr = MatDestroy(jac->tp_pmat);CHKERRQ(ierr);
718   }
719 
720   ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);
721   if (bjac->x) {ierr = VecDestroy(bjac->x);CHKERRQ(ierr);}
722   if (bjac->y) {ierr = VecDestroy(bjac->y);CHKERRQ(ierr);}
723   PetscFunctionReturn(0);
724 }
725 
726 #undef __FUNCT__
727 #define __FUNCT__ "PCDestroy_BJacobi_Singleblock"
728 PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
729 {
730   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
731   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
732   PetscErrorCode         ierr;
733 
734   PetscFunctionBegin;
735   ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr);
736   ierr = KSPDestroy(jac->ksp[0]);CHKERRQ(ierr);
737   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
738   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
739   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
740   ierr = PetscFree(bjac);CHKERRQ(ierr);
741   ierr = PetscFree(pc->data);CHKERRQ(ierr);
742   PetscFunctionReturn(0);
743 }
744 
745 #undef __FUNCT__
746 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Singleblock"
747 PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
748 {
749   PetscErrorCode ierr;
750   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
751 
752   PetscFunctionBegin;
753   ierr = KSPSetUp(jac->ksp[0]);CHKERRQ(ierr);
754   PetscFunctionReturn(0);
755 }
756 
757 #undef __FUNCT__
758 #define __FUNCT__ "PCApply_BJacobi_Singleblock"
759 PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y)
760 {
761   PetscErrorCode         ierr;
762   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
763   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
764   PetscScalar            *x_array,*y_array;
765 
766   PetscFunctionBegin;
767   /*
768       The VecPlaceArray() is to avoid having to copy the
769     y vector into the bjac->x vector. The reason for
770     the bjac->x vector is that we need a sequential vector
771     for the sequential solve.
772   */
773   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
774   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
775   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
776   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
777   ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
778   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
779   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
780   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
781   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
782   PetscFunctionReturn(0);
783 }
784 
785 #undef __FUNCT__
786 #define __FUNCT__ "PCApplySymmetricLeft_BJacobi_Singleblock"
787 PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y)
788 {
789   PetscErrorCode         ierr;
790   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
791   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
792   PetscScalar            *x_array,*y_array;
793   PC                     subpc;
794 
795   PetscFunctionBegin;
796   /*
797       The VecPlaceArray() is to avoid having to copy the
798     y vector into the bjac->x vector. The reason for
799     the bjac->x vector is that we need a sequential vector
800     for the sequential solve.
801   */
802   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
803   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
804   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
805   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
806 
807   /* apply the symmetric left portion of the inner PC operator */
808   /* note this by-passes the inner KSP and its options completely */
809 
810   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
811   ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
812   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
813   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
814 
815   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
816   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
817   PetscFunctionReturn(0);
818 }
819 
820 #undef __FUNCT__
821 #define __FUNCT__ "PCApplySymmetricRight_BJacobi_Singleblock"
822 PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y)
823 {
824   PetscErrorCode         ierr;
825   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
826   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
827   PetscScalar            *x_array,*y_array;
828   PC                     subpc;
829 
830   PetscFunctionBegin;
831   /*
832       The VecPlaceArray() is to avoid having to copy the
833     y vector into the bjac->x vector. The reason for
834     the bjac->x vector is that we need a sequential vector
835     for the sequential solve.
836   */
837   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
838   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
839   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
840   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
841 
842   /* apply the symmetric right portion of the inner PC operator */
843   /* note this by-passes the inner KSP and its options completely */
844 
845   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
846   ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
847 
848   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
849   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
850   PetscFunctionReturn(0);
851 }
852 
853 #undef __FUNCT__
854 #define __FUNCT__ "PCApplyTranspose_BJacobi_Singleblock"
855 PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y)
856 {
857   PetscErrorCode         ierr;
858   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
859   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
860   PetscScalar            *x_array,*y_array;
861 
862   PetscFunctionBegin;
863   /*
864       The VecPlaceArray() is to avoid having to copy the
865     y vector into the bjac->x vector. The reason for
866     the bjac->x vector is that we need a sequential vector
867     for the sequential solve.
868   */
869   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
870   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
871   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
872   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
873   ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
874   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
875   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
876   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
877   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
878   PetscFunctionReturn(0);
879 }
880 
881 #undef __FUNCT__
882 #define __FUNCT__ "PCSetUp_BJacobi_Singleblock"
883 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat)
884 {
885   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
886   PetscErrorCode         ierr;
887   PetscInt               m;
888   KSP                    ksp;
889   Vec                    x,y;
890   PC_BJacobi_Singleblock *bjac;
891   PetscBool              wasSetup;
892 
893   PetscFunctionBegin;
894 
895   /* set default direct solver with no Krylov method */
896   if (!pc->setupcalled) {
897     const char *prefix;
898     wasSetup = PETSC_FALSE;
899     ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
900     ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
901     ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
902     ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
903     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
904     ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
905     ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
906     /*
907       The reason we need to generate these vectors is to serve
908       as the right-hand side and solution vector for the solve on the
909       block. We do not need to allocate space for the vectors since
910       that is provided via VecPlaceArray() just before the call to
911       KSPSolve() on the block.
912     */
913     ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr);
914     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&x);CHKERRQ(ierr);
915     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&y);CHKERRQ(ierr);
916     ierr = PetscLogObjectParent(pc,x);CHKERRQ(ierr);
917     ierr = PetscLogObjectParent(pc,y);CHKERRQ(ierr);
918 
919     pc->ops->reset               = PCReset_BJacobi_Singleblock;
920     pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
921     pc->ops->apply               = PCApply_BJacobi_Singleblock;
922     pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
923     pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
924     pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
925     pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;
926 
927     ierr = PetscNewLog(pc,PC_BJacobi_Singleblock,&bjac);CHKERRQ(ierr);
928     bjac->x      = x;
929     bjac->y      = y;
930 
931     ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
932     jac->ksp[0] = ksp;
933     jac->data    = (void*)bjac;
934   } else {
935     wasSetup = PETSC_TRUE;
936     ksp = jac->ksp[0];
937     bjac = (PC_BJacobi_Singleblock *)jac->data;
938   }
939   if (jac->use_true_local) {
940     ierr = KSPSetOperators(ksp,mat,pmat,pc->flag);CHKERRQ(ierr);
941   }  else {
942     ierr = KSPSetOperators(ksp,pmat,pmat,pc->flag);CHKERRQ(ierr);
943   }
944   if (!wasSetup && pc->setfromoptionscalled) {
945     ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
946   }
947   PetscFunctionReturn(0);
948 }
949 
950 /* ---------------------------------------------------------------------------------------------*/
951 #undef __FUNCT__
952 #define __FUNCT__ "PCReset_BJacobi_Multiblock"
953 PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
954 {
955   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
956   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
957   PetscErrorCode        ierr;
958   PetscInt              i;
959 
960   PetscFunctionBegin;
961   if (bjac && bjac->pmat) {
962     ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr);
963     if (jac->use_true_local) {
964       ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr);
965     }
966   }
967 
968   /*
969         If the on processor block had to be generated via a MatGetDiagonalBlock()
970      that creates a copy, this frees the space
971   */
972   if (jac->tp_mat) {
973     ierr = MatDestroy(jac->tp_mat);CHKERRQ(ierr);
974   }
975   if (jac->tp_pmat) {
976     ierr = MatDestroy(jac->tp_pmat);CHKERRQ(ierr);
977   }
978 
979   for (i=0; i<jac->n_local; i++) {
980     ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr);
981     if (bjac && bjac->x) {
982       ierr = VecDestroy(bjac->x[i]);CHKERRQ(ierr);
983       ierr = VecDestroy(bjac->y[i]);CHKERRQ(ierr);
984       ierr = ISDestroy(bjac->is[i]);CHKERRQ(ierr);
985     }
986   }
987   if (bjac) {
988     ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr);
989     ierr = PetscFree(bjac->starts);CHKERRQ(ierr);
990     ierr = PetscFree(bjac->is);CHKERRQ(ierr);
991   }
992   ierr = PetscFree(jac->data);CHKERRQ(ierr);
993   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
994   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
995   PetscFunctionReturn(0);
996 }
997 
998 #undef __FUNCT__
999 #define __FUNCT__ "PCDestroy_BJacobi_Multiblock"
1000 PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
1001 {
1002   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1003   PetscErrorCode        ierr;
1004   PetscInt              i;
1005 
1006   PetscFunctionBegin;
1007   ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr);
1008   for (i=0; i<jac->n_local; i++) {
1009     ierr = KSPDestroy(jac->ksp[i]);CHKERRQ(ierr);
1010   }
1011   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
1012   ierr = PetscFree(pc->data);CHKERRQ(ierr);
1013   PetscFunctionReturn(0);
1014 }
1015 
1016 #undef __FUNCT__
1017 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Multiblock"
1018 PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
1019 {
1020   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
1021   PetscErrorCode ierr;
1022   PetscInt       i,n_local = jac->n_local;
1023 
1024   PetscFunctionBegin;
1025   for (i=0; i<n_local; i++) {
1026     ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr);
1027   }
1028   PetscFunctionReturn(0);
1029 }
1030 
1031 /*
1032       Preconditioner for block Jacobi
1033 */
1034 #undef __FUNCT__
1035 #define __FUNCT__ "PCApply_BJacobi_Multiblock"
1036 PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y)
1037 {
1038   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1039   PetscErrorCode        ierr;
1040   PetscInt              i,n_local = jac->n_local;
1041   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
1042   PetscScalar           *xin,*yin;
1043 
1044   PetscFunctionBegin;
1045   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
1046   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
1047   for (i=0; i<n_local; i++) {
1048     /*
1049        To avoid copying the subvector from x into a workspace we instead
1050        make the workspace vector array point to the subpart of the array of
1051        the global vector.
1052     */
1053     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
1054     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
1055 
1056     ierr = PetscLogEventBegin(PC_SetUpOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1057     ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
1058     ierr = PetscLogEventEnd(PC_SetUpOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1059 
1060     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
1061     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
1062   }
1063   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
1064   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 /*
1069       Preconditioner for block Jacobi
1070 */
1071 #undef __FUNCT__
1072 #define __FUNCT__ "PCApplyTranspose_BJacobi_Multiblock"
1073 PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y)
1074 {
1075   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1076   PetscErrorCode        ierr;
1077   PetscInt              i,n_local = jac->n_local;
1078   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
1079   PetscScalar           *xin,*yin;
1080 
1081   PetscFunctionBegin;
1082   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
1083   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
1084   for (i=0; i<n_local; i++) {
1085     /*
1086        To avoid copying the subvector from x into a workspace we instead
1087        make the workspace vector array point to the subpart of the array of
1088        the global vector.
1089     */
1090     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
1091     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
1092 
1093     ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1094     ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
1095     ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1096   }
1097   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
1098   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
1099   PetscFunctionReturn(0);
1100 }
1101 
1102 #undef __FUNCT__
1103 #define __FUNCT__ "PCSetUp_BJacobi_Multiblock"
1104 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat)
1105 {
1106   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
1107   PetscErrorCode         ierr;
1108   PetscInt               m,n_local,N,M,start,i;
1109   const char             *prefix,*pprefix,*mprefix;
1110   KSP                    ksp;
1111   Vec                    x,y;
1112   PC_BJacobi_Multiblock  *bjac = (PC_BJacobi_Multiblock*)jac->data;
1113   PC                     subpc;
1114   IS                     is;
1115   MatReuse               scall = MAT_REUSE_MATRIX;
1116 
1117   PetscFunctionBegin;
1118   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
1119 
1120   n_local = jac->n_local;
1121 
1122   if (jac->use_true_local) {
1123     PetscBool  same;
1124     ierr = PetscTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr);
1125     if (!same) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_INCOMP,"Matrices not of same type");
1126   }
1127 
1128   if (!pc->setupcalled) {
1129     scall                  = MAT_INITIAL_MATRIX;
1130     pc->ops->reset         = PCReset_BJacobi_Multiblock;
1131     pc->ops->destroy       = PCDestroy_BJacobi_Multiblock;
1132     pc->ops->apply         = PCApply_BJacobi_Multiblock;
1133     pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock;
1134     pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock;
1135 
1136     ierr = PetscNewLog(pc,PC_BJacobi_Multiblock,&bjac);CHKERRQ(ierr);
1137     ierr = PetscMalloc(n_local*sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
1138     ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr);
1139     ierr = PetscMalloc2(n_local,Vec,&bjac->x,n_local,Vec,&bjac->y);CHKERRQ(ierr);
1140     ierr = PetscMalloc(n_local*sizeof(PetscScalar),&bjac->starts);CHKERRQ(ierr);
1141     ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr);
1142 
1143     jac->data    = (void*)bjac;
1144     ierr = PetscMalloc(n_local*sizeof(IS),&bjac->is);CHKERRQ(ierr);
1145     ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr);
1146 
1147     start = 0;
1148     for (i=0; i<n_local; i++) {
1149       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
1150       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
1151       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
1152       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
1153       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
1154       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1155       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
1156       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
1157 
1158       m = jac->l_lens[i];
1159 
1160       /*
1161       The reason we need to generate these vectors is to serve
1162       as the right-hand side and solution vector for the solve on the
1163       block. We do not need to allocate space for the vectors since
1164       that is provided via VecPlaceArray() just before the call to
1165       KSPSolve() on the block.
1166 
1167       */
1168       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr);
1169       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&y);CHKERRQ(ierr);
1170       ierr = PetscLogObjectParent(pc,x);CHKERRQ(ierr);
1171       ierr = PetscLogObjectParent(pc,y);CHKERRQ(ierr);
1172       bjac->x[i]      = x;
1173       bjac->y[i]      = y;
1174       bjac->starts[i] = start;
1175       jac->ksp[i]    = ksp;
1176 
1177       ierr = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr);
1178       bjac->is[i] = is;
1179       ierr = PetscLogObjectParent(pc,is);CHKERRQ(ierr);
1180 
1181       start += m;
1182     }
1183   } else {
1184     bjac = (PC_BJacobi_Multiblock*)jac->data;
1185     /*
1186        Destroy the blocks from the previous iteration
1187     */
1188     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1189       ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr);
1190       if (jac->use_true_local) {
1191         ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr);
1192       }
1193       scall = MAT_INITIAL_MATRIX;
1194     }
1195   }
1196 
1197   ierr = MatGetSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr);
1198   if (jac->use_true_local) {
1199     ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr);
1200     ierr = MatGetSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr);
1201   }
1202   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1203      different boundary conditions for the submatrices than for the global problem) */
1204   ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
1205 
1206   ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr);
1207   for (i=0; i<n_local; i++) {
1208     ierr = PetscLogObjectParent(pc,bjac->pmat[i]);CHKERRQ(ierr);
1209     ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr);
1210     if (jac->use_true_local) {
1211       ierr = PetscLogObjectParent(pc,bjac->mat[i]);CHKERRQ(ierr);
1212       ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr);
1213       ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr);
1214     } else {
1215       ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr);
1216     }
1217     if(pc->setfromoptionscalled){
1218       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
1219     }
1220   }
1221   PetscFunctionReturn(0);
1222 }
1223 
1224 /* ---------------------------------------------------------------------------------------------*/
1225 /*
1226       These are for a single block with multiple processes;
1227 */
1228 #undef __FUNCT__
1229 #define __FUNCT__ "PCReset_BJacobi_Multiproc"
1230 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1231 {
1232   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1233   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1234   PetscErrorCode       ierr;
1235 
1236   PetscFunctionBegin;
1237   if (mpjac->ysub){ierr = VecDestroy(mpjac->ysub);CHKERRQ(ierr);}
1238   if (mpjac->xsub){ierr = VecDestroy(mpjac->xsub);CHKERRQ(ierr);}
1239   if (mpjac->submats){ierr = MatDestroy(mpjac->submats);CHKERRQ(ierr);}
1240   if (mpjac->ksp){ierr = KSPReset(mpjac->ksp);CHKERRQ(ierr);}
1241   PetscFunctionReturn(0);
1242 }
1243 
1244 #undef __FUNCT__
1245 #define __FUNCT__ "PCDestroy_BJacobi_Multiproc"
1246 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1247 {
1248   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1249   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1250   PetscErrorCode       ierr;
1251 
1252   PetscFunctionBegin;
1253   if (mpjac->ksp){ierr = KSPDestroy(mpjac->ksp);CHKERRQ(ierr);}
1254   if (mpjac->psubcomm){ierr = PetscSubcommDestroy(mpjac->psubcomm);CHKERRQ(ierr);}
1255 
1256   ierr = PetscFree(mpjac);CHKERRQ(ierr);
1257   ierr = PetscFree(pc->data);CHKERRQ(ierr);
1258   PetscFunctionReturn(0);
1259 }
1260 
1261 #undef __FUNCT__
1262 #define __FUNCT__ "PCApply_BJacobi_Multiproc"
1263 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y)
1264 {
1265   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1266   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1267   PetscErrorCode       ierr;
1268   PetscScalar          *xarray,*yarray;
1269 
1270   PetscFunctionBegin;
1271   /* place x's and y's local arrays into xsub and ysub */
1272   ierr = VecGetArray(x,&xarray);CHKERRQ(ierr);
1273   ierr = VecGetArray(y,&yarray);CHKERRQ(ierr);
1274   ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr);
1275   ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr);
1276 
1277   /* apply preconditioner on each matrix block */
1278   ierr = KSPSolve(mpjac->ksp,mpjac->xsub,mpjac->ysub);CHKERRQ(ierr);
1279 
1280   ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr);
1281   ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr);
1282   ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1283   ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1284   PetscFunctionReturn(0);
1285 }
1286 
1287 extern PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat,MPI_Comm,Mat*);
1288 #undef __FUNCT__
1289 #define __FUNCT__ "PCSetUp_BJacobi_Multiproc"
1290 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1291 {
1292   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1293   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1294   PetscErrorCode       ierr;
1295   PetscInt             m,n;
1296   MPI_Comm             comm = ((PetscObject)pc)->comm,subcomm=0;
1297   const char           *prefix;
1298 
1299   PetscFunctionBegin;
1300   if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported");
1301   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1302   if (!pc->setupcalled) {
1303     ierr = PetscNewLog(pc,PC_BJacobi_Multiproc,&mpjac);CHKERRQ(ierr);
1304     jac->data = (void*)mpjac;
1305 
1306     /* initialize datastructure mpjac */
1307     if (!jac->psubcomm) {
1308       /* Create default contiguous subcommunicatiors if user does not provide them */
1309       ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr);
1310       ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr);
1311       ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
1312       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
1313     }
1314     mpjac->psubcomm = jac->psubcomm;
1315     subcomm         = mpjac->psubcomm->comm;
1316 
1317     /* Get matrix blocks of pmat */
1318     ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,&mpjac->submats);CHKERRQ(ierr);
1319 
1320     /* create a new PC that processors in each subcomm have copy of */
1321     ierr = KSPCreate(subcomm,&mpjac->ksp);CHKERRQ(ierr);
1322     ierr = PetscObjectIncrementTabLevel((PetscObject)mpjac->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
1323     ierr = PetscLogObjectParent(pc,mpjac->ksp);CHKERRQ(ierr);
1324     ierr = KSPSetOperators(mpjac->ksp,mpjac->submats,mpjac->submats,pc->flag);CHKERRQ(ierr);
1325     ierr = KSPGetPC(mpjac->ksp,&mpjac->pc);CHKERRQ(ierr);
1326 
1327     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1328     ierr = KSPSetOptionsPrefix(mpjac->ksp,prefix);CHKERRQ(ierr);
1329     ierr = KSPAppendOptionsPrefix(mpjac->ksp,"sub_");CHKERRQ(ierr);
1330     /*
1331       PetscMPIInt rank,subsize,subrank;
1332       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1333       ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
1334       ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
1335 
1336       ierr = MatGetLocalSize(mpjac->submats,&m,PETSC_NULL);CHKERRQ(ierr);
1337       ierr = MatGetSize(mpjac->submats,&n,PETSC_NULL);CHKERRQ(ierr);
1338       ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank);
1339       ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
1340     */
1341 
1342     /* create dummy vectors xsub and ysub */
1343     ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr);
1344     ierr = VecCreateMPIWithArray(subcomm,n,PETSC_DECIDE,PETSC_NULL,&mpjac->xsub);CHKERRQ(ierr);
1345     ierr = VecCreateMPIWithArray(subcomm,m,PETSC_DECIDE,PETSC_NULL,&mpjac->ysub);CHKERRQ(ierr);
1346     ierr = PetscLogObjectParent(pc,mpjac->xsub);CHKERRQ(ierr);
1347     ierr = PetscLogObjectParent(pc,mpjac->ysub);CHKERRQ(ierr);
1348 
1349     pc->ops->reset   = PCReset_BJacobi_Multiproc;
1350     pc->ops->destroy = PCDestroy_BJacobi_Multiproc;
1351     pc->ops->apply   = PCApply_BJacobi_Multiproc;
1352   }
1353 
1354   if (pc->setupcalled && pc->flag == DIFFERENT_NONZERO_PATTERN) {
1355     /* destroy old matrix blocks, then get new matrix blocks */
1356     if (mpjac->submats) {
1357       ierr = MatDestroy(mpjac->submats);CHKERRQ(ierr);
1358       subcomm = mpjac->psubcomm->comm;
1359       ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,&mpjac->submats);CHKERRQ(ierr);
1360       ierr = KSPSetOperators(mpjac->ksp,mpjac->submats,mpjac->submats,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr);
1361     }
1362   }
1363 
1364   if (pc->setfromoptionscalled){
1365     ierr = KSPSetFromOptions(mpjac->ksp);CHKERRQ(ierr);
1366   }
1367   PetscFunctionReturn(0);
1368 }
1369