xref: /petsc/src/ksp/pc/impls/bjacobi/bjacobi.c (revision 0e9bae810fdaeb60e2713eaa8ddb89f42e079fd1)
1 
2 /*
3    Defines a block Jacobi preconditioner.
4 */
5 #include <petsc-private/pcimpl.h>              /*I "petscpc.h" I*/
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h>
7 
8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat);
9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat);
10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);
11 
12 #undef __FUNCT__
13 #define __FUNCT__ "PCSetUp_BJacobi"
14 static PetscErrorCode PCSetUp_BJacobi(PC pc)
15 {
16   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
17   Mat            mat = pc->mat,pmat = pc->pmat;
18   PetscErrorCode ierr,(*f)(Mat,Mat*);
19   PetscInt       N,M,start,i,sum,end;
20   PetscInt       bs,i_start=-1,i_end=-1;
21   PetscMPIInt    rank,size;
22   const char     *pprefix,*mprefix;
23 
24   PetscFunctionBegin;
25   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
26   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
27   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
28   ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
29 
30   if (jac->n > 0 && jac->n < size){
31     ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr);
32     PetscFunctionReturn(0);
33   }
34 
35   /* --------------------------------------------------------------------------
36       Determines the number of blocks assigned to each processor
37   -----------------------------------------------------------------------------*/
38 
39   /*   local block count  given */
40   if (jac->n_local > 0 && jac->n < 0) {
41     ierr = MPI_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
42     if (jac->l_lens) { /* check that user set these correctly */
43       sum = 0;
44       for (i=0; i<jac->n_local; i++) {
45         if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
46         sum += jac->l_lens[i];
47       }
48       if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly");
49     } else {
50       ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
51       for (i=0; i<jac->n_local; i++) {
52         jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
53       }
54     }
55   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
56     /* global blocks given: determine which ones are local */
57     if (jac->g_lens) {
58       /* check if the g_lens is has valid entries */
59       for (i=0; i<jac->n; i++) {
60         if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed");
61         if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
62       }
63       if (size == 1) {
64         jac->n_local = jac->n;
65         ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
66         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
67         /* check that user set these correctly */
68         sum = 0;
69         for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i];
70         if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly");
71       } else {
72         ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr);
73         /* loop over blocks determing first one owned by me */
74         sum = 0;
75         for (i=0; i<jac->n+1; i++) {
76           if (sum == start) { i_start = i; goto start_1;}
77           if (i < jac->n) sum += jac->g_lens[i];
78         }
79         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
80  start_1:
81         for (i=i_start; i<jac->n+1; i++) {
82           if (sum == end) { i_end = i; goto end_1; }
83           if (i < jac->n) sum += jac->g_lens[i];
84         }
85         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
86  end_1:
87         jac->n_local = i_end - i_start;
88         ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
89         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
90       }
91     } else { /* no global blocks given, determine then using default layout */
92       jac->n_local = jac->n/size + ((jac->n % size) > rank);
93       ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
94       for (i=0; i<jac->n_local; i++) {
95         jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs;
96         if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given");
97       }
98     }
99   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
100     jac->n         = size;
101     jac->n_local   = 1;
102     ierr           = PetscMalloc(sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
103     jac->l_lens[0] = M;
104   } else { /* jac->n > 0 && jac->n_local > 0 */
105     if (!jac->l_lens) {
106       ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
107       for (i=0; i<jac->n_local; i++) {
108         jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
109       }
110     }
111   }
112   if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors");
113 
114   /* -------------------------
115       Determines mat and pmat
116   ---------------------------*/
117   ierr = PetscObjectQueryFunction((PetscObject)pc->mat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
118   if (!f && size == 1) {
119     mat  = pc->mat;
120     pmat = pc->pmat;
121   } else {
122     if (jac->use_true_local) {
123       /* use block from true matrix, not preconditioner matrix for local MatMult() */
124       ierr = MatGetDiagonalBlock(pc->mat,&mat);CHKERRQ(ierr);
125       /* make submatrix have same prefix as entire matrix */
126       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr);
127       ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr);
128     }
129     if (pc->pmat != pc->mat || !jac->use_true_local) {
130       ierr = MatGetDiagonalBlock(pc->pmat,&pmat);CHKERRQ(ierr);
131       /* make submatrix have same prefix as entire matrix */
132       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
133       ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr);
134     } else {
135       pmat = mat;
136     }
137   }
138 
139   /* ------
140      Setup code depends on the number of blocks
141   */
142   if (jac->n_local == 1) {
143     ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr);
144   } else {
145     ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr);
146   }
147   PetscFunctionReturn(0);
148 }
149 
150 /* Default destroy, if it has never been setup */
151 #undef __FUNCT__
152 #define __FUNCT__ "PCDestroy_BJacobi"
153 static PetscErrorCode PCDestroy_BJacobi(PC pc)
154 {
155   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
156   PetscErrorCode ierr;
157 
158   PetscFunctionBegin;
159   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
160   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
161   ierr = PetscFree(pc->data);CHKERRQ(ierr);
162   PetscFunctionReturn(0);
163 }
164 
165 #undef __FUNCT__
166 #define __FUNCT__ "PCSetFromOptions_BJacobi"
167 
168 static PetscErrorCode PCSetFromOptions_BJacobi(PC pc)
169 {
170   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
171   PetscErrorCode ierr;
172   PetscInt       blocks;
173   PetscBool      flg;
174 
175   PetscFunctionBegin;
176   ierr = PetscOptionsHead("Block Jacobi options");CHKERRQ(ierr);
177     ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr);
178     if (flg) {
179       ierr = PCBJacobiSetTotalBlocks(pc,blocks,PETSC_NULL);CHKERRQ(ierr);
180     }
181     flg  = PETSC_FALSE;
182     ierr = PetscOptionsBool("-pc_bjacobi_truelocal","Use the true matrix, not preconditioner matrix to define matrix vector product in sub-problems","PCBJacobiSetUseTrueLocal",flg,&flg,PETSC_NULL);CHKERRQ(ierr);
183     if (flg) {
184       ierr = PCBJacobiSetUseTrueLocal(pc);CHKERRQ(ierr);
185     }
186   ierr = PetscOptionsTail();CHKERRQ(ierr);
187   PetscFunctionReturn(0);
188 }
189 
190 #undef __FUNCT__
191 #define __FUNCT__ "PCView_BJacobi"
192 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer)
193 {
194   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
195   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
196   PetscErrorCode       ierr;
197   PetscMPIInt          rank;
198   PetscInt             i;
199   PetscBool            iascii,isstring;
200   PetscViewer          sviewer;
201 
202   PetscFunctionBegin;
203   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
204   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
205   if (iascii) {
206     if (jac->use_true_local) {
207       ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: using true local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr);
208     }
209     ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: number of blocks = %D\n",jac->n);CHKERRQ(ierr);
210     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
211     if (jac->same_local_solves) {
212       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
213       if (jac->ksp && !jac->psubcomm) {
214         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
215         if (!rank){
216           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
217           ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
218           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
219         }
220         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
221       } else if (jac->psubcomm && !jac->psubcomm->color){
222         ierr = PetscViewerASCIIGetStdout(mpjac->psubcomm->comm,&sviewer);CHKERRQ(ierr);
223         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
224         ierr = KSPView(*(jac->ksp),sviewer);CHKERRQ(ierr);
225         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
226       }
227     } else {
228       PetscInt n_global;
229       ierr = MPI_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,((PetscObject)pc)->comm);CHKERRQ(ierr);
230       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
231       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
232       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n",
233                    rank,jac->n_local,jac->first_local);CHKERRQ(ierr);
234       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
235       for (i=0; i<n_global; i++) {
236         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
237         if (i < jac->n_local) {
238           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
239           ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
240           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
241         }
242         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
243       }
244       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
245       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
246       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
247     }
248   } else if (isstring) {
249     ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr);
250     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
251     if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
252     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
253   } else {
254     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for block Jacobi",((PetscObject)viewer)->type_name);
255   }
256   PetscFunctionReturn(0);
257 }
258 
259 /* -------------------------------------------------------------------------------------*/
260 
261 EXTERN_C_BEGIN
262 #undef __FUNCT__
263 #define __FUNCT__ "PCBJacobiSetUseTrueLocal_BJacobi"
264 PetscErrorCode  PCBJacobiSetUseTrueLocal_BJacobi(PC pc)
265 {
266   PC_BJacobi   *jac;
267 
268   PetscFunctionBegin;
269   jac                 = (PC_BJacobi*)pc->data;
270   jac->use_true_local = PETSC_TRUE;
271   PetscFunctionReturn(0);
272 }
273 EXTERN_C_END
274 
275 EXTERN_C_BEGIN
276 #undef __FUNCT__
277 #define __FUNCT__ "PCBJacobiGetSubKSP_BJacobi"
278 PetscErrorCode  PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
279 {
280   PC_BJacobi   *jac = (PC_BJacobi*)pc->data;;
281 
282   PetscFunctionBegin;
283   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first");
284 
285   if (n_local)     *n_local     = jac->n_local;
286   if (first_local) *first_local = jac->first_local;
287   *ksp                          = jac->ksp;
288   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
289                                                   not necessarily true though!  This flag is
290                                                   used only for PCView_BJacobi() */
291   PetscFunctionReturn(0);
292 }
293 EXTERN_C_END
294 
295 EXTERN_C_BEGIN
296 #undef __FUNCT__
297 #define __FUNCT__ "PCBJacobiSetTotalBlocks_BJacobi"
298 PetscErrorCode  PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens)
299 {
300   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
301   PetscErrorCode ierr;
302 
303   PetscFunctionBegin;
304 
305   if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
306   jac->n = blocks;
307   if (!lens) {
308     jac->g_lens = 0;
309   } else {
310     ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->g_lens);CHKERRQ(ierr);
311     ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
312     ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
313   }
314   PetscFunctionReturn(0);
315 }
316 EXTERN_C_END
317 
318 EXTERN_C_BEGIN
319 #undef __FUNCT__
320 #define __FUNCT__ "PCBJacobiGetTotalBlocks_BJacobi"
321 PetscErrorCode  PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
322 {
323   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
324 
325   PetscFunctionBegin;
326   *blocks = jac->n;
327   if (lens) *lens = jac->g_lens;
328   PetscFunctionReturn(0);
329 }
330 EXTERN_C_END
331 
332 EXTERN_C_BEGIN
333 #undef __FUNCT__
334 #define __FUNCT__ "PCBJacobiSetLocalBlocks_BJacobi"
335 PetscErrorCode  PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[])
336 {
337   PC_BJacobi     *jac;
338   PetscErrorCode ierr;
339 
340   PetscFunctionBegin;
341   jac = (PC_BJacobi*)pc->data;
342 
343   jac->n_local = blocks;
344   if (!lens) {
345     jac->l_lens = 0;
346   } else {
347     ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
348     ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
349     ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
350   }
351   PetscFunctionReturn(0);
352 }
353 EXTERN_C_END
354 
355 EXTERN_C_BEGIN
356 #undef __FUNCT__
357 #define __FUNCT__ "PCBJacobiGetLocalBlocks_BJacobi"
358 PetscErrorCode  PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
359 {
360   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
361 
362   PetscFunctionBegin;
363   *blocks = jac->n_local;
364   if (lens) *lens = jac->l_lens;
365   PetscFunctionReturn(0);
366 }
367 EXTERN_C_END
368 
369 /* -------------------------------------------------------------------------------------*/
370 
371 #undef __FUNCT__
372 #define __FUNCT__ "PCBJacobiSetUseTrueLocal"
373 /*@
374    PCBJacobiSetUseTrueLocal - Sets a flag to indicate that the block
375    problem is associated with the linear system matrix instead of the
376    default (where it is associated with the preconditioning matrix).
377    That is, if the local system is solved iteratively then it iterates
378    on the block from the matrix using the block from the preconditioner
379    as the preconditioner for the local block.
380 
381    Logically Collective on PC
382 
383    Input Parameters:
384 .  pc - the preconditioner context
385 
386    Options Database Key:
387 .  -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal()
388 
389    Notes:
390    For the common case in which the preconditioning and linear
391    system matrices are identical, this routine is unnecessary.
392 
393    Level: intermediate
394 
395 .keywords:  block, Jacobi, set, true, local, flag
396 
397 .seealso: PCSetOperators(), PCBJacobiSetLocalBlocks()
398 @*/
399 PetscErrorCode  PCBJacobiSetUseTrueLocal(PC pc)
400 {
401   PetscErrorCode ierr;
402 
403   PetscFunctionBegin;
404   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
405   ierr = PetscTryMethod(pc,"PCBJacobiSetUseTrueLocal_C",(PC),(pc));CHKERRQ(ierr);
406   PetscFunctionReturn(0);
407 }
408 
409 #undef __FUNCT__
410 #define __FUNCT__ "PCBJacobiGetSubKSP"
411 /*@C
412    PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on
413    this processor.
414 
415    Note Collective
416 
417    Input Parameter:
418 .  pc - the preconditioner context
419 
420    Output Parameters:
421 +  n_local - the number of blocks on this processor, or PETSC_NULL
422 .  first_local - the global number of the first block on this processor, or PETSC_NULL
423 -  ksp - the array of KSP contexts
424 
425    Notes:
426    After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed.
427 
428    Currently for some matrix implementations only 1 block per processor
429    is supported.
430 
431    You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP().
432 
433    Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs.
434       You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,PETSC_NULL_OBJECT,ierr) to determine how large the
435       KSP array must be.
436 
437    Level: advanced
438 
439 .keywords:  block, Jacobi, get, sub, KSP, context
440 
441 .seealso: PCBJacobiGetSubKSP()
442 @*/
443 PetscErrorCode  PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
444 {
445   PetscErrorCode ierr;
446 
447   PetscFunctionBegin;
448   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
449   ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt *,PetscInt *,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
450   PetscFunctionReturn(0);
451 }
452 
453 #undef __FUNCT__
454 #define __FUNCT__ "PCBJacobiSetTotalBlocks"
455 /*@
456    PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
457    Jacobi preconditioner.
458 
459    Collective on PC
460 
461    Input Parameters:
462 +  pc - the preconditioner context
463 .  blocks - the number of blocks
464 -  lens - [optional] integer array containing the size of each block
465 
466    Options Database Key:
467 .  -pc_bjacobi_blocks <blocks> - Sets the number of global blocks
468 
469    Notes:
470    Currently only a limited number of blocking configurations are supported.
471    All processors sharing the PC must call this routine with the same data.
472 
473    Level: intermediate
474 
475 .keywords:  set, number, Jacobi, global, total, blocks
476 
477 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetLocalBlocks()
478 @*/
479 PetscErrorCode  PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
480 {
481   PetscErrorCode ierr;
482 
483   PetscFunctionBegin;
484   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
485   if (blocks <= 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks");
486   ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr);
487   PetscFunctionReturn(0);
488 }
489 
490 #undef __FUNCT__
491 #define __FUNCT__ "PCBJacobiGetTotalBlocks"
492 /*@C
493    PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
494    Jacobi preconditioner.
495 
496    Not Collective
497 
498    Input Parameter:
499 .  pc - the preconditioner context
500 
501    Output parameters:
502 +  blocks - the number of blocks
503 -  lens - integer array containing the size of each block
504 
505    Level: intermediate
506 
507 .keywords:  get, number, Jacobi, global, total, blocks
508 
509 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetLocalBlocks()
510 @*/
511 PetscErrorCode  PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
512 {
513   PetscErrorCode ierr;
514 
515   PetscFunctionBegin;
516   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
517   PetscValidIntPointer(blocks,2);
518   ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
519   PetscFunctionReturn(0);
520 }
521 
522 #undef __FUNCT__
523 #define __FUNCT__ "PCBJacobiSetLocalBlocks"
524 /*@
525    PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
526    Jacobi preconditioner.
527 
528    Not Collective
529 
530    Input Parameters:
531 +  pc - the preconditioner context
532 .  blocks - the number of blocks
533 -  lens - [optional] integer array containing size of each block
534 
535    Note:
536    Currently only a limited number of blocking configurations are supported.
537 
538    Level: intermediate
539 
540 .keywords: PC, set, number, Jacobi, local, blocks
541 
542 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetTotalBlocks()
543 @*/
544 PetscErrorCode  PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
545 {
546   PetscErrorCode ierr;
547 
548   PetscFunctionBegin;
549   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
550   if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks");
551   ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr);
552   PetscFunctionReturn(0);
553 }
554 
555 #undef __FUNCT__
556 #define __FUNCT__ "PCBJacobiGetLocalBlocks"
557 /*@C
558    PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
559    Jacobi preconditioner.
560 
561    Not Collective
562 
563    Input Parameters:
564 +  pc - the preconditioner context
565 .  blocks - the number of blocks
566 -  lens - [optional] integer array containing size of each block
567 
568    Note:
569    Currently only a limited number of blocking configurations are supported.
570 
571    Level: intermediate
572 
573 .keywords: PC, get, number, Jacobi, local, blocks
574 
575 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetTotalBlocks()
576 @*/
577 PetscErrorCode  PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
578 {
579   PetscErrorCode ierr;
580 
581   PetscFunctionBegin;
582   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
583   PetscValidIntPointer(blocks,2);
584   ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
585   PetscFunctionReturn(0);
586 }
587 
588 /* -----------------------------------------------------------------------------------*/
589 
590 /*MC
591    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
592            its own KSP object.
593 
594    Options Database Keys:
595 .  -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal()
596 
597    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
598      than one processor. Defaults to one block per processor.
599 
600      To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
601         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
602 
603      To set the options on the solvers separate for each block call PCBJacobiGetSubKSP()
604          and set the options directly on the resulting KSP object (you can access its PC
605          KSPGetPC())
606 
607    Level: beginner
608 
609    Concepts: block Jacobi
610 
611    Developer Notes: This preconditioner does not currently work with CUDA/CUSP for a couple of reasons.
612        (1) It creates seq vectors as work vectors that should be cusp
613        (2) The use of VecPlaceArray() is not handled properly by CUSP (that is it will not know where
614            the ownership of the vector is so may use wrong values) even if it did know the ownership
615            it may induce extra copy ups and downs. Satish suggests a VecTransplantArray() to handle two
616            vectors sharing the same pointer and handling the CUSP side as well instead of VecGetArray()/VecPlaceArray().
617 
618 
619 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
620            PCASM, PCBJacobiSetUseTrueLocal(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(),
621            PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices()
622 M*/
623 
624 EXTERN_C_BEGIN
625 #undef __FUNCT__
626 #define __FUNCT__ "PCCreate_BJacobi"
627 PetscErrorCode  PCCreate_BJacobi(PC pc)
628 {
629   PetscErrorCode ierr;
630   PetscMPIInt    rank;
631   PC_BJacobi     *jac;
632 
633   PetscFunctionBegin;
634   ierr = PetscNewLog(pc,PC_BJacobi,&jac);CHKERRQ(ierr);
635   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
636   pc->ops->apply              = 0;
637   pc->ops->applytranspose     = 0;
638   pc->ops->setup              = PCSetUp_BJacobi;
639   pc->ops->destroy            = PCDestroy_BJacobi;
640   pc->ops->setfromoptions     = PCSetFromOptions_BJacobi;
641   pc->ops->view               = PCView_BJacobi;
642   pc->ops->applyrichardson    = 0;
643 
644   pc->data               = (void*)jac;
645   jac->n                 = -1;
646   jac->n_local           = -1;
647   jac->first_local       = rank;
648   jac->ksp               = 0;
649   jac->use_true_local    = PETSC_FALSE;
650   jac->same_local_solves = PETSC_TRUE;
651   jac->g_lens            = 0;
652   jac->l_lens            = 0;
653   jac->psubcomm          = 0;
654 
655   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetUseTrueLocal_C",
656                     "PCBJacobiSetUseTrueLocal_BJacobi",
657                     PCBJacobiSetUseTrueLocal_BJacobi);CHKERRQ(ierr);
658   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetSubKSP_C","PCBJacobiGetSubKSP_BJacobi",
659                     PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr);
660   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetTotalBlocks_C","PCBJacobiSetTotalBlocks_BJacobi",
661                     PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr);
662   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetTotalBlocks_C","PCBJacobiGetTotalBlocks_BJacobi",
663                     PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr);
664   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetLocalBlocks_C","PCBJacobiSetLocalBlocks_BJacobi",
665                     PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr);
666   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetLocalBlocks_C","PCBJacobiGetLocalBlocks_BJacobi",
667                     PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr);
668 
669   PetscFunctionReturn(0);
670 }
671 EXTERN_C_END
672 
673 /* --------------------------------------------------------------------------------------------*/
674 /*
675         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
676 */
677 #undef __FUNCT__
678 #define __FUNCT__ "PCReset_BJacobi_Singleblock"
679 PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
680 {
681   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
682   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
683   PetscErrorCode         ierr;
684 
685   PetscFunctionBegin;
686   ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);
687   ierr = VecDestroy(&bjac->x);CHKERRQ(ierr);
688   ierr = VecDestroy(&bjac->y);CHKERRQ(ierr);
689   PetscFunctionReturn(0);
690 }
691 
692 #undef __FUNCT__
693 #define __FUNCT__ "PCDestroy_BJacobi_Singleblock"
694 PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
695 {
696   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
697   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
698   PetscErrorCode         ierr;
699 
700   PetscFunctionBegin;
701   ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr);
702   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
703   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
704   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
705   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
706   ierr = PetscFree(bjac);CHKERRQ(ierr);
707   ierr = PetscFree(pc->data);CHKERRQ(ierr);
708   PetscFunctionReturn(0);
709 }
710 
711 #undef __FUNCT__
712 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Singleblock"
713 PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
714 {
715   PetscErrorCode ierr;
716   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
717 
718   PetscFunctionBegin;
719   ierr = KSPSetUp(jac->ksp[0]);CHKERRQ(ierr);
720   PetscFunctionReturn(0);
721 }
722 
723 #undef __FUNCT__
724 #define __FUNCT__ "PCApply_BJacobi_Singleblock"
725 PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y)
726 {
727   PetscErrorCode         ierr;
728   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
729   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
730   PetscScalar            *x_array,*y_array;
731   PetscFunctionBegin;
732   /*
733       The VecPlaceArray() is to avoid having to copy the
734     y vector into the bjac->x vector. The reason for
735     the bjac->x vector is that we need a sequential vector
736     for the sequential solve.
737   */
738   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
739   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
740   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
741   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
742   ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
743   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
744   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
745   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
746   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
747   PetscFunctionReturn(0);
748 }
749 
750 #undef __FUNCT__
751 #define __FUNCT__ "PCApplySymmetricLeft_BJacobi_Singleblock"
752 PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y)
753 {
754   PetscErrorCode         ierr;
755   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
756   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
757   PetscScalar            *x_array,*y_array;
758   PC                     subpc;
759 
760   PetscFunctionBegin;
761   /*
762       The VecPlaceArray() is to avoid having to copy the
763     y vector into the bjac->x vector. The reason for
764     the bjac->x vector is that we need a sequential vector
765     for the sequential solve.
766   */
767   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
768   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
769   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
770   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
771   /* apply the symmetric left portion of the inner PC operator */
772   /* note this by-passes the inner KSP and its options completely */
773   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
774   ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
775   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
776   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
777   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
778   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
779   PetscFunctionReturn(0);
780 }
781 
782 #undef __FUNCT__
783 #define __FUNCT__ "PCApplySymmetricRight_BJacobi_Singleblock"
784 PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y)
785 {
786   PetscErrorCode         ierr;
787   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
788   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
789   PetscScalar            *x_array,*y_array;
790   PC                     subpc;
791 
792   PetscFunctionBegin;
793   /*
794       The VecPlaceArray() is to avoid having to copy the
795     y vector into the bjac->x vector. The reason for
796     the bjac->x vector is that we need a sequential vector
797     for the sequential solve.
798   */
799   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
800   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
801   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
802   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
803 
804   /* apply the symmetric right portion of the inner PC operator */
805   /* note this by-passes the inner KSP and its options completely */
806 
807   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
808   ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
809 
810   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
811   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
812   PetscFunctionReturn(0);
813 }
814 
815 #undef __FUNCT__
816 #define __FUNCT__ "PCApplyTranspose_BJacobi_Singleblock"
817 PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y)
818 {
819   PetscErrorCode         ierr;
820   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
821   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
822   PetscScalar            *x_array,*y_array;
823 
824   PetscFunctionBegin;
825   /*
826       The VecPlaceArray() is to avoid having to copy the
827     y vector into the bjac->x vector. The reason for
828     the bjac->x vector is that we need a sequential vector
829     for the sequential solve.
830   */
831   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
832   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
833   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
834   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
835   ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
836   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
837   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
838   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
839   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
840   PetscFunctionReturn(0);
841 }
842 
843 #undef __FUNCT__
844 #define __FUNCT__ "PCSetUp_BJacobi_Singleblock"
845 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat)
846 {
847   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
848   PetscErrorCode         ierr;
849   PetscInt               m;
850   KSP                    ksp;
851   PC_BJacobi_Singleblock *bjac;
852   PetscBool              wasSetup = PETSC_TRUE;
853 
854   PetscFunctionBegin;
855 
856   if (!pc->setupcalled) {
857     const char *prefix;
858 
859     if (!jac->ksp) {
860       wasSetup = PETSC_FALSE;
861       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
862       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
863       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
864       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
865       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
866       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
867       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
868 
869       pc->ops->reset               = PCReset_BJacobi_Singleblock;
870       pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
871       pc->ops->apply               = PCApply_BJacobi_Singleblock;
872       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
873       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
874       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
875       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;
876 
877       ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
878       jac->ksp[0] = ksp;
879 
880       ierr = PetscNewLog(pc,PC_BJacobi_Singleblock,&bjac);CHKERRQ(ierr);
881       jac->data    = (void*)bjac;
882     } else {
883       ksp  = jac->ksp[0];
884       bjac = (PC_BJacobi_Singleblock *)jac->data;
885     }
886 
887     /*
888       The reason we need to generate these vectors is to serve
889       as the right-hand side and solution vector for the solve on the
890       block. We do not need to allocate space for the vectors since
891       that is provided via VecPlaceArray() just before the call to
892       KSPSolve() on the block.
893     */
894     ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr);
895     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,PETSC_NULL,&bjac->x);CHKERRQ(ierr);
896     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,PETSC_NULL,&bjac->y);CHKERRQ(ierr);
897     ierr = PetscLogObjectParent(pc,bjac->x);CHKERRQ(ierr);
898     ierr = PetscLogObjectParent(pc,bjac->y);CHKERRQ(ierr);
899   } else {
900     ksp = jac->ksp[0];
901     bjac = (PC_BJacobi_Singleblock *)jac->data;
902   }
903   if (jac->use_true_local) {
904     ierr = KSPSetOperators(ksp,mat,pmat,pc->flag);CHKERRQ(ierr);
905   }  else {
906     ierr = KSPSetOperators(ksp,pmat,pmat,pc->flag);CHKERRQ(ierr);
907   }
908   if (!wasSetup && pc->setfromoptionscalled) {
909     ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
910   }
911   PetscFunctionReturn(0);
912 }
913 
914 /* ---------------------------------------------------------------------------------------------*/
915 #undef __FUNCT__
916 #define __FUNCT__ "PCReset_BJacobi_Multiblock"
917 PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
918 {
919   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
920   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
921   PetscErrorCode        ierr;
922   PetscInt              i;
923 
924   PetscFunctionBegin;
925   if (bjac && bjac->pmat) {
926     ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr);
927     if (jac->use_true_local) {
928       ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr);
929     }
930   }
931 
932   for (i=0; i<jac->n_local; i++) {
933     ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr);
934     if (bjac && bjac->x) {
935       ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr);
936       ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr);
937       ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr);
938     }
939   }
940   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
941   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
942   PetscFunctionReturn(0);
943 }
944 
945 #undef __FUNCT__
946 #define __FUNCT__ "PCDestroy_BJacobi_Multiblock"
947 PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
948 {
949   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
950   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
951   PetscErrorCode        ierr;
952   PetscInt              i;
953 
954   PetscFunctionBegin;
955   ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr);
956   if (bjac) {
957     ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr);
958     ierr = PetscFree(bjac->starts);CHKERRQ(ierr);
959     ierr = PetscFree(bjac->is);CHKERRQ(ierr);
960   }
961   ierr = PetscFree(jac->data);CHKERRQ(ierr);
962   for (i=0; i<jac->n_local; i++) {
963     ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr);
964   }
965   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
966   ierr = PetscFree(pc->data);CHKERRQ(ierr);
967   PetscFunctionReturn(0);
968 }
969 
970 #undef __FUNCT__
971 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Multiblock"
972 PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
973 {
974   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
975   PetscErrorCode ierr;
976   PetscInt       i,n_local = jac->n_local;
977 
978   PetscFunctionBegin;
979   for (i=0; i<n_local; i++) {
980     ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr);
981   }
982   PetscFunctionReturn(0);
983 }
984 
985 /*
986       Preconditioner for block Jacobi
987 */
988 #undef __FUNCT__
989 #define __FUNCT__ "PCApply_BJacobi_Multiblock"
990 PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y)
991 {
992   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
993   PetscErrorCode        ierr;
994   PetscInt              i,n_local = jac->n_local;
995   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
996   PetscScalar           *xin,*yin;
997 
998   PetscFunctionBegin;
999   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
1000   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
1001   for (i=0; i<n_local; i++) {
1002     /*
1003        To avoid copying the subvector from x into a workspace we instead
1004        make the workspace vector array point to the subpart of the array of
1005        the global vector.
1006     */
1007     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
1008     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
1009 
1010     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1011     ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
1012     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1013 
1014     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
1015     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
1016   }
1017   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
1018   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
1019   PetscFunctionReturn(0);
1020 }
1021 
1022 /*
1023       Preconditioner for block Jacobi
1024 */
1025 #undef __FUNCT__
1026 #define __FUNCT__ "PCApplyTranspose_BJacobi_Multiblock"
1027 PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y)
1028 {
1029   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1030   PetscErrorCode        ierr;
1031   PetscInt              i,n_local = jac->n_local;
1032   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
1033   PetscScalar           *xin,*yin;
1034 
1035   PetscFunctionBegin;
1036   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
1037   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
1038   for (i=0; i<n_local; i++) {
1039     /*
1040        To avoid copying the subvector from x into a workspace we instead
1041        make the workspace vector array point to the subpart of the array of
1042        the global vector.
1043     */
1044     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
1045     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
1046 
1047     ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1048     ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
1049     ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1050 
1051     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
1052     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
1053   }
1054   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
1055   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
1056   PetscFunctionReturn(0);
1057 }
1058 
1059 #undef __FUNCT__
1060 #define __FUNCT__ "PCSetUp_BJacobi_Multiblock"
1061 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat)
1062 {
1063   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
1064   PetscErrorCode         ierr;
1065   PetscInt               m,n_local,N,M,start,i;
1066   const char             *prefix,*pprefix,*mprefix;
1067   KSP                    ksp;
1068   Vec                    x,y;
1069   PC_BJacobi_Multiblock  *bjac = (PC_BJacobi_Multiblock*)jac->data;
1070   PC                     subpc;
1071   IS                     is;
1072   MatReuse               scall;
1073 
1074   PetscFunctionBegin;
1075   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
1076 
1077   n_local = jac->n_local;
1078 
1079   if (jac->use_true_local) {
1080     PetscBool  same;
1081     ierr = PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr);
1082     if (!same) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_INCOMP,"Matrices not of same type");
1083   }
1084 
1085   if (!pc->setupcalled) {
1086     scall                  = MAT_INITIAL_MATRIX;
1087 
1088     if (!jac->ksp) {
1089       pc->ops->reset         = PCReset_BJacobi_Multiblock;
1090       pc->ops->destroy       = PCDestroy_BJacobi_Multiblock;
1091       pc->ops->apply         = PCApply_BJacobi_Multiblock;
1092       pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock;
1093       pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock;
1094 
1095       ierr = PetscNewLog(pc,PC_BJacobi_Multiblock,&bjac);CHKERRQ(ierr);
1096       ierr = PetscMalloc(n_local*sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
1097       ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr);
1098       ierr = PetscMalloc2(n_local,Vec,&bjac->x,n_local,Vec,&bjac->y);CHKERRQ(ierr);
1099       ierr = PetscMalloc(n_local*sizeof(PetscScalar),&bjac->starts);CHKERRQ(ierr);
1100       ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr);
1101 
1102       jac->data    = (void*)bjac;
1103       ierr = PetscMalloc(n_local*sizeof(IS),&bjac->is);CHKERRQ(ierr);
1104       ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr);
1105 
1106       for (i=0; i<n_local; i++) {
1107         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
1108         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
1109         ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
1110         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
1111         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
1112         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1113         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
1114         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
1115         jac->ksp[i]    = ksp;
1116       }
1117     } else {
1118       bjac = (PC_BJacobi_Multiblock*)jac->data;
1119     }
1120 
1121     start = 0;
1122     for (i=0; i<n_local; i++) {
1123       m = jac->l_lens[i];
1124       /*
1125       The reason we need to generate these vectors is to serve
1126       as the right-hand side and solution vector for the solve on the
1127       block. We do not need to allocate space for the vectors since
1128       that is provided via VecPlaceArray() just before the call to
1129       KSPSolve() on the block.
1130 
1131       */
1132       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr);
1133       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,PETSC_NULL,&y);CHKERRQ(ierr);
1134       ierr = PetscLogObjectParent(pc,x);CHKERRQ(ierr);
1135       ierr = PetscLogObjectParent(pc,y);CHKERRQ(ierr);
1136       bjac->x[i]      = x;
1137       bjac->y[i]      = y;
1138       bjac->starts[i] = start;
1139 
1140       ierr = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr);
1141       bjac->is[i] = is;
1142       ierr = PetscLogObjectParent(pc,is);CHKERRQ(ierr);
1143 
1144       start += m;
1145     }
1146   } else {
1147     bjac = (PC_BJacobi_Multiblock*)jac->data;
1148     /*
1149        Destroy the blocks from the previous iteration
1150     */
1151     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1152       ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr);
1153       if (jac->use_true_local) {
1154         ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr);
1155       }
1156       scall = MAT_INITIAL_MATRIX;
1157     } else {
1158       scall = MAT_REUSE_MATRIX;
1159     }
1160   }
1161 
1162   ierr = MatGetSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr);
1163   if (jac->use_true_local) {
1164     ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr);
1165     ierr = MatGetSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr);
1166   }
1167   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1168      different boundary conditions for the submatrices than for the global problem) */
1169   ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
1170 
1171   ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr);
1172   for (i=0; i<n_local; i++) {
1173     ierr = PetscLogObjectParent(pc,bjac->pmat[i]);CHKERRQ(ierr);
1174     ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr);
1175     if (jac->use_true_local) {
1176       ierr = PetscLogObjectParent(pc,bjac->mat[i]);CHKERRQ(ierr);
1177       ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr);
1178       ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr);
1179     } else {
1180       ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr);
1181     }
1182     if(pc->setfromoptionscalled){
1183       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
1184     }
1185   }
1186   PetscFunctionReturn(0);
1187 }
1188 
1189 /* ---------------------------------------------------------------------------------------------*/
1190 /*
1191       These are for a single block with multiple processes;
1192 */
1193 #undef __FUNCT__
1194 #define __FUNCT__ "PCReset_BJacobi_Multiproc"
1195 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1196 {
1197   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1198   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1199   PetscErrorCode       ierr;
1200 
1201   PetscFunctionBegin;
1202   ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr);
1203   ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr);
1204   ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);
1205   if (jac->ksp){ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);}
1206   PetscFunctionReturn(0);
1207 }
1208 
1209 #undef __FUNCT__
1210 #define __FUNCT__ "PCDestroy_BJacobi_Multiproc"
1211 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1212 {
1213   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1214   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1215   PetscErrorCode       ierr;
1216 
1217   PetscFunctionBegin;
1218   ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr);
1219   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
1220   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
1221   ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr);
1222 
1223   ierr = PetscFree(mpjac);CHKERRQ(ierr);
1224   ierr = PetscFree(pc->data);CHKERRQ(ierr);
1225   PetscFunctionReturn(0);
1226 }
1227 
1228 #undef __FUNCT__
1229 #define __FUNCT__ "PCApply_BJacobi_Multiproc"
1230 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y)
1231 {
1232   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1233   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1234   PetscErrorCode       ierr;
1235   PetscScalar          *xarray,*yarray;
1236 
1237   PetscFunctionBegin;
1238   /* place x's and y's local arrays into xsub and ysub */
1239   ierr = VecGetArray(x,&xarray);CHKERRQ(ierr);
1240   ierr = VecGetArray(y,&yarray);CHKERRQ(ierr);
1241   ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr);
1242   ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr);
1243 
1244   /* apply preconditioner on each matrix block */
1245   ierr = PetscLogEventBegin(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1246   ierr = KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);CHKERRQ(ierr);
1247   ierr = PetscLogEventEnd(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1248 
1249   ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr);
1250   ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr);
1251   ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1252   ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1253   PetscFunctionReturn(0);
1254 }
1255 
1256 extern PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat,MPI_Comm,MatReuse,Mat*);
1257 #undef __FUNCT__
1258 #define __FUNCT__ "PCSetUp_BJacobi_Multiproc"
1259 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1260 {
1261   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1262   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1263   PetscErrorCode       ierr;
1264   PetscInt             m,n;
1265   MPI_Comm             comm = ((PetscObject)pc)->comm,subcomm=0;
1266   const char           *prefix;
1267   PetscBool            wasSetup = PETSC_TRUE;
1268 
1269   PetscFunctionBegin;
1270   if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported");
1271   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1272   if (!pc->setupcalled) {
1273     wasSetup = PETSC_FALSE;
1274     ierr = PetscNewLog(pc,PC_BJacobi_Multiproc,&mpjac);CHKERRQ(ierr);
1275     jac->data = (void*)mpjac;
1276 
1277     /* initialize datastructure mpjac */
1278     if (!jac->psubcomm) {
1279       /* Create default contiguous subcommunicatiors if user does not provide them */
1280       ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr);
1281       ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr);
1282       ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
1283       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
1284     }
1285     mpjac->psubcomm = jac->psubcomm;
1286     subcomm         = mpjac->psubcomm->comm;
1287 
1288     /* Get matrix blocks of pmat */
1289     ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1290 
1291     /* create a new PC that processors in each subcomm have copy of */
1292     ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
1293     ierr = KSPCreate(subcomm,&jac->ksp[0]);CHKERRQ(ierr);
1294     ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);CHKERRQ(ierr);
1295     ierr = PetscLogObjectParent(pc,jac->ksp[0]);CHKERRQ(ierr);
1296     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats,pc->flag);CHKERRQ(ierr);
1297     ierr = KSPGetPC(jac->ksp[0],&mpjac->pc);CHKERRQ(ierr);
1298 
1299     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1300     ierr = KSPSetOptionsPrefix(jac->ksp[0],prefix);CHKERRQ(ierr);
1301     ierr = KSPAppendOptionsPrefix(jac->ksp[0],"sub_");CHKERRQ(ierr);
1302     /*
1303       PetscMPIInt rank,subsize,subrank;
1304       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1305       ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
1306       ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
1307 
1308       ierr = MatGetLocalSize(mpjac->submats,&m,PETSC_NULL);CHKERRQ(ierr);
1309       ierr = MatGetSize(mpjac->submats,&n,PETSC_NULL);CHKERRQ(ierr);
1310       ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank);
1311       ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
1312     */
1313 
1314     /* create dummy vectors xsub and ysub */
1315     ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr);
1316     ierr = VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,PETSC_NULL,&mpjac->xsub);CHKERRQ(ierr);
1317     ierr = VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,PETSC_NULL,&mpjac->ysub);CHKERRQ(ierr);
1318     ierr = PetscLogObjectParent(pc,mpjac->xsub);CHKERRQ(ierr);
1319     ierr = PetscLogObjectParent(pc,mpjac->ysub);CHKERRQ(ierr);
1320 
1321     pc->ops->reset   = PCReset_BJacobi_Multiproc;
1322     pc->ops->destroy = PCDestroy_BJacobi_Multiproc;
1323     pc->ops->apply   = PCApply_BJacobi_Multiproc;
1324   } else { /* pc->setupcalled */
1325     subcomm = mpjac->psubcomm->comm;
1326     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1327       /* destroy old matrix blocks, then get new matrix blocks */
1328       if (mpjac->submats){ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);}
1329       ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1330     } else {
1331       ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1332     }
1333     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats,pc->flag);CHKERRQ(ierr);
1334   }
1335 
1336   if (!wasSetup && pc->setfromoptionscalled){
1337     ierr = KSPSetFromOptions(jac->ksp[0]);CHKERRQ(ierr);
1338   }
1339   PetscFunctionReturn(0);
1340 }
1341