xref: /petsc/src/ksp/pc/impls/bjacobi/bjacobi.c (revision 0337bfe0b9dcc77abc5d44df0b7f57cdcdf2ff74)
1 /*
2    Defines a block Jacobi preconditioner.
3 */
4 
5 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /*I "petscpc.h" I*/
6 
7 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC, Mat, Mat);
8 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC, Mat, Mat);
9 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);
10 
11 static PetscErrorCode PCSetUp_BJacobi(PC pc)
12 {
13   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
14   Mat         mat = pc->mat, pmat = pc->pmat;
15   PetscBool   hasop;
16   PetscInt    N, M, start, i, sum, end;
17   PetscInt    bs, i_start = -1, i_end = -1;
18   PetscMPIInt rank, size;
19 
20   PetscFunctionBegin;
21   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
22   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
23   PetscCall(MatGetLocalSize(pc->pmat, &M, &N));
24   PetscCall(MatGetBlockSize(pc->pmat, &bs));
25 
26   if (jac->n > 0 && jac->n < size) {
27     PetscCall(PCSetUp_BJacobi_Multiproc(pc));
28     PetscFunctionReturn(PETSC_SUCCESS);
29   }
30 
31   /*    Determines the number of blocks assigned to each processor */
32   /*   local block count  given */
33   if (jac->n_local > 0 && jac->n < 0) {
34     PetscCallMPI(MPIU_Allreduce(&jac->n_local, &jac->n, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
35     if (jac->l_lens) { /* check that user set these correctly */
36       sum = 0;
37       for (i = 0; i < jac->n_local; i++) {
38         PetscCheck(jac->l_lens[i] / bs * bs == jac->l_lens[i], PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat blocksize doesn't match block Jacobi layout");
39         sum += jac->l_lens[i];
40       }
41       PetscCheck(sum == M, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Local lens set incorrectly");
42     } else {
43       PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
44       for (i = 0; i < jac->n_local; i++) jac->l_lens[i] = bs * ((M / bs) / jac->n_local + (((M / bs) % jac->n_local) > i));
45     }
46   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
47     /* global blocks given: determine which ones are local */
48     if (jac->g_lens) {
49       /* check if the g_lens is has valid entries */
50       for (i = 0; i < jac->n; i++) {
51         PetscCheck(jac->g_lens[i], PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Zero block not allowed");
52         PetscCheck(jac->g_lens[i] / bs * bs == jac->g_lens[i], PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat blocksize doesn't match block Jacobi layout");
53       }
54       if (size == 1) {
55         jac->n_local = jac->n;
56         PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
57         PetscCall(PetscArraycpy(jac->l_lens, jac->g_lens, jac->n_local));
58         /* check that user set these correctly */
59         sum = 0;
60         for (i = 0; i < jac->n_local; i++) sum += jac->l_lens[i];
61         PetscCheck(sum == M, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Global lens set incorrectly");
62       } else {
63         PetscCall(MatGetOwnershipRange(pc->pmat, &start, &end));
64         /* loop over blocks determining first one owned by me */
65         sum = 0;
66         for (i = 0; i < jac->n + 1; i++) {
67           if (sum == start) {
68             i_start = i;
69             goto start_1;
70           }
71           if (i < jac->n) sum += jac->g_lens[i];
72         }
73         SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
74       start_1:
75         for (i = i_start; i < jac->n + 1; i++) {
76           if (sum == end) {
77             i_end = i;
78             goto end_1;
79           }
80           if (i < jac->n) sum += jac->g_lens[i];
81         }
82         SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
83       end_1:
84         jac->n_local = i_end - i_start;
85         PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
86         PetscCall(PetscArraycpy(jac->l_lens, jac->g_lens + i_start, jac->n_local));
87       }
88     } else { /* no global blocks given, determine then using default layout */
89       jac->n_local = jac->n / size + ((jac->n % size) > rank);
90       PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
91       for (i = 0; i < jac->n_local; i++) {
92         jac->l_lens[i] = ((M / bs) / jac->n_local + (((M / bs) % jac->n_local) > i)) * bs;
93         PetscCheck(jac->l_lens[i], PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Too many blocks given");
94       }
95     }
96   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
97     jac->n       = size;
98     jac->n_local = 1;
99     PetscCall(PetscMalloc1(1, &jac->l_lens));
100     jac->l_lens[0] = M;
101   } else { /* jac->n > 0 && jac->n_local > 0 */
102     if (!jac->l_lens) {
103       PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
104       for (i = 0; i < jac->n_local; i++) jac->l_lens[i] = bs * ((M / bs) / jac->n_local + (((M / bs) % jac->n_local) > i));
105     }
106   }
107   PetscCheck(jac->n_local >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of blocks is less than number of processors");
108 
109   /*    Determines mat and pmat */
110   PetscCall(MatHasOperation(pc->mat, MATOP_GET_DIAGONAL_BLOCK, &hasop));
111   if (!hasop && size == 1) {
112     mat  = pc->mat;
113     pmat = pc->pmat;
114   } else {
115     if (pc->useAmat) {
116       /* use block from Amat matrix, not Pmat for local MatMult() */
117       PetscCall(MatGetDiagonalBlock(pc->mat, &mat));
118     }
119     if (pc->pmat != pc->mat || !pc->useAmat) {
120       PetscCall(MatGetDiagonalBlock(pc->pmat, &pmat));
121     } else pmat = mat;
122   }
123 
124   /*
125      Setup code depends on the number of blocks
126   */
127   if (jac->n_local == 1) {
128     PetscCall(PCSetUp_BJacobi_Singleblock(pc, mat, pmat));
129   } else {
130     PetscCall(PCSetUp_BJacobi_Multiblock(pc, mat, pmat));
131   }
132   PetscFunctionReturn(PETSC_SUCCESS);
133 }
134 
135 /* Default destroy, if it has never been setup */
136 static PetscErrorCode PCDestroy_BJacobi(PC pc)
137 {
138   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
139 
140   PetscFunctionBegin;
141   PetscCall(PetscFree(jac->g_lens));
142   PetscCall(PetscFree(jac->l_lens));
143   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetSubKSP_C", NULL));
144   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiSetTotalBlocks_C", NULL));
145   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetTotalBlocks_C", NULL));
146   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiSetLocalBlocks_C", NULL));
147   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetLocalBlocks_C", NULL));
148   PetscCall(PetscFree(pc->data));
149   PetscFunctionReturn(PETSC_SUCCESS);
150 }
151 
152 static PetscErrorCode PCSetFromOptions_BJacobi(PC pc, PetscOptionItems PetscOptionsObject)
153 {
154   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
155   PetscInt    blocks, i;
156   PetscBool   flg;
157 
158   PetscFunctionBegin;
159   PetscOptionsHeadBegin(PetscOptionsObject, "Block Jacobi options");
160   PetscCall(PetscOptionsInt("-pc_bjacobi_blocks", "Total number of blocks", "PCBJacobiSetTotalBlocks", jac->n, &blocks, &flg));
161   if (flg) PetscCall(PCBJacobiSetTotalBlocks(pc, blocks, NULL));
162   PetscCall(PetscOptionsInt("-pc_bjacobi_local_blocks", "Local number of blocks", "PCBJacobiSetLocalBlocks", jac->n_local, &blocks, &flg));
163   if (flg) PetscCall(PCBJacobiSetLocalBlocks(pc, blocks, NULL));
164   if (jac->ksp) {
165     /* The sub-KSP has already been set up (e.g., PCSetUp_BJacobi_Singleblock), but KSPSetFromOptions was not called
166      * unless we had already been called. */
167     for (i = 0; i < jac->n_local; i++) PetscCall(KSPSetFromOptions(jac->ksp[i]));
168   }
169   PetscOptionsHeadEnd();
170   PetscFunctionReturn(PETSC_SUCCESS);
171 }
172 
173 #include <petscdraw.h>
174 static PetscErrorCode PCView_BJacobi(PC pc, PetscViewer viewer)
175 {
176   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
177   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;
178   PetscMPIInt           rank;
179   PetscInt              i;
180   PetscBool             iascii, isstring, isdraw;
181   PetscViewer           sviewer;
182   PetscViewerFormat     format;
183   const char           *prefix;
184 
185   PetscFunctionBegin;
186   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
187   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
188   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
189   if (iascii) {
190     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat local matrix, number of blocks = %" PetscInt_FMT "\n", jac->n));
191     PetscCall(PetscViewerASCIIPrintf(viewer, "  number of blocks = %" PetscInt_FMT "\n", jac->n));
192     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
193     PetscCall(PetscViewerGetFormat(viewer, &format));
194     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
195       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
196       PetscCall(PCGetOptionsPrefix(pc, &prefix));
197       PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
198       if (jac->ksp && !jac->psubcomm) {
199         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
200         if (rank == 0) {
201           PetscCall(PetscViewerASCIIPushTab(sviewer));
202           PetscCall(KSPView(jac->ksp[0], sviewer));
203           PetscCall(PetscViewerASCIIPopTab(sviewer));
204         }
205         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
206         /*  extra call needed because of the two calls to PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
207         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
208       } else if (mpjac && jac->ksp && mpjac->psubcomm) {
209         PetscCall(PetscViewerGetSubViewer(viewer, mpjac->psubcomm->child, &sviewer));
210         if (!mpjac->psubcomm->color) {
211           PetscCall(PetscViewerASCIIPushTab(sviewer));
212           PetscCall(KSPView(*jac->ksp, sviewer));
213           PetscCall(PetscViewerASCIIPopTab(sviewer));
214         }
215         PetscCall(PetscViewerRestoreSubViewer(viewer, mpjac->psubcomm->child, &sviewer));
216         /*  extra call needed because of the two calls to PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
217         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
218       }
219     } else {
220       PetscInt n_global;
221       PetscCallMPI(MPIU_Allreduce(&jac->n_local, &n_global, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)pc)));
222       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
223       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
224       PetscCall(PetscViewerASCIIPushTab(viewer));
225       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
226       PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] number of local blocks = %" PetscInt_FMT ", first local block number = %" PetscInt_FMT "\n", rank, jac->n_local, jac->first_local));
227       for (i = 0; i < jac->n_local; i++) {
228         PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] local block number %" PetscInt_FMT "\n", rank, i));
229         PetscCall(KSPView(jac->ksp[i], sviewer));
230         PetscCall(PetscViewerASCIIPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
231       }
232       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
233       PetscCall(PetscViewerASCIIPopTab(viewer));
234       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
235     }
236   } else if (isstring) {
237     PetscCall(PetscViewerStringSPrintf(viewer, " blks=%" PetscInt_FMT, jac->n));
238     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
239     if (jac->ksp) PetscCall(KSPView(jac->ksp[0], sviewer));
240     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
241   } else if (isdraw) {
242     PetscDraw draw;
243     char      str[25];
244     PetscReal x, y, bottom, h;
245 
246     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
247     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
248     PetscCall(PetscSNPrintf(str, 25, "Number blocks %" PetscInt_FMT, jac->n));
249     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
250     bottom = y - h;
251     PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom));
252     /* warning the communicator on viewer is different then on ksp in parallel */
253     if (jac->ksp) PetscCall(KSPView(jac->ksp[0], viewer));
254     PetscCall(PetscDrawPopCurrentPoint(draw));
255   }
256   PetscFunctionReturn(PETSC_SUCCESS);
257 }
258 
259 static PetscErrorCode PCBJacobiGetSubKSP_BJacobi(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
260 {
261   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
262 
263   PetscFunctionBegin;
264   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() first");
265 
266   if (n_local) *n_local = jac->n_local;
267   if (first_local) *first_local = jac->first_local;
268   if (ksp) *ksp = jac->ksp;
269   PetscFunctionReturn(PETSC_SUCCESS);
270 }
271 
272 static PetscErrorCode PCBJacobiSetTotalBlocks_BJacobi(PC pc, PetscInt blocks, const PetscInt *lens)
273 {
274   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
275 
276   PetscFunctionBegin;
277   PetscCheck(!pc->setupcalled || jac->n == blocks, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
278   jac->n = blocks;
279   if (!lens) jac->g_lens = NULL;
280   else {
281     PetscCall(PetscMalloc1(blocks, &jac->g_lens));
282     PetscCall(PetscArraycpy(jac->g_lens, lens, blocks));
283   }
284   PetscFunctionReturn(PETSC_SUCCESS);
285 }
286 
287 static PetscErrorCode PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
288 {
289   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
290 
291   PetscFunctionBegin;
292   *blocks = jac->n;
293   if (lens) *lens = jac->g_lens;
294   PetscFunctionReturn(PETSC_SUCCESS);
295 }
296 
297 static PetscErrorCode PCBJacobiSetLocalBlocks_BJacobi(PC pc, PetscInt blocks, const PetscInt lens[])
298 {
299   PC_BJacobi *jac;
300 
301   PetscFunctionBegin;
302   jac = (PC_BJacobi *)pc->data;
303 
304   jac->n_local = blocks;
305   if (!lens) jac->l_lens = NULL;
306   else {
307     PetscCall(PetscMalloc1(blocks, &jac->l_lens));
308     PetscCall(PetscArraycpy(jac->l_lens, lens, blocks));
309   }
310   PetscFunctionReturn(PETSC_SUCCESS);
311 }
312 
313 static PetscErrorCode PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
314 {
315   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
316 
317   PetscFunctionBegin;
318   *blocks = jac->n_local;
319   if (lens) *lens = jac->l_lens;
320   PetscFunctionReturn(PETSC_SUCCESS);
321 }
322 
323 /*@C
324   PCBJacobiGetSubKSP - Gets the local `KSP` contexts for all blocks on
325   this processor.
326 
327   Not Collective
328 
329   Input Parameter:
330 . pc - the preconditioner context
331 
332   Output Parameters:
333 + n_local     - the number of blocks on this processor, or NULL
334 . first_local - the global number of the first block on this processor, or NULL
335 - ksp         - the array of KSP contexts
336 
337   Level: advanced
338 
339   Notes:
340   After `PCBJacobiGetSubKSP()` the array of `KSP` contexts is not to be freed.
341 
342   Currently for some matrix implementations only 1 block per processor
343   is supported.
344 
345   You must call `KSPSetUp()` or `PCSetUp()` before calling `PCBJacobiGetSubKSP()`.
346 
347 .seealso: [](ch_ksp), `PCBJACOBI`, `PCASM`, `PCASMGetSubKSP()`
348 @*/
349 PetscErrorCode PCBJacobiGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
350 {
351   PetscFunctionBegin;
352   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
353   PetscUseMethod(pc, "PCBJacobiGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
354   PetscFunctionReturn(PETSC_SUCCESS);
355 }
356 
357 /*@
358   PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
359   Jacobi preconditioner.
360 
361   Collective
362 
363   Input Parameters:
364 + pc     - the preconditioner context
365 . blocks - the number of blocks
366 - lens   - [optional] integer array containing the size of each block
367 
368   Options Database Key:
369 . -pc_bjacobi_blocks <blocks> - Sets the number of global blocks
370 
371   Level: intermediate
372 
373   Note:
374   Currently only a limited number of blocking configurations are supported.
375   All processors sharing the `PC` must call this routine with the same data.
376 
377 .seealso: [](ch_ksp), `PCBJACOBI`, `PCSetUseAmat()`, `PCBJacobiSetLocalBlocks()`
378 @*/
379 PetscErrorCode PCBJacobiSetTotalBlocks(PC pc, PetscInt blocks, const PetscInt lens[])
380 {
381   PetscFunctionBegin;
382   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
383   PetscCheck(blocks > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Must have positive blocks");
384   PetscTryMethod(pc, "PCBJacobiSetTotalBlocks_C", (PC, PetscInt, const PetscInt[]), (pc, blocks, lens));
385   PetscFunctionReturn(PETSC_SUCCESS);
386 }
387 
388 /*@C
389   PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
390   Jacobi, `PCBJACOBI`, preconditioner.
391 
392   Not Collective
393 
394   Input Parameter:
395 . pc - the preconditioner context
396 
397   Output Parameters:
398 + blocks - the number of blocks
399 - lens   - integer array containing the size of each block
400 
401   Level: intermediate
402 
403 .seealso: [](ch_ksp), `PCBJACOBI`, `PCSetUseAmat()`, `PCBJacobiGetLocalBlocks()`
404 @*/
405 PetscErrorCode PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
406 {
407   PetscFunctionBegin;
408   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
409   PetscAssertPointer(blocks, 2);
410   PetscUseMethod(pc, "PCBJacobiGetTotalBlocks_C", (PC, PetscInt *, const PetscInt *[]), (pc, blocks, lens));
411   PetscFunctionReturn(PETSC_SUCCESS);
412 }
413 
414 /*@
415   PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
416   Jacobi, `PCBJACOBI`,  preconditioner.
417 
418   Not Collective
419 
420   Input Parameters:
421 + pc     - the preconditioner context
422 . blocks - the number of blocks
423 - lens   - [optional] integer array containing size of each block
424 
425   Options Database Key:
426 . -pc_bjacobi_local_blocks <blocks> - Sets the number of local blocks
427 
428   Level: intermediate
429 
430   Note:
431   Currently only a limited number of blocking configurations are supported.
432 
433 .seealso: [](ch_ksp), `PCBJACOBI`, `PCSetUseAmat()`, `PCBJacobiSetTotalBlocks()`
434 @*/
435 PetscErrorCode PCBJacobiSetLocalBlocks(PC pc, PetscInt blocks, const PetscInt lens[])
436 {
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
439   PetscCheck(blocks >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have nonegative blocks");
440   PetscTryMethod(pc, "PCBJacobiSetLocalBlocks_C", (PC, PetscInt, const PetscInt[]), (pc, blocks, lens));
441   PetscFunctionReturn(PETSC_SUCCESS);
442 }
443 
444 /*@C
445   PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
446   Jacobi, `PCBJACOBI`, preconditioner.
447 
448   Not Collective
449 
450   Input Parameters:
451 + pc     - the preconditioner context
452 . blocks - the number of blocks
453 - lens   - [optional] integer array containing size of each block
454 
455   Level: intermediate
456 
457   Note:
458   Currently only a limited number of blocking configurations are supported.
459 
460 .seealso: [](ch_ksp), `PCBJACOBI`, `PCSetUseAmat()`, `PCBJacobiGetTotalBlocks()`
461 @*/
462 PetscErrorCode PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
463 {
464   PetscFunctionBegin;
465   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
466   PetscAssertPointer(blocks, 2);
467   PetscUseMethod(pc, "PCBJacobiGetLocalBlocks_C", (PC, PetscInt *, const PetscInt *[]), (pc, blocks, lens));
468   PetscFunctionReturn(PETSC_SUCCESS);
469 }
470 
471 /*MC
472    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
473            its own `KSP` object.
474 
475    Options Database Keys:
476 +  -pc_use_amat - use Amat to apply block of operator in inner Krylov method
477 -  -pc_bjacobi_blocks <n> - use n total blocks
478 
479    Level: beginner
480 
481    Notes:
482     See `PCJACOBI` for diagonal Jacobi, `PCVPBJACOBI` for variable point block, and `PCPBJACOBI` for fixed size point block
483 
484     Each processor can have one or more blocks, or a single block can be shared by several processes. Defaults to one block per processor.
485 
486      To set options on the solvers for each block append -sub_ to all the `KSP` and `PC`
487         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
488 
489      To set the options on the solvers separate for each block call `PCBJacobiGetSubKSP()`
490          and set the options directly on the resulting `KSP` object (you can access its `PC`
491          `KSPGetPC()`)
492 
493      For GPU-based vectors (`VECCUDA`, `VECViennaCL`) it is recommended to use exactly one block per MPI process for best
494          performance.  Different block partitioning may lead to additional data transfers
495          between host and GPU that lead to degraded performance.
496 
497      When multiple processes share a single block, each block encompasses exactly all the unknowns owned its set of processes.
498 
499 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCType`,
500           `PCASM`, `PCSetUseAmat()`, `PCGetUseAmat()`, `PCBJacobiGetSubKSP()`, `PCBJacobiSetTotalBlocks()`,
501           `PCBJacobiSetLocalBlocks()`, `PCSetModifySubMatrices()`, `PCJACOBI`, `PCVPBJACOBI`, `PCPBJACOBI`
502 M*/
503 
504 PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc)
505 {
506   PetscMPIInt rank;
507   PC_BJacobi *jac;
508 
509   PetscFunctionBegin;
510   PetscCall(PetscNew(&jac));
511   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
512 
513   pc->ops->apply             = NULL;
514   pc->ops->matapply          = NULL;
515   pc->ops->applytranspose    = NULL;
516   pc->ops->matapplytranspose = NULL;
517   pc->ops->setup             = PCSetUp_BJacobi;
518   pc->ops->destroy           = PCDestroy_BJacobi;
519   pc->ops->setfromoptions    = PCSetFromOptions_BJacobi;
520   pc->ops->view              = PCView_BJacobi;
521   pc->ops->applyrichardson   = NULL;
522 
523   pc->data         = (void *)jac;
524   jac->n           = -1;
525   jac->n_local     = -1;
526   jac->first_local = rank;
527   jac->ksp         = NULL;
528   jac->g_lens      = NULL;
529   jac->l_lens      = NULL;
530   jac->psubcomm    = NULL;
531 
532   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetSubKSP_C", PCBJacobiGetSubKSP_BJacobi));
533   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiSetTotalBlocks_C", PCBJacobiSetTotalBlocks_BJacobi));
534   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetTotalBlocks_C", PCBJacobiGetTotalBlocks_BJacobi));
535   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiSetLocalBlocks_C", PCBJacobiSetLocalBlocks_BJacobi));
536   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetLocalBlocks_C", PCBJacobiGetLocalBlocks_BJacobi));
537   PetscFunctionReturn(PETSC_SUCCESS);
538 }
539 
540 /*
541         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
542 */
543 static PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
544 {
545   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
546   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
547 
548   PetscFunctionBegin;
549   PetscCall(KSPReset(jac->ksp[0]));
550   PetscCall(VecDestroy(&bjac->x));
551   PetscCall(VecDestroy(&bjac->y));
552   PetscFunctionReturn(PETSC_SUCCESS);
553 }
554 
555 static PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
556 {
557   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
558   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
559 
560   PetscFunctionBegin;
561   PetscCall(PCReset_BJacobi_Singleblock(pc));
562   PetscCall(KSPDestroy(&jac->ksp[0]));
563   PetscCall(PetscFree(jac->ksp));
564   PetscCall(PetscFree(bjac));
565   PetscCall(PCDestroy_BJacobi(pc));
566   PetscFunctionReturn(PETSC_SUCCESS);
567 }
568 
569 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
570 {
571   PC_BJacobi        *jac    = (PC_BJacobi *)pc->data;
572   KSP                subksp = jac->ksp[0];
573   KSPConvergedReason reason;
574 
575   PetscFunctionBegin;
576   PetscCall(KSPSetUp(subksp));
577   PetscCall(KSPGetConvergedReason(subksp, &reason));
578   if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
579   PetscFunctionReturn(PETSC_SUCCESS);
580 }
581 
582 static PetscErrorCode PCApply_BJacobi_Singleblock(PC pc, Vec x, Vec y)
583 {
584   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
585   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
586 
587   PetscFunctionBegin;
588   PetscCall(VecGetLocalVectorRead(x, bjac->x));
589   PetscCall(VecGetLocalVector(y, bjac->y));
590   /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
591      matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild
592      of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/
593   PetscCall(KSPSetReusePreconditioner(jac->ksp[0], pc->reusepreconditioner));
594   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[0], bjac->x, bjac->y, 0));
595   PetscCall(KSPSolve(jac->ksp[0], bjac->x, bjac->y));
596   PetscCall(KSPCheckSolve(jac->ksp[0], pc, bjac->y));
597   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[0], bjac->x, bjac->y, 0));
598   PetscCall(VecRestoreLocalVectorRead(x, bjac->x));
599   PetscCall(VecRestoreLocalVector(y, bjac->y));
600   PetscFunctionReturn(PETSC_SUCCESS);
601 }
602 
603 static PetscErrorCode PCMatApply_BJacobi_Singleblock_Private(PC pc, Mat X, Mat Y, PetscBool transpose)
604 {
605   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
606   Mat         sX, sY;
607 
608   PetscFunctionBegin;
609   /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
610      matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild
611      of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/
612   PetscCall(KSPSetReusePreconditioner(jac->ksp[0], pc->reusepreconditioner));
613   PetscCall(MatDenseGetLocalMatrix(X, &sX));
614   PetscCall(MatDenseGetLocalMatrix(Y, &sY));
615   if (!transpose) {
616     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[0], sX, sY, 0));
617     PetscCall(KSPMatSolve(jac->ksp[0], sX, sY));
618     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[0], sX, sY, 0));
619   } else {
620     PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, jac->ksp[0], sX, sY, 0));
621     PetscCall(KSPMatSolveTranspose(jac->ksp[0], sX, sY));
622     PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, jac->ksp[0], sX, sY, 0));
623   }
624   PetscFunctionReturn(PETSC_SUCCESS);
625 }
626 
627 static PetscErrorCode PCMatApply_BJacobi_Singleblock(PC pc, Mat X, Mat Y)
628 {
629   PetscFunctionBegin;
630   PetscCall(PCMatApply_BJacobi_Singleblock_Private(pc, X, Y, PETSC_FALSE));
631   PetscFunctionReturn(PETSC_SUCCESS);
632 }
633 
634 static PetscErrorCode PCMatApplyTranspose_BJacobi_Singleblock(PC pc, Mat X, Mat Y)
635 {
636   PetscFunctionBegin;
637   PetscCall(PCMatApply_BJacobi_Singleblock_Private(pc, X, Y, PETSC_TRUE));
638   PetscFunctionReturn(PETSC_SUCCESS);
639 }
640 
641 static PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc, Vec x, Vec y)
642 {
643   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
644   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
645   PetscScalar            *y_array;
646   const PetscScalar      *x_array;
647   PC                      subpc;
648 
649   PetscFunctionBegin;
650   /*
651       The VecPlaceArray() is to avoid having to copy the
652     y vector into the bjac->x vector. The reason for
653     the bjac->x vector is that we need a sequential vector
654     for the sequential solve.
655   */
656   PetscCall(VecGetArrayRead(x, &x_array));
657   PetscCall(VecGetArray(y, &y_array));
658   PetscCall(VecPlaceArray(bjac->x, x_array));
659   PetscCall(VecPlaceArray(bjac->y, y_array));
660   /* apply the symmetric left portion of the inner PC operator */
661   /* note this bypasses the inner KSP and its options completely */
662   PetscCall(KSPGetPC(jac->ksp[0], &subpc));
663   PetscCall(PCApplySymmetricLeft(subpc, bjac->x, bjac->y));
664   PetscCall(VecResetArray(bjac->x));
665   PetscCall(VecResetArray(bjac->y));
666   PetscCall(VecRestoreArrayRead(x, &x_array));
667   PetscCall(VecRestoreArray(y, &y_array));
668   PetscFunctionReturn(PETSC_SUCCESS);
669 }
670 
671 static PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc, Vec x, Vec y)
672 {
673   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
674   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
675   PetscScalar            *y_array;
676   const PetscScalar      *x_array;
677   PC                      subpc;
678 
679   PetscFunctionBegin;
680   /*
681       The VecPlaceArray() is to avoid having to copy the
682     y vector into the bjac->x vector. The reason for
683     the bjac->x vector is that we need a sequential vector
684     for the sequential solve.
685   */
686   PetscCall(VecGetArrayRead(x, &x_array));
687   PetscCall(VecGetArray(y, &y_array));
688   PetscCall(VecPlaceArray(bjac->x, x_array));
689   PetscCall(VecPlaceArray(bjac->y, y_array));
690 
691   /* apply the symmetric right portion of the inner PC operator */
692   /* note this bypasses the inner KSP and its options completely */
693 
694   PetscCall(KSPGetPC(jac->ksp[0], &subpc));
695   PetscCall(PCApplySymmetricRight(subpc, bjac->x, bjac->y));
696 
697   PetscCall(VecResetArray(bjac->x));
698   PetscCall(VecResetArray(bjac->y));
699   PetscCall(VecRestoreArrayRead(x, &x_array));
700   PetscCall(VecRestoreArray(y, &y_array));
701   PetscFunctionReturn(PETSC_SUCCESS);
702 }
703 
704 static PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc, Vec x, Vec y)
705 {
706   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
707   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
708   PetscScalar            *y_array;
709   const PetscScalar      *x_array;
710 
711   PetscFunctionBegin;
712   /*
713       The VecPlaceArray() is to avoid having to copy the
714     y vector into the bjac->x vector. The reason for
715     the bjac->x vector is that we need a sequential vector
716     for the sequential solve.
717   */
718   PetscCall(VecGetArrayRead(x, &x_array));
719   PetscCall(VecGetArray(y, &y_array));
720   PetscCall(VecPlaceArray(bjac->x, x_array));
721   PetscCall(VecPlaceArray(bjac->y, y_array));
722   PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, jac->ksp[0], bjac->x, bjac->y, 0));
723   PetscCall(KSPSolveTranspose(jac->ksp[0], bjac->x, bjac->y));
724   PetscCall(KSPCheckSolve(jac->ksp[0], pc, bjac->y));
725   PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, jac->ksp[0], bjac->x, bjac->y, 0));
726   PetscCall(VecResetArray(bjac->x));
727   PetscCall(VecResetArray(bjac->y));
728   PetscCall(VecRestoreArrayRead(x, &x_array));
729   PetscCall(VecRestoreArray(y, &y_array));
730   PetscFunctionReturn(PETSC_SUCCESS);
731 }
732 
733 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc, Mat mat, Mat pmat)
734 {
735   PC_BJacobi             *jac = (PC_BJacobi *)pc->data;
736   PetscInt                m;
737   KSP                     ksp;
738   PC_BJacobi_Singleblock *bjac;
739   PetscBool               wasSetup = PETSC_TRUE;
740   VecType                 vectype;
741   const char             *prefix;
742 
743   PetscFunctionBegin;
744   if (!pc->setupcalled) {
745     if (!jac->ksp) {
746       PetscInt nestlevel;
747 
748       wasSetup = PETSC_FALSE;
749 
750       PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
751       PetscCall(PCGetKSPNestLevel(pc, &nestlevel));
752       PetscCall(KSPSetNestLevel(ksp, nestlevel + 1));
753       PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
754       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
755       PetscCall(KSPSetType(ksp, KSPPREONLY));
756       PetscCall(PCGetOptionsPrefix(pc, &prefix));
757       PetscCall(KSPSetOptionsPrefix(ksp, prefix));
758       PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
759 
760       pc->ops->reset               = PCReset_BJacobi_Singleblock;
761       pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
762       pc->ops->apply               = PCApply_BJacobi_Singleblock;
763       pc->ops->matapply            = PCMatApply_BJacobi_Singleblock;
764       pc->ops->matapplytranspose   = PCMatApplyTranspose_BJacobi_Singleblock;
765       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
766       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
767       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
768       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;
769 
770       PetscCall(PetscMalloc1(1, &jac->ksp));
771       jac->ksp[0] = ksp;
772 
773       PetscCall(PetscNew(&bjac));
774       jac->data = (void *)bjac;
775     } else {
776       ksp  = jac->ksp[0];
777       bjac = (PC_BJacobi_Singleblock *)jac->data;
778     }
779 
780     /*
781       The reason we need to generate these vectors is to serve
782       as the right-hand side and solution vector for the solve on the
783       block. We do not need to allocate space for the vectors since
784       that is provided via VecPlaceArray() just before the call to
785       KSPSolve() on the block.
786     */
787     PetscCall(MatGetSize(pmat, &m, &m));
788     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, m, NULL, &bjac->x));
789     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, m, NULL, &bjac->y));
790     PetscCall(MatGetVecType(pmat, &vectype));
791     PetscCall(VecSetType(bjac->x, vectype));
792     PetscCall(VecSetType(bjac->y, vectype));
793   } else {
794     ksp  = jac->ksp[0];
795     bjac = (PC_BJacobi_Singleblock *)jac->data;
796   }
797   PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
798   if (pc->useAmat) {
799     PetscCall(KSPSetOperators(ksp, mat, pmat));
800     PetscCall(MatSetOptionsPrefix(mat, prefix));
801   } else {
802     PetscCall(KSPSetOperators(ksp, pmat, pmat));
803   }
804   PetscCall(MatSetOptionsPrefix(pmat, prefix));
805   if (!wasSetup && pc->setfromoptionscalled) {
806     /* If PCSetFromOptions_BJacobi is called later, KSPSetFromOptions will be called at that time. */
807     PetscCall(KSPSetFromOptions(ksp));
808   }
809   PetscFunctionReturn(PETSC_SUCCESS);
810 }
811 
812 static PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
813 {
814   PC_BJacobi            *jac  = (PC_BJacobi *)pc->data;
815   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
816   PetscInt               i;
817 
818   PetscFunctionBegin;
819   if (bjac && bjac->pmat) {
820     PetscCall(MatDestroyMatrices(jac->n_local, &bjac->pmat));
821     if (pc->useAmat) PetscCall(MatDestroyMatrices(jac->n_local, &bjac->mat));
822   }
823 
824   for (i = 0; i < jac->n_local; i++) {
825     PetscCall(KSPReset(jac->ksp[i]));
826     if (bjac && bjac->x) {
827       PetscCall(VecDestroy(&bjac->x[i]));
828       PetscCall(VecDestroy(&bjac->y[i]));
829       PetscCall(ISDestroy(&bjac->is[i]));
830     }
831   }
832   PetscCall(PetscFree(jac->l_lens));
833   PetscCall(PetscFree(jac->g_lens));
834   PetscFunctionReturn(PETSC_SUCCESS);
835 }
836 
837 static PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
838 {
839   PC_BJacobi            *jac  = (PC_BJacobi *)pc->data;
840   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
841   PetscInt               i;
842 
843   PetscFunctionBegin;
844   PetscCall(PCReset_BJacobi_Multiblock(pc));
845   if (bjac) {
846     PetscCall(PetscFree2(bjac->x, bjac->y));
847     PetscCall(PetscFree(bjac->starts));
848     PetscCall(PetscFree(bjac->is));
849   }
850   PetscCall(PetscFree(jac->data));
851   for (i = 0; i < jac->n_local; i++) PetscCall(KSPDestroy(&jac->ksp[i]));
852   PetscCall(PetscFree(jac->ksp));
853   PetscCall(PCDestroy_BJacobi(pc));
854   PetscFunctionReturn(PETSC_SUCCESS);
855 }
856 
857 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
858 {
859   PC_BJacobi        *jac = (PC_BJacobi *)pc->data;
860   PetscInt           i, n_local = jac->n_local;
861   KSPConvergedReason reason;
862 
863   PetscFunctionBegin;
864   for (i = 0; i < n_local; i++) {
865     PetscCall(KSPSetUp(jac->ksp[i]));
866     PetscCall(KSPGetConvergedReason(jac->ksp[i], &reason));
867     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
868   }
869   PetscFunctionReturn(PETSC_SUCCESS);
870 }
871 
872 static PetscErrorCode PCApply_BJacobi_Multiblock(PC pc, Vec x, Vec y)
873 {
874   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
875   PetscInt               i, n_local = jac->n_local;
876   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
877   PetscScalar           *yin;
878   const PetscScalar     *xin;
879 
880   PetscFunctionBegin;
881   PetscCall(VecGetArrayRead(x, &xin));
882   PetscCall(VecGetArray(y, &yin));
883   for (i = 0; i < n_local; i++) {
884     /*
885        To avoid copying the subvector from x into a workspace we instead
886        make the workspace vector array point to the subpart of the array of
887        the global vector.
888     */
889     PetscCall(VecPlaceArray(bjac->x[i], xin + bjac->starts[i]));
890     PetscCall(VecPlaceArray(bjac->y[i], yin + bjac->starts[i]));
891 
892     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
893     PetscCall(KSPSolve(jac->ksp[i], bjac->x[i], bjac->y[i]));
894     PetscCall(KSPCheckSolve(jac->ksp[i], pc, bjac->y[i]));
895     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
896 
897     PetscCall(VecResetArray(bjac->x[i]));
898     PetscCall(VecResetArray(bjac->y[i]));
899   }
900   PetscCall(VecRestoreArrayRead(x, &xin));
901   PetscCall(VecRestoreArray(y, &yin));
902   PetscFunctionReturn(PETSC_SUCCESS);
903 }
904 
905 static PetscErrorCode PCApplySymmetricLeft_BJacobi_Multiblock(PC pc, Vec x, Vec y)
906 {
907   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
908   PetscInt               i, n_local = jac->n_local;
909   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
910   PetscScalar           *yin;
911   const PetscScalar     *xin;
912   PC                     subpc;
913 
914   PetscFunctionBegin;
915   PetscCall(VecGetArrayRead(x, &xin));
916   PetscCall(VecGetArray(y, &yin));
917   for (i = 0; i < n_local; i++) {
918     /*
919        To avoid copying the subvector from x into a workspace we instead
920        make the workspace vector array point to the subpart of the array of
921        the global vector.
922     */
923     PetscCall(VecPlaceArray(bjac->x[i], xin + bjac->starts[i]));
924     PetscCall(VecPlaceArray(bjac->y[i], yin + bjac->starts[i]));
925 
926     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
927     /* apply the symmetric left portion of the inner PC operator */
928     /* note this bypasses the inner KSP and its options completely */
929     PetscCall(KSPGetPC(jac->ksp[i], &subpc));
930     PetscCall(PCApplySymmetricLeft(subpc, bjac->x[i], bjac->y[i]));
931     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
932 
933     PetscCall(VecResetArray(bjac->x[i]));
934     PetscCall(VecResetArray(bjac->y[i]));
935   }
936   PetscCall(VecRestoreArrayRead(x, &xin));
937   PetscCall(VecRestoreArray(y, &yin));
938   PetscFunctionReturn(PETSC_SUCCESS);
939 }
940 
941 static PetscErrorCode PCApplySymmetricRight_BJacobi_Multiblock(PC pc, Vec x, Vec y)
942 {
943   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
944   PetscInt               i, n_local = jac->n_local;
945   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
946   PetscScalar           *yin;
947   const PetscScalar     *xin;
948   PC                     subpc;
949 
950   PetscFunctionBegin;
951   PetscCall(VecGetArrayRead(x, &xin));
952   PetscCall(VecGetArray(y, &yin));
953   for (i = 0; i < n_local; i++) {
954     /*
955        To avoid copying the subvector from x into a workspace we instead
956        make the workspace vector array point to the subpart of the array of
957        the global vector.
958     */
959     PetscCall(VecPlaceArray(bjac->x[i], xin + bjac->starts[i]));
960     PetscCall(VecPlaceArray(bjac->y[i], yin + bjac->starts[i]));
961 
962     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
963     /* apply the symmetric left portion of the inner PC operator */
964     /* note this bypasses the inner KSP and its options completely */
965     PetscCall(KSPGetPC(jac->ksp[i], &subpc));
966     PetscCall(PCApplySymmetricRight(subpc, bjac->x[i], bjac->y[i]));
967     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
968 
969     PetscCall(VecResetArray(bjac->x[i]));
970     PetscCall(VecResetArray(bjac->y[i]));
971   }
972   PetscCall(VecRestoreArrayRead(x, &xin));
973   PetscCall(VecRestoreArray(y, &yin));
974   PetscFunctionReturn(PETSC_SUCCESS);
975 }
976 
977 static PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc, Vec x, Vec y)
978 {
979   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
980   PetscInt               i, n_local = jac->n_local;
981   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
982   PetscScalar           *yin;
983   const PetscScalar     *xin;
984 
985   PetscFunctionBegin;
986   PetscCall(VecGetArrayRead(x, &xin));
987   PetscCall(VecGetArray(y, &yin));
988   for (i = 0; i < n_local; i++) {
989     /*
990        To avoid copying the subvector from x into a workspace we instead
991        make the workspace vector array point to the subpart of the array of
992        the global vector.
993     */
994     PetscCall(VecPlaceArray(bjac->x[i], xin + bjac->starts[i]));
995     PetscCall(VecPlaceArray(bjac->y[i], yin + bjac->starts[i]));
996 
997     PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
998     PetscCall(KSPSolveTranspose(jac->ksp[i], bjac->x[i], bjac->y[i]));
999     PetscCall(KSPCheckSolve(jac->ksp[i], pc, bjac->y[i]));
1000     PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
1001 
1002     PetscCall(VecResetArray(bjac->x[i]));
1003     PetscCall(VecResetArray(bjac->y[i]));
1004   }
1005   PetscCall(VecRestoreArrayRead(x, &xin));
1006   PetscCall(VecRestoreArray(y, &yin));
1007   PetscFunctionReturn(PETSC_SUCCESS);
1008 }
1009 
1010 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc, Mat mat, Mat pmat)
1011 {
1012   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
1013   PetscInt               m, n_local, N, M, start, i;
1014   const char            *prefix;
1015   KSP                    ksp;
1016   Vec                    x, y;
1017   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
1018   PC                     subpc;
1019   IS                     is;
1020   MatReuse               scall;
1021   VecType                vectype;
1022   MatNullSpace          *nullsp_mat = NULL, *nullsp_pmat = NULL;
1023 
1024   PetscFunctionBegin;
1025   PetscCall(MatGetLocalSize(pc->pmat, &M, &N));
1026 
1027   n_local = jac->n_local;
1028 
1029   if (pc->useAmat) {
1030     PetscBool same;
1031     PetscCall(PetscObjectTypeCompare((PetscObject)mat, ((PetscObject)pmat)->type_name, &same));
1032     PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Matrices not of same type");
1033   }
1034 
1035   if (!pc->setupcalled) {
1036     PetscInt nestlevel;
1037 
1038     scall = MAT_INITIAL_MATRIX;
1039 
1040     if (!jac->ksp) {
1041       pc->ops->reset               = PCReset_BJacobi_Multiblock;
1042       pc->ops->destroy             = PCDestroy_BJacobi_Multiblock;
1043       pc->ops->apply               = PCApply_BJacobi_Multiblock;
1044       pc->ops->matapply            = NULL;
1045       pc->ops->matapplytranspose   = NULL;
1046       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Multiblock;
1047       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Multiblock;
1048       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Multiblock;
1049       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Multiblock;
1050 
1051       PetscCall(PetscNew(&bjac));
1052       PetscCall(PetscMalloc1(n_local, &jac->ksp));
1053       PetscCall(PetscMalloc2(n_local, &bjac->x, n_local, &bjac->y));
1054       PetscCall(PetscMalloc1(n_local, &bjac->starts));
1055 
1056       jac->data = (void *)bjac;
1057       PetscCall(PetscMalloc1(n_local, &bjac->is));
1058 
1059       for (i = 0; i < n_local; i++) {
1060         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
1061         PetscCall(PCGetKSPNestLevel(pc, &nestlevel));
1062         PetscCall(KSPSetNestLevel(ksp, nestlevel + 1));
1063         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
1064         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
1065         PetscCall(KSPSetType(ksp, KSPPREONLY));
1066         PetscCall(KSPGetPC(ksp, &subpc));
1067         PetscCall(PCGetOptionsPrefix(pc, &prefix));
1068         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
1069         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
1070 
1071         jac->ksp[i] = ksp;
1072       }
1073     } else {
1074       bjac = (PC_BJacobi_Multiblock *)jac->data;
1075     }
1076 
1077     start = 0;
1078     PetscCall(MatGetVecType(pmat, &vectype));
1079     for (i = 0; i < n_local; i++) {
1080       m = jac->l_lens[i];
1081       /*
1082       The reason we need to generate these vectors is to serve
1083       as the right-hand side and solution vector for the solve on the
1084       block. We do not need to allocate space for the vectors since
1085       that is provided via VecPlaceArray() just before the call to
1086       KSPSolve() on the block.
1087 
1088       */
1089       PetscCall(VecCreateSeq(PETSC_COMM_SELF, m, &x));
1090       PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, m, NULL, &y));
1091       PetscCall(VecSetType(x, vectype));
1092       PetscCall(VecSetType(y, vectype));
1093 
1094       bjac->x[i]      = x;
1095       bjac->y[i]      = y;
1096       bjac->starts[i] = start;
1097 
1098       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, start, 1, &is));
1099       bjac->is[i] = is;
1100 
1101       start += m;
1102     }
1103   } else {
1104     bjac = (PC_BJacobi_Multiblock *)jac->data;
1105     /*
1106        Destroy the blocks from the previous iteration
1107     */
1108     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1109       PetscCall(MatGetNullSpaces(n_local, bjac->pmat, &nullsp_pmat));
1110       PetscCall(MatDestroyMatrices(n_local, &bjac->pmat));
1111       if (pc->useAmat) {
1112         PetscCall(MatGetNullSpaces(n_local, bjac->mat, &nullsp_mat));
1113         PetscCall(MatDestroyMatrices(n_local, &bjac->mat));
1114       }
1115       scall = MAT_INITIAL_MATRIX;
1116     } else scall = MAT_REUSE_MATRIX;
1117   }
1118 
1119   PetscCall(MatCreateSubMatrices(pmat, n_local, bjac->is, bjac->is, scall, &bjac->pmat));
1120   if (nullsp_pmat) PetscCall(MatRestoreNullSpaces(n_local, bjac->pmat, &nullsp_pmat));
1121   if (pc->useAmat) {
1122     PetscCall(MatCreateSubMatrices(mat, n_local, bjac->is, bjac->is, scall, &bjac->mat));
1123     if (nullsp_mat) PetscCall(MatRestoreNullSpaces(n_local, bjac->mat, &nullsp_mat));
1124   }
1125   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1126      different boundary conditions for the submatrices than for the global problem) */
1127   PetscCall(PCModifySubMatrices(pc, n_local, bjac->is, bjac->is, bjac->pmat, pc->modifysubmatricesP));
1128 
1129   for (i = 0; i < n_local; i++) {
1130     PetscCall(KSPGetOptionsPrefix(jac->ksp[i], &prefix));
1131     if (pc->useAmat) {
1132       PetscCall(KSPSetOperators(jac->ksp[i], bjac->mat[i], bjac->pmat[i]));
1133       PetscCall(MatSetOptionsPrefix(bjac->mat[i], prefix));
1134     } else {
1135       PetscCall(KSPSetOperators(jac->ksp[i], bjac->pmat[i], bjac->pmat[i]));
1136     }
1137     PetscCall(MatSetOptionsPrefix(bjac->pmat[i], prefix));
1138     if (pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(jac->ksp[i]));
1139   }
1140   PetscFunctionReturn(PETSC_SUCCESS);
1141 }
1142 
1143 /*
1144       These are for a single block with multiple processes
1145 */
1146 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiproc(PC pc)
1147 {
1148   PC_BJacobi        *jac    = (PC_BJacobi *)pc->data;
1149   KSP                subksp = jac->ksp[0];
1150   KSPConvergedReason reason;
1151 
1152   PetscFunctionBegin;
1153   PetscCall(KSPSetUp(subksp));
1154   PetscCall(KSPGetConvergedReason(subksp, &reason));
1155   if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
1156   PetscFunctionReturn(PETSC_SUCCESS);
1157 }
1158 
1159 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1160 {
1161   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
1162   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;
1163 
1164   PetscFunctionBegin;
1165   PetscCall(VecDestroy(&mpjac->ysub));
1166   PetscCall(VecDestroy(&mpjac->xsub));
1167   PetscCall(MatDestroy(&mpjac->submats));
1168   if (jac->ksp) PetscCall(KSPReset(jac->ksp[0]));
1169   PetscFunctionReturn(PETSC_SUCCESS);
1170 }
1171 
1172 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1173 {
1174   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
1175   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;
1176 
1177   PetscFunctionBegin;
1178   PetscCall(PCReset_BJacobi_Multiproc(pc));
1179   PetscCall(KSPDestroy(&jac->ksp[0]));
1180   PetscCall(PetscFree(jac->ksp));
1181   PetscCall(PetscSubcommDestroy(&mpjac->psubcomm));
1182 
1183   PetscCall(PetscFree(mpjac));
1184   PetscCall(PCDestroy_BJacobi(pc));
1185   PetscFunctionReturn(PETSC_SUCCESS);
1186 }
1187 
1188 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc, Vec x, Vec y)
1189 {
1190   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
1191   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;
1192   PetscScalar          *yarray;
1193   const PetscScalar    *xarray;
1194   KSPConvergedReason    reason;
1195 
1196   PetscFunctionBegin;
1197   /* place x's and y's local arrays into xsub and ysub */
1198   PetscCall(VecGetArrayRead(x, &xarray));
1199   PetscCall(VecGetArray(y, &yarray));
1200   PetscCall(VecPlaceArray(mpjac->xsub, xarray));
1201   PetscCall(VecPlaceArray(mpjac->ysub, yarray));
1202 
1203   /* apply preconditioner on each matrix block */
1204   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[0], mpjac->xsub, mpjac->ysub, 0));
1205   PetscCall(KSPSolve(jac->ksp[0], mpjac->xsub, mpjac->ysub));
1206   PetscCall(KSPCheckSolve(jac->ksp[0], pc, mpjac->ysub));
1207   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[0], mpjac->xsub, mpjac->ysub, 0));
1208   PetscCall(KSPGetConvergedReason(jac->ksp[0], &reason));
1209   if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
1210 
1211   PetscCall(VecResetArray(mpjac->xsub));
1212   PetscCall(VecResetArray(mpjac->ysub));
1213   PetscCall(VecRestoreArrayRead(x, &xarray));
1214   PetscCall(VecRestoreArray(y, &yarray));
1215   PetscFunctionReturn(PETSC_SUCCESS);
1216 }
1217 
1218 static PetscErrorCode PCMatApply_BJacobi_Multiproc(PC pc, Mat X, Mat Y)
1219 {
1220   PC_BJacobi        *jac = (PC_BJacobi *)pc->data;
1221   KSPConvergedReason reason;
1222   Mat                sX, sY;
1223   const PetscScalar *x;
1224   PetscScalar       *y;
1225   PetscInt           m, N, lda, ldb;
1226 
1227   PetscFunctionBegin;
1228   /* apply preconditioner on each matrix block */
1229   PetscCall(MatGetLocalSize(X, &m, NULL));
1230   PetscCall(MatGetSize(X, NULL, &N));
1231   PetscCall(MatDenseGetLDA(X, &lda));
1232   PetscCall(MatDenseGetLDA(Y, &ldb));
1233   PetscCall(MatDenseGetArrayRead(X, &x));
1234   PetscCall(MatDenseGetArrayWrite(Y, &y));
1235   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)jac->ksp[0]), m, PETSC_DECIDE, PETSC_DECIDE, N, (PetscScalar *)x, &sX));
1236   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)jac->ksp[0]), m, PETSC_DECIDE, PETSC_DECIDE, N, y, &sY));
1237   PetscCall(MatDenseSetLDA(sX, lda));
1238   PetscCall(MatDenseSetLDA(sY, ldb));
1239   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[0], X, Y, 0));
1240   PetscCall(KSPMatSolve(jac->ksp[0], sX, sY));
1241   PetscCall(KSPCheckSolve(jac->ksp[0], pc, NULL));
1242   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[0], X, Y, 0));
1243   PetscCall(MatDestroy(&sY));
1244   PetscCall(MatDestroy(&sX));
1245   PetscCall(MatDenseRestoreArrayWrite(Y, &y));
1246   PetscCall(MatDenseRestoreArrayRead(X, &x));
1247   PetscCall(KSPGetConvergedReason(jac->ksp[0], &reason));
1248   if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
1249   PetscFunctionReturn(PETSC_SUCCESS);
1250 }
1251 
1252 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1253 {
1254   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
1255   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;
1256   PetscInt              m, n;
1257   MPI_Comm              comm, subcomm = 0;
1258   const char           *prefix;
1259   PetscBool             wasSetup = PETSC_TRUE;
1260   VecType               vectype;
1261 
1262   PetscFunctionBegin;
1263   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
1264   PetscCheck(jac->n_local <= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Only a single block in a subcommunicator is supported");
1265   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1266   if (!pc->setupcalled) {
1267     PetscInt nestlevel;
1268 
1269     wasSetup = PETSC_FALSE;
1270     PetscCall(PetscNew(&mpjac));
1271     jac->data = (void *)mpjac;
1272 
1273     /* initialize datastructure mpjac */
1274     if (!jac->psubcomm) {
1275       /* Create default contiguous subcommunicatiors if user does not provide them */
1276       PetscCall(PetscSubcommCreate(comm, &jac->psubcomm));
1277       PetscCall(PetscSubcommSetNumber(jac->psubcomm, jac->n));
1278       PetscCall(PetscSubcommSetType(jac->psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
1279     }
1280     mpjac->psubcomm = jac->psubcomm;
1281     subcomm         = PetscSubcommChild(mpjac->psubcomm);
1282 
1283     /* Get matrix blocks of pmat */
1284     PetscCall(MatGetMultiProcBlock(pc->pmat, subcomm, MAT_INITIAL_MATRIX, &mpjac->submats));
1285 
1286     /* create a new PC that processors in each subcomm have copy of */
1287     PetscCall(PetscMalloc1(1, &jac->ksp));
1288     PetscCall(KSPCreate(subcomm, &jac->ksp[0]));
1289     PetscCall(PCGetKSPNestLevel(pc, &nestlevel));
1290     PetscCall(KSPSetNestLevel(jac->ksp[0], nestlevel + 1));
1291     PetscCall(KSPSetErrorIfNotConverged(jac->ksp[0], pc->erroriffailure));
1292     PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0], (PetscObject)pc, 1));
1293     PetscCall(KSPSetOperators(jac->ksp[0], mpjac->submats, mpjac->submats));
1294     PetscCall(KSPGetPC(jac->ksp[0], &mpjac->pc));
1295 
1296     PetscCall(PCGetOptionsPrefix(pc, &prefix));
1297     PetscCall(KSPSetOptionsPrefix(jac->ksp[0], prefix));
1298     PetscCall(KSPAppendOptionsPrefix(jac->ksp[0], "sub_"));
1299     PetscCall(KSPGetOptionsPrefix(jac->ksp[0], &prefix));
1300     PetscCall(MatSetOptionsPrefix(mpjac->submats, prefix));
1301 
1302     /* create dummy vectors xsub and ysub */
1303     PetscCall(MatGetLocalSize(mpjac->submats, &m, &n));
1304     PetscCall(VecCreateMPIWithArray(subcomm, 1, n, PETSC_DECIDE, NULL, &mpjac->xsub));
1305     PetscCall(VecCreateMPIWithArray(subcomm, 1, m, PETSC_DECIDE, NULL, &mpjac->ysub));
1306     PetscCall(MatGetVecType(mpjac->submats, &vectype));
1307     PetscCall(VecSetType(mpjac->xsub, vectype));
1308     PetscCall(VecSetType(mpjac->ysub, vectype));
1309 
1310     pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiproc;
1311     pc->ops->reset         = PCReset_BJacobi_Multiproc;
1312     pc->ops->destroy       = PCDestroy_BJacobi_Multiproc;
1313     pc->ops->apply         = PCApply_BJacobi_Multiproc;
1314     pc->ops->matapply      = PCMatApply_BJacobi_Multiproc;
1315   } else { /* pc->setupcalled */
1316     subcomm = PetscSubcommChild(mpjac->psubcomm);
1317     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1318       /* destroy old matrix blocks, then get new matrix blocks */
1319       if (mpjac->submats) PetscCall(MatDestroy(&mpjac->submats));
1320       PetscCall(MatGetMultiProcBlock(pc->pmat, subcomm, MAT_INITIAL_MATRIX, &mpjac->submats));
1321     } else {
1322       PetscCall(MatGetMultiProcBlock(pc->pmat, subcomm, MAT_REUSE_MATRIX, &mpjac->submats));
1323     }
1324     PetscCall(KSPSetOperators(jac->ksp[0], mpjac->submats, mpjac->submats));
1325   }
1326 
1327   if (!wasSetup && pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(jac->ksp[0]));
1328   PetscFunctionReturn(PETSC_SUCCESS);
1329 }
1330