xref: /petsc/src/ksp/pc/impls/bjacobi/bjacobi.c (revision 2ae2db358559b5b4e2d021d6074243931b3b092d)
1 /*
2    Defines a block Jacobi preconditioner.
3 */
4 
5 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /*I "petscpc.h" I*/
6 
7 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC, Mat, Mat);
8 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC, Mat, Mat);
9 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);
10 
11 static PetscErrorCode PCSetUp_BJacobi(PC pc)
12 {
13   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
14   Mat         mat = pc->mat, pmat = pc->pmat;
15   PetscBool   hasop;
16   PetscInt    N, M, start, i, sum, end;
17   PetscInt    bs, i_start = -1, i_end = -1;
18   PetscMPIInt rank, size;
19 
20   PetscFunctionBegin;
21   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
22   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
23   PetscCall(MatGetLocalSize(pc->pmat, &M, &N));
24   PetscCall(MatGetBlockSize(pc->pmat, &bs));
25 
26   if (jac->n > 0 && jac->n < size) {
27     PetscCall(PCSetUp_BJacobi_Multiproc(pc));
28     PetscFunctionReturn(PETSC_SUCCESS);
29   }
30 
31   /*    Determines the number of blocks assigned to each processor */
32   /*   local block count  given */
33   if (jac->n_local > 0 && jac->n < 0) {
34     PetscCallMPI(MPIU_Allreduce(&jac->n_local, &jac->n, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
35     if (jac->l_lens) { /* check that user set these correctly */
36       sum = 0;
37       for (i = 0; i < jac->n_local; i++) {
38         PetscCheck(jac->l_lens[i] / bs * bs == jac->l_lens[i], PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat blocksize doesn't match block Jacobi layout");
39         sum += jac->l_lens[i];
40       }
41       PetscCheck(sum == M, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Local lens set incorrectly");
42     } else {
43       PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
44       for (i = 0; i < jac->n_local; i++) jac->l_lens[i] = bs * ((M / bs) / jac->n_local + (((M / bs) % jac->n_local) > i));
45     }
46   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
47     /* global blocks given: determine which ones are local */
48     if (jac->g_lens) {
49       /* check if the g_lens is has valid entries */
50       for (i = 0; i < jac->n; i++) {
51         PetscCheck(jac->g_lens[i], PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Zero block not allowed");
52         PetscCheck(jac->g_lens[i] / bs * bs == jac->g_lens[i], PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat blocksize doesn't match block Jacobi layout");
53       }
54       if (size == 1) {
55         jac->n_local = jac->n;
56         PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
57         PetscCall(PetscArraycpy(jac->l_lens, jac->g_lens, jac->n_local));
58         /* check that user set these correctly */
59         sum = 0;
60         for (i = 0; i < jac->n_local; i++) sum += jac->l_lens[i];
61         PetscCheck(sum == M, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Global lens set incorrectly");
62       } else {
63         PetscCall(MatGetOwnershipRange(pc->pmat, &start, &end));
64         /* loop over blocks determining first one owned by me */
65         sum = 0;
66         for (i = 0; i < jac->n + 1; i++) {
67           if (sum == start) {
68             i_start = i;
69             goto start_1;
70           }
71           if (i < jac->n) sum += jac->g_lens[i];
72         }
73         SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
74       start_1:
75         for (i = i_start; i < jac->n + 1; i++) {
76           if (sum == end) {
77             i_end = i;
78             goto end_1;
79           }
80           if (i < jac->n) sum += jac->g_lens[i];
81         }
82         SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
83       end_1:
84         jac->n_local = i_end - i_start;
85         PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
86         PetscCall(PetscArraycpy(jac->l_lens, jac->g_lens + i_start, jac->n_local));
87       }
88     } else { /* no global blocks given, determine then using default layout */
89       jac->n_local = jac->n / size + ((jac->n % size) > rank);
90       PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
91       for (i = 0; i < jac->n_local; i++) {
92         jac->l_lens[i] = ((M / bs) / jac->n_local + (((M / bs) % jac->n_local) > i)) * bs;
93         PetscCheck(jac->l_lens[i], PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Too many blocks given");
94       }
95     }
96   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
97     jac->n       = size;
98     jac->n_local = 1;
99     PetscCall(PetscMalloc1(1, &jac->l_lens));
100     jac->l_lens[0] = M;
101   } else { /* jac->n > 0 && jac->n_local > 0 */
102     if (!jac->l_lens) {
103       PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
104       for (i = 0; i < jac->n_local; i++) jac->l_lens[i] = bs * ((M / bs) / jac->n_local + (((M / bs) % jac->n_local) > i));
105     }
106   }
107   PetscCheck(jac->n_local >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of blocks is less than number of processors");
108 
109   /*    Determines mat and pmat */
110   PetscCall(MatHasOperation(pc->mat, MATOP_GET_DIAGONAL_BLOCK, &hasop));
111   if (!hasop && size == 1) {
112     mat  = pc->mat;
113     pmat = pc->pmat;
114   } else {
115     if (pc->useAmat) {
116       /* use block from Amat matrix, not Pmat for local MatMult() */
117       PetscCall(MatGetDiagonalBlock(pc->mat, &mat));
118     }
119     if (pc->pmat != pc->mat || !pc->useAmat) {
120       PetscCall(MatGetDiagonalBlock(pc->pmat, &pmat));
121     } else pmat = mat;
122   }
123 
124   /*
125      Setup code depends on the number of blocks
126   */
127   if (jac->n_local == 1) {
128     PetscCall(PCSetUp_BJacobi_Singleblock(pc, mat, pmat));
129   } else {
130     PetscCall(PCSetUp_BJacobi_Multiblock(pc, mat, pmat));
131   }
132   PetscFunctionReturn(PETSC_SUCCESS);
133 }
134 
135 /* Default destroy, if it has never been setup */
136 static PetscErrorCode PCDestroy_BJacobi(PC pc)
137 {
138   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
139 
140   PetscFunctionBegin;
141   PetscCall(PetscFree(jac->g_lens));
142   PetscCall(PetscFree(jac->l_lens));
143   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetSubKSP_C", NULL));
144   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiSetTotalBlocks_C", NULL));
145   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetTotalBlocks_C", NULL));
146   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiSetLocalBlocks_C", NULL));
147   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetLocalBlocks_C", NULL));
148   PetscCall(PetscFree(pc->data));
149   PetscFunctionReturn(PETSC_SUCCESS);
150 }
151 
152 static PetscErrorCode PCSetFromOptions_BJacobi(PC pc, PetscOptionItems PetscOptionsObject)
153 {
154   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
155   PetscInt    blocks, i;
156   PetscBool   flg;
157 
158   PetscFunctionBegin;
159   PetscOptionsHeadBegin(PetscOptionsObject, "Block Jacobi options");
160   PetscCall(PetscOptionsInt("-pc_bjacobi_blocks", "Total number of blocks", "PCBJacobiSetTotalBlocks", jac->n, &blocks, &flg));
161   if (flg) PetscCall(PCBJacobiSetTotalBlocks(pc, blocks, NULL));
162   PetscCall(PetscOptionsInt("-pc_bjacobi_local_blocks", "Local number of blocks", "PCBJacobiSetLocalBlocks", jac->n_local, &blocks, &flg));
163   if (flg) PetscCall(PCBJacobiSetLocalBlocks(pc, blocks, NULL));
164   if (jac->ksp) {
165     /* The sub-KSP has already been set up (e.g., PCSetUp_BJacobi_Singleblock), but KSPSetFromOptions was not called
166      * unless we had already been called. */
167     for (i = 0; i < jac->n_local; i++) PetscCall(KSPSetFromOptions(jac->ksp[i]));
168   }
169   PetscOptionsHeadEnd();
170   PetscFunctionReturn(PETSC_SUCCESS);
171 }
172 
173 #include <petscdraw.h>
174 static PetscErrorCode PCView_BJacobi(PC pc, PetscViewer viewer)
175 {
176   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
177   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;
178   PetscMPIInt           rank;
179   PetscInt              i;
180   PetscBool             iascii, isstring, isdraw;
181   PetscViewer           sviewer;
182   PetscViewerFormat     format;
183   const char           *prefix;
184 
185   PetscFunctionBegin;
186   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
187   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
188   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
189   if (iascii) {
190     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat local matrix, number of blocks = %" PetscInt_FMT "\n", jac->n));
191     PetscCall(PetscViewerASCIIPrintf(viewer, "  number of blocks = %" PetscInt_FMT "\n", jac->n));
192     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
193     PetscCall(PetscViewerGetFormat(viewer, &format));
194     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
195       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
196       PetscCall(PCGetOptionsPrefix(pc, &prefix));
197       PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
198       if (jac->ksp && !jac->psubcomm) {
199         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
200         if (rank == 0) {
201           PetscCall(PetscViewerASCIIPushTab(sviewer));
202           PetscCall(KSPView(jac->ksp[0], sviewer));
203           PetscCall(PetscViewerASCIIPopTab(sviewer));
204         }
205         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
206         /*  extra call needed because of the two calls to PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
207         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
208       } else if (mpjac && jac->ksp && mpjac->psubcomm) {
209         PetscCall(PetscViewerGetSubViewer(viewer, mpjac->psubcomm->child, &sviewer));
210         if (!mpjac->psubcomm->color) {
211           PetscCall(PetscViewerASCIIPushTab(sviewer));
212           PetscCall(KSPView(*jac->ksp, sviewer));
213           PetscCall(PetscViewerASCIIPopTab(sviewer));
214         }
215         PetscCall(PetscViewerRestoreSubViewer(viewer, mpjac->psubcomm->child, &sviewer));
216         /*  extra call needed because of the two calls to PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
217         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
218       }
219     } else {
220       PetscInt n_global;
221       PetscCallMPI(MPIU_Allreduce(&jac->n_local, &n_global, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)pc)));
222       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
223       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
224       PetscCall(PetscViewerASCIIPushTab(viewer));
225       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
226       PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] number of local blocks = %" PetscInt_FMT ", first local block number = %" PetscInt_FMT "\n", rank, jac->n_local, jac->first_local));
227       for (i = 0; i < jac->n_local; i++) {
228         PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] local block number %" PetscInt_FMT "\n", rank, i));
229         PetscCall(KSPView(jac->ksp[i], sviewer));
230         PetscCall(PetscViewerASCIIPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
231       }
232       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
233       PetscCall(PetscViewerASCIIPopTab(viewer));
234       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
235     }
236   } else if (isstring) {
237     PetscCall(PetscViewerStringSPrintf(viewer, " blks=%" PetscInt_FMT, jac->n));
238     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
239     if (jac->ksp) PetscCall(KSPView(jac->ksp[0], sviewer));
240     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
241   } else if (isdraw) {
242     PetscDraw draw;
243     char      str[25];
244     PetscReal x, y, bottom, h;
245 
246     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
247     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
248     PetscCall(PetscSNPrintf(str, 25, "Number blocks %" PetscInt_FMT, jac->n));
249     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
250     bottom = y - h;
251     PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom));
252     /* warning the communicator on viewer is different then on ksp in parallel */
253     if (jac->ksp) PetscCall(KSPView(jac->ksp[0], viewer));
254     PetscCall(PetscDrawPopCurrentPoint(draw));
255   }
256   PetscFunctionReturn(PETSC_SUCCESS);
257 }
258 
259 static PetscErrorCode PCBJacobiGetSubKSP_BJacobi(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
260 {
261   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
262 
263   PetscFunctionBegin;
264   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() first");
265 
266   if (n_local) *n_local = jac->n_local;
267   if (first_local) *first_local = jac->first_local;
268   if (ksp) *ksp = jac->ksp;
269   PetscFunctionReturn(PETSC_SUCCESS);
270 }
271 
272 static PetscErrorCode PCBJacobiSetTotalBlocks_BJacobi(PC pc, PetscInt blocks, const PetscInt *lens)
273 {
274   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
275 
276   PetscFunctionBegin;
277   PetscCheck(!pc->setupcalled || jac->n == blocks, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
278   jac->n = blocks;
279   if (!lens) jac->g_lens = NULL;
280   else {
281     PetscCall(PetscMalloc1(blocks, &jac->g_lens));
282     PetscCall(PetscArraycpy(jac->g_lens, lens, blocks));
283   }
284   PetscFunctionReturn(PETSC_SUCCESS);
285 }
286 
287 static PetscErrorCode PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
288 {
289   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
290 
291   PetscFunctionBegin;
292   *blocks = jac->n;
293   if (lens) *lens = jac->g_lens;
294   PetscFunctionReturn(PETSC_SUCCESS);
295 }
296 
297 static PetscErrorCode PCBJacobiSetLocalBlocks_BJacobi(PC pc, PetscInt blocks, const PetscInt lens[])
298 {
299   PC_BJacobi *jac;
300 
301   PetscFunctionBegin;
302   jac = (PC_BJacobi *)pc->data;
303 
304   jac->n_local = blocks;
305   if (!lens) jac->l_lens = NULL;
306   else {
307     PetscCall(PetscMalloc1(blocks, &jac->l_lens));
308     PetscCall(PetscArraycpy(jac->l_lens, lens, blocks));
309   }
310   PetscFunctionReturn(PETSC_SUCCESS);
311 }
312 
313 static PetscErrorCode PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
314 {
315   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
316 
317   PetscFunctionBegin;
318   *blocks = jac->n_local;
319   if (lens) *lens = jac->l_lens;
320   PetscFunctionReturn(PETSC_SUCCESS);
321 }
322 
323 /*@C
324   PCBJacobiGetSubKSP - Gets the local `KSP` contexts for all blocks on
325   this processor.
326 
327   Not Collective
328 
329   Input Parameter:
330 . pc - the preconditioner context
331 
332   Output Parameters:
333 + n_local     - the number of blocks on this processor, or NULL
334 . first_local - the global number of the first block on this processor, or NULL
335 - ksp         - the array of KSP contexts
336 
337   Level: advanced
338 
339   Notes:
340   After `PCBJacobiGetSubKSP()` the array of `KSP` contexts is not to be freed.
341 
342   Currently for some matrix implementations only 1 block per processor
343   is supported.
344 
345   You must call `KSPSetUp()` or `PCSetUp()` before calling `PCBJacobiGetSubKSP()`.
346 
347   Fortran Note:
348   Call `PCBJacobiRestoreSubKSP()` when you no longer need access to the array of `KSP`
349 
350 .seealso: [](ch_ksp), `PCBJACOBI`, `PCASM`, `PCASMGetSubKSP()`
351 @*/
352 PetscErrorCode PCBJacobiGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
353 {
354   PetscFunctionBegin;
355   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
356   PetscUseMethod(pc, "PCBJacobiGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
357   PetscFunctionReturn(PETSC_SUCCESS);
358 }
359 
360 /*@
361   PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
362   Jacobi preconditioner.
363 
364   Collective
365 
366   Input Parameters:
367 + pc     - the preconditioner context
368 . blocks - the number of blocks
369 - lens   - [optional] integer array containing the size of each block
370 
371   Options Database Key:
372 . -pc_bjacobi_blocks <blocks> - Sets the number of global blocks
373 
374   Level: intermediate
375 
376   Note:
377   Currently only a limited number of blocking configurations are supported.
378   All processors sharing the `PC` must call this routine with the same data.
379 
380 .seealso: [](ch_ksp), `PCBJACOBI`, `PCSetUseAmat()`, `PCBJacobiSetLocalBlocks()`
381 @*/
382 PetscErrorCode PCBJacobiSetTotalBlocks(PC pc, PetscInt blocks, const PetscInt lens[])
383 {
384   PetscFunctionBegin;
385   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
386   PetscCheck(blocks > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Must have positive blocks");
387   PetscTryMethod(pc, "PCBJacobiSetTotalBlocks_C", (PC, PetscInt, const PetscInt[]), (pc, blocks, lens));
388   PetscFunctionReturn(PETSC_SUCCESS);
389 }
390 
391 /*@C
392   PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
393   Jacobi, `PCBJACOBI`, preconditioner.
394 
395   Not Collective
396 
397   Input Parameter:
398 . pc - the preconditioner context
399 
400   Output Parameters:
401 + blocks - the number of blocks
402 - lens   - integer array containing the size of each block
403 
404   Level: intermediate
405 
406 .seealso: [](ch_ksp), `PCBJACOBI`, `PCSetUseAmat()`, `PCBJacobiGetLocalBlocks()`
407 @*/
408 PetscErrorCode PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
409 {
410   PetscFunctionBegin;
411   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
412   PetscAssertPointer(blocks, 2);
413   PetscUseMethod(pc, "PCBJacobiGetTotalBlocks_C", (PC, PetscInt *, const PetscInt *[]), (pc, blocks, lens));
414   PetscFunctionReturn(PETSC_SUCCESS);
415 }
416 
417 /*@
418   PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
419   Jacobi, `PCBJACOBI`,  preconditioner.
420 
421   Not Collective
422 
423   Input Parameters:
424 + pc     - the preconditioner context
425 . blocks - the number of blocks
426 - lens   - [optional] integer array containing size of each block
427 
428   Options Database Key:
429 . -pc_bjacobi_local_blocks <blocks> - Sets the number of local blocks
430 
431   Level: intermediate
432 
433   Note:
434   Currently only a limited number of blocking configurations are supported.
435 
436 .seealso: [](ch_ksp), `PCBJACOBI`, `PCSetUseAmat()`, `PCBJacobiSetTotalBlocks()`
437 @*/
438 PetscErrorCode PCBJacobiSetLocalBlocks(PC pc, PetscInt blocks, const PetscInt lens[])
439 {
440   PetscFunctionBegin;
441   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
442   PetscCheck(blocks >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have nonegative blocks");
443   PetscTryMethod(pc, "PCBJacobiSetLocalBlocks_C", (PC, PetscInt, const PetscInt[]), (pc, blocks, lens));
444   PetscFunctionReturn(PETSC_SUCCESS);
445 }
446 
447 /*@C
448   PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
449   Jacobi, `PCBJACOBI`, preconditioner.
450 
451   Not Collective
452 
453   Input Parameters:
454 + pc     - the preconditioner context
455 . blocks - the number of blocks
456 - lens   - [optional] integer array containing size of each block
457 
458   Level: intermediate
459 
460   Note:
461   Currently only a limited number of blocking configurations are supported.
462 
463 .seealso: [](ch_ksp), `PCBJACOBI`, `PCSetUseAmat()`, `PCBJacobiGetTotalBlocks()`
464 @*/
465 PetscErrorCode PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
466 {
467   PetscFunctionBegin;
468   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
469   PetscAssertPointer(blocks, 2);
470   PetscUseMethod(pc, "PCBJacobiGetLocalBlocks_C", (PC, PetscInt *, const PetscInt *[]), (pc, blocks, lens));
471   PetscFunctionReturn(PETSC_SUCCESS);
472 }
473 
474 /*MC
475    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
476            its own `KSP` object.
477 
478    Options Database Keys:
479 +  -pc_use_amat - use Amat to apply block of operator in inner Krylov method
480 -  -pc_bjacobi_blocks <n> - use n total blocks
481 
482    Level: beginner
483 
484    Notes:
485     See `PCJACOBI` for diagonal Jacobi, `PCVPBJACOBI` for variable point block, and `PCPBJACOBI` for fixed size point block
486 
487     Each processor can have one or more blocks, or a single block can be shared by several processes. Defaults to one block per processor.
488 
489      To set options on the solvers for each block append -sub_ to all the `KSP` and `PC`
490         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
491 
492      To set the options on the solvers separate for each block call `PCBJacobiGetSubKSP()`
493          and set the options directly on the resulting `KSP` object (you can access its `PC`
494          `KSPGetPC()`)
495 
496      For GPU-based vectors (`VECCUDA`, `VECViennaCL`) it is recommended to use exactly one block per MPI process for best
497          performance.  Different block partitioning may lead to additional data transfers
498          between host and GPU that lead to degraded performance.
499 
500      When multiple processes share a single block, each block encompasses exactly all the unknowns owned its set of processes.
501 
502 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCType`,
503           `PCASM`, `PCSetUseAmat()`, `PCGetUseAmat()`, `PCBJacobiGetSubKSP()`, `PCBJacobiSetTotalBlocks()`,
504           `PCBJacobiSetLocalBlocks()`, `PCSetModifySubMatrices()`, `PCJACOBI`, `PCVPBJACOBI`, `PCPBJACOBI`
505 M*/
506 
507 PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc)
508 {
509   PetscMPIInt rank;
510   PC_BJacobi *jac;
511 
512   PetscFunctionBegin;
513   PetscCall(PetscNew(&jac));
514   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
515 
516   pc->ops->apply             = NULL;
517   pc->ops->matapply          = NULL;
518   pc->ops->applytranspose    = NULL;
519   pc->ops->matapplytranspose = NULL;
520   pc->ops->setup             = PCSetUp_BJacobi;
521   pc->ops->destroy           = PCDestroy_BJacobi;
522   pc->ops->setfromoptions    = PCSetFromOptions_BJacobi;
523   pc->ops->view              = PCView_BJacobi;
524   pc->ops->applyrichardson   = NULL;
525 
526   pc->data         = (void *)jac;
527   jac->n           = -1;
528   jac->n_local     = -1;
529   jac->first_local = rank;
530   jac->ksp         = NULL;
531   jac->g_lens      = NULL;
532   jac->l_lens      = NULL;
533   jac->psubcomm    = NULL;
534 
535   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetSubKSP_C", PCBJacobiGetSubKSP_BJacobi));
536   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiSetTotalBlocks_C", PCBJacobiSetTotalBlocks_BJacobi));
537   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetTotalBlocks_C", PCBJacobiGetTotalBlocks_BJacobi));
538   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiSetLocalBlocks_C", PCBJacobiSetLocalBlocks_BJacobi));
539   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetLocalBlocks_C", PCBJacobiGetLocalBlocks_BJacobi));
540   PetscFunctionReturn(PETSC_SUCCESS);
541 }
542 
543 /*
544         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
545 */
546 static PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
547 {
548   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
549   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
550 
551   PetscFunctionBegin;
552   PetscCall(KSPReset(jac->ksp[0]));
553   PetscCall(VecDestroy(&bjac->x));
554   PetscCall(VecDestroy(&bjac->y));
555   PetscFunctionReturn(PETSC_SUCCESS);
556 }
557 
558 static PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
559 {
560   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
561   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
562 
563   PetscFunctionBegin;
564   PetscCall(PCReset_BJacobi_Singleblock(pc));
565   PetscCall(KSPDestroy(&jac->ksp[0]));
566   PetscCall(PetscFree(jac->ksp));
567   PetscCall(PetscFree(bjac));
568   PetscCall(PCDestroy_BJacobi(pc));
569   PetscFunctionReturn(PETSC_SUCCESS);
570 }
571 
572 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
573 {
574   PC_BJacobi        *jac    = (PC_BJacobi *)pc->data;
575   KSP                subksp = jac->ksp[0];
576   KSPConvergedReason reason;
577 
578   PetscFunctionBegin;
579   PetscCall(KSPSetUp(subksp));
580   PetscCall(KSPGetConvergedReason(subksp, &reason));
581   if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
582   PetscFunctionReturn(PETSC_SUCCESS);
583 }
584 
585 static PetscErrorCode PCApply_BJacobi_Singleblock(PC pc, Vec x, Vec y)
586 {
587   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
588   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
589 
590   PetscFunctionBegin;
591   PetscCall(VecGetLocalVectorRead(x, bjac->x));
592   PetscCall(VecGetLocalVector(y, bjac->y));
593   /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
594      matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild
595      of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/
596   PetscCall(KSPSetReusePreconditioner(jac->ksp[0], pc->reusepreconditioner));
597   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[0], bjac->x, bjac->y, 0));
598   PetscCall(KSPSolve(jac->ksp[0], bjac->x, bjac->y));
599   PetscCall(KSPCheckSolve(jac->ksp[0], pc, bjac->y));
600   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[0], bjac->x, bjac->y, 0));
601   PetscCall(VecRestoreLocalVectorRead(x, bjac->x));
602   PetscCall(VecRestoreLocalVector(y, bjac->y));
603   PetscFunctionReturn(PETSC_SUCCESS);
604 }
605 
606 static PetscErrorCode PCMatApply_BJacobi_Singleblock_Private(PC pc, Mat X, Mat Y, PetscBool transpose)
607 {
608   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
609   Mat         sX, sY;
610 
611   PetscFunctionBegin;
612   /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
613      matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild
614      of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/
615   PetscCall(KSPSetReusePreconditioner(jac->ksp[0], pc->reusepreconditioner));
616   PetscCall(MatDenseGetLocalMatrix(X, &sX));
617   PetscCall(MatDenseGetLocalMatrix(Y, &sY));
618   if (!transpose) {
619     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[0], sX, sY, 0));
620     PetscCall(KSPMatSolve(jac->ksp[0], sX, sY));
621     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[0], sX, sY, 0));
622   } else {
623     PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, jac->ksp[0], sX, sY, 0));
624     PetscCall(KSPMatSolveTranspose(jac->ksp[0], sX, sY));
625     PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, jac->ksp[0], sX, sY, 0));
626   }
627   PetscFunctionReturn(PETSC_SUCCESS);
628 }
629 
630 static PetscErrorCode PCMatApply_BJacobi_Singleblock(PC pc, Mat X, Mat Y)
631 {
632   PetscFunctionBegin;
633   PetscCall(PCMatApply_BJacobi_Singleblock_Private(pc, X, Y, PETSC_FALSE));
634   PetscFunctionReturn(PETSC_SUCCESS);
635 }
636 
637 static PetscErrorCode PCMatApplyTranspose_BJacobi_Singleblock(PC pc, Mat X, Mat Y)
638 {
639   PetscFunctionBegin;
640   PetscCall(PCMatApply_BJacobi_Singleblock_Private(pc, X, Y, PETSC_TRUE));
641   PetscFunctionReturn(PETSC_SUCCESS);
642 }
643 
644 static PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc, Vec x, Vec y)
645 {
646   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
647   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
648   PetscScalar            *y_array;
649   const PetscScalar      *x_array;
650   PC                      subpc;
651 
652   PetscFunctionBegin;
653   /*
654       The VecPlaceArray() is to avoid having to copy the
655     y vector into the bjac->x vector. The reason for
656     the bjac->x vector is that we need a sequential vector
657     for the sequential solve.
658   */
659   PetscCall(VecGetArrayRead(x, &x_array));
660   PetscCall(VecGetArray(y, &y_array));
661   PetscCall(VecPlaceArray(bjac->x, x_array));
662   PetscCall(VecPlaceArray(bjac->y, y_array));
663   /* apply the symmetric left portion of the inner PC operator */
664   /* note this bypasses the inner KSP and its options completely */
665   PetscCall(KSPGetPC(jac->ksp[0], &subpc));
666   PetscCall(PCApplySymmetricLeft(subpc, bjac->x, bjac->y));
667   PetscCall(VecResetArray(bjac->x));
668   PetscCall(VecResetArray(bjac->y));
669   PetscCall(VecRestoreArrayRead(x, &x_array));
670   PetscCall(VecRestoreArray(y, &y_array));
671   PetscFunctionReturn(PETSC_SUCCESS);
672 }
673 
674 static PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc, Vec x, Vec y)
675 {
676   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
677   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
678   PetscScalar            *y_array;
679   const PetscScalar      *x_array;
680   PC                      subpc;
681 
682   PetscFunctionBegin;
683   /*
684       The VecPlaceArray() is to avoid having to copy the
685     y vector into the bjac->x vector. The reason for
686     the bjac->x vector is that we need a sequential vector
687     for the sequential solve.
688   */
689   PetscCall(VecGetArrayRead(x, &x_array));
690   PetscCall(VecGetArray(y, &y_array));
691   PetscCall(VecPlaceArray(bjac->x, x_array));
692   PetscCall(VecPlaceArray(bjac->y, y_array));
693 
694   /* apply the symmetric right portion of the inner PC operator */
695   /* note this bypasses the inner KSP and its options completely */
696 
697   PetscCall(KSPGetPC(jac->ksp[0], &subpc));
698   PetscCall(PCApplySymmetricRight(subpc, bjac->x, bjac->y));
699 
700   PetscCall(VecResetArray(bjac->x));
701   PetscCall(VecResetArray(bjac->y));
702   PetscCall(VecRestoreArrayRead(x, &x_array));
703   PetscCall(VecRestoreArray(y, &y_array));
704   PetscFunctionReturn(PETSC_SUCCESS);
705 }
706 
707 static PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc, Vec x, Vec y)
708 {
709   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
710   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
711   PetscScalar            *y_array;
712   const PetscScalar      *x_array;
713 
714   PetscFunctionBegin;
715   /*
716       The VecPlaceArray() is to avoid having to copy the
717     y vector into the bjac->x vector. The reason for
718     the bjac->x vector is that we need a sequential vector
719     for the sequential solve.
720   */
721   PetscCall(VecGetArrayRead(x, &x_array));
722   PetscCall(VecGetArray(y, &y_array));
723   PetscCall(VecPlaceArray(bjac->x, x_array));
724   PetscCall(VecPlaceArray(bjac->y, y_array));
725   PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, jac->ksp[0], bjac->x, bjac->y, 0));
726   PetscCall(KSPSolveTranspose(jac->ksp[0], bjac->x, bjac->y));
727   PetscCall(KSPCheckSolve(jac->ksp[0], pc, bjac->y));
728   PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, jac->ksp[0], bjac->x, bjac->y, 0));
729   PetscCall(VecResetArray(bjac->x));
730   PetscCall(VecResetArray(bjac->y));
731   PetscCall(VecRestoreArrayRead(x, &x_array));
732   PetscCall(VecRestoreArray(y, &y_array));
733   PetscFunctionReturn(PETSC_SUCCESS);
734 }
735 
736 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc, Mat mat, Mat pmat)
737 {
738   PC_BJacobi             *jac = (PC_BJacobi *)pc->data;
739   PetscInt                m;
740   KSP                     ksp;
741   PC_BJacobi_Singleblock *bjac;
742   PetscBool               wasSetup = PETSC_TRUE;
743   VecType                 vectype;
744   const char             *prefix;
745 
746   PetscFunctionBegin;
747   if (!pc->setupcalled) {
748     if (!jac->ksp) {
749       PetscInt nestlevel;
750 
751       wasSetup = PETSC_FALSE;
752 
753       PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
754       PetscCall(PCGetKSPNestLevel(pc, &nestlevel));
755       PetscCall(KSPSetNestLevel(ksp, nestlevel + 1));
756       PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
757       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
758       PetscCall(KSPSetType(ksp, KSPPREONLY));
759       PetscCall(PCGetOptionsPrefix(pc, &prefix));
760       PetscCall(KSPSetOptionsPrefix(ksp, prefix));
761       PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
762 
763       pc->ops->reset               = PCReset_BJacobi_Singleblock;
764       pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
765       pc->ops->apply               = PCApply_BJacobi_Singleblock;
766       pc->ops->matapply            = PCMatApply_BJacobi_Singleblock;
767       pc->ops->matapplytranspose   = PCMatApplyTranspose_BJacobi_Singleblock;
768       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
769       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
770       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
771       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;
772 
773       PetscCall(PetscMalloc1(1, &jac->ksp));
774       jac->ksp[0] = ksp;
775 
776       PetscCall(PetscNew(&bjac));
777       jac->data = (void *)bjac;
778     } else {
779       ksp  = jac->ksp[0];
780       bjac = (PC_BJacobi_Singleblock *)jac->data;
781     }
782 
783     /*
784       The reason we need to generate these vectors is to serve
785       as the right-hand side and solution vector for the solve on the
786       block. We do not need to allocate space for the vectors since
787       that is provided via VecPlaceArray() just before the call to
788       KSPSolve() on the block.
789     */
790     PetscCall(MatGetSize(pmat, &m, &m));
791     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, m, NULL, &bjac->x));
792     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, m, NULL, &bjac->y));
793     PetscCall(MatGetVecType(pmat, &vectype));
794     PetscCall(VecSetType(bjac->x, vectype));
795     PetscCall(VecSetType(bjac->y, vectype));
796   } else {
797     ksp  = jac->ksp[0];
798     bjac = (PC_BJacobi_Singleblock *)jac->data;
799   }
800   PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
801   if (pc->useAmat) {
802     PetscCall(KSPSetOperators(ksp, mat, pmat));
803     PetscCall(MatSetOptionsPrefix(mat, prefix));
804   } else {
805     PetscCall(KSPSetOperators(ksp, pmat, pmat));
806   }
807   PetscCall(MatSetOptionsPrefix(pmat, prefix));
808   if (!wasSetup && pc->setfromoptionscalled) {
809     /* If PCSetFromOptions_BJacobi is called later, KSPSetFromOptions will be called at that time. */
810     PetscCall(KSPSetFromOptions(ksp));
811   }
812   PetscFunctionReturn(PETSC_SUCCESS);
813 }
814 
815 static PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
816 {
817   PC_BJacobi            *jac  = (PC_BJacobi *)pc->data;
818   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
819   PetscInt               i;
820 
821   PetscFunctionBegin;
822   if (bjac && bjac->pmat) {
823     PetscCall(MatDestroyMatrices(jac->n_local, &bjac->pmat));
824     if (pc->useAmat) PetscCall(MatDestroyMatrices(jac->n_local, &bjac->mat));
825   }
826 
827   for (i = 0; i < jac->n_local; i++) {
828     PetscCall(KSPReset(jac->ksp[i]));
829     if (bjac && bjac->x) {
830       PetscCall(VecDestroy(&bjac->x[i]));
831       PetscCall(VecDestroy(&bjac->y[i]));
832       PetscCall(ISDestroy(&bjac->is[i]));
833     }
834   }
835   PetscCall(PetscFree(jac->l_lens));
836   PetscCall(PetscFree(jac->g_lens));
837   PetscFunctionReturn(PETSC_SUCCESS);
838 }
839 
840 static PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
841 {
842   PC_BJacobi            *jac  = (PC_BJacobi *)pc->data;
843   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
844   PetscInt               i;
845 
846   PetscFunctionBegin;
847   PetscCall(PCReset_BJacobi_Multiblock(pc));
848   if (bjac) {
849     PetscCall(PetscFree2(bjac->x, bjac->y));
850     PetscCall(PetscFree(bjac->starts));
851     PetscCall(PetscFree(bjac->is));
852   }
853   PetscCall(PetscFree(jac->data));
854   for (i = 0; i < jac->n_local; i++) PetscCall(KSPDestroy(&jac->ksp[i]));
855   PetscCall(PetscFree(jac->ksp));
856   PetscCall(PCDestroy_BJacobi(pc));
857   PetscFunctionReturn(PETSC_SUCCESS);
858 }
859 
860 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
861 {
862   PC_BJacobi        *jac = (PC_BJacobi *)pc->data;
863   PetscInt           i, n_local = jac->n_local;
864   KSPConvergedReason reason;
865 
866   PetscFunctionBegin;
867   for (i = 0; i < n_local; i++) {
868     PetscCall(KSPSetUp(jac->ksp[i]));
869     PetscCall(KSPGetConvergedReason(jac->ksp[i], &reason));
870     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
871   }
872   PetscFunctionReturn(PETSC_SUCCESS);
873 }
874 
875 static PetscErrorCode PCApply_BJacobi_Multiblock(PC pc, Vec x, Vec y)
876 {
877   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
878   PetscInt               i, n_local = jac->n_local;
879   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
880   PetscScalar           *yin;
881   const PetscScalar     *xin;
882 
883   PetscFunctionBegin;
884   PetscCall(VecGetArrayRead(x, &xin));
885   PetscCall(VecGetArray(y, &yin));
886   for (i = 0; i < n_local; i++) {
887     /*
888        To avoid copying the subvector from x into a workspace we instead
889        make the workspace vector array point to the subpart of the array of
890        the global vector.
891     */
892     PetscCall(VecPlaceArray(bjac->x[i], xin + bjac->starts[i]));
893     PetscCall(VecPlaceArray(bjac->y[i], yin + bjac->starts[i]));
894 
895     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
896     PetscCall(KSPSolve(jac->ksp[i], bjac->x[i], bjac->y[i]));
897     PetscCall(KSPCheckSolve(jac->ksp[i], pc, bjac->y[i]));
898     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
899 
900     PetscCall(VecResetArray(bjac->x[i]));
901     PetscCall(VecResetArray(bjac->y[i]));
902   }
903   PetscCall(VecRestoreArrayRead(x, &xin));
904   PetscCall(VecRestoreArray(y, &yin));
905   PetscFunctionReturn(PETSC_SUCCESS);
906 }
907 
908 static PetscErrorCode PCApplySymmetricLeft_BJacobi_Multiblock(PC pc, Vec x, Vec y)
909 {
910   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
911   PetscInt               i, n_local = jac->n_local;
912   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
913   PetscScalar           *yin;
914   const PetscScalar     *xin;
915   PC                     subpc;
916 
917   PetscFunctionBegin;
918   PetscCall(VecGetArrayRead(x, &xin));
919   PetscCall(VecGetArray(y, &yin));
920   for (i = 0; i < n_local; i++) {
921     /*
922        To avoid copying the subvector from x into a workspace we instead
923        make the workspace vector array point to the subpart of the array of
924        the global vector.
925     */
926     PetscCall(VecPlaceArray(bjac->x[i], xin + bjac->starts[i]));
927     PetscCall(VecPlaceArray(bjac->y[i], yin + bjac->starts[i]));
928 
929     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
930     /* apply the symmetric left portion of the inner PC operator */
931     /* note this bypasses the inner KSP and its options completely */
932     PetscCall(KSPGetPC(jac->ksp[i], &subpc));
933     PetscCall(PCApplySymmetricLeft(subpc, bjac->x[i], bjac->y[i]));
934     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
935 
936     PetscCall(VecResetArray(bjac->x[i]));
937     PetscCall(VecResetArray(bjac->y[i]));
938   }
939   PetscCall(VecRestoreArrayRead(x, &xin));
940   PetscCall(VecRestoreArray(y, &yin));
941   PetscFunctionReturn(PETSC_SUCCESS);
942 }
943 
944 static PetscErrorCode PCApplySymmetricRight_BJacobi_Multiblock(PC pc, Vec x, Vec y)
945 {
946   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
947   PetscInt               i, n_local = jac->n_local;
948   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
949   PetscScalar           *yin;
950   const PetscScalar     *xin;
951   PC                     subpc;
952 
953   PetscFunctionBegin;
954   PetscCall(VecGetArrayRead(x, &xin));
955   PetscCall(VecGetArray(y, &yin));
956   for (i = 0; i < n_local; i++) {
957     /*
958        To avoid copying the subvector from x into a workspace we instead
959        make the workspace vector array point to the subpart of the array of
960        the global vector.
961     */
962     PetscCall(VecPlaceArray(bjac->x[i], xin + bjac->starts[i]));
963     PetscCall(VecPlaceArray(bjac->y[i], yin + bjac->starts[i]));
964 
965     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
966     /* apply the symmetric left portion of the inner PC operator */
967     /* note this bypasses the inner KSP and its options completely */
968     PetscCall(KSPGetPC(jac->ksp[i], &subpc));
969     PetscCall(PCApplySymmetricRight(subpc, bjac->x[i], bjac->y[i]));
970     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
971 
972     PetscCall(VecResetArray(bjac->x[i]));
973     PetscCall(VecResetArray(bjac->y[i]));
974   }
975   PetscCall(VecRestoreArrayRead(x, &xin));
976   PetscCall(VecRestoreArray(y, &yin));
977   PetscFunctionReturn(PETSC_SUCCESS);
978 }
979 
980 static PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc, Vec x, Vec y)
981 {
982   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
983   PetscInt               i, n_local = jac->n_local;
984   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
985   PetscScalar           *yin;
986   const PetscScalar     *xin;
987 
988   PetscFunctionBegin;
989   PetscCall(VecGetArrayRead(x, &xin));
990   PetscCall(VecGetArray(y, &yin));
991   for (i = 0; i < n_local; i++) {
992     /*
993        To avoid copying the subvector from x into a workspace we instead
994        make the workspace vector array point to the subpart of the array of
995        the global vector.
996     */
997     PetscCall(VecPlaceArray(bjac->x[i], xin + bjac->starts[i]));
998     PetscCall(VecPlaceArray(bjac->y[i], yin + bjac->starts[i]));
999 
1000     PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
1001     PetscCall(KSPSolveTranspose(jac->ksp[i], bjac->x[i], bjac->y[i]));
1002     PetscCall(KSPCheckSolve(jac->ksp[i], pc, bjac->y[i]));
1003     PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
1004 
1005     PetscCall(VecResetArray(bjac->x[i]));
1006     PetscCall(VecResetArray(bjac->y[i]));
1007   }
1008   PetscCall(VecRestoreArrayRead(x, &xin));
1009   PetscCall(VecRestoreArray(y, &yin));
1010   PetscFunctionReturn(PETSC_SUCCESS);
1011 }
1012 
1013 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc, Mat mat, Mat pmat)
1014 {
1015   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
1016   PetscInt               m, n_local, N, M, start, i;
1017   const char            *prefix;
1018   KSP                    ksp;
1019   Vec                    x, y;
1020   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
1021   PC                     subpc;
1022   IS                     is;
1023   MatReuse               scall;
1024   VecType                vectype;
1025   MatNullSpace          *nullsp_mat = NULL, *nullsp_pmat = NULL;
1026 
1027   PetscFunctionBegin;
1028   PetscCall(MatGetLocalSize(pc->pmat, &M, &N));
1029 
1030   n_local = jac->n_local;
1031 
1032   if (pc->useAmat) {
1033     PetscBool same;
1034     PetscCall(PetscObjectTypeCompare((PetscObject)mat, ((PetscObject)pmat)->type_name, &same));
1035     PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Matrices not of same type");
1036   }
1037 
1038   if (!pc->setupcalled) {
1039     PetscInt nestlevel;
1040 
1041     scall = MAT_INITIAL_MATRIX;
1042 
1043     if (!jac->ksp) {
1044       pc->ops->reset               = PCReset_BJacobi_Multiblock;
1045       pc->ops->destroy             = PCDestroy_BJacobi_Multiblock;
1046       pc->ops->apply               = PCApply_BJacobi_Multiblock;
1047       pc->ops->matapply            = NULL;
1048       pc->ops->matapplytranspose   = NULL;
1049       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Multiblock;
1050       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Multiblock;
1051       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Multiblock;
1052       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Multiblock;
1053 
1054       PetscCall(PetscNew(&bjac));
1055       PetscCall(PetscMalloc1(n_local, &jac->ksp));
1056       PetscCall(PetscMalloc2(n_local, &bjac->x, n_local, &bjac->y));
1057       PetscCall(PetscMalloc1(n_local, &bjac->starts));
1058 
1059       jac->data = (void *)bjac;
1060       PetscCall(PetscMalloc1(n_local, &bjac->is));
1061 
1062       for (i = 0; i < n_local; i++) {
1063         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
1064         PetscCall(PCGetKSPNestLevel(pc, &nestlevel));
1065         PetscCall(KSPSetNestLevel(ksp, nestlevel + 1));
1066         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
1067         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
1068         PetscCall(KSPSetType(ksp, KSPPREONLY));
1069         PetscCall(KSPGetPC(ksp, &subpc));
1070         PetscCall(PCGetOptionsPrefix(pc, &prefix));
1071         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
1072         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
1073 
1074         jac->ksp[i] = ksp;
1075       }
1076     } else {
1077       bjac = (PC_BJacobi_Multiblock *)jac->data;
1078     }
1079 
1080     start = 0;
1081     PetscCall(MatGetVecType(pmat, &vectype));
1082     for (i = 0; i < n_local; i++) {
1083       m = jac->l_lens[i];
1084       /*
1085       The reason we need to generate these vectors is to serve
1086       as the right-hand side and solution vector for the solve on the
1087       block. We do not need to allocate space for the vectors since
1088       that is provided via VecPlaceArray() just before the call to
1089       KSPSolve() on the block.
1090 
1091       */
1092       PetscCall(VecCreateSeq(PETSC_COMM_SELF, m, &x));
1093       PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, m, NULL, &y));
1094       PetscCall(VecSetType(x, vectype));
1095       PetscCall(VecSetType(y, vectype));
1096 
1097       bjac->x[i]      = x;
1098       bjac->y[i]      = y;
1099       bjac->starts[i] = start;
1100 
1101       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, start, 1, &is));
1102       bjac->is[i] = is;
1103 
1104       start += m;
1105     }
1106   } else {
1107     bjac = (PC_BJacobi_Multiblock *)jac->data;
1108     /*
1109        Destroy the blocks from the previous iteration
1110     */
1111     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1112       PetscCall(MatGetNullSpaces(n_local, bjac->pmat, &nullsp_pmat));
1113       PetscCall(MatDestroyMatrices(n_local, &bjac->pmat));
1114       if (pc->useAmat) {
1115         PetscCall(MatGetNullSpaces(n_local, bjac->mat, &nullsp_mat));
1116         PetscCall(MatDestroyMatrices(n_local, &bjac->mat));
1117       }
1118       scall = MAT_INITIAL_MATRIX;
1119     } else scall = MAT_REUSE_MATRIX;
1120   }
1121 
1122   PetscCall(MatCreateSubMatrices(pmat, n_local, bjac->is, bjac->is, scall, &bjac->pmat));
1123   if (nullsp_pmat) PetscCall(MatRestoreNullSpaces(n_local, bjac->pmat, &nullsp_pmat));
1124   if (pc->useAmat) {
1125     PetscCall(MatCreateSubMatrices(mat, n_local, bjac->is, bjac->is, scall, &bjac->mat));
1126     if (nullsp_mat) PetscCall(MatRestoreNullSpaces(n_local, bjac->mat, &nullsp_mat));
1127   }
1128   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1129      different boundary conditions for the submatrices than for the global problem) */
1130   PetscCall(PCModifySubMatrices(pc, n_local, bjac->is, bjac->is, bjac->pmat, pc->modifysubmatricesP));
1131 
1132   for (i = 0; i < n_local; i++) {
1133     PetscCall(KSPGetOptionsPrefix(jac->ksp[i], &prefix));
1134     if (pc->useAmat) {
1135       PetscCall(KSPSetOperators(jac->ksp[i], bjac->mat[i], bjac->pmat[i]));
1136       PetscCall(MatSetOptionsPrefix(bjac->mat[i], prefix));
1137     } else {
1138       PetscCall(KSPSetOperators(jac->ksp[i], bjac->pmat[i], bjac->pmat[i]));
1139     }
1140     PetscCall(MatSetOptionsPrefix(bjac->pmat[i], prefix));
1141     if (pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(jac->ksp[i]));
1142   }
1143   PetscFunctionReturn(PETSC_SUCCESS);
1144 }
1145 
1146 /*
1147       These are for a single block with multiple processes
1148 */
1149 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiproc(PC pc)
1150 {
1151   PC_BJacobi        *jac    = (PC_BJacobi *)pc->data;
1152   KSP                subksp = jac->ksp[0];
1153   KSPConvergedReason reason;
1154 
1155   PetscFunctionBegin;
1156   PetscCall(KSPSetUp(subksp));
1157   PetscCall(KSPGetConvergedReason(subksp, &reason));
1158   if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
1159   PetscFunctionReturn(PETSC_SUCCESS);
1160 }
1161 
1162 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1163 {
1164   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
1165   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;
1166 
1167   PetscFunctionBegin;
1168   PetscCall(VecDestroy(&mpjac->ysub));
1169   PetscCall(VecDestroy(&mpjac->xsub));
1170   PetscCall(MatDestroy(&mpjac->submats));
1171   if (jac->ksp) PetscCall(KSPReset(jac->ksp[0]));
1172   PetscFunctionReturn(PETSC_SUCCESS);
1173 }
1174 
1175 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1176 {
1177   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
1178   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;
1179 
1180   PetscFunctionBegin;
1181   PetscCall(PCReset_BJacobi_Multiproc(pc));
1182   PetscCall(KSPDestroy(&jac->ksp[0]));
1183   PetscCall(PetscFree(jac->ksp));
1184   PetscCall(PetscSubcommDestroy(&mpjac->psubcomm));
1185 
1186   PetscCall(PetscFree(mpjac));
1187   PetscCall(PCDestroy_BJacobi(pc));
1188   PetscFunctionReturn(PETSC_SUCCESS);
1189 }
1190 
1191 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc, Vec x, Vec y)
1192 {
1193   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
1194   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;
1195   PetscScalar          *yarray;
1196   const PetscScalar    *xarray;
1197   KSPConvergedReason    reason;
1198 
1199   PetscFunctionBegin;
1200   /* place x's and y's local arrays into xsub and ysub */
1201   PetscCall(VecGetArrayRead(x, &xarray));
1202   PetscCall(VecGetArray(y, &yarray));
1203   PetscCall(VecPlaceArray(mpjac->xsub, xarray));
1204   PetscCall(VecPlaceArray(mpjac->ysub, yarray));
1205 
1206   /* apply preconditioner on each matrix block */
1207   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[0], mpjac->xsub, mpjac->ysub, 0));
1208   PetscCall(KSPSolve(jac->ksp[0], mpjac->xsub, mpjac->ysub));
1209   PetscCall(KSPCheckSolve(jac->ksp[0], pc, mpjac->ysub));
1210   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[0], mpjac->xsub, mpjac->ysub, 0));
1211   PetscCall(KSPGetConvergedReason(jac->ksp[0], &reason));
1212   if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
1213 
1214   PetscCall(VecResetArray(mpjac->xsub));
1215   PetscCall(VecResetArray(mpjac->ysub));
1216   PetscCall(VecRestoreArrayRead(x, &xarray));
1217   PetscCall(VecRestoreArray(y, &yarray));
1218   PetscFunctionReturn(PETSC_SUCCESS);
1219 }
1220 
1221 static PetscErrorCode PCMatApply_BJacobi_Multiproc(PC pc, Mat X, Mat Y)
1222 {
1223   PC_BJacobi        *jac = (PC_BJacobi *)pc->data;
1224   KSPConvergedReason reason;
1225   Mat                sX, sY;
1226   const PetscScalar *x;
1227   PetscScalar       *y;
1228   PetscInt           m, N, lda, ldb;
1229 
1230   PetscFunctionBegin;
1231   /* apply preconditioner on each matrix block */
1232   PetscCall(MatGetLocalSize(X, &m, NULL));
1233   PetscCall(MatGetSize(X, NULL, &N));
1234   PetscCall(MatDenseGetLDA(X, &lda));
1235   PetscCall(MatDenseGetLDA(Y, &ldb));
1236   PetscCall(MatDenseGetArrayRead(X, &x));
1237   PetscCall(MatDenseGetArrayWrite(Y, &y));
1238   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)jac->ksp[0]), m, PETSC_DECIDE, PETSC_DECIDE, N, (PetscScalar *)x, &sX));
1239   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)jac->ksp[0]), m, PETSC_DECIDE, PETSC_DECIDE, N, y, &sY));
1240   PetscCall(MatDenseSetLDA(sX, lda));
1241   PetscCall(MatDenseSetLDA(sY, ldb));
1242   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[0], X, Y, 0));
1243   PetscCall(KSPMatSolve(jac->ksp[0], sX, sY));
1244   PetscCall(KSPCheckSolve(jac->ksp[0], pc, NULL));
1245   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[0], X, Y, 0));
1246   PetscCall(MatDestroy(&sY));
1247   PetscCall(MatDestroy(&sX));
1248   PetscCall(MatDenseRestoreArrayWrite(Y, &y));
1249   PetscCall(MatDenseRestoreArrayRead(X, &x));
1250   PetscCall(KSPGetConvergedReason(jac->ksp[0], &reason));
1251   if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
1252   PetscFunctionReturn(PETSC_SUCCESS);
1253 }
1254 
1255 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1256 {
1257   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
1258   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;
1259   PetscInt              m, n;
1260   MPI_Comm              comm, subcomm = 0;
1261   const char           *prefix;
1262   PetscBool             wasSetup = PETSC_TRUE;
1263   VecType               vectype;
1264 
1265   PetscFunctionBegin;
1266   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
1267   PetscCheck(jac->n_local <= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Only a single block in a subcommunicator is supported");
1268   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1269   if (!pc->setupcalled) {
1270     PetscInt nestlevel;
1271 
1272     wasSetup = PETSC_FALSE;
1273     PetscCall(PetscNew(&mpjac));
1274     jac->data = (void *)mpjac;
1275 
1276     /* initialize datastructure mpjac */
1277     if (!jac->psubcomm) {
1278       /* Create default contiguous subcommunicatiors if user does not provide them */
1279       PetscCall(PetscSubcommCreate(comm, &jac->psubcomm));
1280       PetscCall(PetscSubcommSetNumber(jac->psubcomm, jac->n));
1281       PetscCall(PetscSubcommSetType(jac->psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
1282     }
1283     mpjac->psubcomm = jac->psubcomm;
1284     subcomm         = PetscSubcommChild(mpjac->psubcomm);
1285 
1286     /* Get matrix blocks of pmat */
1287     PetscCall(MatGetMultiProcBlock(pc->pmat, subcomm, MAT_INITIAL_MATRIX, &mpjac->submats));
1288 
1289     /* create a new PC that processors in each subcomm have copy of */
1290     PetscCall(PetscMalloc1(1, &jac->ksp));
1291     PetscCall(KSPCreate(subcomm, &jac->ksp[0]));
1292     PetscCall(PCGetKSPNestLevel(pc, &nestlevel));
1293     PetscCall(KSPSetNestLevel(jac->ksp[0], nestlevel + 1));
1294     PetscCall(KSPSetErrorIfNotConverged(jac->ksp[0], pc->erroriffailure));
1295     PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0], (PetscObject)pc, 1));
1296     PetscCall(KSPSetOperators(jac->ksp[0], mpjac->submats, mpjac->submats));
1297     PetscCall(KSPGetPC(jac->ksp[0], &mpjac->pc));
1298 
1299     PetscCall(PCGetOptionsPrefix(pc, &prefix));
1300     PetscCall(KSPSetOptionsPrefix(jac->ksp[0], prefix));
1301     PetscCall(KSPAppendOptionsPrefix(jac->ksp[0], "sub_"));
1302     PetscCall(KSPGetOptionsPrefix(jac->ksp[0], &prefix));
1303     PetscCall(MatSetOptionsPrefix(mpjac->submats, prefix));
1304 
1305     /* create dummy vectors xsub and ysub */
1306     PetscCall(MatGetLocalSize(mpjac->submats, &m, &n));
1307     PetscCall(VecCreateMPIWithArray(subcomm, 1, n, PETSC_DECIDE, NULL, &mpjac->xsub));
1308     PetscCall(VecCreateMPIWithArray(subcomm, 1, m, PETSC_DECIDE, NULL, &mpjac->ysub));
1309     PetscCall(MatGetVecType(mpjac->submats, &vectype));
1310     PetscCall(VecSetType(mpjac->xsub, vectype));
1311     PetscCall(VecSetType(mpjac->ysub, vectype));
1312 
1313     pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiproc;
1314     pc->ops->reset         = PCReset_BJacobi_Multiproc;
1315     pc->ops->destroy       = PCDestroy_BJacobi_Multiproc;
1316     pc->ops->apply         = PCApply_BJacobi_Multiproc;
1317     pc->ops->matapply      = PCMatApply_BJacobi_Multiproc;
1318   } else { /* pc->setupcalled */
1319     subcomm = PetscSubcommChild(mpjac->psubcomm);
1320     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1321       /* destroy old matrix blocks, then get new matrix blocks */
1322       if (mpjac->submats) PetscCall(MatDestroy(&mpjac->submats));
1323       PetscCall(MatGetMultiProcBlock(pc->pmat, subcomm, MAT_INITIAL_MATRIX, &mpjac->submats));
1324     } else {
1325       PetscCall(MatGetMultiProcBlock(pc->pmat, subcomm, MAT_REUSE_MATRIX, &mpjac->submats));
1326     }
1327     PetscCall(KSPSetOperators(jac->ksp[0], mpjac->submats, mpjac->submats));
1328   }
1329 
1330   if (!wasSetup && pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(jac->ksp[0]));
1331   PetscFunctionReturn(PETSC_SUCCESS);
1332 }
1333