xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision be39004295a98ced168607de69334a8b204d0588)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 #include <petscdevice.h>
5 #if PetscDefined(HAVE_CUDA)
6   #include <petscdevice_cuda.h>
7 #endif
8 #if PetscDefined(HAVE_HIP)
9   #include <petscdevice_hip.h>
10 #endif
11 
12 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
13 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
14 
15 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
16 
17 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
18 struct _PC_FieldSplitLink {
19   KSP               ksp;
20   Vec               x, y, z;
21   char             *splitname;
22   PetscInt          nfields;
23   PetscInt         *fields, *fields_col;
24   VecScatter        sctx;
25   IS                is, is_col;
26   PC_FieldSplitLink next, previous;
27   PetscLogEvent     event;
28 
29   /* Used only when setting coordinates with PCSetCoordinates */
30   PetscInt   dim;
31   PetscInt   ndofs;
32   PetscReal *coords;
33 };
34 
35 typedef struct {
36   PCCompositeType type;
37   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
38   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
39   PetscInt        bs;           /* Block size for IS and Mat structures */
40   PetscInt        nsplits;      /* Number of field divisions defined */
41   Vec            *x, *y, w1, w2;
42   Mat            *mat;    /* The diagonal block for each split */
43   Mat            *pmat;   /* The preconditioning diagonal block for each split */
44   Mat            *Afield; /* The rows of the matrix associated with each split */
45   PetscBool       issetup;
46 
47   /* Only used when Schur complement preconditioning is used */
48   Mat                       B;          /* The (0,1) block */
49   Mat                       C;          /* The (1,0) block */
50   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
51   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a matrix for constructing the preconditioner when solving with S */
52   Mat                       schur_user; /* User-provided matrix for constructing the preconditioner for the Schur complement */
53   PCFieldSplitSchurPreType  schurpre;   /* Determines which matrix is used for the Schur complement */
54   PCFieldSplitSchurFactType schurfactorization;
55   KSP                       kspschur;   /* The solver for S */
56   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
57   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
58 
59   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
60   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
61   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
62   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
63   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
64   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
65   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
66   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
67   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
68   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
69 
70   PC_FieldSplitLink head;
71   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
72   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
73   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
74   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
75   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
76   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
77   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
78 } PC_FieldSplit;
79 
80 /*
81     Note:
82     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
83    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
84    PC you could change this.
85 */
86 
87 /* This helper is so that setting a user-provided matrix is orthogonal to choosing to use it.  This way the
88 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
89 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
90 {
91   switch (jac->schurpre) {
92   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
93     return jac->schur;
94   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
95     return jac->schurp;
96   case PC_FIELDSPLIT_SCHUR_PRE_A11:
97     return jac->pmat[1];
98   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
99   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
100   default:
101     return jac->schur_user ? jac->schur_user : jac->pmat[1];
102   }
103 }
104 
105 #include <petscdraw.h>
106 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
107 {
108   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
109   PetscBool         iascii, isdraw;
110   PetscInt          i, j;
111   PC_FieldSplitLink ilink = jac->head;
112 
113   PetscFunctionBegin;
114   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
115   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
116   if (iascii) {
117     if (jac->bs > 0) {
118       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
119     } else {
120       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
121     }
122     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
123     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
124     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
125     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
126     for (i = 0; i < jac->nsplits; i++) {
127       if (ilink->fields) {
128         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
129         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
130         for (j = 0; j < ilink->nfields; j++) {
131           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
132           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
133         }
134         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
135         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
136       } else {
137         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
138       }
139       PetscCall(KSPView(ilink->ksp, viewer));
140       ilink = ilink->next;
141     }
142   }
143 
144   if (isdraw) {
145     PetscDraw draw;
146     PetscReal x, y, w, wd;
147 
148     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
149     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
150     w  = 2 * PetscMin(1.0 - x, x);
151     wd = w / (jac->nsplits + 1);
152     x  = x - wd * (jac->nsplits - 1) / 2.0;
153     for (i = 0; i < jac->nsplits; i++) {
154       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
155       PetscCall(KSPView(ilink->ksp, viewer));
156       PetscCall(PetscDrawPopCurrentPoint(draw));
157       x += wd;
158       ilink = ilink->next;
159     }
160   }
161   PetscFunctionReturn(PETSC_SUCCESS);
162 }
163 
164 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
165 {
166   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
167   PetscBool                  iascii, isdraw;
168   PetscInt                   i, j;
169   PC_FieldSplitLink          ilink = jac->head;
170   MatSchurComplementAinvType atype;
171 
172   PetscFunctionBegin;
173   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
174   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
175   if (iascii) {
176     if (jac->bs > 0) {
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
178     } else {
179       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
180     }
181     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
182     switch (jac->schurpre) {
183     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
184       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
185       break;
186     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
187       if (jac->schur) {
188         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
189         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
190       }
191       break;
192     case PC_FIELDSPLIT_SCHUR_PRE_A11:
193       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
194       break;
195     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
196       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
197       break;
198     case PC_FIELDSPLIT_SCHUR_PRE_USER:
199       if (jac->schur_user) {
200         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
201       } else {
202         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
203       }
204       break;
205     default:
206       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
207     }
208     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
209     PetscCall(PetscViewerASCIIPushTab(viewer));
210     for (i = 0; i < jac->nsplits; i++) {
211       if (ilink->fields) {
212         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
213         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
214         for (j = 0; j < ilink->nfields; j++) {
215           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
216           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
217         }
218         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
219         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
220       } else {
221         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
222       }
223       ilink = ilink->next;
224     }
225     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
226     PetscCall(PetscViewerASCIIPushTab(viewer));
227     if (jac->head) {
228       PetscCall(KSPView(jac->head->ksp, viewer));
229     } else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
230     PetscCall(PetscViewerASCIIPopTab(viewer));
231     if (jac->head && jac->kspupper != jac->head->ksp) {
232       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n"));
233       PetscCall(PetscViewerASCIIPushTab(viewer));
234       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
235       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
236       PetscCall(PetscViewerASCIIPopTab(viewer));
237     }
238     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n"));
239     PetscCall(PetscViewerASCIIPushTab(viewer));
240     if (jac->kspschur) {
241       PetscCall(KSPView(jac->kspschur, viewer));
242     } else {
243       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
244     }
245     PetscCall(PetscViewerASCIIPopTab(viewer));
246     PetscCall(PetscViewerASCIIPopTab(viewer));
247   } else if (isdraw && jac->head) {
248     PetscDraw draw;
249     PetscReal x, y, w, wd, h;
250     PetscInt  cnt = 2;
251     char      str[32];
252 
253     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
254     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
255     if (jac->kspupper != jac->head->ksp) cnt++;
256     w  = 2 * PetscMin(1.0 - x, x);
257     wd = w / (cnt + 1);
258 
259     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
260     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
261     y -= h;
262     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
263       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
264     } else {
265       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
266     }
267     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
268     y -= h;
269     x = x - wd * (cnt - 1) / 2.0;
270 
271     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
272     PetscCall(KSPView(jac->head->ksp, viewer));
273     PetscCall(PetscDrawPopCurrentPoint(draw));
274     if (jac->kspupper != jac->head->ksp) {
275       x += wd;
276       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
277       PetscCall(KSPView(jac->kspupper, viewer));
278       PetscCall(PetscDrawPopCurrentPoint(draw));
279     }
280     x += wd;
281     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
282     PetscCall(KSPView(jac->kspschur, viewer));
283     PetscCall(PetscDrawPopCurrentPoint(draw));
284   }
285   PetscFunctionReturn(PETSC_SUCCESS);
286 }
287 
288 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
289 {
290   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
291   PetscBool         iascii, isdraw;
292   PetscInt          i, j;
293   PC_FieldSplitLink ilink = jac->head;
294 
295   PetscFunctionBegin;
296   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
297   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
298   if (iascii) {
299     if (jac->bs > 0) {
300       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
301     } else {
302       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
303     }
304     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
305     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
306     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
307 
308     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
309     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
310     PetscCall(PetscViewerASCIIPushTab(viewer));
311 
312     if (ilink->fields) {
313       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
314       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
315       for (j = 0; j < ilink->nfields; j++) {
316         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
317         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
318       }
319       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
320       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
321     } else {
322       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
323     }
324     PetscCall(KSPView(ilink->ksp, viewer));
325 
326     PetscCall(PetscViewerASCIIPopTab(viewer));
327   }
328 
329   if (isdraw) {
330     PetscDraw draw;
331     PetscReal x, y, w, wd;
332 
333     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
334     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
335     w  = 2 * PetscMin(1.0 - x, x);
336     wd = w / (jac->nsplits + 1);
337     x  = x - wd * (jac->nsplits - 1) / 2.0;
338     for (i = 0; i < jac->nsplits; i++) {
339       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
340       PetscCall(KSPView(ilink->ksp, viewer));
341       PetscCall(PetscDrawPopCurrentPoint(draw));
342       x += wd;
343       ilink = ilink->next;
344     }
345   }
346   PetscFunctionReturn(PETSC_SUCCESS);
347 }
348 
349 /* Precondition: jac->bs is set to a meaningful value or MATNEST */
350 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
351 {
352   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
353   PetscInt       bs, i, nfields, *ifields, nfields_col, *ifields_col;
354   PetscBool      flg, flg_col, mnest;
355   char           optionname[128], splitname[8], optionname_col[128];
356 
357   PetscFunctionBegin;
358   PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest));
359   if (mnest) {
360     PetscCall(MatNestGetSize(pc->pmat, &bs, NULL));
361   } else {
362     bs = jac->bs;
363   }
364   PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col));
365   for (i = 0, flg = PETSC_TRUE;; i++) {
366     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
367     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
368     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
369     nfields     = bs;
370     nfields_col = bs;
371     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
372     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
373     if (!flg) break;
374     else if (flg && !flg_col) {
375       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
376       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
377     } else {
378       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
379       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
380       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
381     }
382   }
383   if (i > 0) {
384     /* Makes command-line setting of splits take precedence over setting them in code.
385        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
386        create new splits, which would probably not be what the user wanted. */
387     jac->splitdefined = PETSC_TRUE;
388   }
389   PetscCall(PetscFree2(ifields, ifields_col));
390   PetscFunctionReturn(PETSC_SUCCESS);
391 }
392 
393 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
394 {
395   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
396   PC_FieldSplitLink ilink              = jac->head;
397   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
398   PetscInt          i;
399 
400   PetscFunctionBegin;
401   /*
402    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
403    Should probably be rewritten.
404    */
405   if (!ilink) {
406     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
407     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
408       PetscInt  numFields, f, i, j;
409       char    **fieldNames;
410       IS       *fields;
411       DM       *dms;
412       DM        subdm[128];
413       PetscBool flg;
414 
415       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
416       /* Allow the user to prescribe the splits */
417       for (i = 0, flg = PETSC_TRUE;; i++) {
418         PetscInt ifields[128];
419         IS       compField;
420         char     optionname[128], splitname[8];
421         PetscInt nfields = numFields;
422 
423         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
424         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
425         if (!flg) break;
426         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
427         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
428         if (nfields == 1) {
429           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
430         } else {
431           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
432           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
433         }
434         PetscCall(ISDestroy(&compField));
435         for (j = 0; j < nfields; ++j) {
436           f = ifields[j];
437           PetscCall(PetscFree(fieldNames[f]));
438           PetscCall(ISDestroy(&fields[f]));
439         }
440       }
441       if (i == 0) {
442         for (f = 0; f < numFields; ++f) {
443           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
444           PetscCall(PetscFree(fieldNames[f]));
445           PetscCall(ISDestroy(&fields[f]));
446         }
447       } else {
448         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
449         PetscCall(PetscFree(dms));
450         PetscCall(PetscMalloc1(i, &dms));
451         for (j = 0; j < i; ++j) dms[j] = subdm[j];
452       }
453       PetscCall(PetscFree(fieldNames));
454       PetscCall(PetscFree(fields));
455       if (dms) {
456         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
457         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
458           const char *prefix;
459           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix));
460           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix));
461           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
462           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
463           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
464           PetscCall(DMDestroy(&dms[i]));
465         }
466         PetscCall(PetscFree(dms));
467       }
468     } else {
469       if (jac->bs <= 0) {
470         if (pc->pmat) {
471           PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
472         } else jac->bs = 1;
473       }
474 
475       if (jac->detect) {
476         IS       zerodiags, rest;
477         PetscInt nmin, nmax;
478 
479         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
480         if (jac->diag_use_amat) {
481           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
482         } else {
483           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
484         }
485         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
486         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
487         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
488         PetscCall(ISDestroy(&zerodiags));
489         PetscCall(ISDestroy(&rest));
490       } else if (coupling) {
491         IS       coupling, rest;
492         PetscInt nmin, nmax;
493 
494         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
495         if (jac->offdiag_use_amat) {
496           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
497         } else {
498           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
499         }
500         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
501         PetscCall(ISSetIdentity(rest));
502         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
503         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
504         PetscCall(ISDestroy(&coupling));
505         PetscCall(ISDestroy(&rest));
506       } else {
507         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
508         if (!fieldsplit_default) {
509           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
510            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
511           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
512           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
513         }
514         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
515           Mat       M = pc->pmat;
516           PetscBool isnest;
517           PetscInt  nf;
518 
519           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
520           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
521           if (!isnest) {
522             M = pc->mat;
523             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
524           }
525           if (!isnest) nf = jac->bs;
526           else PetscCall(MatNestGetSize(M, &nf, NULL));
527           for (i = 0; i < nf; i++) {
528             char splitname[8];
529 
530             PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
531             PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
532           }
533           jac->defaultsplit = PETSC_TRUE;
534         }
535       }
536     }
537   } else if (jac->nsplits == 1) {
538     IS       is2;
539     PetscInt nmin, nmax;
540 
541     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
542     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
543     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
544     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
545     PetscCall(ISDestroy(&is2));
546   }
547 
548   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
549   PetscFunctionReturn(PETSC_SUCCESS);
550 }
551 
552 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
553 {
554   Mat       BT, T;
555   PetscReal nrmT, nrmB;
556 
557   PetscFunctionBegin;
558   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
559   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
560   PetscCall(MatNorm(T, NORM_1, &nrmT));
561   PetscCall(MatNorm(B, NORM_1, &nrmB));
562   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable.");
563 
564   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
565   /* setting N := 1/nu*I in [Ar13].                                                 */
566   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
567   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_CURRENT, H)); /* H = A01*A01'          */
568   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
569 
570   PetscCall(MatDestroy(&BT));
571   PetscCall(MatDestroy(&T));
572   PetscFunctionReturn(PETSC_SUCCESS);
573 }
574 
575 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg);
576 
577 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
578 {
579   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
580   PC_FieldSplitLink ilink;
581   PetscInt          i, nsplit;
582   PetscBool         sorted, sorted_col, matnest = PETSC_FALSE;
583 
584   PetscFunctionBegin;
585   pc->failedreason = PC_NOERROR;
586   PetscCall(PCFieldSplitSetDefaults(pc));
587   nsplit = jac->nsplits;
588   ilink  = jac->head;
589   if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
590 
591   /* get the matrices for each split */
592   if (!jac->issetup) {
593     PetscInt rstart, rend, nslots, bs;
594 
595     jac->issetup = PETSC_TRUE;
596 
597     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
598     if (jac->defaultsplit || !ilink->is) {
599       if (jac->bs <= 0) jac->bs = nsplit;
600     }
601 
602     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
603     PetscCall(MatGetBlockSize(pc->pmat, &bs));
604     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
605       PetscBool blk;
606 
607       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
608       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
609     }
610 
611     if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */
612       bs = jac->bs;
613       PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
614       nslots = (rend - rstart) / bs;
615       for (i = 0; i < nsplit; i++) {
616         if (jac->defaultsplit) {
617           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
618           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
619         } else if (!ilink->is) {
620           if (ilink->nfields > 1) {
621             PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
622 
623             PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
624             PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
625             for (j = 0; j < nslots; j++) {
626               for (k = 0; k < nfields; k++) {
627                 ii[nfields * j + k] = rstart + bs * j + fields[k];
628                 jj[nfields * j + k] = rstart + bs * j + fields_col[k];
629               }
630             }
631             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
632             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
633             PetscCall(ISSetBlockSize(ilink->is, nfields));
634             PetscCall(ISSetBlockSize(ilink->is_col, nfields));
635           } else {
636             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
637             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
638           }
639         }
640         PetscCall(ISSorted(ilink->is, &sorted));
641         if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col));
642         PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split");
643         ilink = ilink->next;
644       }
645     } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */
646       IS      *rowis, *colis, *ises = NULL;
647       PetscInt mis, nis;
648 
649       PetscCall(MatNestGetSize(pc->pmat, &mis, &nis));
650       PetscCall(PetscMalloc2(mis, &rowis, nis, &colis));
651       PetscCall(MatNestGetISs(pc->pmat, rowis, colis));
652       if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises));
653 
654       for (i = 0; i < nsplit; i++) {
655         if (jac->defaultsplit) {
656           PetscCall(ISDuplicate(rowis[i], &ilink->is));
657           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
658         } else if (!ilink->is) {
659           if (ilink->nfields > 1) {
660             for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]];
661             PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is));
662           } else {
663             PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is));
664           }
665           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
666         }
667         ilink = ilink->next;
668       }
669       PetscCall(PetscFree2(rowis, colis));
670       PetscCall(PetscFree(ises));
671     }
672   }
673 
674   ilink = jac->head;
675   if (!jac->pmat) {
676     Vec xtmp;
677 
678     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
679     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
680     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
681     for (i = 0; i < nsplit; i++) {
682       MatNullSpace sp;
683 
684       /* Check for matrix attached to IS */
685       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
686       if (jac->pmat[i]) {
687         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
688         if (jac->type == PC_COMPOSITE_SCHUR) {
689           jac->schur_user = jac->pmat[i];
690 
691           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
692         }
693       } else {
694         const char *prefix;
695         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
696         PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix));
697         if (!prefix) {
698           PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
699           PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
700         }
701         PetscCall(MatSetFromOptions(jac->pmat[i]));
702         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
703       }
704       /* create work vectors for each split */
705       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
706       ilink->x = jac->x[i];
707       ilink->y = jac->y[i];
708       ilink->z = NULL;
709       /* compute scatter contexts needed by multiplicative versions and non-default splits */
710       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
711       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
712       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
713       ilink = ilink->next;
714     }
715     PetscCall(VecDestroy(&xtmp));
716   } else {
717     MatReuse      scall;
718     MatNullSpace *nullsp = NULL;
719 
720     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
721       PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp));
722       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
723       scall = MAT_INITIAL_MATRIX;
724     } else scall = MAT_REUSE_MATRIX;
725 
726     for (i = 0; i < nsplit; i++) {
727       Mat pmat;
728 
729       /* Check for matrix attached to IS */
730       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
731       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
732       ilink = ilink->next;
733     }
734     if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp));
735   }
736   if (jac->diag_use_amat) {
737     ilink = jac->head;
738     if (!jac->mat) {
739       PetscCall(PetscMalloc1(nsplit, &jac->mat));
740       for (i = 0; i < nsplit; i++) {
741         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
742         ilink = ilink->next;
743       }
744     } else {
745       MatReuse      scall;
746       MatNullSpace *nullsp = NULL;
747 
748       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
749         PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp));
750         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
751         scall = MAT_INITIAL_MATRIX;
752       } else scall = MAT_REUSE_MATRIX;
753 
754       for (i = 0; i < nsplit; i++) {
755         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
756         ilink = ilink->next;
757       }
758       if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp));
759     }
760   } else {
761     jac->mat = jac->pmat;
762   }
763 
764   /* Check for null space attached to IS */
765   ilink = jac->head;
766   for (i = 0; i < nsplit; i++) {
767     MatNullSpace sp;
768 
769     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
770     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
771     ilink = ilink->next;
772   }
773 
774   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
775     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
776     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
777     ilink = jac->head;
778     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
779       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
780       if (!jac->Afield) {
781         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
782         if (jac->offdiag_use_amat) {
783           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
784         } else {
785           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
786         }
787       } else {
788         MatReuse scall;
789 
790         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
791           PetscCall(MatDestroy(&jac->Afield[1]));
792           scall = MAT_INITIAL_MATRIX;
793         } else scall = MAT_REUSE_MATRIX;
794 
795         if (jac->offdiag_use_amat) {
796           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
797         } else {
798           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
799         }
800       }
801     } else {
802       if (!jac->Afield) {
803         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
804         for (i = 0; i < nsplit; i++) {
805           if (jac->offdiag_use_amat) {
806             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
807           } else {
808             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
809           }
810           ilink = ilink->next;
811         }
812       } else {
813         MatReuse scall;
814         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
815           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
816           scall = MAT_INITIAL_MATRIX;
817         } else scall = MAT_REUSE_MATRIX;
818 
819         for (i = 0; i < nsplit; i++) {
820           if (jac->offdiag_use_amat) {
821             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
822           } else {
823             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
824           }
825           ilink = ilink->next;
826         }
827       }
828     }
829   }
830 
831   if (jac->type == PC_COMPOSITE_SCHUR) {
832     IS          ccis;
833     PetscBool   isset, isspd = PETSC_FALSE, issym = PETSC_FALSE, flg;
834     PetscInt    rstart, rend;
835     char        lscname[256];
836     PetscObject LSC_L;
837 
838     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
839 
840     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
841     PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
842     if (jac->schurscale == (PetscScalar)-1.0) jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
843     PetscCall(MatIsSymmetricKnown(pc->pmat, &isset, &issym));
844 
845     /* When extracting off-diagonal submatrices, we take complements from this range */
846     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
847     PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
848 
849     if (jac->schur) {
850       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
851       MatReuse scall;
852 
853       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
854         scall = MAT_INITIAL_MATRIX;
855         PetscCall(MatDestroy(&jac->B));
856         PetscCall(MatDestroy(&jac->C));
857       } else scall = MAT_REUSE_MATRIX;
858 
859       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
860       ilink = jac->head;
861       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
862       if (jac->offdiag_use_amat) {
863         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B));
864       } else {
865         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B));
866       }
867       PetscCall(ISDestroy(&ccis));
868       if (!flg) {
869         ilink = ilink->next;
870         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
871         if (jac->offdiag_use_amat) {
872           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C));
873         } else {
874           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C));
875         }
876         PetscCall(ISDestroy(&ccis));
877       } else {
878         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg));
879         if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
880         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
881       }
882       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
883       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
884         PetscCall(MatDestroy(&jac->schurp));
885         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
886       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL && jac->kspupper != jac->head->ksp) {
887         PetscCall(MatDestroy(&jac->schur_user));
888         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
889       }
890       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
891       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
892       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
893     } else {
894       const char  *Dprefix;
895       char         schurprefix[256], schurmatprefix[256];
896       char         schurtestoption[256];
897       MatNullSpace sp;
898       KSP          kspt;
899 
900       /* extract the A01 and A10 matrices */
901       ilink = jac->head;
902       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
903       if (jac->offdiag_use_amat) {
904         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
905       } else {
906         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
907       }
908       PetscCall(ISDestroy(&ccis));
909       ilink = ilink->next;
910       if (!flg) {
911         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
912         if (jac->offdiag_use_amat) {
913           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
914         } else {
915           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
916         }
917         PetscCall(ISDestroy(&ccis));
918       } else {
919         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg));
920         if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
921         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
922       }
923       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
924       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
925       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
926       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
927       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
928       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
929       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
930       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
931 
932       /* Note: this is not true in general */
933       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
934       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
935 
936       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
937       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
938       if (flg) {
939         DM  dmInner;
940         KSP kspInner;
941         PC  pcInner;
942 
943         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
944         PetscCall(KSPReset(kspInner));
945         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
946         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
947         /* Indent this deeper to emphasize the "inner" nature of this solver. */
948         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
949         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
950         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
951 
952         /* Set DM for new solver */
953         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
954         PetscCall(KSPSetDM(kspInner, dmInner));
955         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
956 
957         /* Defaults to PCKSP as preconditioner */
958         PetscCall(KSPGetPC(kspInner, &pcInner));
959         PetscCall(PCSetType(pcInner, PCKSP));
960         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
961       } else {
962         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
963           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
964           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
965           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
966           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
967           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
968         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
969         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
970       }
971       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
972       PetscCall(KSPSetFromOptions(jac->head->ksp));
973       PetscCall(MatSetFromOptions(jac->schur));
974 
975       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
976       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
977         KSP kspInner;
978         PC  pcInner;
979 
980         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
981         PetscCall(KSPGetPC(kspInner, &pcInner));
982         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
983         if (flg) {
984           KSP ksp;
985 
986           PetscCall(PCKSPGetKSP(pcInner, &ksp));
987           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
988         }
989       }
990       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
991       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
992       if (flg) {
993         DM dmInner;
994 
995         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
996         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
997         PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
998         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
999         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
1000         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
1001         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
1002         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
1003         PetscCall(KSPSetDM(jac->kspupper, dmInner));
1004         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
1005         PetscCall(KSPSetFromOptions(jac->kspupper));
1006         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
1007         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
1008       } else {
1009         jac->kspupper = jac->head->ksp;
1010         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
1011       }
1012 
1013       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
1014       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
1015       PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
1016       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
1017       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
1018       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
1019         PC pcschur;
1020         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
1021         PetscCall(PCSetType(pcschur, PCNONE));
1022         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
1023       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1024         if (jac->schurfactorization != PC_FIELDSPLIT_SCHUR_FACT_FULL || jac->kspupper != jac->head->ksp) PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1025       }
1026       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
1027       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
1028       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
1029       /* propagate DM */
1030       {
1031         DM sdm;
1032         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
1033         if (sdm) {
1034           PetscCall(KSPSetDM(jac->kspschur, sdm));
1035           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
1036         }
1037       }
1038       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1039       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
1040       PetscCall(KSPSetFromOptions(jac->kspschur));
1041     }
1042     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
1043     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
1044     if (issym) PetscCall(MatSetOption(jac->schur, MAT_SYMMETRIC, PETSC_TRUE));
1045     if (isspd) PetscCall(MatSetOption(jac->schur, MAT_SPD, PETSC_TRUE));
1046 
1047     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
1048     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
1049     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L));
1050     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L));
1051     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", LSC_L));
1052     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
1053     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L));
1054     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L));
1055     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", LSC_L));
1056   } else if (jac->type == PC_COMPOSITE_GKB) {
1057     IS       ccis;
1058     PetscInt rstart, rend;
1059 
1060     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1061 
1062     ilink = jac->head;
1063 
1064     /* When extracting off-diagonal submatrices, we take complements from this range */
1065     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
1066 
1067     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1068     if (jac->offdiag_use_amat) {
1069       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1070     } else {
1071       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1072     }
1073     PetscCall(ISDestroy(&ccis));
1074     /* Create work vectors for GKB algorithm */
1075     PetscCall(VecDuplicate(ilink->x, &jac->u));
1076     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1077     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1078     ilink = ilink->next;
1079     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1080     if (jac->offdiag_use_amat) {
1081       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1082     } else {
1083       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1084     }
1085     PetscCall(ISDestroy(&ccis));
1086     /* Create work vectors for GKB algorithm */
1087     PetscCall(VecDuplicate(ilink->x, &jac->v));
1088     PetscCall(VecDuplicate(ilink->x, &jac->d));
1089     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1090     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1091     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1092 
1093     ilink = jac->head;
1094     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1095     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1096     /* Create gkb_monitor context */
1097     if (jac->gkbmonitor) {
1098       PetscInt tablevel;
1099       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1100       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1101       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1102       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1103       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1104     }
1105   } else {
1106     /* set up the individual splits' PCs */
1107     i     = 0;
1108     ilink = jac->head;
1109     while (ilink) {
1110       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1111       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1112       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1113       i++;
1114       ilink = ilink->next;
1115     }
1116   }
1117 
1118   /* Set coordinates to the sub PC objects whenever these are set */
1119   if (jac->coordinates_set) {
1120     PC pc_coords;
1121     if (jac->type == PC_COMPOSITE_SCHUR) {
1122       // Head is first block.
1123       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1124       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1125       // Second one is Schur block, but its KSP object is in kspschur.
1126       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1127       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1128     } else if (jac->type == PC_COMPOSITE_GKB) {
1129       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1130     } else {
1131       ilink = jac->head;
1132       while (ilink) {
1133         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1134         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1135         ilink = ilink->next;
1136       }
1137     }
1138   }
1139 
1140   jac->suboptionsset = PETSC_TRUE;
1141   PetscFunctionReturn(PETSC_SUCCESS);
1142 }
1143 
1144 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_Schur(PC pc)
1145 {
1146   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1147   PC_FieldSplitLink ilinkA = jac->head;
1148   KSP               kspA = ilinkA->ksp, kspUpper = jac->kspupper;
1149 
1150   PetscFunctionBegin;
1151   if (jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL && kspUpper != kspA) {
1152     PetscCall(KSPSetUp(kspUpper));
1153     PetscCall(KSPSetUpOnBlocks(kspUpper));
1154   }
1155   PetscCall(KSPSetUp(kspA));
1156   PetscCall(KSPSetUpOnBlocks(kspA));
1157   if (jac->schurpre != PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1158     PetscCall(KSPSetUp(jac->kspschur));
1159     PetscCall(KSPSetUpOnBlocks(jac->kspschur));
1160   } else if (kspUpper == kspA) {
1161     Mat          A;
1162     PetscInt     m, M, N;
1163     VecType      vtype;
1164     PetscMemType mtype;
1165     PetscScalar *array;
1166 
1167     PetscCall(MatGetSize(jac->B, &M, &N));
1168     PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1169     PetscCall(MatGetVecType(jac->B, &vtype));
1170     PetscCall(VecGetArrayAndMemType(ilinkA->x, &array, &mtype));
1171     PetscCall(VecRestoreArrayAndMemType(ilinkA->x, &array));
1172     if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(PetscMalloc1(m * (N + 1), &array));
1173 #if PetscDefined(HAVE_CUDA)
1174     else if (PetscMemTypeCUDA(mtype)) PetscCallCUDA(cudaMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1175 #endif
1176 #if PetscDefined(HAVE_HIP)
1177     else if (PetscMemTypeHIP(mtype)) PetscCallHIP(hipMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1178 #endif
1179     PetscCall(MatCreateDenseFromVecType(PetscObjectComm((PetscObject)jac->schur), vtype, m, PETSC_DECIDE, M, N + 1, -1, array, &A)); // number of columns of the Schur complement plus one
1180     PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)A));
1181     PetscCall(MatDestroy(&A));
1182   }
1183   PetscFunctionReturn(PETSC_SUCCESS);
1184 }
1185 
1186 static PetscErrorCode PCSetUpOnBlocks_FieldSplit(PC pc)
1187 {
1188   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1189   PC_FieldSplitLink ilink = jac->head;
1190 
1191   PetscFunctionBegin;
1192   while (ilink) {
1193     PetscCall(KSPSetUp(ilink->ksp));
1194     PetscCall(KSPSetUpOnBlocks(ilink->ksp));
1195     ilink = ilink->next;
1196   }
1197   PetscFunctionReturn(PETSC_SUCCESS);
1198 }
1199 
1200 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_GKB(PC pc)
1201 {
1202   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1203   PC_FieldSplitLink ilinkA = jac->head;
1204   KSP               ksp    = ilinkA->ksp;
1205 
1206   PetscFunctionBegin;
1207   PetscCall(KSPSetUp(ksp));
1208   PetscCall(KSPSetUpOnBlocks(ksp));
1209   PetscFunctionReturn(PETSC_SUCCESS);
1210 }
1211 
1212 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1213 {
1214   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1215   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1216   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1217   Mat               AinvB = NULL;
1218   PetscInt          N, P;
1219 
1220   PetscFunctionBegin;
1221   switch (jac->schurfactorization) {
1222   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1223     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1224     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1225     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1226     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1227     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1228     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1229     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1230     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1231     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1232     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1233     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1234     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1235     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1236     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1237     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1238     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1239     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1240     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1241     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1242     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1243     break;
1244   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1245     /* [A00 0; A10 S], suitable for left preconditioning */
1246     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1247     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1248     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1249     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1250     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1251     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1252     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1253     PetscCall(VecScale(ilinkD->x, -1.));
1254     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1255     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1256     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1257     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1258     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1259     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1260     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1261     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1262     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1263     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1264     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1265     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1266     break;
1267   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1268     /* [A00 A01; 0 S], suitable for right preconditioning */
1269     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1270     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1271     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1272     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1273     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1274     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1275     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1276     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1277     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1278     PetscCall(VecScale(ilinkA->x, -1.));
1279     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1280     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1281     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1282     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1283     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1284     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1285     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1286     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1287     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1288     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1289     break;
1290   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1291     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1292     PetscCall(MatGetSize(jac->B, NULL, &P));
1293     N = P;
1294     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1295     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1296     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1297     if (kspUpper == kspA) {
1298       PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB));
1299       if (AinvB) {
1300         PetscCall(MatGetSize(AinvB, NULL, &N));
1301         if (N > P) { // first time PCApply_FieldSplit_Schur() is called
1302           PetscMemType mtype;
1303           Vec          c = NULL;
1304           PetscScalar *array;
1305           PetscInt     m, M;
1306 
1307           PetscCall(MatGetSize(jac->B, &M, NULL));
1308           PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1309           PetscCall(MatDenseGetArrayAndMemType(AinvB, &array, &mtype));
1310           if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1311 #if PetscDefined(HAVE_CUDA)
1312           else if (PetscMemTypeCUDA(mtype)) PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1313 #endif
1314 #if PetscDefined(HAVE_HIP)
1315           else if (PetscMemTypeHIP(mtype)) PetscCall(VecCreateMPIHIPWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1316 #endif
1317           PetscCall(MatDenseRestoreArrayAndMemType(AinvB, &array));
1318           PetscCall(VecCopy(ilinkA->x, c));
1319           PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1320           PetscCall(KSPSetOperators(jac->kspschur, jac->schur, jac->schur_user));
1321           PetscCall(VecCopy(c, ilinkA->y)); // retrieve the solution as the last column of the composed Mat
1322           PetscCall(VecDestroy(&c));
1323         }
1324       }
1325     }
1326     if (N == P) PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1327     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1328     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1329     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1330     PetscCall(VecScale(ilinkD->x, -1.0));
1331     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1332     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1333 
1334     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1335     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1336     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1337     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1338     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1339     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1340     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1341 
1342     if (kspUpper == kspA) {
1343       if (!AinvB) {
1344         PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1345         PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1346         PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1347         PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1348         PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1349         PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1350       } else PetscCall(MatMultAdd(AinvB, ilinkD->y, ilinkA->y, ilinkA->y));
1351     } else {
1352       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1353       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1354       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1355       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1356       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1357       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1358       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1359       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1360       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1361     }
1362     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1363     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1364     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1365   }
1366   PetscFunctionReturn(PETSC_SUCCESS);
1367 }
1368 
1369 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1370 {
1371   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1372   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1373   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1374 
1375   PetscFunctionBegin;
1376   switch (jac->schurfactorization) {
1377   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1378     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1379     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1380     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1381     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1382     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1383     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1384     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1385     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1386     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1387     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1388     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1389     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1390     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1391     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1392     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1393     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1394     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1395     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1396     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1397     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1398     break;
1399   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1400     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1401     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1402     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1403     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1404     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1405     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1406     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1407     PetscCall(VecScale(ilinkD->x, -1.));
1408     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1409     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1410     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1411     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1412     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1413     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1414     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1415     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1416     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1417     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1418     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1419     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1420     break;
1421   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1422     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1423     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1424     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1425     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1426     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1427     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1428     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1429     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1430     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1431     PetscCall(VecScale(ilinkA->x, -1.));
1432     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1433     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1434     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1435     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1436     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1437     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1438     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1439     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1440     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1441     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1442     break;
1443   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1444     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1445     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1446     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1447     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1448     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1449     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1450     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1451     PetscCall(VecScale(ilinkD->x, -1.0));
1452     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1453     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1454 
1455     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1456     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1457     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1458     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1459     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1460     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1461     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1462 
1463     if (kspLower == kspA) {
1464       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1465       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1466       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1467       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1468       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1469       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1470     } else {
1471       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1472       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1473       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1474       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1475       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1476       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1477       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1478       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1479       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1480     }
1481     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1482     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1483     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1484   }
1485   PetscFunctionReturn(PETSC_SUCCESS);
1486 }
1487 
1488 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1489   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1490                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1491                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1492 
1493 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1494 {
1495   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1496   PC_FieldSplitLink ilink = jac->head;
1497   PetscInt          cnt, bs;
1498 
1499   PetscFunctionBegin;
1500   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1501     PetscBool matnest;
1502 
1503     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1504     if (jac->defaultsplit && !matnest) {
1505       PetscCall(VecGetBlockSize(x, &bs));
1506       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1507       PetscCall(VecGetBlockSize(y, &bs));
1508       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1509       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1510       while (ilink) {
1511         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1512         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1513         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1514         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1515         ilink = ilink->next;
1516       }
1517       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1518     } else {
1519       PetscCall(VecSet(y, 0.0));
1520       while (ilink) {
1521         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1522         ilink = ilink->next;
1523       }
1524     }
1525   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1526     PetscCall(VecSet(y, 0.0));
1527     /* solve on first block for first block variables */
1528     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1529     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1530     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1531     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1532     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1533     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1534     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1535     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1536 
1537     /* compute the residual only onto second block variables using first block variables */
1538     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1539     ilink = ilink->next;
1540     PetscCall(VecScale(ilink->x, -1.0));
1541     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1542     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1543 
1544     /* solve on second block variables */
1545     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1546     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1547     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1548     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1549     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1550     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1551   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1552     if (!jac->w1) {
1553       PetscCall(VecDuplicate(x, &jac->w1));
1554       PetscCall(VecDuplicate(x, &jac->w2));
1555     }
1556     PetscCall(VecSet(y, 0.0));
1557     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1558     cnt = 1;
1559     while (ilink->next) {
1560       ilink = ilink->next;
1561       /* compute the residual only over the part of the vector needed */
1562       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1563       PetscCall(VecScale(ilink->x, -1.0));
1564       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1565       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1566       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1567       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1568       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1569       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1570       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1571       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1572     }
1573     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1574       cnt -= 2;
1575       while (ilink->previous) {
1576         ilink = ilink->previous;
1577         /* compute the residual only over the part of the vector needed */
1578         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1579         PetscCall(VecScale(ilink->x, -1.0));
1580         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1581         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1582         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1583         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1584         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1585         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1586         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1587         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1588       }
1589     }
1590   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1591   PetscFunctionReturn(PETSC_SUCCESS);
1592 }
1593 
1594 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1595 {
1596   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1597   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1598   KSP               ksp = ilinkA->ksp;
1599   Vec               u, v, Hu, d, work1, work2;
1600   PetscScalar       alpha, z, nrmz2, *vecz;
1601   PetscReal         lowbnd, nu, beta;
1602   PetscInt          j, iterGKB;
1603 
1604   PetscFunctionBegin;
1605   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1606   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1607   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1608   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1609 
1610   u     = jac->u;
1611   v     = jac->v;
1612   Hu    = jac->Hu;
1613   d     = jac->d;
1614   work1 = jac->w1;
1615   work2 = jac->w2;
1616   vecz  = jac->vecz;
1617 
1618   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1619   /* Add q = q + nu*B*b */
1620   if (jac->gkbnu) {
1621     nu = jac->gkbnu;
1622     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1623     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1624   } else {
1625     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1626     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1627     nu = 1;
1628   }
1629 
1630   /* Transform rhs from [q,tilde{b}] to [0,b] */
1631   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1632   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1633   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1634   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1635   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1636   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1637 
1638   /* First step of algorithm */
1639   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1640   KSPCheckDot(ksp, beta);
1641   beta = PetscSqrtReal(nu) * beta;
1642   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1643   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1644   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1645   PetscCall(KSPSolve(ksp, work2, u));
1646   PetscCall(KSPCheckSolve(ksp, pc, u));
1647   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1648   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1649   PetscCall(VecDot(Hu, u, &alpha));
1650   KSPCheckDot(ksp, alpha);
1651   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1652   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1653   PetscCall(VecScale(u, 1.0 / alpha));
1654   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1655 
1656   z       = beta / alpha;
1657   vecz[1] = z;
1658 
1659   /* Computation of first iterate x(1) and p(1) */
1660   PetscCall(VecAXPY(ilinkA->y, z, u));
1661   PetscCall(VecCopy(d, ilinkD->y));
1662   PetscCall(VecScale(ilinkD->y, -z));
1663 
1664   iterGKB = 1;
1665   lowbnd  = 2 * jac->gkbtol;
1666   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1667 
1668   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1669     iterGKB += 1;
1670     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1671     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1672     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1673     beta = beta / PetscSqrtReal(nu);
1674     PetscCall(VecScale(v, 1.0 / beta));
1675     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1676     PetscCall(MatMult(jac->H, u, Hu));
1677     PetscCall(VecAXPY(work2, -beta, Hu));
1678     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1679     PetscCall(KSPSolve(ksp, work2, u));
1680     PetscCall(KSPCheckSolve(ksp, pc, u));
1681     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1682     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1683     PetscCall(VecDot(Hu, u, &alpha));
1684     KSPCheckDot(ksp, alpha);
1685     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1686     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1687     PetscCall(VecScale(u, 1.0 / alpha));
1688 
1689     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1690     vecz[0] = z;
1691 
1692     /* Computation of new iterate x(i+1) and p(i+1) */
1693     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1694     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1695     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1696     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1697     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1698 
1699     /* Compute Lower Bound estimate */
1700     if (iterGKB > jac->gkbdelay) {
1701       lowbnd = 0.0;
1702       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1703       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1704     }
1705 
1706     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1707     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1708   }
1709 
1710   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1711   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1712   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1713   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1714   PetscFunctionReturn(PETSC_SUCCESS);
1715 }
1716 
1717 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1718   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1719                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1720                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1721 
1722 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1723 {
1724   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1725   PC_FieldSplitLink ilink = jac->head;
1726   PetscInt          bs;
1727 
1728   PetscFunctionBegin;
1729   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1730     PetscBool matnest;
1731 
1732     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1733     if (jac->defaultsplit && !matnest) {
1734       PetscCall(VecGetBlockSize(x, &bs));
1735       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1736       PetscCall(VecGetBlockSize(y, &bs));
1737       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1738       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1739       while (ilink) {
1740         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1741         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1742         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1743         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1744         ilink = ilink->next;
1745       }
1746       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1747     } else {
1748       PetscCall(VecSet(y, 0.0));
1749       while (ilink) {
1750         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1751         ilink = ilink->next;
1752       }
1753     }
1754   } else {
1755     if (!jac->w1) {
1756       PetscCall(VecDuplicate(x, &jac->w1));
1757       PetscCall(VecDuplicate(x, &jac->w2));
1758     }
1759     PetscCall(VecSet(y, 0.0));
1760     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1761       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1762       while (ilink->next) {
1763         ilink = ilink->next;
1764         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1765         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1766         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1767       }
1768       while (ilink->previous) {
1769         ilink = ilink->previous;
1770         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1771         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1772         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1773       }
1774     } else {
1775       while (ilink->next) { /* get to last entry in linked list */
1776         ilink = ilink->next;
1777       }
1778       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1779       while (ilink->previous) {
1780         ilink = ilink->previous;
1781         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1782         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1783         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1784       }
1785     }
1786   }
1787   PetscFunctionReturn(PETSC_SUCCESS);
1788 }
1789 
1790 static PetscErrorCode PCReset_FieldSplit(PC pc)
1791 {
1792   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1793   PC_FieldSplitLink ilink = jac->head, next;
1794 
1795   PetscFunctionBegin;
1796   while (ilink) {
1797     PetscCall(KSPDestroy(&ilink->ksp));
1798     PetscCall(VecDestroy(&ilink->x));
1799     PetscCall(VecDestroy(&ilink->y));
1800     PetscCall(VecDestroy(&ilink->z));
1801     PetscCall(VecScatterDestroy(&ilink->sctx));
1802     PetscCall(ISDestroy(&ilink->is));
1803     PetscCall(ISDestroy(&ilink->is_col));
1804     PetscCall(PetscFree(ilink->splitname));
1805     PetscCall(PetscFree(ilink->fields));
1806     PetscCall(PetscFree(ilink->fields_col));
1807     next = ilink->next;
1808     PetscCall(PetscFree(ilink));
1809     ilink = next;
1810   }
1811   jac->head = NULL;
1812   PetscCall(PetscFree2(jac->x, jac->y));
1813   if (jac->mat && jac->mat != jac->pmat) {
1814     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1815   } else if (jac->mat) {
1816     jac->mat = NULL;
1817   }
1818   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1819   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1820   jac->nsplits = 0;
1821   PetscCall(VecDestroy(&jac->w1));
1822   PetscCall(VecDestroy(&jac->w2));
1823   if (jac->schur) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", NULL));
1824   PetscCall(MatDestroy(&jac->schur));
1825   PetscCall(MatDestroy(&jac->schurp));
1826   PetscCall(MatDestroy(&jac->schur_user));
1827   PetscCall(KSPDestroy(&jac->kspschur));
1828   PetscCall(KSPDestroy(&jac->kspupper));
1829   PetscCall(MatDestroy(&jac->B));
1830   PetscCall(MatDestroy(&jac->C));
1831   PetscCall(MatDestroy(&jac->H));
1832   PetscCall(VecDestroy(&jac->u));
1833   PetscCall(VecDestroy(&jac->v));
1834   PetscCall(VecDestroy(&jac->Hu));
1835   PetscCall(VecDestroy(&jac->d));
1836   PetscCall(PetscFree(jac->vecz));
1837   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1838   jac->isrestrict = PETSC_FALSE;
1839   PetscFunctionReturn(PETSC_SUCCESS);
1840 }
1841 
1842 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1843 {
1844   PetscFunctionBegin;
1845   PetscCall(PCReset_FieldSplit(pc));
1846   PetscCall(PetscFree(pc->data));
1847   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1848   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1849   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1850   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1851   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1852   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1853   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1854   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1855   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1856   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1857   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1858   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1859   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1860   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1861   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1862   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1863   PetscFunctionReturn(PETSC_SUCCESS);
1864 }
1865 
1866 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems PetscOptionsObject)
1867 {
1868   PetscInt        bs;
1869   PetscBool       flg;
1870   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1871   PCCompositeType ctype;
1872 
1873   PetscFunctionBegin;
1874   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1875   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1876   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1877   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1878   jac->diag_use_amat = pc->useAmat;
1879   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1880   jac->offdiag_use_amat = pc->useAmat;
1881   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1882   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1883   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1884   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1885   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1886   /* Only setup fields once */
1887   if ((jac->bs > 0) && (jac->nsplits == 0)) {
1888     /* only allow user to set fields from command line.
1889        otherwise user can set them in PCFieldSplitSetDefaults() */
1890     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1891     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1892   }
1893   if (jac->type == PC_COMPOSITE_SCHUR) {
1894     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1895     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1896     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1897     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1898     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1899   } else if (jac->type == PC_COMPOSITE_GKB) {
1900     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1901     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1902     PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0));
1903     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1904     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1905   }
1906   /*
1907     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1908     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1909     is called on the outer solver in case changes were made in the options database
1910 
1911     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1912     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1913     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1914 
1915     There could be a negative side effect of calling the KSPSetFromOptions() below.
1916 
1917     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1918   */
1919   if (jac->issetup) {
1920     PC_FieldSplitLink ilink = jac->head;
1921     if (jac->type == PC_COMPOSITE_SCHUR) {
1922       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1923       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1924     }
1925     while (ilink) {
1926       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1927       ilink = ilink->next;
1928     }
1929   }
1930   PetscOptionsHeadEnd();
1931   PetscFunctionReturn(PETSC_SUCCESS);
1932 }
1933 
1934 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1935 {
1936   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1937   PC_FieldSplitLink ilink, next = jac->head;
1938   char              prefix[128];
1939   PetscInt          i;
1940   PetscLogEvent     nse;
1941 
1942   PetscFunctionBegin;
1943   if (jac->splitdefined) {
1944     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1945     PetscFunctionReturn(PETSC_SUCCESS);
1946   }
1947   for (i = 0; i < n; i++) { PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); }
1948   PetscCall(PetscNew(&ilink));
1949   if (splitname) {
1950     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1951   } else {
1952     PetscCall(PetscMalloc1(3, &ilink->splitname));
1953     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1954   }
1955   PetscCall(PetscMPIIntCast(jac->nsplits, &nse));
1956   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */
1957   PetscCall(PetscMalloc1(n, &ilink->fields));
1958   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1959   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1960   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1961 
1962   ilink->nfields = n;
1963   ilink->next    = NULL;
1964   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1965   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1966   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1967   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1968   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1969 
1970   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1971   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1972 
1973   if (!next) {
1974     jac->head       = ilink;
1975     ilink->previous = NULL;
1976   } else {
1977     while (next->next) next = next->next;
1978     next->next      = ilink;
1979     ilink->previous = next;
1980   }
1981   jac->nsplits++;
1982   PetscFunctionReturn(PETSC_SUCCESS);
1983 }
1984 
1985 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1986 {
1987   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1988 
1989   PetscFunctionBegin;
1990   *subksp = NULL;
1991   if (n) *n = 0;
1992   if (jac->type == PC_COMPOSITE_SCHUR) {
1993     PetscInt nn;
1994 
1995     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1996     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1997     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1998     PetscCall(PetscMalloc1(nn, subksp));
1999     (*subksp)[0] = jac->head->ksp;
2000     (*subksp)[1] = jac->kspschur;
2001     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
2002     if (n) *n = nn;
2003   }
2004   PetscFunctionReturn(PETSC_SUCCESS);
2005 }
2006 
2007 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
2008 {
2009   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2010 
2011   PetscFunctionBegin;
2012   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
2013   PetscCall(PetscMalloc1(jac->nsplits, subksp));
2014   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
2015 
2016   (*subksp)[1] = jac->kspschur;
2017   if (n) *n = jac->nsplits;
2018   PetscFunctionReturn(PETSC_SUCCESS);
2019 }
2020 
2021 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
2022 {
2023   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2024   PetscInt          cnt   = 0;
2025   PC_FieldSplitLink ilink = jac->head;
2026 
2027   PetscFunctionBegin;
2028   PetscCall(PetscMalloc1(jac->nsplits, subksp));
2029   while (ilink) {
2030     (*subksp)[cnt++] = ilink->ksp;
2031     ilink            = ilink->next;
2032   }
2033   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
2034   if (n) *n = jac->nsplits;
2035   PetscFunctionReturn(PETSC_SUCCESS);
2036 }
2037 
2038 /*@
2039   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
2040 
2041   Input Parameters:
2042 + pc  - the preconditioner context
2043 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
2044 
2045   Level: advanced
2046 
2047   Developer Notes:
2048   It seems the resulting `IS`s will not cover the entire space, so
2049   how can they define a convergent preconditioner? Needs explaining.
2050 
2051 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2052 @*/
2053 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
2054 {
2055   PetscFunctionBegin;
2056   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2057   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
2058   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
2059   PetscFunctionReturn(PETSC_SUCCESS);
2060 }
2061 
2062 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
2063 {
2064   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2065   PC_FieldSplitLink ilink = jac->head, next;
2066   PetscInt          localsize, size, sizez, i;
2067   const PetscInt   *ind, *indz;
2068   PetscInt         *indc, *indcz;
2069   PetscBool         flg;
2070 
2071   PetscFunctionBegin;
2072   PetscCall(ISGetLocalSize(isy, &localsize));
2073   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
2074   size -= localsize;
2075   while (ilink) {
2076     IS isrl, isr;
2077     PC subpc;
2078     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
2079     PetscCall(ISGetLocalSize(isrl, &localsize));
2080     PetscCall(PetscMalloc1(localsize, &indc));
2081     PetscCall(ISGetIndices(isrl, &ind));
2082     PetscCall(PetscArraycpy(indc, ind, localsize));
2083     PetscCall(ISRestoreIndices(isrl, &ind));
2084     PetscCall(ISDestroy(&isrl));
2085     for (i = 0; i < localsize; i++) *(indc + i) += size;
2086     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
2087     PetscCall(PetscObjectReference((PetscObject)isr));
2088     PetscCall(ISDestroy(&ilink->is));
2089     ilink->is = isr;
2090     PetscCall(PetscObjectReference((PetscObject)isr));
2091     PetscCall(ISDestroy(&ilink->is_col));
2092     ilink->is_col = isr;
2093     PetscCall(ISDestroy(&isr));
2094     PetscCall(KSPGetPC(ilink->ksp, &subpc));
2095     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
2096     if (flg) {
2097       IS       iszl, isz;
2098       MPI_Comm comm;
2099       PetscCall(ISGetLocalSize(ilink->is, &localsize));
2100       comm = PetscObjectComm((PetscObject)ilink->is);
2101       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
2102       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
2103       sizez -= localsize;
2104       PetscCall(ISGetLocalSize(iszl, &localsize));
2105       PetscCall(PetscMalloc1(localsize, &indcz));
2106       PetscCall(ISGetIndices(iszl, &indz));
2107       PetscCall(PetscArraycpy(indcz, indz, localsize));
2108       PetscCall(ISRestoreIndices(iszl, &indz));
2109       PetscCall(ISDestroy(&iszl));
2110       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
2111       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
2112       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
2113       PetscCall(ISDestroy(&isz));
2114     }
2115     next  = ilink->next;
2116     ilink = next;
2117   }
2118   jac->isrestrict = PETSC_TRUE;
2119   PetscFunctionReturn(PETSC_SUCCESS);
2120 }
2121 
2122 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
2123 {
2124   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
2125   PC_FieldSplitLink ilink, next = jac->head;
2126   char              prefix[128];
2127   PetscLogEvent     nse;
2128 
2129   PetscFunctionBegin;
2130   if (jac->splitdefined) {
2131     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
2132     PetscFunctionReturn(PETSC_SUCCESS);
2133   }
2134   PetscCall(PetscNew(&ilink));
2135   if (splitname) {
2136     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
2137   } else {
2138     PetscCall(PetscMalloc1(8, &ilink->splitname));
2139     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
2140   }
2141   PetscCall(PetscMPIIntCast(jac->nsplits, &nse));
2142   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */
2143   PetscCall(PetscObjectReference((PetscObject)is));
2144   PetscCall(ISDestroy(&ilink->is));
2145   ilink->is = is;
2146   PetscCall(PetscObjectReference((PetscObject)is));
2147   PetscCall(ISDestroy(&ilink->is_col));
2148   ilink->is_col = is;
2149   ilink->next   = NULL;
2150   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
2151   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
2152   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
2153   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
2154   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
2155 
2156   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
2157   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
2158 
2159   if (!next) {
2160     jac->head       = ilink;
2161     ilink->previous = NULL;
2162   } else {
2163     while (next->next) next = next->next;
2164     next->next      = ilink;
2165     ilink->previous = next;
2166   }
2167   jac->nsplits++;
2168   PetscFunctionReturn(PETSC_SUCCESS);
2169 }
2170 
2171 /*@
2172   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
2173 
2174   Logically Collective
2175 
2176   Input Parameters:
2177 + pc         - the preconditioner context
2178 . splitname  - name of this split, if `NULL` the number of the split is used
2179 . n          - the number of fields in this split
2180 . fields     - the fields in this split
2181 - fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block
2182                of the matrix and `fields_col` provides the column indices for that block
2183 
2184   Options Database Key:
2185 . -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
2186 
2187   Level: intermediate
2188 
2189   Notes:
2190   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2191 
2192   If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`.
2193 
2194   If the matrix used to construct the preconditioner is not `MATNEST` then
2195   `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlockSize()` or
2196   to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block
2197   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2198   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x23x56x8.. x12x45x78x....
2199   where the numbered entries indicate what is in the split.
2200 
2201   This function is called once per split (it creates a new split each time).  Solve options
2202   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2203 
2204   `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields`
2205 
2206   Developer Notes:
2207   This routine does not actually create the `IS` representing the split, that is delayed
2208   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2209   available when this routine is called.
2210 
2211 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`,
2212           `MatSetBlockSize()`, `MatCreateNest()`
2213 @*/
2214 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt fields[], const PetscInt fields_col[])
2215 {
2216   PetscFunctionBegin;
2217   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2218   PetscAssertPointer(splitname, 2);
2219   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2220   PetscAssertPointer(fields, 4);
2221   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2222   PetscFunctionReturn(PETSC_SUCCESS);
2223 }
2224 
2225 /*@
2226   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2227   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2228 
2229   Logically Collective
2230 
2231   Input Parameters:
2232 + pc  - the preconditioner object
2233 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2234 
2235   Options Database Key:
2236 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2237 
2238   Level: intermediate
2239 
2240 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2241 @*/
2242 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2243 {
2244   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2245   PetscBool      isfs;
2246 
2247   PetscFunctionBegin;
2248   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2249   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2250   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2251   jac->diag_use_amat = flg;
2252   PetscFunctionReturn(PETSC_SUCCESS);
2253 }
2254 
2255 /*@
2256   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2257   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2258 
2259   Logically Collective
2260 
2261   Input Parameter:
2262 . pc - the preconditioner object
2263 
2264   Output Parameter:
2265 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2266 
2267   Level: intermediate
2268 
2269 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2270 @*/
2271 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2272 {
2273   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2274   PetscBool      isfs;
2275 
2276   PetscFunctionBegin;
2277   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2278   PetscAssertPointer(flg, 2);
2279   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2280   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2281   *flg = jac->diag_use_amat;
2282   PetscFunctionReturn(PETSC_SUCCESS);
2283 }
2284 
2285 /*@
2286   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2287   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2288 
2289   Logically Collective
2290 
2291   Input Parameters:
2292 + pc  - the preconditioner object
2293 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2294 
2295   Options Database Key:
2296 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2297 
2298   Level: intermediate
2299 
2300 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2301 @*/
2302 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2303 {
2304   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2305   PetscBool      isfs;
2306 
2307   PetscFunctionBegin;
2308   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2309   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2310   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2311   jac->offdiag_use_amat = flg;
2312   PetscFunctionReturn(PETSC_SUCCESS);
2313 }
2314 
2315 /*@
2316   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2317   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2318 
2319   Logically Collective
2320 
2321   Input Parameter:
2322 . pc - the preconditioner object
2323 
2324   Output Parameter:
2325 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2326 
2327   Level: intermediate
2328 
2329 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2330 @*/
2331 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2332 {
2333   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2334   PetscBool      isfs;
2335 
2336   PetscFunctionBegin;
2337   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2338   PetscAssertPointer(flg, 2);
2339   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2340   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2341   *flg = jac->offdiag_use_amat;
2342   PetscFunctionReturn(PETSC_SUCCESS);
2343 }
2344 
2345 /*@
2346   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2347 
2348   Logically Collective
2349 
2350   Input Parameters:
2351 + pc        - the preconditioner context
2352 . splitname - name of this split, if `NULL` the number of the split is used
2353 - is        - the index set that defines the elements in this split
2354 
2355   Level: intermediate
2356 
2357   Notes:
2358   Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST`
2359 
2360   This function is called once per split (it creates a new split each time).  Solve options
2361   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2362 
2363 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()`
2364 @*/
2365 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2366 {
2367   PetscFunctionBegin;
2368   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2369   if (splitname) PetscAssertPointer(splitname, 2);
2370   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2371   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2372   PetscFunctionReturn(PETSC_SUCCESS);
2373 }
2374 
2375 /*@
2376   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2377 
2378   Logically Collective
2379 
2380   Input Parameters:
2381 + pc        - the preconditioner context
2382 - splitname - name of this split
2383 
2384   Output Parameter:
2385 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2386 
2387   Level: intermediate
2388 
2389 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()`
2390 @*/
2391 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2392 {
2393   PetscFunctionBegin;
2394   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2395   PetscAssertPointer(splitname, 2);
2396   PetscAssertPointer(is, 3);
2397   {
2398     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2399     PC_FieldSplitLink ilink = jac->head;
2400     PetscBool         found;
2401 
2402     *is = NULL;
2403     while (ilink) {
2404       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2405       if (found) {
2406         *is = ilink->is;
2407         break;
2408       }
2409       ilink = ilink->next;
2410     }
2411   }
2412   PetscFunctionReturn(PETSC_SUCCESS);
2413 }
2414 
2415 /*@
2416   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2417 
2418   Logically Collective
2419 
2420   Input Parameters:
2421 + pc    - the preconditioner context
2422 - index - index of this split
2423 
2424   Output Parameter:
2425 . is - the index set that defines the elements in this split
2426 
2427   Level: intermediate
2428 
2429 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`,
2430 
2431 @*/
2432 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2433 {
2434   PetscFunctionBegin;
2435   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2436   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2437   PetscAssertPointer(is, 3);
2438   {
2439     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2440     PC_FieldSplitLink ilink = jac->head;
2441     PetscInt          i     = 0;
2442     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2443 
2444     while (i < index) {
2445       ilink = ilink->next;
2446       ++i;
2447     }
2448     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2449   }
2450   PetscFunctionReturn(PETSC_SUCCESS);
2451 }
2452 
2453 /*@
2454   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2455   fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used.
2456 
2457   Logically Collective
2458 
2459   Input Parameters:
2460 + pc - the preconditioner context
2461 - bs - the block size
2462 
2463   Level: intermediate
2464 
2465   Note:
2466   If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields.
2467 
2468 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2469 @*/
2470 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2471 {
2472   PetscFunctionBegin;
2473   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2474   PetscValidLogicalCollectiveInt(pc, bs, 2);
2475   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2476   PetscFunctionReturn(PETSC_SUCCESS);
2477 }
2478 
2479 /*@C
2480   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2481 
2482   Collective
2483 
2484   Input Parameter:
2485 . pc - the preconditioner context
2486 
2487   Output Parameters:
2488 + n      - the number of splits
2489 - subksp - the array of `KSP` contexts
2490 
2491   Level: advanced
2492 
2493   Notes:
2494   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2495   (not the `KSP`, just the array that contains them).
2496 
2497   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2498 
2499   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2500   Schur complement and the `KSP` object used to iterate over the Schur complement.
2501   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2502 
2503   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2504   inner linear system defined by the matrix H in each loop.
2505 
2506   Fortran Note:
2507   You must pass in a `KSP` array that is large enough to contain all the `KSP`s.
2508   You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2509   `KSP` array must be.
2510 
2511   Developer Notes:
2512   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2513 
2514   The Fortran interface could be modernized to return directly the array of values.
2515 
2516 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2517 @*/
2518 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2519 {
2520   PetscFunctionBegin;
2521   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2522   if (n) PetscAssertPointer(n, 2);
2523   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2524   PetscFunctionReturn(PETSC_SUCCESS);
2525 }
2526 
2527 /*@C
2528   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2529 
2530   Collective
2531 
2532   Input Parameter:
2533 . pc - the preconditioner context
2534 
2535   Output Parameters:
2536 + n      - the number of splits
2537 - subksp - the array of `KSP` contexts
2538 
2539   Level: advanced
2540 
2541   Notes:
2542   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2543   (not the `KSP` just the array that contains them).
2544 
2545   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2546 
2547   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2548 +  1  - the `KSP` used for the (1,1) block
2549 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2550 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2551 
2552   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2553 
2554   Fortran Note:
2555   You must pass in a `KSP` array that is large enough to contain all the local `KSP`s.
2556   You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2557   `KSP` array must be.
2558 
2559   Developer Notes:
2560   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2561 
2562   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2563 
2564   The Fortran interface should be modernized to return directly the array of values.
2565 
2566 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2567 @*/
2568 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2569 {
2570   PetscFunctionBegin;
2571   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2572   if (n) PetscAssertPointer(n, 2);
2573   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2574   PetscFunctionReturn(PETSC_SUCCESS);
2575 }
2576 
2577 /*@
2578   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructed for the Schur complement.
2579   The default is the A11 matrix.
2580 
2581   Collective
2582 
2583   Input Parameters:
2584 + pc    - the preconditioner context
2585 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2586               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2587               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2588 - pre   - matrix to use for preconditioning, or `NULL`
2589 
2590   Options Database Keys:
2591 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2592 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2593 
2594   Level: intermediate
2595 
2596   Notes:
2597   If ptype is
2598 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2599   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2600 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2601   The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM`
2602 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2603   to this function).
2604 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $
2605   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2606   lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2607 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2608   computed internally by `PCFIELDSPLIT` (this is expensive)
2609   useful mostly as a test that the Schur complement approach can work for your problem
2610 
2611   When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2612   with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and
2613   `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2614 
2615   Developer Note:
2616   The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere.
2617 
2618 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2619           `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()`
2620 @*/
2621 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2622 {
2623   PetscFunctionBegin;
2624   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2625   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2626   PetscFunctionReturn(PETSC_SUCCESS);
2627 }
2628 
2629 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2630 {
2631   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2632 } /* Deprecated name */
2633 
2634 /*@
2635   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2636   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2637 
2638   Logically Collective
2639 
2640   Input Parameter:
2641 . pc - the preconditioner context
2642 
2643   Output Parameters:
2644 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2645 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2646 
2647   Level: intermediate
2648 
2649 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2650 @*/
2651 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2652 {
2653   PetscFunctionBegin;
2654   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2655   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2656   PetscFunctionReturn(PETSC_SUCCESS);
2657 }
2658 
2659 /*@
2660   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2661 
2662   Not Collective
2663 
2664   Input Parameter:
2665 . pc - the preconditioner context
2666 
2667   Output Parameter:
2668 . S - the Schur complement matrix
2669 
2670   Level: advanced
2671 
2672   Note:
2673   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2674 
2675 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2676           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2677 @*/
2678 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2679 {
2680   const char    *t;
2681   PetscBool      isfs;
2682   PC_FieldSplit *jac;
2683 
2684   PetscFunctionBegin;
2685   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2686   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2687   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2688   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2689   jac = (PC_FieldSplit *)pc->data;
2690   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2691   if (S) *S = jac->schur;
2692   PetscFunctionReturn(PETSC_SUCCESS);
2693 }
2694 
2695 /*@
2696   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2697 
2698   Not Collective
2699 
2700   Input Parameters:
2701 + pc - the preconditioner context
2702 - S  - the Schur complement matrix
2703 
2704   Level: advanced
2705 
2706 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2707 @*/
2708 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2709 {
2710   const char    *t;
2711   PetscBool      isfs;
2712   PC_FieldSplit *jac;
2713 
2714   PetscFunctionBegin;
2715   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2716   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2717   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2718   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2719   jac = (PC_FieldSplit *)pc->data;
2720   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2721   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2722   PetscFunctionReturn(PETSC_SUCCESS);
2723 }
2724 
2725 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2726 {
2727   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2728 
2729   PetscFunctionBegin;
2730   jac->schurpre = ptype;
2731   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2732     PetscCall(MatDestroy(&jac->schur_user));
2733     jac->schur_user = pre;
2734     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2735   }
2736   PetscFunctionReturn(PETSC_SUCCESS);
2737 }
2738 
2739 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2740 {
2741   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2742 
2743   PetscFunctionBegin;
2744   if (ptype) *ptype = jac->schurpre;
2745   if (pre) *pre = jac->schur_user;
2746   PetscFunctionReturn(PETSC_SUCCESS);
2747 }
2748 
2749 /*@
2750   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note`
2751 
2752   Collective
2753 
2754   Input Parameters:
2755 + pc    - the preconditioner context
2756 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2757 
2758   Options Database Key:
2759 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
2760 
2761   Level: intermediate
2762 
2763   Notes:
2764   The `full` factorization is
2765 
2766   ```{math}
2767   \left(\begin{array}{cc} A & B \\
2768   C & E \\
2769   \end{array}\right) =
2770   \left(\begin{array}{cc} I & 0 \\
2771   C A^{-1} & I \\
2772   \end{array}\right)
2773   \left(\begin{array}{cc} A & 0 \\
2774   0 & S \\
2775   \end{array}\right)
2776   \left(\begin{array}{cc} I & A^{-1}B \\
2777   0 & I \\
2778   \end{array}\right) = L D U,
2779   ```
2780 
2781   where $ S = E - C A^{-1} B $. In practice, the full factorization is applied via block triangular solves with the grouping $L(DU)$. `upper` uses $DU$, `lower` uses $LD$,
2782   and `diag` is the diagonal part with the sign of $S$ flipped (because this makes the preconditioner positive definite for many formulations,
2783   thus allowing the use of `KSPMINRES)`. Sign flipping of $S$ can be turned off with `PCFieldSplitSetSchurScale()`.
2784 
2785   If $A$ and $S$ are solved exactly
2786 +  1 - `full` factorization is a direct solver.
2787 .  2 - The preconditioned operator with `lower` or `upper` has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2788 -  3 - With `diag`, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2789 
2790   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2791   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2792 
2793   For symmetric problems in which $A$ is positive definite and $S$ is negative definite, `diag` can be used with `KSPMINRES`.
2794 
2795   A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g., a few iterations of a Krylov method is used to solve with $A$ or $S$).
2796 
2797 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`,
2798           [](sec_flexibleksp), `PCFieldSplitSetSchurPre()`
2799 @*/
2800 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2801 {
2802   PetscFunctionBegin;
2803   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2804   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2805   PetscFunctionReturn(PETSC_SUCCESS);
2806 }
2807 
2808 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2809 {
2810   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2811 
2812   PetscFunctionBegin;
2813   jac->schurfactorization = ftype;
2814   PetscFunctionReturn(PETSC_SUCCESS);
2815 }
2816 
2817 /*@
2818   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2819 
2820   Collective
2821 
2822   Input Parameters:
2823 + pc    - the preconditioner context
2824 - scale - scaling factor for the Schur complement
2825 
2826   Options Database Key:
2827 . -pc_fieldsplit_schur_scale <scale> - default is -1.0
2828 
2829   Level: intermediate
2830 
2831 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
2832 @*/
2833 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2834 {
2835   PetscFunctionBegin;
2836   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2837   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2838   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2839   PetscFunctionReturn(PETSC_SUCCESS);
2840 }
2841 
2842 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2843 {
2844   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2845 
2846   PetscFunctionBegin;
2847   jac->schurscale = scale;
2848   PetscFunctionReturn(PETSC_SUCCESS);
2849 }
2850 
2851 /*@C
2852   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2853 
2854   Collective
2855 
2856   Input Parameter:
2857 . pc - the preconditioner context
2858 
2859   Output Parameters:
2860 + A00 - the (0,0) block
2861 . A01 - the (0,1) block
2862 . A10 - the (1,0) block
2863 - A11 - the (1,1) block
2864 
2865   Level: advanced
2866 
2867   Note:
2868   Use `NULL` for any unneeded output arguments
2869 
2870 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2871 @*/
2872 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2873 {
2874   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2875 
2876   PetscFunctionBegin;
2877   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2878   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2879   if (A00) *A00 = jac->pmat[0];
2880   if (A01) *A01 = jac->B;
2881   if (A10) *A10 = jac->C;
2882   if (A11) *A11 = jac->pmat[1];
2883   PetscFunctionReturn(PETSC_SUCCESS);
2884 }
2885 
2886 /*@
2887   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2888 
2889   Collective
2890 
2891   Input Parameters:
2892 + pc        - the preconditioner context
2893 - tolerance - the solver tolerance
2894 
2895   Options Database Key:
2896 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5
2897 
2898   Level: intermediate
2899 
2900   Note:
2901   The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion.
2902   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2903   this estimate, the stopping criterion is satisfactory in practical cases.
2904 
2905 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2906 @*/
2907 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2908 {
2909   PetscFunctionBegin;
2910   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2911   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2912   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2913   PetscFunctionReturn(PETSC_SUCCESS);
2914 }
2915 
2916 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2917 {
2918   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2919 
2920   PetscFunctionBegin;
2921   jac->gkbtol = tolerance;
2922   PetscFunctionReturn(PETSC_SUCCESS);
2923 }
2924 
2925 /*@
2926   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2927 
2928   Collective
2929 
2930   Input Parameters:
2931 + pc    - the preconditioner context
2932 - maxit - the maximum number of iterations
2933 
2934   Options Database Key:
2935 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100
2936 
2937   Level: intermediate
2938 
2939 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2940 @*/
2941 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2942 {
2943   PetscFunctionBegin;
2944   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2945   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2946   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2947   PetscFunctionReturn(PETSC_SUCCESS);
2948 }
2949 
2950 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2951 {
2952   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2953 
2954   PetscFunctionBegin;
2955   jac->gkbmaxit = maxit;
2956   PetscFunctionReturn(PETSC_SUCCESS);
2957 }
2958 
2959 /*@
2960   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT`
2961   preconditioner.
2962 
2963   Collective
2964 
2965   Input Parameters:
2966 + pc    - the preconditioner context
2967 - delay - the delay window in the lower bound estimate
2968 
2969   Options Database Key:
2970 . -pc_fieldsplit_gkb_delay <delay> - default is 5
2971 
2972   Level: intermediate
2973 
2974   Notes:
2975   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $
2976   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs
2977   at least (`delay` + 1) iterations to stop.
2978 
2979   For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013`
2980 
2981 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2982 @*/
2983 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2984 {
2985   PetscFunctionBegin;
2986   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2987   PetscValidLogicalCollectiveInt(pc, delay, 2);
2988   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2989   PetscFunctionReturn(PETSC_SUCCESS);
2990 }
2991 
2992 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2993 {
2994   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2995 
2996   PetscFunctionBegin;
2997   jac->gkbdelay = delay;
2998   PetscFunctionReturn(PETSC_SUCCESS);
2999 }
3000 
3001 /*@
3002   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the
3003   Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
3004 
3005   Collective
3006 
3007   Input Parameters:
3008 + pc - the preconditioner context
3009 - nu - the shift parameter
3010 
3011   Options Database Key:
3012 . -pc_fieldsplit_gkb_nu <nu> - default is 1
3013 
3014   Level: intermediate
3015 
3016   Notes:
3017   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However,
3018   if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore
3019   necessary to find a good balance in between the convergence of the inner and outer loop.
3020 
3021   For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity.
3022 
3023 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
3024 @*/
3025 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
3026 {
3027   PetscFunctionBegin;
3028   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3029   PetscValidLogicalCollectiveReal(pc, nu, 2);
3030   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
3031   PetscFunctionReturn(PETSC_SUCCESS);
3032 }
3033 
3034 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
3035 {
3036   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3037 
3038   PetscFunctionBegin;
3039   jac->gkbnu = nu;
3040   PetscFunctionReturn(PETSC_SUCCESS);
3041 }
3042 
3043 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
3044 {
3045   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3046 
3047   PetscFunctionBegin;
3048   jac->type = type;
3049   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
3050   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
3051   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
3052   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
3053   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
3054   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
3055   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
3056   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
3057   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
3058 
3059   if (type == PC_COMPOSITE_SCHUR) {
3060     pc->ops->apply          = PCApply_FieldSplit_Schur;
3061     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
3062     pc->ops->view           = PCView_FieldSplit_Schur;
3063     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit_Schur;
3064 
3065     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
3066     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
3067     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
3068     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
3069     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
3070   } else if (type == PC_COMPOSITE_GKB) {
3071     pc->ops->apply          = PCApply_FieldSplit_GKB;
3072     pc->ops->applytranspose = NULL;
3073     pc->ops->view           = PCView_FieldSplit_GKB;
3074     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit_GKB;
3075 
3076     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3077     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
3078     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
3079     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
3080     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
3081   } else {
3082     pc->ops->apply          = PCApply_FieldSplit;
3083     pc->ops->applytranspose = PCApplyTranspose_FieldSplit;
3084     pc->ops->view           = PCView_FieldSplit;
3085     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit;
3086 
3087     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3088   }
3089   PetscFunctionReturn(PETSC_SUCCESS);
3090 }
3091 
3092 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
3093 {
3094   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3095 
3096   PetscFunctionBegin;
3097   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
3098   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
3099   jac->bs = bs;
3100   PetscFunctionReturn(PETSC_SUCCESS);
3101 }
3102 
3103 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
3104 {
3105   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
3106   PC_FieldSplitLink ilink_current = jac->head;
3107   IS                is_owned;
3108 
3109   PetscFunctionBegin;
3110   jac->coordinates_set = PETSC_TRUE; // Internal flag
3111   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
3112 
3113   while (ilink_current) {
3114     // For each IS, embed it to get local coords indces
3115     IS              is_coords;
3116     PetscInt        ndofs_block;
3117     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
3118 
3119     // Setting drop to true for safety. It should make no difference.
3120     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
3121     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
3122     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
3123 
3124     // Allocate coordinates vector and set it directly
3125     PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords));
3126     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
3127       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
3128     }
3129     ilink_current->dim   = dim;
3130     ilink_current->ndofs = ndofs_block;
3131     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
3132     PetscCall(ISDestroy(&is_coords));
3133     ilink_current = ilink_current->next;
3134   }
3135   PetscCall(ISDestroy(&is_owned));
3136   PetscFunctionReturn(PETSC_SUCCESS);
3137 }
3138 
3139 /*@
3140   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3141 
3142   Collective
3143 
3144   Input Parameters:
3145 + pc   - the preconditioner context
3146 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`,
3147          `PC_COMPOSITE_GKB`
3148 
3149   Options Database Key:
3150 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
3151 
3152   Level: intermediate
3153 
3154 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3155           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()`
3156 @*/
3157 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
3158 {
3159   PetscFunctionBegin;
3160   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3161   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
3162   PetscFunctionReturn(PETSC_SUCCESS);
3163 }
3164 
3165 /*@
3166   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3167 
3168   Not collective
3169 
3170   Input Parameter:
3171 . pc - the preconditioner context
3172 
3173   Output Parameter:
3174 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3175 
3176   Level: intermediate
3177 
3178 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3179           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3180 @*/
3181 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
3182 {
3183   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3184 
3185   PetscFunctionBegin;
3186   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3187   PetscAssertPointer(type, 2);
3188   *type = jac->type;
3189   PetscFunctionReturn(PETSC_SUCCESS);
3190 }
3191 
3192 /*@
3193   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3194 
3195   Logically Collective
3196 
3197   Input Parameters:
3198 + pc  - the preconditioner context
3199 - flg - boolean indicating whether to use field splits defined by the `DM`
3200 
3201   Options Database Key:
3202 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3203 
3204   Level: intermediate
3205 
3206   Developer Note:
3207   The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database
3208 
3209 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
3210 @*/
3211 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3212 {
3213   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3214   PetscBool      isfs;
3215 
3216   PetscFunctionBegin;
3217   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3218   PetscValidLogicalCollectiveBool(pc, flg, 2);
3219   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3220   if (isfs) jac->dm_splits = flg;
3221   PetscFunctionReturn(PETSC_SUCCESS);
3222 }
3223 
3224 /*@
3225   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3226 
3227   Logically Collective
3228 
3229   Input Parameter:
3230 . pc - the preconditioner context
3231 
3232   Output Parameter:
3233 . flg - boolean indicating whether to use field splits defined by the `DM`
3234 
3235   Level: intermediate
3236 
3237   Developer Note:
3238   The name should be `PCFieldSplitGetUseDMSplits()`
3239 
3240 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
3241 @*/
3242 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3243 {
3244   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3245   PetscBool      isfs;
3246 
3247   PetscFunctionBegin;
3248   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3249   PetscAssertPointer(flg, 2);
3250   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3251   if (isfs) {
3252     if (flg) *flg = jac->dm_splits;
3253   }
3254   PetscFunctionReturn(PETSC_SUCCESS);
3255 }
3256 
3257 /*@
3258   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3259 
3260   Logically Collective
3261 
3262   Input Parameter:
3263 . pc - the preconditioner context
3264 
3265   Output Parameter:
3266 . flg - boolean indicating whether to detect fields or not
3267 
3268   Level: intermediate
3269 
3270 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3271 @*/
3272 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3273 {
3274   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3275 
3276   PetscFunctionBegin;
3277   *flg = jac->detect;
3278   PetscFunctionReturn(PETSC_SUCCESS);
3279 }
3280 
3281 /*@
3282   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3283 
3284   Logically Collective
3285 
3286   Input Parameter:
3287 . pc - the preconditioner context
3288 
3289   Output Parameter:
3290 . flg - boolean indicating whether to detect fields or not
3291 
3292   Options Database Key:
3293 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3294 
3295   Level: intermediate
3296 
3297   Note:
3298   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3299 
3300 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3301 @*/
3302 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3303 {
3304   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3305 
3306   PetscFunctionBegin;
3307   jac->detect = flg;
3308   if (jac->detect) {
3309     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3310     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3311   }
3312   PetscFunctionReturn(PETSC_SUCCESS);
3313 }
3314 
3315 /*MC
3316   PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3317   collections of variables (that may overlap) called fields or splits. Each field often represents a different continuum variable
3318   represented on a grid, such as velocity, pressure, or temperature.
3319   In the literature these are sometimes called block preconditioners; but should not be confused with `PCBJACOBI`.
3320   See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3321 
3322   Options Database Keys:
3323 +   -pc_fieldsplit_%d_fields <a,b,..>                                                - indicates the fields to be used in the `%d`'th split
3324 .   -pc_fieldsplit_default                                                           - automatically add any fields to additional splits that have not
3325                                                                                        been supplied explicitly by `-pc_fieldsplit_%d_fields`
3326 .   -pc_fieldsplit_block_size <bs>                                                   - size of block that defines fields (i.e. there are bs fields)
3327                                                                                        when the matrix is not of `MatType` `MATNEST`
3328 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3329 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full>                     - default is `a11`; see `PCFieldSplitSetSchurPre()`
3330 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full>                           - set factorization type when using `-pc_fieldsplit_type schur`;
3331                                                                                        see `PCFieldSplitSetSchurFactType()`
3332 .   -pc_fieldsplit_dm_splits <true,false> (default is true)                          - Whether to use `DMCreateFieldDecomposition()` for splits
3333 -   -pc_fieldsplit_detect_saddle_point                                               - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3334 
3335   Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3336   The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3337   For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3338 
3339   To set options on the solvers for all blocks, prepend `-fieldsplit_` to all the `PC`
3340   options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`.
3341 
3342   To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3343   and set the options directly on the resulting `KSP` object
3344 
3345   Level: intermediate
3346 
3347   Notes:
3348   Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()`
3349   to define a split by an arbitrary collection of entries.
3350 
3351   If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports
3352   `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs,
3353   beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3354   if this is not called the block size defaults to the blocksize of the second matrix passed
3355   to `KSPSetOperators()`/`PCSetOperators()`.
3356 
3357   For the Schur complement preconditioner if
3358   ```{math}
3359     J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3360   ```
3361 
3362   the preconditioner using `full` factorization is logically
3363   ```{math}
3364     \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3365       ```
3366   where the action of $\text{ksp}(A_{00})$ is applied using the `KSP` solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3367   ```{math}
3368      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3369   ```
3370   which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the `KSP` solver with prefix `-fieldsplit_splitname_` (where `splitname`
3371   was given in providing the SECOND split or 1 if not given). Accordingly, if using `PCFieldSplitGetSubKSP()`, the array of sub-`KSP` contexts will hold two `KSP`s: at its
3372   0th index, the `KSP` associated with `-fieldsplit_0_`, and at its 1st index, the `KSP` corresponding to `-fieldsplit_1_`.
3373   By default, $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3374 
3375   The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3376   `diag` gives
3377   ```{math}
3378     \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3379   ```
3380   Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3381   can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3382   ```{math}
3383     \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3384   ```
3385   where the inverses of $A_{00}$ and $S$ are applied using `KSP`s. The upper factorization is the inverse of
3386   ```{math}
3387     \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3388   ```
3389   where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3390 
3391   If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3392   is used automatically for a second submatrix.
3393 
3394   The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3395   Generally it should be used with the `MATAIJ` or `MATNEST` `MatType`
3396 
3397   The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see,
3398   for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3399   One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers".
3400 
3401   See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3402 
3403   The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3404   residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3405 
3406   The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3407   ```{math}
3408     \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3409   ```
3410   with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3411   A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3412 
3413   Some `PCFIELDSPLIT` variants are called physics-based preconditioners, since the preconditioner takes into account the underlying physics of the
3414   problem. But this nomenclature is not well-defined.
3415 
3416   Developer Note:
3417   The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3418   user API.
3419 
3420 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3421           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3422           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3423           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3424 M*/
3425 
3426 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3427 {
3428   PC_FieldSplit *jac;
3429 
3430   PetscFunctionBegin;
3431   PetscCall(PetscNew(&jac));
3432 
3433   jac->bs                 = -1;
3434   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3435   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3436   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3437   jac->schurscale         = -1.0;
3438   jac->dm_splits          = PETSC_TRUE;
3439   jac->gkbtol             = 1e-5;
3440   jac->gkbdelay           = 5;
3441   jac->gkbnu              = 1;
3442   jac->gkbmaxit           = 100;
3443 
3444   pc->data = (void *)jac;
3445 
3446   pc->ops->setup           = PCSetUp_FieldSplit;
3447   pc->ops->reset           = PCReset_FieldSplit;
3448   pc->ops->destroy         = PCDestroy_FieldSplit;
3449   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3450   pc->ops->applyrichardson = NULL;
3451 
3452   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3453   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3454   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3455   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3456   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3457   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3458   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3459 
3460   /* Initialize function pointers */
3461   PetscCall(PCFieldSplitSetType(pc, jac->type));
3462   PetscFunctionReturn(PETSC_SUCCESS);
3463 }
3464