xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision 66c9fbdd036b1e887ebf0d2bef6dbdcafd086d45)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 
5 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
6 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
7 
8 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
9 
10 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
11 struct _PC_FieldSplitLink {
12   KSP               ksp;
13   Vec               x, y, z;
14   char             *splitname;
15   PetscInt          nfields;
16   PetscInt         *fields, *fields_col;
17   VecScatter        sctx;
18   IS                is, is_col;
19   PC_FieldSplitLink next, previous;
20   PetscLogEvent     event;
21 
22   /* Used only when setting coordinates with PCSetCoordinates */
23   PetscInt   dim;
24   PetscInt   ndofs;
25   PetscReal *coords;
26 };
27 
28 typedef struct {
29   PCCompositeType type;
30   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
31   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
32   PetscInt        bs;           /* Block size for IS and Mat structures */
33   PetscInt        nsplits;      /* Number of field divisions defined */
34   Vec            *x, *y, w1, w2;
35   Mat            *mat;    /* The diagonal block for each split */
36   Mat            *pmat;   /* The preconditioning diagonal block for each split */
37   Mat            *Afield; /* The rows of the matrix associated with each split */
38   PetscBool       issetup;
39 
40   /* Only used when Schur complement preconditioning is used */
41   Mat                       B;          /* The (0,1) block */
42   Mat                       C;          /* The (1,0) block */
43   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
44   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */
45   Mat                       schur_user; /* User-provided preconditioning matrix for the Schur complement */
46   PCFieldSplitSchurPreType  schurpre;   /* Determines which preconditioning matrix is used for the Schur complement */
47   PCFieldSplitSchurFactType schurfactorization;
48   KSP                       kspschur;   /* The solver for S */
49   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
50   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
51 
52   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
53   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
54   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
55   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
56   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
57   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
58   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
59   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
60   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
61   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
62 
63   PC_FieldSplitLink head;
64   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
65   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
66   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
67   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
68   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
69   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
70   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
71 } PC_FieldSplit;
72 
73 /*
74     Note:
75     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
76    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
77    PC you could change this.
78 */
79 
80 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it.  This way the
81 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
82 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
83 {
84   switch (jac->schurpre) {
85   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
86     return jac->schur;
87   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
88     return jac->schurp;
89   case PC_FIELDSPLIT_SCHUR_PRE_A11:
90     return jac->pmat[1];
91   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
92   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
93   default:
94     return jac->schur_user ? jac->schur_user : jac->pmat[1];
95   }
96 }
97 
98 #include <petscdraw.h>
99 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
100 {
101   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
102   PetscBool         iascii, isdraw;
103   PetscInt          i, j;
104   PC_FieldSplitLink ilink = jac->head;
105 
106   PetscFunctionBegin;
107   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
108   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
109   if (iascii) {
110     if (jac->bs > 0) {
111       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
112     } else {
113       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
114     }
115     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
116     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
117     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
118     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
119     for (i = 0; i < jac->nsplits; i++) {
120       if (ilink->fields) {
121         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
122         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
123         for (j = 0; j < ilink->nfields; j++) {
124           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
125           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
126         }
127         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
128         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
129       } else {
130         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
131       }
132       PetscCall(KSPView(ilink->ksp, viewer));
133       ilink = ilink->next;
134     }
135   }
136 
137   if (isdraw) {
138     PetscDraw draw;
139     PetscReal x, y, w, wd;
140 
141     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
142     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
143     w  = 2 * PetscMin(1.0 - x, x);
144     wd = w / (jac->nsplits + 1);
145     x  = x - wd * (jac->nsplits - 1) / 2.0;
146     for (i = 0; i < jac->nsplits; i++) {
147       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
148       PetscCall(KSPView(ilink->ksp, viewer));
149       PetscCall(PetscDrawPopCurrentPoint(draw));
150       x += wd;
151       ilink = ilink->next;
152     }
153   }
154   PetscFunctionReturn(0);
155 }
156 
157 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
158 {
159   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
160   PetscBool                  iascii, isdraw;
161   PetscInt                   i, j;
162   PC_FieldSplitLink          ilink = jac->head;
163   MatSchurComplementAinvType atype;
164 
165   PetscFunctionBegin;
166   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
167   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
168   if (iascii) {
169     if (jac->bs > 0) {
170       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
171     } else {
172       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
173     }
174     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
175     switch (jac->schurpre) {
176     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
178       break;
179     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
180       PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
181       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
182       break;
183     case PC_FIELDSPLIT_SCHUR_PRE_A11:
184       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
185       break;
186     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
187       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
188       break;
189     case PC_FIELDSPLIT_SCHUR_PRE_USER:
190       if (jac->schur_user) {
191         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
192       } else {
193         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
194       }
195       break;
196     default:
197       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
198     }
199     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
200     PetscCall(PetscViewerASCIIPushTab(viewer));
201     for (i = 0; i < jac->nsplits; i++) {
202       if (ilink->fields) {
203         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
204         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
205         for (j = 0; j < ilink->nfields; j++) {
206           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
207           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
208         }
209         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
210         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
211       } else {
212         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
213       }
214       ilink = ilink->next;
215     }
216     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
217     PetscCall(PetscViewerASCIIPushTab(viewer));
218     if (jac->head) {
219       PetscCall(KSPView(jac->head->ksp, viewer));
220     } else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
221     PetscCall(PetscViewerASCIIPopTab(viewer));
222     if (jac->head && jac->kspupper != jac->head->ksp) {
223       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor \n"));
224       PetscCall(PetscViewerASCIIPushTab(viewer));
225       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
226       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
227       PetscCall(PetscViewerASCIIPopTab(viewer));
228     }
229     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01 \n"));
230     PetscCall(PetscViewerASCIIPushTab(viewer));
231     if (jac->kspschur) {
232       PetscCall(KSPView(jac->kspschur, viewer));
233     } else {
234       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
235     }
236     PetscCall(PetscViewerASCIIPopTab(viewer));
237     PetscCall(PetscViewerASCIIPopTab(viewer));
238   } else if (isdraw && jac->head) {
239     PetscDraw draw;
240     PetscReal x, y, w, wd, h;
241     PetscInt  cnt = 2;
242     char      str[32];
243 
244     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
245     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
246     if (jac->kspupper != jac->head->ksp) cnt++;
247     w  = 2 * PetscMin(1.0 - x, x);
248     wd = w / (cnt + 1);
249 
250     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
251     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
252     y -= h;
253     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
254       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
255     } else {
256       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
257     }
258     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
259     y -= h;
260     x = x - wd * (cnt - 1) / 2.0;
261 
262     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
263     PetscCall(KSPView(jac->head->ksp, viewer));
264     PetscCall(PetscDrawPopCurrentPoint(draw));
265     if (jac->kspupper != jac->head->ksp) {
266       x += wd;
267       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
268       PetscCall(KSPView(jac->kspupper, viewer));
269       PetscCall(PetscDrawPopCurrentPoint(draw));
270     }
271     x += wd;
272     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
273     PetscCall(KSPView(jac->kspschur, viewer));
274     PetscCall(PetscDrawPopCurrentPoint(draw));
275   }
276   PetscFunctionReturn(0);
277 }
278 
279 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
280 {
281   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
282   PetscBool         iascii, isdraw;
283   PetscInt          i, j;
284   PC_FieldSplitLink ilink = jac->head;
285 
286   PetscFunctionBegin;
287   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
288   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
289   if (iascii) {
290     if (jac->bs > 0) {
291       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
292     } else {
293       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
294     }
295     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
296     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
297     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
298 
299     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
300     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
301     PetscCall(PetscViewerASCIIPushTab(viewer));
302 
303     if (ilink->fields) {
304       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
305       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
306       for (j = 0; j < ilink->nfields; j++) {
307         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
308         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
309       }
310       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
311       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
312     } else {
313       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
314     }
315     PetscCall(KSPView(ilink->ksp, viewer));
316 
317     PetscCall(PetscViewerASCIIPopTab(viewer));
318   }
319 
320   if (isdraw) {
321     PetscDraw draw;
322     PetscReal x, y, w, wd;
323 
324     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
325     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
326     w  = 2 * PetscMin(1.0 - x, x);
327     wd = w / (jac->nsplits + 1);
328     x  = x - wd * (jac->nsplits - 1) / 2.0;
329     for (i = 0; i < jac->nsplits; i++) {
330       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
331       PetscCall(KSPView(ilink->ksp, viewer));
332       PetscCall(PetscDrawPopCurrentPoint(draw));
333       x += wd;
334       ilink = ilink->next;
335     }
336   }
337   PetscFunctionReturn(0);
338 }
339 
340 /* Precondition: jac->bs is set to a meaningful value */
341 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
342 {
343   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
344   PetscInt       i, nfields, *ifields, nfields_col, *ifields_col;
345   PetscBool      flg, flg_col;
346   char           optionname[128], splitname[8], optionname_col[128];
347 
348   PetscFunctionBegin;
349   PetscCall(PetscMalloc1(jac->bs, &ifields));
350   PetscCall(PetscMalloc1(jac->bs, &ifields_col));
351   for (i = 0, flg = PETSC_TRUE;; i++) {
352     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
353     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
354     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
355     nfields     = jac->bs;
356     nfields_col = jac->bs;
357     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
358     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
359     if (!flg) break;
360     else if (flg && !flg_col) {
361       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
362       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
363     } else {
364       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
365       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
366       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
367     }
368   }
369   if (i > 0) {
370     /* Makes command-line setting of splits take precedence over setting them in code.
371        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
372        create new splits, which would probably not be what the user wanted. */
373     jac->splitdefined = PETSC_TRUE;
374   }
375   PetscCall(PetscFree(ifields));
376   PetscCall(PetscFree(ifields_col));
377   PetscFunctionReturn(0);
378 }
379 
380 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
381 {
382   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
383   PC_FieldSplitLink ilink              = jac->head;
384   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
385   PetscInt          i;
386 
387   PetscFunctionBegin;
388   /*
389    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
390    Should probably be rewritten.
391    */
392   if (!ilink) {
393     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
394     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
395       PetscInt  numFields, f, i, j;
396       char    **fieldNames;
397       IS       *fields;
398       DM       *dms;
399       DM        subdm[128];
400       PetscBool flg;
401 
402       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
403       /* Allow the user to prescribe the splits */
404       for (i = 0, flg = PETSC_TRUE;; i++) {
405         PetscInt ifields[128];
406         IS       compField;
407         char     optionname[128], splitname[8];
408         PetscInt nfields = numFields;
409 
410         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
411         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
412         if (!flg) break;
413         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
414         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
415         if (nfields == 1) {
416           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
417         } else {
418           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
419           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
420         }
421         PetscCall(ISDestroy(&compField));
422         for (j = 0; j < nfields; ++j) {
423           f = ifields[j];
424           PetscCall(PetscFree(fieldNames[f]));
425           PetscCall(ISDestroy(&fields[f]));
426         }
427       }
428       if (i == 0) {
429         for (f = 0; f < numFields; ++f) {
430           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
431           PetscCall(PetscFree(fieldNames[f]));
432           PetscCall(ISDestroy(&fields[f]));
433         }
434       } else {
435         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
436         PetscCall(PetscFree(dms));
437         PetscCall(PetscMalloc1(i, &dms));
438         for (j = 0; j < i; ++j) dms[j] = subdm[j];
439       }
440       PetscCall(PetscFree(fieldNames));
441       PetscCall(PetscFree(fields));
442       if (dms) {
443         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
444         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
445           const char *prefix;
446           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)(ilink->ksp), &prefix));
447           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)(dms[i]), prefix));
448           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
449           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
450           {
451             PetscErrorCode (*func)(KSP, Mat, Mat, void *);
452             void *ctx;
453 
454             PetscCall(DMKSPGetComputeOperators(pc->dm, &func, &ctx));
455             PetscCall(DMKSPSetComputeOperators(dms[i], func, ctx));
456           }
457           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
458           PetscCall(DMDestroy(&dms[i]));
459         }
460         PetscCall(PetscFree(dms));
461       }
462     } else {
463       if (jac->bs <= 0) {
464         if (pc->pmat) {
465           PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
466         } else jac->bs = 1;
467       }
468 
469       if (jac->detect) {
470         IS       zerodiags, rest;
471         PetscInt nmin, nmax;
472 
473         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
474         if (jac->diag_use_amat) {
475           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
476         } else {
477           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
478         }
479         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
480         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
481         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
482         PetscCall(ISDestroy(&zerodiags));
483         PetscCall(ISDestroy(&rest));
484       } else if (coupling) {
485         IS       coupling, rest;
486         PetscInt nmin, nmax;
487 
488         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
489         if (jac->offdiag_use_amat) {
490           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
491         } else {
492           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
493         }
494         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
495         PetscCall(ISSetIdentity(rest));
496         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
497         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
498         PetscCall(ISDestroy(&coupling));
499         PetscCall(ISDestroy(&rest));
500       } else {
501         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
502         if (!fieldsplit_default) {
503           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
504            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
505           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
506           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
507         }
508         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
509           Mat       M = pc->pmat;
510           PetscBool isnest;
511 
512           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
513           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
514           if (!isnest) {
515             M = pc->mat;
516             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
517           }
518           if (isnest) {
519             IS      *fields;
520             PetscInt nf;
521 
522             PetscCall(MatNestGetSize(M, &nf, NULL));
523             PetscCall(PetscMalloc1(nf, &fields));
524             PetscCall(MatNestGetISs(M, fields, NULL));
525             for (i = 0; i < nf; i++) PetscCall(PCFieldSplitSetIS(pc, NULL, fields[i]));
526             PetscCall(PetscFree(fields));
527           } else {
528             for (i = 0; i < jac->bs; i++) {
529               char splitname[8];
530               PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
531               PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
532             }
533             jac->defaultsplit = PETSC_TRUE;
534           }
535         }
536       }
537     }
538   } else if (jac->nsplits == 1) {
539     if (ilink->is) {
540       IS       is2;
541       PetscInt nmin, nmax;
542 
543       PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
544       PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
545       PetscCall(PCFieldSplitSetIS(pc, "1", is2));
546       PetscCall(ISDestroy(&is2));
547     } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
548   }
549 
550   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
551   PetscFunctionReturn(0);
552 }
553 
554 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
555 {
556   Mat       BT, T;
557   PetscReal nrmT, nrmB;
558 
559   PetscFunctionBegin;
560   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
561   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
562   PetscCall(MatNorm(T, NORM_1, &nrmT));
563   PetscCall(MatNorm(B, NORM_1, &nrmB));
564   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable.");
565 
566   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
567   /* setting N := 1/nu*I in [Ar13].                                                 */
568   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
569   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, H)); /* H = A01*A01'          */
570   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
571 
572   PetscCall(MatDestroy(&BT));
573   PetscCall(MatDestroy(&T));
574   PetscFunctionReturn(0);
575 }
576 
577 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *value[], PetscBool *flg);
578 
579 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
580 {
581   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
582   PC_FieldSplitLink ilink;
583   PetscInt          i, nsplit;
584   PetscBool         sorted, sorted_col;
585 
586   PetscFunctionBegin;
587   pc->failedreason = PC_NOERROR;
588   PetscCall(PCFieldSplitSetDefaults(pc));
589   nsplit = jac->nsplits;
590   ilink  = jac->head;
591 
592   /* get the matrices for each split */
593   if (!jac->issetup) {
594     PetscInt rstart, rend, nslots, bs;
595 
596     jac->issetup = PETSC_TRUE;
597 
598     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
599     if (jac->defaultsplit || !ilink->is) {
600       if (jac->bs <= 0) jac->bs = nsplit;
601     }
602 
603     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
604     PetscCall(MatGetBlockSize(pc->pmat, &bs));
605     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
606       PetscBool blk;
607 
608       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
609       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
610     }
611 
612     bs = jac->bs;
613     PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
614     nslots = (rend - rstart) / bs;
615     for (i = 0; i < nsplit; i++) {
616       if (jac->defaultsplit) {
617         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
618         PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
619       } else if (!ilink->is) {
620         if (ilink->nfields > 1) {
621           PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
622           PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
623           PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
624           for (j = 0; j < nslots; j++) {
625             for (k = 0; k < nfields; k++) {
626               ii[nfields * j + k] = rstart + bs * j + fields[k];
627               jj[nfields * j + k] = rstart + bs * j + fields_col[k];
628             }
629           }
630           PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
631           PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
632           PetscCall(ISSetBlockSize(ilink->is, nfields));
633           PetscCall(ISSetBlockSize(ilink->is_col, nfields));
634         } else {
635           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
636           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
637         }
638       }
639       PetscCall(ISSorted(ilink->is, &sorted));
640       if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col));
641       PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split");
642       ilink = ilink->next;
643     }
644   }
645 
646   ilink = jac->head;
647   if (!jac->pmat) {
648     Vec xtmp;
649 
650     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
651     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
652     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
653     for (i = 0; i < nsplit; i++) {
654       MatNullSpace sp;
655 
656       /* Check for preconditioning matrix attached to IS */
657       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
658       if (jac->pmat[i]) {
659         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
660         if (jac->type == PC_COMPOSITE_SCHUR) {
661           jac->schur_user = jac->pmat[i];
662 
663           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
664         }
665       } else {
666         const char *prefix;
667         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
668         PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
669         PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
670         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
671       }
672       /* create work vectors for each split */
673       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
674       ilink->x = jac->x[i];
675       ilink->y = jac->y[i];
676       ilink->z = NULL;
677       /* compute scatter contexts needed by multiplicative versions and non-default splits */
678       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
679       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
680       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
681       ilink = ilink->next;
682     }
683     PetscCall(VecDestroy(&xtmp));
684   } else {
685     MatReuse scall;
686     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
687       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
688       scall = MAT_INITIAL_MATRIX;
689     } else scall = MAT_REUSE_MATRIX;
690 
691     for (i = 0; i < nsplit; i++) {
692       Mat pmat;
693 
694       /* Check for preconditioning matrix attached to IS */
695       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
696       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
697       ilink = ilink->next;
698     }
699   }
700   if (jac->diag_use_amat) {
701     ilink = jac->head;
702     if (!jac->mat) {
703       PetscCall(PetscMalloc1(nsplit, &jac->mat));
704       for (i = 0; i < nsplit; i++) {
705         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
706         ilink = ilink->next;
707       }
708     } else {
709       MatReuse scall;
710       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
711         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
712         scall = MAT_INITIAL_MATRIX;
713       } else scall = MAT_REUSE_MATRIX;
714 
715       for (i = 0; i < nsplit; i++) {
716         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
717         ilink = ilink->next;
718       }
719     }
720   } else {
721     jac->mat = jac->pmat;
722   }
723 
724   /* Check for null space attached to IS */
725   ilink = jac->head;
726   for (i = 0; i < nsplit; i++) {
727     MatNullSpace sp;
728 
729     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
730     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
731     ilink = ilink->next;
732   }
733 
734   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
735     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
736     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
737     ilink = jac->head;
738     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
739       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
740       if (!jac->Afield) {
741         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
742         if (jac->offdiag_use_amat) {
743           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
744         } else {
745           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
746         }
747       } else {
748         MatReuse scall;
749 
750         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
751           PetscCall(MatDestroy(&jac->Afield[1]));
752           scall = MAT_INITIAL_MATRIX;
753         } else scall = MAT_REUSE_MATRIX;
754 
755         if (jac->offdiag_use_amat) {
756           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
757         } else {
758           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
759         }
760       }
761     } else {
762       if (!jac->Afield) {
763         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
764         for (i = 0; i < nsplit; i++) {
765           if (jac->offdiag_use_amat) {
766             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
767           } else {
768             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
769           }
770           ilink = ilink->next;
771         }
772       } else {
773         MatReuse scall;
774         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
775           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
776           scall = MAT_INITIAL_MATRIX;
777         } else scall = MAT_REUSE_MATRIX;
778 
779         for (i = 0; i < nsplit; i++) {
780           if (jac->offdiag_use_amat) {
781             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
782           } else {
783             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
784           }
785           ilink = ilink->next;
786         }
787       }
788     }
789   }
790 
791   if (jac->type == PC_COMPOSITE_SCHUR) {
792     IS          ccis;
793     PetscBool   isset, isspd;
794     PetscInt    rstart, rend;
795     char        lscname[256];
796     PetscObject LSC_L;
797 
798     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
799 
800     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
801     if (jac->schurscale == (PetscScalar)-1.0) {
802       PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
803       jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
804     }
805 
806     /* When extracting off-diagonal submatrices, we take complements from this range */
807     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
808 
809     if (jac->schur) {
810       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
811       MatReuse scall;
812 
813       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
814         scall = MAT_INITIAL_MATRIX;
815         PetscCall(MatDestroy(&jac->B));
816         PetscCall(MatDestroy(&jac->C));
817       } else scall = MAT_REUSE_MATRIX;
818 
819       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
820       ilink = jac->head;
821       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
822       if (jac->offdiag_use_amat) {
823         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B));
824       } else {
825         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B));
826       }
827       PetscCall(ISDestroy(&ccis));
828       ilink = ilink->next;
829       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
830       if (jac->offdiag_use_amat) {
831         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C));
832       } else {
833         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C));
834       }
835       PetscCall(ISDestroy(&ccis));
836       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
837       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
838         PetscCall(MatDestroy(&jac->schurp));
839         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
840       }
841       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
842       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
843       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
844     } else {
845       const char  *Dprefix;
846       char         schurprefix[256], schurmatprefix[256];
847       char         schurtestoption[256];
848       MatNullSpace sp;
849       PetscBool    flg;
850       KSP          kspt;
851 
852       /* extract the A01 and A10 matrices */
853       ilink = jac->head;
854       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
855       if (jac->offdiag_use_amat) {
856         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
857       } else {
858         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
859       }
860       PetscCall(ISDestroy(&ccis));
861       ilink = ilink->next;
862       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
863       if (jac->offdiag_use_amat) {
864         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
865       } else {
866         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
867       }
868       PetscCall(ISDestroy(&ccis));
869 
870       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
871       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
872       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
873       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
874       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
875       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
876       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
877       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
878 
879       /* Note: this is not true in general */
880       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
881       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
882 
883       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
884       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg));
885       if (flg) {
886         DM  dmInner;
887         KSP kspInner;
888         PC  pcInner;
889 
890         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
891         PetscCall(KSPReset(kspInner));
892         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
893         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
894         /* Indent this deeper to emphasize the "inner" nature of this solver. */
895         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
896         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
897         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
898 
899         /* Set DM for new solver */
900         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
901         PetscCall(KSPSetDM(kspInner, dmInner));
902         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
903 
904         /* Defaults to PCKSP as preconditioner */
905         PetscCall(KSPGetPC(kspInner, &pcInner));
906         PetscCall(PCSetType(pcInner, PCKSP));
907         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
908       } else {
909         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
910           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
911           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
912           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
913           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
914           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
915         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
916         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
917       }
918       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
919       PetscCall(KSPSetFromOptions(jac->head->ksp));
920       PetscCall(MatSetFromOptions(jac->schur));
921 
922       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
923       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
924         KSP kspInner;
925         PC  pcInner;
926 
927         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
928         PetscCall(KSPGetPC(kspInner, &pcInner));
929         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
930         if (flg) {
931           KSP ksp;
932 
933           PetscCall(PCKSPGetKSP(pcInner, &ksp));
934           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
935         }
936       }
937       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
938       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg));
939       if (flg) {
940         DM dmInner;
941 
942         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
943         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
944         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
945         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
946         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
947         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
948         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
949         PetscCall(KSPSetDM(jac->kspupper, dmInner));
950         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
951         PetscCall(KSPSetFromOptions(jac->kspupper));
952         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
953         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
954       } else {
955         jac->kspupper = jac->head->ksp;
956         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
957       }
958 
959       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
960       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
961       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
962       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
963       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
964         PC pcschur;
965         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
966         PetscCall(PCSetType(pcschur, PCNONE));
967         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
968       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
969         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
970       }
971       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
972       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
973       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
974       /* propagate DM */
975       {
976         DM sdm;
977         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
978         if (sdm) {
979           PetscCall(KSPSetDM(jac->kspschur, sdm));
980           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
981         }
982       }
983       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
984       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
985       PetscCall(KSPSetFromOptions(jac->kspschur));
986     }
987     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
988     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
989 
990     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
991     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
992     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
993     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
994     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L));
995     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
996     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
997     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
998     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L));
999   } else if (jac->type == PC_COMPOSITE_GKB) {
1000     IS       ccis;
1001     PetscInt rstart, rend;
1002 
1003     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1004 
1005     ilink = jac->head;
1006 
1007     /* When extracting off-diagonal submatrices, we take complements from this range */
1008     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
1009 
1010     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1011     if (jac->offdiag_use_amat) {
1012       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1013     } else {
1014       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1015     }
1016     PetscCall(ISDestroy(&ccis));
1017     /* Create work vectors for GKB algorithm */
1018     PetscCall(VecDuplicate(ilink->x, &jac->u));
1019     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1020     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1021     ilink = ilink->next;
1022     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1023     if (jac->offdiag_use_amat) {
1024       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1025     } else {
1026       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1027     }
1028     PetscCall(ISDestroy(&ccis));
1029     /* Create work vectors for GKB algorithm */
1030     PetscCall(VecDuplicate(ilink->x, &jac->v));
1031     PetscCall(VecDuplicate(ilink->x, &jac->d));
1032     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1033     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1034     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1035 
1036     ilink = jac->head;
1037     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1038     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1039     /* Create gkb_monitor context */
1040     if (jac->gkbmonitor) {
1041       PetscInt tablevel;
1042       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1043       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1044       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1045       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1046       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1047     }
1048   } else {
1049     /* set up the individual splits' PCs */
1050     i     = 0;
1051     ilink = jac->head;
1052     while (ilink) {
1053       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1054       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1055       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1056       i++;
1057       ilink = ilink->next;
1058     }
1059   }
1060 
1061   /* Set coordinates to the sub PC objects whenever these are set */
1062   if (jac->coordinates_set) {
1063     PC pc_coords;
1064     if (jac->type == PC_COMPOSITE_SCHUR) {
1065       // Head is first block.
1066       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1067       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1068       // Second one is Schur block, but its KSP object is in kspschur.
1069       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1070       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1071     } else if (jac->type == PC_COMPOSITE_GKB) {
1072       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner"));
1073     } else {
1074       ilink = jac->head;
1075       while (ilink) {
1076         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1077         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1078         ilink = ilink->next;
1079       }
1080     }
1081   }
1082 
1083   jac->suboptionsset = PETSC_TRUE;
1084   PetscFunctionReturn(0);
1085 }
1086 
1087 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1088   (VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1089    KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1090    VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE))
1091 
1092 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1093 {
1094   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1095   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1096   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1097 
1098   PetscFunctionBegin;
1099   switch (jac->schurfactorization) {
1100   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1101     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1102     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1103     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1104     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1105     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1106     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1107     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1108     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1109     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1110     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1111     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1112     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1113     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1114     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1115     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1116     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1117     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1118     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1119     break;
1120   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1121     /* [A00 0; A10 S], suitable for left preconditioning */
1122     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1123     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1124     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1125     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1126     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1127     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1128     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1129     PetscCall(VecScale(ilinkD->x, -1.));
1130     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1131     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1132     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1133     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1134     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1135     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1136     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1137     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1138     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1139     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1140     break;
1141   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1142     /* [A00 A01; 0 S], suitable for right preconditioning */
1143     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1144     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1145     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1146     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1147     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1148     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1149     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1150     PetscCall(VecScale(ilinkA->x, -1.));
1151     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1152     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1153     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1154     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1155     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1156     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1157     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1158     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1159     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1160     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1161     break;
1162   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1163     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1164     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1165     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1166     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1167     PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1168     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1169     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1170     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1171     PetscCall(VecScale(ilinkD->x, -1.0));
1172     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1173     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1174 
1175     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1176     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1177     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1178     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1179     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1180 
1181     if (kspUpper == kspA) {
1182       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1183       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1184       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1185       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1186       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1187       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1188     } else {
1189       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1190       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1191       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1192       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1193       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1194       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1195       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1196       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1197       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1198     }
1199     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1200     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1201     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1202   }
1203   PetscFunctionReturn(0);
1204 }
1205 
1206 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1207 {
1208   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1209   PC_FieldSplitLink ilink = jac->head;
1210   PetscInt          cnt, bs;
1211 
1212   PetscFunctionBegin;
1213   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1214     if (jac->defaultsplit) {
1215       PetscCall(VecGetBlockSize(x, &bs));
1216       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1217       PetscCall(VecGetBlockSize(y, &bs));
1218       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1219       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1220       while (ilink) {
1221         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1222         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1223         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1224         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1225         ilink = ilink->next;
1226       }
1227       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1228     } else {
1229       PetscCall(VecSet(y, 0.0));
1230       while (ilink) {
1231         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1232         ilink = ilink->next;
1233       }
1234     }
1235   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1236     PetscCall(VecSet(y, 0.0));
1237     /* solve on first block for first block variables */
1238     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1239     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1240     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1241     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1242     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1243     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1244     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1245     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1246 
1247     /* compute the residual only onto second block variables using first block variables */
1248     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1249     ilink = ilink->next;
1250     PetscCall(VecScale(ilink->x, -1.0));
1251     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1252     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1253 
1254     /* solve on second block variables */
1255     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1256     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1257     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1258     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1259     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1260     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1261   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1262     if (!jac->w1) {
1263       PetscCall(VecDuplicate(x, &jac->w1));
1264       PetscCall(VecDuplicate(x, &jac->w2));
1265     }
1266     PetscCall(VecSet(y, 0.0));
1267     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1268     cnt = 1;
1269     while (ilink->next) {
1270       ilink = ilink->next;
1271       /* compute the residual only over the part of the vector needed */
1272       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1273       PetscCall(VecScale(ilink->x, -1.0));
1274       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1275       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1276       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1277       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1278       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1279       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1280       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1281       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1282     }
1283     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1284       cnt -= 2;
1285       while (ilink->previous) {
1286         ilink = ilink->previous;
1287         /* compute the residual only over the part of the vector needed */
1288         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1289         PetscCall(VecScale(ilink->x, -1.0));
1290         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1291         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1292         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1293         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1294         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1295         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1296         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1297         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1298       }
1299     }
1300   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1301   PetscFunctionReturn(0);
1302 }
1303 
1304 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1305 {
1306   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1307   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1308   KSP               ksp = ilinkA->ksp;
1309   Vec               u, v, Hu, d, work1, work2;
1310   PetscScalar       alpha, z, nrmz2, *vecz;
1311   PetscReal         lowbnd, nu, beta;
1312   PetscInt          j, iterGKB;
1313 
1314   PetscFunctionBegin;
1315   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1316   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1317   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1318   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1319 
1320   u     = jac->u;
1321   v     = jac->v;
1322   Hu    = jac->Hu;
1323   d     = jac->d;
1324   work1 = jac->w1;
1325   work2 = jac->w2;
1326   vecz  = jac->vecz;
1327 
1328   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1329   /* Add q = q + nu*B*b */
1330   if (jac->gkbnu) {
1331     nu = jac->gkbnu;
1332     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1333     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1334   } else {
1335     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1336     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1337     nu = 1;
1338   }
1339 
1340   /* Transform rhs from [q,tilde{b}] to [0,b] */
1341   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1342   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1343   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1344   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1345   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1346   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1347 
1348   /* First step of algorithm */
1349   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1350   KSPCheckDot(ksp, beta);
1351   beta = PetscSqrtReal(nu) * beta;
1352   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1353   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1354   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1355   PetscCall(KSPSolve(ksp, work2, u));
1356   PetscCall(KSPCheckSolve(ksp, pc, u));
1357   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1358   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1359   PetscCall(VecDot(Hu, u, &alpha));
1360   KSPCheckDot(ksp, alpha);
1361   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1362   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1363   PetscCall(VecScale(u, 1.0 / alpha));
1364   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1365 
1366   z       = beta / alpha;
1367   vecz[1] = z;
1368 
1369   /* Computation of first iterate x(1) and p(1) */
1370   PetscCall(VecAXPY(ilinkA->y, z, u));
1371   PetscCall(VecCopy(d, ilinkD->y));
1372   PetscCall(VecScale(ilinkD->y, -z));
1373 
1374   iterGKB = 1;
1375   lowbnd  = 2 * jac->gkbtol;
1376   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1377 
1378   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1379     iterGKB += 1;
1380     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1381     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1382     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1383     beta = beta / PetscSqrtReal(nu);
1384     PetscCall(VecScale(v, 1.0 / beta));
1385     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1386     PetscCall(MatMult(jac->H, u, Hu));
1387     PetscCall(VecAXPY(work2, -beta, Hu));
1388     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1389     PetscCall(KSPSolve(ksp, work2, u));
1390     PetscCall(KSPCheckSolve(ksp, pc, u));
1391     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1392     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1393     PetscCall(VecDot(Hu, u, &alpha));
1394     KSPCheckDot(ksp, alpha);
1395     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1396     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1397     PetscCall(VecScale(u, 1.0 / alpha));
1398 
1399     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1400     vecz[0] = z;
1401 
1402     /* Computation of new iterate x(i+1) and p(i+1) */
1403     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1404     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1405     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1406     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1407     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1408 
1409     /* Compute Lower Bound estimate */
1410     if (iterGKB > jac->gkbdelay) {
1411       lowbnd = 0.0;
1412       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1413       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1414     }
1415 
1416     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1417     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1418   }
1419 
1420   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1421   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1422   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1423   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1424 
1425   PetscFunctionReturn(0);
1426 }
1427 
1428 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1429   (VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1430    KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1431    VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE))
1432 
1433 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1434 {
1435   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1436   PC_FieldSplitLink ilink = jac->head;
1437   PetscInt          bs;
1438 
1439   PetscFunctionBegin;
1440   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1441     if (jac->defaultsplit) {
1442       PetscCall(VecGetBlockSize(x, &bs));
1443       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1444       PetscCall(VecGetBlockSize(y, &bs));
1445       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1446       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1447       while (ilink) {
1448         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1449         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1450         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1451         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1452         ilink = ilink->next;
1453       }
1454       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1455     } else {
1456       PetscCall(VecSet(y, 0.0));
1457       while (ilink) {
1458         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1459         ilink = ilink->next;
1460       }
1461     }
1462   } else {
1463     if (!jac->w1) {
1464       PetscCall(VecDuplicate(x, &jac->w1));
1465       PetscCall(VecDuplicate(x, &jac->w2));
1466     }
1467     PetscCall(VecSet(y, 0.0));
1468     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1469       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1470       while (ilink->next) {
1471         ilink = ilink->next;
1472         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1473         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1474         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1475       }
1476       while (ilink->previous) {
1477         ilink = ilink->previous;
1478         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1479         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1480         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1481       }
1482     } else {
1483       while (ilink->next) { /* get to last entry in linked list */
1484         ilink = ilink->next;
1485       }
1486       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1487       while (ilink->previous) {
1488         ilink = ilink->previous;
1489         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1490         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1491         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1492       }
1493     }
1494   }
1495   PetscFunctionReturn(0);
1496 }
1497 
1498 static PetscErrorCode PCReset_FieldSplit(PC pc)
1499 {
1500   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1501   PC_FieldSplitLink ilink = jac->head, next;
1502 
1503   PetscFunctionBegin;
1504   while (ilink) {
1505     PetscCall(KSPDestroy(&ilink->ksp));
1506     PetscCall(VecDestroy(&ilink->x));
1507     PetscCall(VecDestroy(&ilink->y));
1508     PetscCall(VecDestroy(&ilink->z));
1509     PetscCall(VecScatterDestroy(&ilink->sctx));
1510     PetscCall(ISDestroy(&ilink->is));
1511     PetscCall(ISDestroy(&ilink->is_col));
1512     PetscCall(PetscFree(ilink->splitname));
1513     PetscCall(PetscFree(ilink->fields));
1514     PetscCall(PetscFree(ilink->fields_col));
1515     next = ilink->next;
1516     PetscCall(PetscFree(ilink));
1517     ilink = next;
1518   }
1519   jac->head = NULL;
1520   PetscCall(PetscFree2(jac->x, jac->y));
1521   if (jac->mat && jac->mat != jac->pmat) {
1522     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1523   } else if (jac->mat) {
1524     jac->mat = NULL;
1525   }
1526   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1527   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1528   jac->nsplits = 0;
1529   PetscCall(VecDestroy(&jac->w1));
1530   PetscCall(VecDestroy(&jac->w2));
1531   PetscCall(MatDestroy(&jac->schur));
1532   PetscCall(MatDestroy(&jac->schurp));
1533   PetscCall(MatDestroy(&jac->schur_user));
1534   PetscCall(KSPDestroy(&jac->kspschur));
1535   PetscCall(KSPDestroy(&jac->kspupper));
1536   PetscCall(MatDestroy(&jac->B));
1537   PetscCall(MatDestroy(&jac->C));
1538   PetscCall(MatDestroy(&jac->H));
1539   PetscCall(VecDestroy(&jac->u));
1540   PetscCall(VecDestroy(&jac->v));
1541   PetscCall(VecDestroy(&jac->Hu));
1542   PetscCall(VecDestroy(&jac->d));
1543   PetscCall(PetscFree(jac->vecz));
1544   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1545   jac->isrestrict = PETSC_FALSE;
1546   PetscFunctionReturn(0);
1547 }
1548 
1549 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1550 {
1551   PetscFunctionBegin;
1552   PetscCall(PCReset_FieldSplit(pc));
1553   PetscCall(PetscFree(pc->data));
1554   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1555   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1556   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1557   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1558   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1559   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1560   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1561   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1562 
1563   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1564   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1565   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1566   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1567   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1568   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1569   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1570   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1571   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1572   PetscFunctionReturn(0);
1573 }
1574 
1575 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject)
1576 {
1577   PetscInt        bs;
1578   PetscBool       flg;
1579   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1580   PCCompositeType ctype;
1581 
1582   PetscFunctionBegin;
1583   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1584   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1585   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1586   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1587   jac->diag_use_amat = pc->useAmat;
1588   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1589   jac->offdiag_use_amat = pc->useAmat;
1590   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1591   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1592   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1593   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1594   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1595   /* Only setup fields once */
1596   if ((jac->bs > 0) && (jac->nsplits == 0)) {
1597     /* only allow user to set fields from command line if bs is already known.
1598        otherwise user can set them in PCFieldSplitSetDefaults() */
1599     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1600     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1601   }
1602   if (jac->type == PC_COMPOSITE_SCHUR) {
1603     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1604     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1605     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1606     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1607     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1608   } else if (jac->type == PC_COMPOSITE_GKB) {
1609     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1610     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1611     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitGKBNu", jac->gkbnu, &jac->gkbnu, NULL));
1612     PetscCheck(jac->gkbnu >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nu cannot be less than 0: value %g", (double)jac->gkbnu);
1613     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1614     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1615   }
1616   /*
1617     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1618     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1619     is called on the outer solver in case changes were made in the options database
1620 
1621     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1622     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1623     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1624 
1625     There could be a negative side effect of calling the KSPSetFromOptions() below.
1626 
1627     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1628   */
1629   if (jac->issetup) {
1630     PC_FieldSplitLink ilink = jac->head;
1631     if (jac->type == PC_COMPOSITE_SCHUR) {
1632       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1633       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1634     }
1635     while (ilink) {
1636       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1637       ilink = ilink->next;
1638     }
1639   }
1640   PetscOptionsHeadEnd();
1641   PetscFunctionReturn(0);
1642 }
1643 
1644 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1645 {
1646   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1647   PC_FieldSplitLink ilink, next = jac->head;
1648   char              prefix[128];
1649   PetscInt          i;
1650 
1651   PetscFunctionBegin;
1652   if (jac->splitdefined) {
1653     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1654     PetscFunctionReturn(0);
1655   }
1656   for (i = 0; i < n; i++) {
1657     PetscCheck(fields[i] < jac->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", fields[i], jac->bs);
1658     PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]);
1659   }
1660   PetscCall(PetscNew(&ilink));
1661   if (splitname) {
1662     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1663   } else {
1664     PetscCall(PetscMalloc1(3, &ilink->splitname));
1665     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1666   }
1667   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1668   PetscCall(PetscMalloc1(n, &ilink->fields));
1669   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1670   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1671   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1672 
1673   ilink->nfields = n;
1674   ilink->next    = NULL;
1675   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1676   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1677   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1678   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1679 
1680   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1681   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1682 
1683   if (!next) {
1684     jac->head       = ilink;
1685     ilink->previous = NULL;
1686   } else {
1687     while (next->next) next = next->next;
1688     next->next      = ilink;
1689     ilink->previous = next;
1690   }
1691   jac->nsplits++;
1692   PetscFunctionReturn(0);
1693 }
1694 
1695 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1696 {
1697   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1698 
1699   PetscFunctionBegin;
1700   *subksp = NULL;
1701   if (n) *n = 0;
1702   if (jac->type == PC_COMPOSITE_SCHUR) {
1703     PetscInt nn;
1704 
1705     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1706     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1707     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1708     PetscCall(PetscMalloc1(nn, subksp));
1709     (*subksp)[0] = jac->head->ksp;
1710     (*subksp)[1] = jac->kspschur;
1711     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
1712     if (n) *n = nn;
1713   }
1714   PetscFunctionReturn(0);
1715 }
1716 
1717 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
1718 {
1719   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1720 
1721   PetscFunctionBegin;
1722   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
1723   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1724   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
1725 
1726   (*subksp)[1] = jac->kspschur;
1727   if (n) *n = jac->nsplits;
1728   PetscFunctionReturn(0);
1729 }
1730 
1731 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1732 {
1733   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1734   PetscInt          cnt   = 0;
1735   PC_FieldSplitLink ilink = jac->head;
1736 
1737   PetscFunctionBegin;
1738   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1739   while (ilink) {
1740     (*subksp)[cnt++] = ilink->ksp;
1741     ilink            = ilink->next;
1742   }
1743   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
1744   if (n) *n = jac->nsplits;
1745   PetscFunctionReturn(0);
1746 }
1747 
1748 /*@C
1749     PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
1750 
1751     Input Parameters:
1752 +   pc  - the preconditioner context
1753 -   is - the index set that defines the indices to which the fieldsplit is to be restricted
1754 
1755     Level: advanced
1756 
1757     Developer Note:
1758     It seems the resulting `IS`s will not cover the entire space, so
1759     how can they define a convergent preconditioner? Needs explaining.
1760 
1761 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
1762 @*/
1763 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
1764 {
1765   PetscFunctionBegin;
1766   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1767   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
1768   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
1769   PetscFunctionReturn(0);
1770 }
1771 
1772 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
1773 {
1774   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1775   PC_FieldSplitLink ilink = jac->head, next;
1776   PetscInt          localsize, size, sizez, i;
1777   const PetscInt   *ind, *indz;
1778   PetscInt         *indc, *indcz;
1779   PetscBool         flg;
1780 
1781   PetscFunctionBegin;
1782   PetscCall(ISGetLocalSize(isy, &localsize));
1783   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
1784   size -= localsize;
1785   while (ilink) {
1786     IS isrl, isr;
1787     PC subpc;
1788     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
1789     PetscCall(ISGetLocalSize(isrl, &localsize));
1790     PetscCall(PetscMalloc1(localsize, &indc));
1791     PetscCall(ISGetIndices(isrl, &ind));
1792     PetscCall(PetscArraycpy(indc, ind, localsize));
1793     PetscCall(ISRestoreIndices(isrl, &ind));
1794     PetscCall(ISDestroy(&isrl));
1795     for (i = 0; i < localsize; i++) *(indc + i) += size;
1796     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
1797     PetscCall(PetscObjectReference((PetscObject)isr));
1798     PetscCall(ISDestroy(&ilink->is));
1799     ilink->is = isr;
1800     PetscCall(PetscObjectReference((PetscObject)isr));
1801     PetscCall(ISDestroy(&ilink->is_col));
1802     ilink->is_col = isr;
1803     PetscCall(ISDestroy(&isr));
1804     PetscCall(KSPGetPC(ilink->ksp, &subpc));
1805     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
1806     if (flg) {
1807       IS       iszl, isz;
1808       MPI_Comm comm;
1809       PetscCall(ISGetLocalSize(ilink->is, &localsize));
1810       comm = PetscObjectComm((PetscObject)ilink->is);
1811       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
1812       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
1813       sizez -= localsize;
1814       PetscCall(ISGetLocalSize(iszl, &localsize));
1815       PetscCall(PetscMalloc1(localsize, &indcz));
1816       PetscCall(ISGetIndices(iszl, &indz));
1817       PetscCall(PetscArraycpy(indcz, indz, localsize));
1818       PetscCall(ISRestoreIndices(iszl, &indz));
1819       PetscCall(ISDestroy(&iszl));
1820       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
1821       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
1822       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
1823       PetscCall(ISDestroy(&isz));
1824     }
1825     next  = ilink->next;
1826     ilink = next;
1827   }
1828   jac->isrestrict = PETSC_TRUE;
1829   PetscFunctionReturn(0);
1830 }
1831 
1832 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
1833 {
1834   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1835   PC_FieldSplitLink ilink, next = jac->head;
1836   char              prefix[128];
1837 
1838   PetscFunctionBegin;
1839   if (jac->splitdefined) {
1840     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1841     PetscFunctionReturn(0);
1842   }
1843   PetscCall(PetscNew(&ilink));
1844   if (splitname) {
1845     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1846   } else {
1847     PetscCall(PetscMalloc1(8, &ilink->splitname));
1848     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
1849   }
1850   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1851   PetscCall(PetscObjectReference((PetscObject)is));
1852   PetscCall(ISDestroy(&ilink->is));
1853   ilink->is = is;
1854   PetscCall(PetscObjectReference((PetscObject)is));
1855   PetscCall(ISDestroy(&ilink->is_col));
1856   ilink->is_col = is;
1857   ilink->next   = NULL;
1858   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1859   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1860   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1861   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1862 
1863   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1864   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1865 
1866   if (!next) {
1867     jac->head       = ilink;
1868     ilink->previous = NULL;
1869   } else {
1870     while (next->next) next = next->next;
1871     next->next      = ilink;
1872     ilink->previous = next;
1873   }
1874   jac->nsplits++;
1875   PetscFunctionReturn(0);
1876 }
1877 
1878 /*@C
1879     PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
1880 
1881     Logically Collective
1882 
1883     Input Parameters:
1884 +   pc  - the preconditioner context
1885 .   splitname - name of this split, if `NULL` the number of the split is used
1886 .   n - the number of fields in this split
1887 .   fields - the fields in this split
1888 -   fields_col - generally the same as fields, if it does not match fields then the matrix block that is solved for this set of fields comes from an off-diagonal block
1889                  of the matrix and fields_col provides the column indices for that block
1890 
1891     Level: intermediate
1892 
1893     Notes:
1894     Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
1895 
1896      `PCFieldSplitSetFields()` is for defining fields as strided blocks. For example, if the block
1897      size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
1898      0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x....
1899      where the numbered entries indicate what is in the split.
1900 
1901      This function is called once per split (it creates a new split each time).  Solve options
1902      for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
1903 
1904    `PCFieldSplitSetIS()` does not support having a fields_col different from fields
1905 
1906    Developer Note:
1907    This routine does not actually create the `IS` representing the split, that is delayed
1908    until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
1909    available when this routine is called.
1910 
1911 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`
1912 @*/
1913 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1914 {
1915   PetscFunctionBegin;
1916   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1917   PetscValidCharPointer(splitname, 2);
1918   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
1919   PetscValidIntPointer(fields, 4);
1920   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
1921   PetscFunctionReturn(0);
1922 }
1923 
1924 /*@
1925     PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
1926     the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
1927 
1928     Logically Collective
1929 
1930     Input Parameters:
1931 +   pc  - the preconditioner object
1932 -   flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
1933 
1934     Options Database Keys:
1935 .   -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
1936 
1937     Level: intermediate
1938 
1939 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
1940 @*/
1941 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
1942 {
1943   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1944   PetscBool      isfs;
1945 
1946   PetscFunctionBegin;
1947   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1948   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
1949   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
1950   jac->diag_use_amat = flg;
1951   PetscFunctionReturn(0);
1952 }
1953 
1954 /*@
1955     PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
1956     the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
1957 
1958     Logically Collective
1959 
1960     Input Parameters:
1961 .   pc  - the preconditioner object
1962 
1963     Output Parameters:
1964 .   flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
1965 
1966     Level: intermediate
1967 
1968 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
1969 @*/
1970 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
1971 {
1972   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1973   PetscBool      isfs;
1974 
1975   PetscFunctionBegin;
1976   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1977   PetscValidBoolPointer(flg, 2);
1978   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
1979   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
1980   *flg = jac->diag_use_amat;
1981   PetscFunctionReturn(0);
1982 }
1983 
1984 /*@
1985     PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
1986     the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
1987 
1988     Logically Collective
1989 
1990     Input Parameters:
1991 +   pc  - the preconditioner object
1992 -   flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
1993 
1994     Options Database Keys:
1995 .     -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
1996 
1997     Level: intermediate
1998 
1999 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2000 @*/
2001 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2002 {
2003   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2004   PetscBool      isfs;
2005 
2006   PetscFunctionBegin;
2007   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2008   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2009   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2010   jac->offdiag_use_amat = flg;
2011   PetscFunctionReturn(0);
2012 }
2013 
2014 /*@
2015     PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2016     the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
2017 
2018     Logically Collective
2019 
2020     Input Parameters:
2021 .   pc  - the preconditioner object
2022 
2023     Output Parameters:
2024 .   flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2025 
2026     Level: intermediate
2027 
2028 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2029 @*/
2030 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2031 {
2032   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2033   PetscBool      isfs;
2034 
2035   PetscFunctionBegin;
2036   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2037   PetscValidBoolPointer(flg, 2);
2038   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2039   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2040   *flg = jac->offdiag_use_amat;
2041   PetscFunctionReturn(0);
2042 }
2043 
2044 /*@C
2045     PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2046 
2047     Logically Collective
2048 
2049     Input Parameters:
2050 +   pc  - the preconditioner context
2051 .   splitname - name of this split, if `NULL` the number of the split is used
2052 -   is - the index set that defines the elements in this split
2053 
2054     Level: intermediate
2055 
2056     Notes:
2057     Use `PCFieldSplitSetFields()`, for splits defined by strided types.
2058 
2059     This function is called once per split (it creates a new split each time).  Solve options
2060     for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2061 
2062 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`
2063 @*/
2064 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2065 {
2066   PetscFunctionBegin;
2067   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2068   if (splitname) PetscValidCharPointer(splitname, 2);
2069   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2070   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2071   PetscFunctionReturn(0);
2072 }
2073 
2074 /*@C
2075     PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2076 
2077     Logically Collective
2078 
2079     Input Parameters:
2080 +   pc  - the preconditioner context
2081 -   splitname - name of this split
2082 
2083     Output Parameter:
2084 -   is - the index set that defines the elements in this split, or `NULL` if the split is not found
2085 
2086     Level: intermediate
2087 
2088 .seealso:  [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`
2089 @*/
2090 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2091 {
2092   PetscFunctionBegin;
2093   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2094   PetscValidCharPointer(splitname, 2);
2095   PetscValidPointer(is, 3);
2096   {
2097     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2098     PC_FieldSplitLink ilink = jac->head;
2099     PetscBool         found;
2100 
2101     *is = NULL;
2102     while (ilink) {
2103       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2104       if (found) {
2105         *is = ilink->is;
2106         break;
2107       }
2108       ilink = ilink->next;
2109     }
2110   }
2111   PetscFunctionReturn(0);
2112 }
2113 
2114 /*@C
2115     PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2116 
2117     Logically Collective
2118 
2119     Input Parameters:
2120 +   pc  - the preconditioner context
2121 -   index - index of this split
2122 
2123     Output Parameter:
2124 -   is - the index set that defines the elements in this split
2125 
2126     Level: intermediate
2127 
2128 .seealso:  [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`
2129 @*/
2130 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2131 {
2132   PetscFunctionBegin;
2133   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2134   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2135   PetscValidPointer(is, 3);
2136   {
2137     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2138     PC_FieldSplitLink ilink = jac->head;
2139     PetscInt          i     = 0;
2140     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2141 
2142     while (i < index) {
2143       ilink = ilink->next;
2144       ++i;
2145     }
2146     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2147   }
2148   PetscFunctionReturn(0);
2149 }
2150 
2151 /*@
2152     PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2153       fieldsplit preconditioner when calling `PCFieldSplitSetIS()`. If not set the matrix block size is used.
2154 
2155     Logically Collective
2156 
2157     Input Parameters:
2158 +   pc  - the preconditioner context
2159 -   bs - the block size
2160 
2161     Level: intermediate
2162 
2163 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2164 @*/
2165 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2166 {
2167   PetscFunctionBegin;
2168   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2169   PetscValidLogicalCollectiveInt(pc, bs, 2);
2170   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2171   PetscFunctionReturn(0);
2172 }
2173 
2174 /*@C
2175    PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2176 
2177    Collective
2178 
2179    Input Parameter:
2180 .  pc - the preconditioner context
2181 
2182    Output Parameters:
2183 +  n - the number of splits
2184 -  subksp - the array of `KSP` contexts
2185 
2186    Level: advanced
2187 
2188    Notes:
2189    After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2190    (not the `KSP`, just the array that contains them).
2191 
2192    You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2193 
2194    If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2195    Schur complement and the `KSP` object used to iterate over the Schur complement.
2196    To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2197 
2198    If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2199    inner linear system defined by the matrix H in each loop.
2200 
2201    Fortran Usage:
2202    You must pass in a `KSP` array that is large enough to contain all the `KSP`s.
2203    You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2204    `KSP` array must be.
2205 
2206    Developer Note:
2207    There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2208 
2209    The Fortran interface should be modernized to return directly the array of values.
2210 
2211 .seealso:  [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2212 @*/
2213 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2214 {
2215   PetscFunctionBegin;
2216   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2217   if (n) PetscValidIntPointer(n, 2);
2218   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2219   PetscFunctionReturn(0);
2220 }
2221 
2222 /*@C
2223    PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2224 
2225    Collective
2226 
2227    Input Parameter:
2228 .  pc - the preconditioner context
2229 
2230    Output Parameters:
2231 +  n - the number of splits
2232 -  subksp - the array of `KSP` contexts
2233 
2234    Level: advanced
2235 
2236    Notes:
2237    After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2238    (not the `KSP` just the array that contains them).
2239 
2240    You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2241 
2242    If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2243 +  1  - the `KSP` used for the (1,1) block
2244 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2245 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2246 
2247    It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2248 
2249    Fortran Note:
2250    You must pass in a `KSP` array that is large enough to contain all the local `KSP`s.
2251    You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2252    `KSP` array must be.
2253 
2254    Developer Notes:
2255    There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2256 
2257    Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2258 
2259    The Fortran interface should be modernized to return directly the array of values.
2260 
2261 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2262 @*/
2263 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2264 {
2265   PetscFunctionBegin;
2266   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2267   if (n) PetscValidIntPointer(n, 2);
2268   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2269   PetscFunctionReturn(0);
2270 }
2271 
2272 /*@
2273     PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructucted for the Schur complement.
2274       The default is the A11 matrix.
2275 
2276     Collective
2277 
2278     Input Parameters:
2279 +   pc      - the preconditioner context
2280 .   ptype   - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2281               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2282               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2283 -   pre - matrix to use for preconditioning, or `NULL`
2284 
2285     Options Database Keys:
2286 +    -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is a11. See notes for meaning of various arguments
2287 -    -fieldsplit_1_pc_type <pctype> - the preconditioner algorithm that is used to construct the preconditioner from the operator
2288 
2289      Level: intermediate
2290 
2291     Notes:
2292     If ptype is
2293 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2294      matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2295 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2296           The only preconditioner that currently works with this symbolic respresentation matrix object is the `PCLSC`
2297           preconditioner
2298 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2299           to this function).
2300 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation Sp = A11 - A10 inv(diag(A00)) A01
2301           This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2302           lumped before extracting the diagonal using the additional option -fieldsplit_1_mat_schur_complement_ainv_type lump
2303 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2304       computed internally by `PCFIELDSPLIT` (this is expensive)
2305       useful mostly as a test that the Schur complement approach can work for your problem
2306 
2307      When solving a saddle point problem, where the A11 block is identically zero, using a11 as the ptype only makes sense
2308     with the additional option -fieldsplit_1_pc_type none. Usually for saddle point problems one would use a ptype of self and
2309     -fieldsplit_1_pc_type lsc which uses the least squares commutator to compute a preconditioner for the Schur complement.
2310 
2311 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2312           `MatSchurComplementSetAinvType()`, `PCLSC`,
2313           `PCFieldSplitSchurPreType`
2314 @*/
2315 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2316 {
2317   PetscFunctionBegin;
2318   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2319   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2320   PetscFunctionReturn(0);
2321 }
2322 
2323 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2324 {
2325   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2326 } /* Deprecated name */
2327 
2328 /*@
2329     PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2330     preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2331 
2332     Logically Collective
2333 
2334     Input Parameter:
2335 .   pc      - the preconditioner context
2336 
2337     Output Parameters:
2338 +   ptype   - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_PRE_USER`
2339 -   pre - matrix to use for preconditioning (with `PC_FIELDSPLIT_PRE_USER`), or NULL
2340 
2341     Level: intermediate
2342 
2343 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`,
2344           `PCFieldSplitSchurPreType`
2345 @*/
2346 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2347 {
2348   PetscFunctionBegin;
2349   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2350   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2351   PetscFunctionReturn(0);
2352 }
2353 
2354 /*@
2355     PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2356 
2357     Not collective
2358 
2359     Input Parameter:
2360 .   pc      - the preconditioner context
2361 
2362     Output Parameter:
2363 .   S       - the Schur complement matrix
2364 
2365     Level: advanced
2366 
2367     Note:
2368     This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2369 
2370 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2371           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2372 @*/
2373 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2374 {
2375   const char    *t;
2376   PetscBool      isfs;
2377   PC_FieldSplit *jac;
2378 
2379   PetscFunctionBegin;
2380   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2381   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2382   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2383   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2384   jac = (PC_FieldSplit *)pc->data;
2385   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2386   if (S) *S = jac->schur;
2387   PetscFunctionReturn(0);
2388 }
2389 
2390 /*@
2391     PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2392 
2393     Not collective
2394 
2395     Input Parameters:
2396 +   pc      - the preconditioner context
2397 -   S       - the Schur complement matrix
2398 
2399     Level: advanced
2400 
2401 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2402 @*/
2403 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2404 {
2405   const char    *t;
2406   PetscBool      isfs;
2407   PC_FieldSplit *jac;
2408 
2409   PetscFunctionBegin;
2410   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2411   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2412   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2413   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2414   jac = (PC_FieldSplit *)pc->data;
2415   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2416   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2417   PetscFunctionReturn(0);
2418 }
2419 
2420 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2421 {
2422   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2423 
2424   PetscFunctionBegin;
2425   jac->schurpre = ptype;
2426   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2427     PetscCall(MatDestroy(&jac->schur_user));
2428     jac->schur_user = pre;
2429     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2430   }
2431   PetscFunctionReturn(0);
2432 }
2433 
2434 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2435 {
2436   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2437 
2438   PetscFunctionBegin;
2439   *ptype = jac->schurpre;
2440   *pre   = jac->schur_user;
2441   PetscFunctionReturn(0);
2442 }
2443 
2444 /*@
2445     PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner
2446 
2447     Collective
2448 
2449     Input Parameters:
2450 +   pc  - the preconditioner context
2451 -   ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2452 
2453     Options Database Key:
2454 .     -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is full
2455 
2456     Level: intermediate
2457 
2458     Notes:
2459     The FULL factorization is
2460 
2461 .vb
2462    (A   B)  = (1       0) (A   0) (1  Ainv*B)  = L D U
2463    (C   E)    (C*Ainv  1) (0   S) (0     1)
2464 .vb
2465     where S = E - C*Ainv*B. In practice, the full factorization is applied via block triangular solves with the grouping L*(D*U). UPPER uses D*U, LOWER uses L*D,
2466     and DIAG is the diagonal part with the sign of S flipped (because this makes the preconditioner positive definite for many formulations, thus allowing the use of KSPMINRES). Sign flipping of S can be turned off with PCFieldSplitSetSchurScale().
2467 
2468     If A and S are solved exactly
2469 .vb
2470       *) FULL factorization is a direct solver.
2471       *) The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2472       *) With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2473 .ve
2474 
2475     If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2476     application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2477 
2478     For symmetric problems in which A is positive definite and S is negative definite, DIAG can be used with `KSPMINRES`.
2479 
2480     A flexible method like `KSPFGMRES` or `KSPGCR` must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S).
2481 
2482     References:
2483 +   * - Murphy, Golub, and Wathen, A note on preconditioning indefinite linear systems, SIAM J. Sci. Comput., 21 (2000).
2484 -   * - Ipsen, A note on preconditioning nonsymmetric matrices, SIAM J. Sci. Comput., 23 (2001).
2485 
2486 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`
2487 @*/
2488 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2489 {
2490   PetscFunctionBegin;
2491   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2492   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2493   PetscFunctionReturn(0);
2494 }
2495 
2496 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2497 {
2498   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2499 
2500   PetscFunctionBegin;
2501   jac->schurfactorization = ftype;
2502   PetscFunctionReturn(0);
2503 }
2504 
2505 /*@
2506     PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2507 
2508     Collective
2509 
2510     Input Parameters:
2511 +   pc    - the preconditioner context
2512 -   scale - scaling factor for the Schur complement
2513 
2514     Options Database Key:
2515 .   -pc_fieldsplit_schur_scale - default is -1.0
2516 
2517     Level: intermediate
2518 
2519 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetSchurFactType()`
2520 @*/
2521 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2522 {
2523   PetscFunctionBegin;
2524   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2525   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2526   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2527   PetscFunctionReturn(0);
2528 }
2529 
2530 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2531 {
2532   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2533 
2534   PetscFunctionBegin;
2535   jac->schurscale = scale;
2536   PetscFunctionReturn(0);
2537 }
2538 
2539 /*@C
2540    PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2541 
2542    Collective
2543 
2544    Input Parameter:
2545 .  pc - the preconditioner context
2546 
2547    Output Parameters:
2548 +  A00 - the (0,0) block
2549 .  A01 - the (0,1) block
2550 .  A10 - the (1,0) block
2551 -  A11 - the (1,1) block
2552 
2553    Level: advanced
2554 
2555 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2556 @*/
2557 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2558 {
2559   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2560 
2561   PetscFunctionBegin;
2562   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2563   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2564   if (A00) *A00 = jac->pmat[0];
2565   if (A01) *A01 = jac->B;
2566   if (A10) *A10 = jac->C;
2567   if (A11) *A11 = jac->pmat[1];
2568   PetscFunctionReturn(0);
2569 }
2570 
2571 /*@
2572     PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT`
2573 
2574     Collective
2575 
2576     Input Parameters:
2577 +   pc        - the preconditioner context
2578 -   tolerance - the solver tolerance
2579 
2580     Options Database Key:
2581 .   -pc_fieldsplit_gkb_tol - default is 1e-5
2582 
2583     Level: intermediate
2584 
2585     Note:
2586     The generalized GKB algorithm uses a lower bound estimate of the error in energy norm as stopping criterion.
2587     It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2588     this estimate, the stopping criterion is satisfactory in practical cases [A13].
2589 
2590     References:
2591     [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2592 
2593 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2594 @*/
2595 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2596 {
2597   PetscFunctionBegin;
2598   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2599   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2600   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2601   PetscFunctionReturn(0);
2602 }
2603 
2604 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2605 {
2606   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2607 
2608   PetscFunctionBegin;
2609   jac->gkbtol = tolerance;
2610   PetscFunctionReturn(0);
2611 }
2612 
2613 /*@
2614     PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT`
2615 
2616     Collective
2617 
2618     Input Parameters:
2619 +   pc     - the preconditioner context
2620 -   maxit  - the maximum number of iterations
2621 
2622     Options Database Key:
2623 .   -pc_fieldsplit_gkb_maxit - default is 100
2624 
2625     Level: intermediate
2626 
2627 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2628 @*/
2629 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2630 {
2631   PetscFunctionBegin;
2632   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2633   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2634   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2635   PetscFunctionReturn(0);
2636 }
2637 
2638 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2639 {
2640   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2641 
2642   PetscFunctionBegin;
2643   jac->gkbmaxit = maxit;
2644   PetscFunctionReturn(0);
2645 }
2646 
2647 /*@
2648     PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization in `PCFIELDSPLIT`
2649     preconditioner.
2650 
2651     Collective
2652 
2653     Input Parameters:
2654 +   pc     - the preconditioner context
2655 -   delay  - the delay window in the lower bound estimate
2656 
2657     Options Database Key:
2658 .   -pc_fieldsplit_gkb_delay - default is 5
2659 
2660     Level: intermediate
2661 
2662     Note:
2663     The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error ||u-u^k||_H
2664     is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + delay), and thus the algorithm needs
2665     at least (delay + 1) iterations to stop. For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to
2666 
2667     References:
2668     [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2669 
2670 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2671 @*/
2672 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2673 {
2674   PetscFunctionBegin;
2675   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2676   PetscValidLogicalCollectiveInt(pc, delay, 2);
2677   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2678   PetscFunctionReturn(0);
2679 }
2680 
2681 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2682 {
2683   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2684 
2685   PetscFunctionBegin;
2686   jac->gkbdelay = delay;
2687   PetscFunctionReturn(0);
2688 }
2689 
2690 /*@
2691     PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the Golub-Kahan bidiagonalization preconditioner
2692     in `PCFIELDSPLIT`
2693 
2694     Collective
2695 
2696     Input Parameters:
2697 +   pc     - the preconditioner context
2698 -   nu     - the shift parameter
2699 
2700     Options Database Keys:
2701 .   -pc_fieldsplit_gkb_nu - default is 1
2702 
2703     Level: intermediate
2704 
2705     Notes:
2706     This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by chosing nu sufficiently big. However,
2707     if nu is chosen too big, the matrix H might be badly conditioned and the solution of the linear system Hx = b in the inner loop gets difficult. It is therefore
2708     necessary to find a good balance in between the convergence of the inner and outer loop.
2709 
2710     For nu = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in [Ar13] is then chosen as identity.
2711 
2712     References:
2713     [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2714 
2715 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2716 @*/
2717 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
2718 {
2719   PetscFunctionBegin;
2720   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2721   PetscValidLogicalCollectiveReal(pc, nu, 2);
2722   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
2723   PetscFunctionReturn(0);
2724 }
2725 
2726 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
2727 {
2728   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2729 
2730   PetscFunctionBegin;
2731   jac->gkbnu = nu;
2732   PetscFunctionReturn(0);
2733 }
2734 
2735 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
2736 {
2737   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2738 
2739   PetscFunctionBegin;
2740   jac->type = type;
2741   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
2742   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
2743   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
2744   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
2745   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
2746   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
2747   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
2748   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
2749   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
2750 
2751   if (type == PC_COMPOSITE_SCHUR) {
2752     pc->ops->apply = PCApply_FieldSplit_Schur;
2753     pc->ops->view  = PCView_FieldSplit_Schur;
2754 
2755     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
2756     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
2757     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
2758     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
2759     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
2760   } else if (type == PC_COMPOSITE_GKB) {
2761     pc->ops->apply = PCApply_FieldSplit_GKB;
2762     pc->ops->view  = PCView_FieldSplit_GKB;
2763 
2764     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2765     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
2766     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
2767     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
2768     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
2769   } else {
2770     pc->ops->apply = PCApply_FieldSplit;
2771     pc->ops->view  = PCView_FieldSplit;
2772 
2773     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2774   }
2775   PetscFunctionReturn(0);
2776 }
2777 
2778 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
2779 {
2780   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2781 
2782   PetscFunctionBegin;
2783   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
2784   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
2785   jac->bs = bs;
2786   PetscFunctionReturn(0);
2787 }
2788 
2789 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
2790 {
2791   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
2792   PC_FieldSplitLink ilink_current = jac->head;
2793   IS                is_owned;
2794 
2795   PetscFunctionBegin;
2796   jac->coordinates_set = PETSC_TRUE; // Internal flag
2797   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, PETSC_NULL));
2798 
2799   while (ilink_current) {
2800     // For each IS, embed it to get local coords indces
2801     IS              is_coords;
2802     PetscInt        ndofs_block;
2803     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
2804 
2805     // Setting drop to true for safety. It should make no difference.
2806     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
2807     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
2808     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
2809 
2810     // Allocate coordinates vector and set it directly
2811     PetscCall(PetscMalloc1(ndofs_block * dim, &(ilink_current->coords)));
2812     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
2813       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
2814     }
2815     ilink_current->dim   = dim;
2816     ilink_current->ndofs = ndofs_block;
2817     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
2818     PetscCall(ISDestroy(&is_coords));
2819     ilink_current = ilink_current->next;
2820   }
2821   PetscCall(ISDestroy(&is_owned));
2822   PetscFunctionReturn(0);
2823 }
2824 
2825 /*@
2826    PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
2827 
2828    Collective
2829 
2830    Input Parameters:
2831 +  pc - the preconditioner context
2832 -  type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2833 
2834    Options Database Key:
2835 .  -pc_fieldsplit_type <type: one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
2836 
2837    Level: Intermediate
2838 
2839 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
2840           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2841 @*/
2842 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
2843 {
2844   PetscFunctionBegin;
2845   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2846   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
2847   PetscFunctionReturn(0);
2848 }
2849 
2850 /*@
2851   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
2852 
2853   Not collective
2854 
2855   Input Parameter:
2856 . pc - the preconditioner context
2857 
2858   Output Parameter:
2859 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2860 
2861   Level: Intermediate
2862 
2863 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
2864           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2865 @*/
2866 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
2867 {
2868   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2869 
2870   PetscFunctionBegin;
2871   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2872   PetscValidIntPointer(type, 2);
2873   *type = jac->type;
2874   PetscFunctionReturn(0);
2875 }
2876 
2877 /*@
2878    PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
2879 
2880    Logically Collective
2881 
2882    Input Parameters:
2883 +  pc   - the preconditioner context
2884 -  flg  - boolean indicating whether to use field splits defined by the `DM`
2885 
2886    Options Database Key:
2887 .  -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
2888 
2889    Level: Intermediate
2890 
2891 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
2892 @*/
2893 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
2894 {
2895   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2896   PetscBool      isfs;
2897 
2898   PetscFunctionBegin;
2899   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2900   PetscValidLogicalCollectiveBool(pc, flg, 2);
2901   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2902   if (isfs) jac->dm_splits = flg;
2903   PetscFunctionReturn(0);
2904 }
2905 
2906 /*@
2907    PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
2908 
2909    Logically Collective
2910 
2911    Input Parameter:
2912 .  pc   - the preconditioner context
2913 
2914    Output Parameter:
2915 .  flg  - boolean indicating whether to use field splits defined by the `DM`
2916 
2917    Level: Intermediate
2918 
2919 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
2920 @*/
2921 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
2922 {
2923   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2924   PetscBool      isfs;
2925 
2926   PetscFunctionBegin;
2927   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2928   PetscValidBoolPointer(flg, 2);
2929   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2930   if (isfs) {
2931     if (flg) *flg = jac->dm_splits;
2932   }
2933   PetscFunctionReturn(0);
2934 }
2935 
2936 /*@
2937    PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
2938 
2939    Logically Collective
2940 
2941    Input Parameter:
2942 .  pc   - the preconditioner context
2943 
2944    Output Parameter:
2945 .  flg  - boolean indicating whether to detect fields or not
2946 
2947    Level: Intermediate
2948 
2949 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
2950 @*/
2951 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
2952 {
2953   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2954 
2955   PetscFunctionBegin;
2956   *flg = jac->detect;
2957   PetscFunctionReturn(0);
2958 }
2959 
2960 /*@
2961    PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
2962 
2963    Logically Collective
2964 
2965    Input Parameter:
2966 .  pc   - the preconditioner context
2967 
2968    Output Parameter:
2969 .  flg  - boolean indicating whether to detect fields or not
2970 
2971    Options Database Key:
2972 .  -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
2973 
2974    Level: Intermediate
2975 
2976  Note:
2977    Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
2978 
2979 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
2980 @*/
2981 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
2982 {
2983   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2984 
2985   PetscFunctionBegin;
2986   jac->detect = flg;
2987   if (jac->detect) {
2988     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
2989     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
2990   }
2991   PetscFunctionReturn(0);
2992 }
2993 
2994 /*MC
2995    PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
2996    collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
2997 
2998    Options Database Keys:
2999 +   -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
3000 .   -pc_fieldsplit_default - automatically add any fields to additional splits that have not
3001                               been supplied explicitly by `-pc_fieldsplit_%d_fields`
3002 .   -pc_fieldsplit_block_size <bs> - size of block that defines fields (i.e. there are bs fields)
3003 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3004 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is a11; see `PCFieldSplitSetSchurPre()`
3005 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - set factorization type when using `-pc_fieldsplit_type schur`; see `PCFieldSplitSetSchurFactType()`
3006 -   -pc_fieldsplit_detect_saddle_point - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3007 
3008      Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3009      The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3010      For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3011 
3012      To set options on the solvers for each block append `-fieldsplit_` to all the `PC`
3013      options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`
3014 
3015      To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3016       and set the options directly on the resulting `KSP` object
3017 
3018     Level: intermediate
3019 
3020    Notes:
3021     Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries and `PCFieldSplitSetIS()`
3022      to define a split by an arbitrary collection of entries.
3023 
3024       If no splits are set the default is used. The splits are defined by entries strided by bs,
3025       beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3026       if this is not called the block size defaults to the blocksize of the second matrix passed
3027       to `KSPSetOperators()`/`PCSetOperators()`.
3028 
3029       For the Schur complement preconditioner if
3030 
3031       ```{math}
3032       J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3033       ```
3034 
3035       the preconditioner using `full` factorization is logically
3036       ```{math}
3037       \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3038       ```
3039      where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3040      ```{math}
3041      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3042      ```
3043      which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given
3044      in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0,
3045      it returns the KSP associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default
3046      $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3047 
3048      The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3049      `diag` gives
3050       ```{math}
3051       \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3052       ```
3053      Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3054      can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3055       ```{math}
3056       \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3057       ```
3058      where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of
3059       ```{math}
3060       \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3061       ```
3062      where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3063 
3064      If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3065      is used automatically for a second block.
3066 
3067      The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3068      Generally it should be used with the `MATAIJ` format.
3069 
3070      The forms of these preconditioners are closely related if not identical to forms derived as "Distributive Iterations", see,
3071      for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`Wesseling2009`.
3072      One can also use `PCFIELDSPLIT`
3073      inside a smoother resulting in "Distributive Smoothers".
3074 
3075      See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3076 
3077      The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3078      residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3079 
3080      The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3081      ```{math}
3082      \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3083      ```
3084      with $A_{00}$ positive semi-definite. The implementation follows {cite}`Arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3085      A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3086 
3087    Developer Note:
3088    The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3089    user API.
3090 
3091      References:
3092      ```{bibliography}
3093      :filter: docname in docnames
3094      ```
3095 
3096 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3097           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3098           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3099           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3100 M*/
3101 
3102 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3103 {
3104   PC_FieldSplit *jac;
3105 
3106   PetscFunctionBegin;
3107   PetscCall(PetscNew(&jac));
3108 
3109   jac->bs                 = -1;
3110   jac->nsplits            = 0;
3111   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3112   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3113   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3114   jac->schurscale         = -1.0;
3115   jac->dm_splits          = PETSC_TRUE;
3116   jac->detect             = PETSC_FALSE;
3117   jac->gkbtol             = 1e-5;
3118   jac->gkbdelay           = 5;
3119   jac->gkbnu              = 1;
3120   jac->gkbmaxit           = 100;
3121   jac->gkbmonitor         = PETSC_FALSE;
3122   jac->coordinates_set    = PETSC_FALSE;
3123 
3124   pc->data = (void *)jac;
3125 
3126   pc->ops->apply           = PCApply_FieldSplit;
3127   pc->ops->applytranspose  = PCApplyTranspose_FieldSplit;
3128   pc->ops->setup           = PCSetUp_FieldSplit;
3129   pc->ops->reset           = PCReset_FieldSplit;
3130   pc->ops->destroy         = PCDestroy_FieldSplit;
3131   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3132   pc->ops->view            = PCView_FieldSplit;
3133   pc->ops->applyrichardson = NULL;
3134 
3135   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3136   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3137   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3138   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3139   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3140   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3141   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3142   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3143   PetscFunctionReturn(0);
3144 }
3145