xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision ccb5f9611935399ce519ad67775f577af13758f7)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 
5 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
6 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
7 
8 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
9 
10 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
11 struct _PC_FieldSplitLink {
12   KSP               ksp;
13   Vec               x, y, z;
14   char             *splitname;
15   PetscInt          nfields;
16   PetscInt         *fields, *fields_col;
17   VecScatter        sctx;
18   IS                is, is_col;
19   PC_FieldSplitLink next, previous;
20   PetscLogEvent     event;
21 
22   /* Used only when setting coordinates with PCSetCoordinates */
23   PetscInt   dim;
24   PetscInt   ndofs;
25   PetscReal *coords;
26 };
27 
28 typedef struct {
29   PCCompositeType type;
30   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
31   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
32   PetscInt        bs;           /* Block size for IS and Mat structures */
33   PetscInt        nsplits;      /* Number of field divisions defined */
34   Vec            *x, *y, w1, w2;
35   Mat            *mat;    /* The diagonal block for each split */
36   Mat            *pmat;   /* The preconditioning diagonal block for each split */
37   Mat            *Afield; /* The rows of the matrix associated with each split */
38   PetscBool       issetup;
39 
40   /* Only used when Schur complement preconditioning is used */
41   Mat                       B;          /* The (0,1) block */
42   Mat                       C;          /* The (1,0) block */
43   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
44   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */
45   Mat                       schur_user; /* User-provided preconditioning matrix for the Schur complement */
46   PCFieldSplitSchurPreType  schurpre;   /* Determines which preconditioning matrix is used for the Schur complement */
47   PCFieldSplitSchurFactType schurfactorization;
48   KSP                       kspschur;   /* The solver for S */
49   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
50   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
51 
52   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
53   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
54   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
55   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
56   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
57   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
58   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
59   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
60   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
61   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
62 
63   PC_FieldSplitLink head;
64   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
65   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
66   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
67   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
68   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
69   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
70   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
71 } PC_FieldSplit;
72 
73 /*
74     Note:
75     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
76    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
77    PC you could change this.
78 */
79 
80 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it.  This way the
81 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
82 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
83 {
84   switch (jac->schurpre) {
85   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
86     return jac->schur;
87   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
88     return jac->schurp;
89   case PC_FIELDSPLIT_SCHUR_PRE_A11:
90     return jac->pmat[1];
91   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
92   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
93   default:
94     return jac->schur_user ? jac->schur_user : jac->pmat[1];
95   }
96 }
97 
98 #include <petscdraw.h>
99 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
100 {
101   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
102   PetscBool         iascii, isdraw;
103   PetscInt          i, j;
104   PC_FieldSplitLink ilink = jac->head;
105 
106   PetscFunctionBegin;
107   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
108   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
109   if (iascii) {
110     if (jac->bs > 0) {
111       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
112     } else {
113       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
114     }
115     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
116     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
117     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
118     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
119     for (i = 0; i < jac->nsplits; i++) {
120       if (ilink->fields) {
121         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
122         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
123         for (j = 0; j < ilink->nfields; j++) {
124           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
125           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
126         }
127         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
128         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
129       } else {
130         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
131       }
132       PetscCall(KSPView(ilink->ksp, viewer));
133       ilink = ilink->next;
134     }
135   }
136 
137   if (isdraw) {
138     PetscDraw draw;
139     PetscReal x, y, w, wd;
140 
141     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
142     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
143     w  = 2 * PetscMin(1.0 - x, x);
144     wd = w / (jac->nsplits + 1);
145     x  = x - wd * (jac->nsplits - 1) / 2.0;
146     for (i = 0; i < jac->nsplits; i++) {
147       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
148       PetscCall(KSPView(ilink->ksp, viewer));
149       PetscCall(PetscDrawPopCurrentPoint(draw));
150       x += wd;
151       ilink = ilink->next;
152     }
153   }
154   PetscFunctionReturn(PETSC_SUCCESS);
155 }
156 
157 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
158 {
159   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
160   PetscBool                  iascii, isdraw;
161   PetscInt                   i, j;
162   PC_FieldSplitLink          ilink = jac->head;
163   MatSchurComplementAinvType atype;
164 
165   PetscFunctionBegin;
166   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
167   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
168   if (iascii) {
169     if (jac->bs > 0) {
170       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
171     } else {
172       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
173     }
174     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
175     switch (jac->schurpre) {
176     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
178       break;
179     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
180       PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
181       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
182       break;
183     case PC_FIELDSPLIT_SCHUR_PRE_A11:
184       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
185       break;
186     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
187       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
188       break;
189     case PC_FIELDSPLIT_SCHUR_PRE_USER:
190       if (jac->schur_user) {
191         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
192       } else {
193         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
194       }
195       break;
196     default:
197       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
198     }
199     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
200     PetscCall(PetscViewerASCIIPushTab(viewer));
201     for (i = 0; i < jac->nsplits; i++) {
202       if (ilink->fields) {
203         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
204         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
205         for (j = 0; j < ilink->nfields; j++) {
206           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
207           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
208         }
209         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
210         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
211       } else {
212         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
213       }
214       ilink = ilink->next;
215     }
216     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
217     PetscCall(PetscViewerASCIIPushTab(viewer));
218     if (jac->head) {
219       PetscCall(KSPView(jac->head->ksp, viewer));
220     } else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
221     PetscCall(PetscViewerASCIIPopTab(viewer));
222     if (jac->head && jac->kspupper != jac->head->ksp) {
223       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor \n"));
224       PetscCall(PetscViewerASCIIPushTab(viewer));
225       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
226       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
227       PetscCall(PetscViewerASCIIPopTab(viewer));
228     }
229     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01 \n"));
230     PetscCall(PetscViewerASCIIPushTab(viewer));
231     if (jac->kspschur) {
232       PetscCall(KSPView(jac->kspschur, viewer));
233     } else {
234       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
235     }
236     PetscCall(PetscViewerASCIIPopTab(viewer));
237     PetscCall(PetscViewerASCIIPopTab(viewer));
238   } else if (isdraw && jac->head) {
239     PetscDraw draw;
240     PetscReal x, y, w, wd, h;
241     PetscInt  cnt = 2;
242     char      str[32];
243 
244     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
245     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
246     if (jac->kspupper != jac->head->ksp) cnt++;
247     w  = 2 * PetscMin(1.0 - x, x);
248     wd = w / (cnt + 1);
249 
250     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
251     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
252     y -= h;
253     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
254       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
255     } else {
256       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
257     }
258     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
259     y -= h;
260     x = x - wd * (cnt - 1) / 2.0;
261 
262     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
263     PetscCall(KSPView(jac->head->ksp, viewer));
264     PetscCall(PetscDrawPopCurrentPoint(draw));
265     if (jac->kspupper != jac->head->ksp) {
266       x += wd;
267       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
268       PetscCall(KSPView(jac->kspupper, viewer));
269       PetscCall(PetscDrawPopCurrentPoint(draw));
270     }
271     x += wd;
272     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
273     PetscCall(KSPView(jac->kspschur, viewer));
274     PetscCall(PetscDrawPopCurrentPoint(draw));
275   }
276   PetscFunctionReturn(PETSC_SUCCESS);
277 }
278 
279 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
280 {
281   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
282   PetscBool         iascii, isdraw;
283   PetscInt          i, j;
284   PC_FieldSplitLink ilink = jac->head;
285 
286   PetscFunctionBegin;
287   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
288   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
289   if (iascii) {
290     if (jac->bs > 0) {
291       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
292     } else {
293       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
294     }
295     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
296     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
297     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
298 
299     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
300     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
301     PetscCall(PetscViewerASCIIPushTab(viewer));
302 
303     if (ilink->fields) {
304       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
305       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
306       for (j = 0; j < ilink->nfields; j++) {
307         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
308         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
309       }
310       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
311       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
312     } else {
313       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
314     }
315     PetscCall(KSPView(ilink->ksp, viewer));
316 
317     PetscCall(PetscViewerASCIIPopTab(viewer));
318   }
319 
320   if (isdraw) {
321     PetscDraw draw;
322     PetscReal x, y, w, wd;
323 
324     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
325     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
326     w  = 2 * PetscMin(1.0 - x, x);
327     wd = w / (jac->nsplits + 1);
328     x  = x - wd * (jac->nsplits - 1) / 2.0;
329     for (i = 0; i < jac->nsplits; i++) {
330       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
331       PetscCall(KSPView(ilink->ksp, viewer));
332       PetscCall(PetscDrawPopCurrentPoint(draw));
333       x += wd;
334       ilink = ilink->next;
335     }
336   }
337   PetscFunctionReturn(PETSC_SUCCESS);
338 }
339 
340 /* Precondition: jac->bs is set to a meaningful value */
341 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
342 {
343   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
344   PetscInt       i, nfields, *ifields, nfields_col, *ifields_col;
345   PetscBool      flg, flg_col;
346   char           optionname[128], splitname[8], optionname_col[128];
347 
348   PetscFunctionBegin;
349   PetscCall(PetscMalloc1(jac->bs, &ifields));
350   PetscCall(PetscMalloc1(jac->bs, &ifields_col));
351   for (i = 0, flg = PETSC_TRUE;; i++) {
352     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
353     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
354     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
355     nfields     = jac->bs;
356     nfields_col = jac->bs;
357     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
358     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
359     if (!flg) break;
360     else if (flg && !flg_col) {
361       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
362       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
363     } else {
364       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
365       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
366       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
367     }
368   }
369   if (i > 0) {
370     /* Makes command-line setting of splits take precedence over setting them in code.
371        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
372        create new splits, which would probably not be what the user wanted. */
373     jac->splitdefined = PETSC_TRUE;
374   }
375   PetscCall(PetscFree(ifields));
376   PetscCall(PetscFree(ifields_col));
377   PetscFunctionReturn(PETSC_SUCCESS);
378 }
379 
380 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
381 {
382   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
383   PC_FieldSplitLink ilink              = jac->head;
384   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
385   PetscInt          i;
386 
387   PetscFunctionBegin;
388   /*
389    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
390    Should probably be rewritten.
391    */
392   if (!ilink) {
393     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
394     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
395       PetscInt  numFields, f, i, j;
396       char    **fieldNames;
397       IS       *fields;
398       DM       *dms;
399       DM        subdm[128];
400       PetscBool flg;
401 
402       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
403       /* Allow the user to prescribe the splits */
404       for (i = 0, flg = PETSC_TRUE;; i++) {
405         PetscInt ifields[128];
406         IS       compField;
407         char     optionname[128], splitname[8];
408         PetscInt nfields = numFields;
409 
410         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
411         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
412         if (!flg) break;
413         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
414         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
415         if (nfields == 1) {
416           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
417         } else {
418           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
419           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
420         }
421         PetscCall(ISDestroy(&compField));
422         for (j = 0; j < nfields; ++j) {
423           f = ifields[j];
424           PetscCall(PetscFree(fieldNames[f]));
425           PetscCall(ISDestroy(&fields[f]));
426         }
427       }
428       if (i == 0) {
429         for (f = 0; f < numFields; ++f) {
430           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
431           PetscCall(PetscFree(fieldNames[f]));
432           PetscCall(ISDestroy(&fields[f]));
433         }
434       } else {
435         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
436         PetscCall(PetscFree(dms));
437         PetscCall(PetscMalloc1(i, &dms));
438         for (j = 0; j < i; ++j) dms[j] = subdm[j];
439       }
440       PetscCall(PetscFree(fieldNames));
441       PetscCall(PetscFree(fields));
442       if (dms) {
443         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
444         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
445           const char *prefix;
446           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)(ilink->ksp), &prefix));
447           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)(dms[i]), prefix));
448           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
449           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
450           {
451             PetscErrorCode (*func)(KSP, Mat, Mat, void *);
452             void *ctx;
453 
454             PetscCall(DMKSPGetComputeOperators(pc->dm, &func, &ctx));
455             PetscCall(DMKSPSetComputeOperators(dms[i], func, ctx));
456           }
457           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
458           PetscCall(DMDestroy(&dms[i]));
459         }
460         PetscCall(PetscFree(dms));
461       }
462     } else {
463       if (jac->bs <= 0) {
464         if (pc->pmat) {
465           PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
466         } else jac->bs = 1;
467       }
468 
469       if (jac->detect) {
470         IS       zerodiags, rest;
471         PetscInt nmin, nmax;
472 
473         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
474         if (jac->diag_use_amat) {
475           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
476         } else {
477           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
478         }
479         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
480         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
481         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
482         PetscCall(ISDestroy(&zerodiags));
483         PetscCall(ISDestroy(&rest));
484       } else if (coupling) {
485         IS       coupling, rest;
486         PetscInt nmin, nmax;
487 
488         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
489         if (jac->offdiag_use_amat) {
490           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
491         } else {
492           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
493         }
494         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
495         PetscCall(ISSetIdentity(rest));
496         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
497         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
498         PetscCall(ISDestroy(&coupling));
499         PetscCall(ISDestroy(&rest));
500       } else {
501         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
502         if (!fieldsplit_default) {
503           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
504            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
505           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
506           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
507         }
508         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
509           Mat       M = pc->pmat;
510           PetscBool isnest;
511 
512           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
513           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
514           if (!isnest) {
515             M = pc->mat;
516             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
517           }
518           if (isnest) {
519             IS      *fields;
520             PetscInt nf;
521 
522             PetscCall(MatNestGetSize(M, &nf, NULL));
523             PetscCall(PetscMalloc1(nf, &fields));
524             PetscCall(MatNestGetISs(M, fields, NULL));
525             for (i = 0; i < nf; i++) PetscCall(PCFieldSplitSetIS(pc, NULL, fields[i]));
526             PetscCall(PetscFree(fields));
527           } else {
528             for (i = 0; i < jac->bs; i++) {
529               char splitname[8];
530               PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
531               PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
532             }
533             jac->defaultsplit = PETSC_TRUE;
534           }
535         }
536       }
537     }
538   } else if (jac->nsplits == 1) {
539     IS       is2;
540     PetscInt nmin, nmax;
541 
542     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
543     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
544     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
545     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
546     PetscCall(ISDestroy(&is2));
547   }
548 
549   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
550   PetscFunctionReturn(PETSC_SUCCESS);
551 }
552 
553 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
554 {
555   Mat       BT, T;
556   PetscReal nrmT, nrmB;
557 
558   PetscFunctionBegin;
559   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
560   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
561   PetscCall(MatNorm(T, NORM_1, &nrmT));
562   PetscCall(MatNorm(B, NORM_1, &nrmB));
563   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable.");
564 
565   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
566   /* setting N := 1/nu*I in [Ar13].                                                 */
567   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
568   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, H)); /* H = A01*A01'          */
569   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
570 
571   PetscCall(MatDestroy(&BT));
572   PetscCall(MatDestroy(&T));
573   PetscFunctionReturn(PETSC_SUCCESS);
574 }
575 
576 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *value[], PetscBool *flg);
577 
578 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
579 {
580   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
581   PC_FieldSplitLink ilink;
582   PetscInt          i, nsplit;
583   PetscBool         sorted, sorted_col;
584 
585   PetscFunctionBegin;
586   pc->failedreason = PC_NOERROR;
587   PetscCall(PCFieldSplitSetDefaults(pc));
588   nsplit = jac->nsplits;
589   ilink  = jac->head;
590 
591   /* get the matrices for each split */
592   if (!jac->issetup) {
593     PetscInt rstart, rend, nslots, bs;
594 
595     jac->issetup = PETSC_TRUE;
596 
597     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
598     if (jac->defaultsplit || !ilink->is) {
599       if (jac->bs <= 0) jac->bs = nsplit;
600     }
601 
602     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
603     PetscCall(MatGetBlockSize(pc->pmat, &bs));
604     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
605       PetscBool blk;
606 
607       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
608       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
609     }
610 
611     bs = jac->bs;
612     PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
613     nslots = (rend - rstart) / bs;
614     for (i = 0; i < nsplit; i++) {
615       if (jac->defaultsplit) {
616         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
617         PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
618       } else if (!ilink->is) {
619         if (ilink->nfields > 1) {
620           PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
621           PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
622           PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
623           for (j = 0; j < nslots; j++) {
624             for (k = 0; k < nfields; k++) {
625               ii[nfields * j + k] = rstart + bs * j + fields[k];
626               jj[nfields * j + k] = rstart + bs * j + fields_col[k];
627             }
628           }
629           PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
630           PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
631           PetscCall(ISSetBlockSize(ilink->is, nfields));
632           PetscCall(ISSetBlockSize(ilink->is_col, nfields));
633         } else {
634           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
635           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
636         }
637       }
638       PetscCall(ISSorted(ilink->is, &sorted));
639       if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col));
640       PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split");
641       ilink = ilink->next;
642     }
643   }
644 
645   ilink = jac->head;
646   if (!jac->pmat) {
647     Vec xtmp;
648 
649     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
650     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
651     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
652     for (i = 0; i < nsplit; i++) {
653       MatNullSpace sp;
654 
655       /* Check for preconditioning matrix attached to IS */
656       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
657       if (jac->pmat[i]) {
658         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
659         if (jac->type == PC_COMPOSITE_SCHUR) {
660           jac->schur_user = jac->pmat[i];
661 
662           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
663         }
664       } else {
665         const char *prefix;
666         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
667         PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
668         PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
669         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
670       }
671       /* create work vectors for each split */
672       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
673       ilink->x = jac->x[i];
674       ilink->y = jac->y[i];
675       ilink->z = NULL;
676       /* compute scatter contexts needed by multiplicative versions and non-default splits */
677       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
678       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
679       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
680       ilink = ilink->next;
681     }
682     PetscCall(VecDestroy(&xtmp));
683   } else {
684     MatReuse scall;
685     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
686       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
687       scall = MAT_INITIAL_MATRIX;
688     } else scall = MAT_REUSE_MATRIX;
689 
690     for (i = 0; i < nsplit; i++) {
691       Mat pmat;
692 
693       /* Check for preconditioning matrix attached to IS */
694       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
695       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
696       ilink = ilink->next;
697     }
698   }
699   if (jac->diag_use_amat) {
700     ilink = jac->head;
701     if (!jac->mat) {
702       PetscCall(PetscMalloc1(nsplit, &jac->mat));
703       for (i = 0; i < nsplit; i++) {
704         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
705         ilink = ilink->next;
706       }
707     } else {
708       MatReuse scall;
709       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
710         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
711         scall = MAT_INITIAL_MATRIX;
712       } else scall = MAT_REUSE_MATRIX;
713 
714       for (i = 0; i < nsplit; i++) {
715         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
716         ilink = ilink->next;
717       }
718     }
719   } else {
720     jac->mat = jac->pmat;
721   }
722 
723   /* Check for null space attached to IS */
724   ilink = jac->head;
725   for (i = 0; i < nsplit; i++) {
726     MatNullSpace sp;
727 
728     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
729     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
730     ilink = ilink->next;
731   }
732 
733   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
734     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
735     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
736     ilink = jac->head;
737     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
738       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
739       if (!jac->Afield) {
740         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
741         if (jac->offdiag_use_amat) {
742           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
743         } else {
744           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
745         }
746       } else {
747         MatReuse scall;
748 
749         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
750           PetscCall(MatDestroy(&jac->Afield[1]));
751           scall = MAT_INITIAL_MATRIX;
752         } else scall = MAT_REUSE_MATRIX;
753 
754         if (jac->offdiag_use_amat) {
755           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
756         } else {
757           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
758         }
759       }
760     } else {
761       if (!jac->Afield) {
762         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
763         for (i = 0; i < nsplit; i++) {
764           if (jac->offdiag_use_amat) {
765             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
766           } else {
767             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
768           }
769           ilink = ilink->next;
770         }
771       } else {
772         MatReuse scall;
773         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
774           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
775           scall = MAT_INITIAL_MATRIX;
776         } else scall = MAT_REUSE_MATRIX;
777 
778         for (i = 0; i < nsplit; i++) {
779           if (jac->offdiag_use_amat) {
780             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
781           } else {
782             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
783           }
784           ilink = ilink->next;
785         }
786       }
787     }
788   }
789 
790   if (jac->type == PC_COMPOSITE_SCHUR) {
791     IS          ccis;
792     PetscBool   isset, isspd;
793     PetscInt    rstart, rend;
794     char        lscname[256];
795     PetscObject LSC_L;
796 
797     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
798 
799     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
800     if (jac->schurscale == (PetscScalar)-1.0) {
801       PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
802       jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
803     }
804 
805     /* When extracting off-diagonal submatrices, we take complements from this range */
806     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
807 
808     if (jac->schur) {
809       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
810       MatReuse scall;
811 
812       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
813         scall = MAT_INITIAL_MATRIX;
814         PetscCall(MatDestroy(&jac->B));
815         PetscCall(MatDestroy(&jac->C));
816       } else scall = MAT_REUSE_MATRIX;
817 
818       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
819       ilink = jac->head;
820       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
821       if (jac->offdiag_use_amat) {
822         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B));
823       } else {
824         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B));
825       }
826       PetscCall(ISDestroy(&ccis));
827       ilink = ilink->next;
828       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
829       if (jac->offdiag_use_amat) {
830         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C));
831       } else {
832         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C));
833       }
834       PetscCall(ISDestroy(&ccis));
835       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
836       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
837         PetscCall(MatDestroy(&jac->schurp));
838         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
839       }
840       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
841       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
842       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
843     } else {
844       const char  *Dprefix;
845       char         schurprefix[256], schurmatprefix[256];
846       char         schurtestoption[256];
847       MatNullSpace sp;
848       PetscBool    flg;
849       KSP          kspt;
850 
851       /* extract the A01 and A10 matrices */
852       ilink = jac->head;
853       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
854       if (jac->offdiag_use_amat) {
855         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
856       } else {
857         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
858       }
859       PetscCall(ISDestroy(&ccis));
860       ilink = ilink->next;
861       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
862       if (jac->offdiag_use_amat) {
863         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
864       } else {
865         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
866       }
867       PetscCall(ISDestroy(&ccis));
868 
869       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
870       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
871       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
872       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
873       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
874       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
875       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
876       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
877 
878       /* Note: this is not true in general */
879       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
880       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
881 
882       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
883       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg));
884       if (flg) {
885         DM  dmInner;
886         KSP kspInner;
887         PC  pcInner;
888 
889         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
890         PetscCall(KSPReset(kspInner));
891         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
892         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
893         /* Indent this deeper to emphasize the "inner" nature of this solver. */
894         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
895         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
896         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
897 
898         /* Set DM for new solver */
899         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
900         PetscCall(KSPSetDM(kspInner, dmInner));
901         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
902 
903         /* Defaults to PCKSP as preconditioner */
904         PetscCall(KSPGetPC(kspInner, &pcInner));
905         PetscCall(PCSetType(pcInner, PCKSP));
906         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
907       } else {
908         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
909           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
910           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
911           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
912           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
913           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
914         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
915         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
916       }
917       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
918       PetscCall(KSPSetFromOptions(jac->head->ksp));
919       PetscCall(MatSetFromOptions(jac->schur));
920 
921       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
922       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
923         KSP kspInner;
924         PC  pcInner;
925 
926         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
927         PetscCall(KSPGetPC(kspInner, &pcInner));
928         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
929         if (flg) {
930           KSP ksp;
931 
932           PetscCall(PCKSPGetKSP(pcInner, &ksp));
933           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
934         }
935       }
936       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
937       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg));
938       if (flg) {
939         DM dmInner;
940 
941         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
942         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
943         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
944         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
945         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
946         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
947         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
948         PetscCall(KSPSetDM(jac->kspupper, dmInner));
949         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
950         PetscCall(KSPSetFromOptions(jac->kspupper));
951         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
952         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
953       } else {
954         jac->kspupper = jac->head->ksp;
955         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
956       }
957 
958       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
959       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
960       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
961       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
962       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
963         PC pcschur;
964         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
965         PetscCall(PCSetType(pcschur, PCNONE));
966         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
967       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
968         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
969       }
970       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
971       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
972       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
973       /* propagate DM */
974       {
975         DM sdm;
976         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
977         if (sdm) {
978           PetscCall(KSPSetDM(jac->kspschur, sdm));
979           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
980         }
981       }
982       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
983       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
984       PetscCall(KSPSetFromOptions(jac->kspschur));
985     }
986     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
987     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
988 
989     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
990     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
991     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
992     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
993     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L));
994     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
995     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
996     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
997     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L));
998   } else if (jac->type == PC_COMPOSITE_GKB) {
999     IS       ccis;
1000     PetscInt rstart, rend;
1001 
1002     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1003 
1004     ilink = jac->head;
1005 
1006     /* When extracting off-diagonal submatrices, we take complements from this range */
1007     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
1008 
1009     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1010     if (jac->offdiag_use_amat) {
1011       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1012     } else {
1013       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1014     }
1015     PetscCall(ISDestroy(&ccis));
1016     /* Create work vectors for GKB algorithm */
1017     PetscCall(VecDuplicate(ilink->x, &jac->u));
1018     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1019     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1020     ilink = ilink->next;
1021     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1022     if (jac->offdiag_use_amat) {
1023       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1024     } else {
1025       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1026     }
1027     PetscCall(ISDestroy(&ccis));
1028     /* Create work vectors for GKB algorithm */
1029     PetscCall(VecDuplicate(ilink->x, &jac->v));
1030     PetscCall(VecDuplicate(ilink->x, &jac->d));
1031     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1032     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1033     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1034 
1035     ilink = jac->head;
1036     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1037     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1038     /* Create gkb_monitor context */
1039     if (jac->gkbmonitor) {
1040       PetscInt tablevel;
1041       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1042       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1043       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1044       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1045       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1046     }
1047   } else {
1048     /* set up the individual splits' PCs */
1049     i     = 0;
1050     ilink = jac->head;
1051     while (ilink) {
1052       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1053       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1054       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1055       i++;
1056       ilink = ilink->next;
1057     }
1058   }
1059 
1060   /* Set coordinates to the sub PC objects whenever these are set */
1061   if (jac->coordinates_set) {
1062     PC pc_coords;
1063     if (jac->type == PC_COMPOSITE_SCHUR) {
1064       // Head is first block.
1065       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1066       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1067       // Second one is Schur block, but its KSP object is in kspschur.
1068       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1069       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1070     } else if (jac->type == PC_COMPOSITE_GKB) {
1071       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner"));
1072     } else {
1073       ilink = jac->head;
1074       while (ilink) {
1075         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1076         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1077         ilink = ilink->next;
1078       }
1079     }
1080   }
1081 
1082   jac->suboptionsset = PETSC_TRUE;
1083   PetscFunctionReturn(PETSC_SUCCESS);
1084 }
1085 
1086 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1087   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1088                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1089                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1090 
1091 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1092 {
1093   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1094   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1095   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1096 
1097   PetscFunctionBegin;
1098   switch (jac->schurfactorization) {
1099   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1100     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1101     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1102     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1103     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1104     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1105     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1106     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1107     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1108     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1109     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1110     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1111     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1112     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1113     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1114     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1115     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1116     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1117     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1118     break;
1119   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1120     /* [A00 0; A10 S], suitable for left preconditioning */
1121     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1122     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1123     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1124     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1125     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1126     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1127     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1128     PetscCall(VecScale(ilinkD->x, -1.));
1129     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1130     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1131     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1132     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1133     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1134     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1135     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1136     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1137     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1138     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1139     break;
1140   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1141     /* [A00 A01; 0 S], suitable for right preconditioning */
1142     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1143     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1144     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1145     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1146     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1147     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1148     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1149     PetscCall(VecScale(ilinkA->x, -1.));
1150     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1151     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1152     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1153     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1154     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1155     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1156     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1157     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1158     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1159     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1160     break;
1161   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1162     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1163     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1164     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1165     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1166     PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1167     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1168     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1169     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1170     PetscCall(VecScale(ilinkD->x, -1.0));
1171     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1172     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1173 
1174     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1175     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1176     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1177     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1178     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1179 
1180     if (kspUpper == kspA) {
1181       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1182       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1183       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1184       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1185       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1186       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1187     } else {
1188       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1189       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1190       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1191       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1192       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1193       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1194       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1195       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1196       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1197     }
1198     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1199     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1200     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1201   }
1202   PetscFunctionReturn(PETSC_SUCCESS);
1203 }
1204 
1205 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1206 {
1207   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1208   PC_FieldSplitLink ilink = jac->head;
1209   PetscInt          cnt, bs;
1210 
1211   PetscFunctionBegin;
1212   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1213     if (jac->defaultsplit) {
1214       PetscCall(VecGetBlockSize(x, &bs));
1215       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1216       PetscCall(VecGetBlockSize(y, &bs));
1217       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1218       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1219       while (ilink) {
1220         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1221         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1222         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1223         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1224         ilink = ilink->next;
1225       }
1226       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1227     } else {
1228       PetscCall(VecSet(y, 0.0));
1229       while (ilink) {
1230         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1231         ilink = ilink->next;
1232       }
1233     }
1234   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1235     PetscCall(VecSet(y, 0.0));
1236     /* solve on first block for first block variables */
1237     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1238     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1239     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1240     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1241     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1242     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1243     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1244     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1245 
1246     /* compute the residual only onto second block variables using first block variables */
1247     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1248     ilink = ilink->next;
1249     PetscCall(VecScale(ilink->x, -1.0));
1250     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1251     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1252 
1253     /* solve on second block variables */
1254     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1255     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1256     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1257     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1258     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1259     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1260   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1261     if (!jac->w1) {
1262       PetscCall(VecDuplicate(x, &jac->w1));
1263       PetscCall(VecDuplicate(x, &jac->w2));
1264     }
1265     PetscCall(VecSet(y, 0.0));
1266     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1267     cnt = 1;
1268     while (ilink->next) {
1269       ilink = ilink->next;
1270       /* compute the residual only over the part of the vector needed */
1271       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1272       PetscCall(VecScale(ilink->x, -1.0));
1273       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1274       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1275       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1276       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1277       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1278       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1279       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1280       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1281     }
1282     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1283       cnt -= 2;
1284       while (ilink->previous) {
1285         ilink = ilink->previous;
1286         /* compute the residual only over the part of the vector needed */
1287         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1288         PetscCall(VecScale(ilink->x, -1.0));
1289         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1290         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1291         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1292         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1293         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1294         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1295         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1296         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1297       }
1298     }
1299   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1300   PetscFunctionReturn(PETSC_SUCCESS);
1301 }
1302 
1303 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1304 {
1305   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1306   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1307   KSP               ksp = ilinkA->ksp;
1308   Vec               u, v, Hu, d, work1, work2;
1309   PetscScalar       alpha, z, nrmz2, *vecz;
1310   PetscReal         lowbnd, nu, beta;
1311   PetscInt          j, iterGKB;
1312 
1313   PetscFunctionBegin;
1314   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1315   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1316   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1317   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1318 
1319   u     = jac->u;
1320   v     = jac->v;
1321   Hu    = jac->Hu;
1322   d     = jac->d;
1323   work1 = jac->w1;
1324   work2 = jac->w2;
1325   vecz  = jac->vecz;
1326 
1327   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1328   /* Add q = q + nu*B*b */
1329   if (jac->gkbnu) {
1330     nu = jac->gkbnu;
1331     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1332     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1333   } else {
1334     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1335     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1336     nu = 1;
1337   }
1338 
1339   /* Transform rhs from [q,tilde{b}] to [0,b] */
1340   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1341   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1342   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1343   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1344   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1345   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1346 
1347   /* First step of algorithm */
1348   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1349   KSPCheckDot(ksp, beta);
1350   beta = PetscSqrtReal(nu) * beta;
1351   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1352   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1353   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1354   PetscCall(KSPSolve(ksp, work2, u));
1355   PetscCall(KSPCheckSolve(ksp, pc, u));
1356   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1357   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1358   PetscCall(VecDot(Hu, u, &alpha));
1359   KSPCheckDot(ksp, alpha);
1360   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1361   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1362   PetscCall(VecScale(u, 1.0 / alpha));
1363   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1364 
1365   z       = beta / alpha;
1366   vecz[1] = z;
1367 
1368   /* Computation of first iterate x(1) and p(1) */
1369   PetscCall(VecAXPY(ilinkA->y, z, u));
1370   PetscCall(VecCopy(d, ilinkD->y));
1371   PetscCall(VecScale(ilinkD->y, -z));
1372 
1373   iterGKB = 1;
1374   lowbnd  = 2 * jac->gkbtol;
1375   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1376 
1377   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1378     iterGKB += 1;
1379     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1380     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1381     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1382     beta = beta / PetscSqrtReal(nu);
1383     PetscCall(VecScale(v, 1.0 / beta));
1384     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1385     PetscCall(MatMult(jac->H, u, Hu));
1386     PetscCall(VecAXPY(work2, -beta, Hu));
1387     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1388     PetscCall(KSPSolve(ksp, work2, u));
1389     PetscCall(KSPCheckSolve(ksp, pc, u));
1390     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1391     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1392     PetscCall(VecDot(Hu, u, &alpha));
1393     KSPCheckDot(ksp, alpha);
1394     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1395     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1396     PetscCall(VecScale(u, 1.0 / alpha));
1397 
1398     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1399     vecz[0] = z;
1400 
1401     /* Computation of new iterate x(i+1) and p(i+1) */
1402     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1403     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1404     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1405     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1406     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1407 
1408     /* Compute Lower Bound estimate */
1409     if (iterGKB > jac->gkbdelay) {
1410       lowbnd = 0.0;
1411       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1412       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1413     }
1414 
1415     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1416     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1417   }
1418 
1419   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1420   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1421   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1422   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1423 
1424   PetscFunctionReturn(PETSC_SUCCESS);
1425 }
1426 
1427 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1428   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1429                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1430                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1431 
1432 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1433 {
1434   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1435   PC_FieldSplitLink ilink = jac->head;
1436   PetscInt          bs;
1437 
1438   PetscFunctionBegin;
1439   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1440     if (jac->defaultsplit) {
1441       PetscCall(VecGetBlockSize(x, &bs));
1442       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1443       PetscCall(VecGetBlockSize(y, &bs));
1444       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1445       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1446       while (ilink) {
1447         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1448         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1449         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1450         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1451         ilink = ilink->next;
1452       }
1453       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1454     } else {
1455       PetscCall(VecSet(y, 0.0));
1456       while (ilink) {
1457         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1458         ilink = ilink->next;
1459       }
1460     }
1461   } else {
1462     if (!jac->w1) {
1463       PetscCall(VecDuplicate(x, &jac->w1));
1464       PetscCall(VecDuplicate(x, &jac->w2));
1465     }
1466     PetscCall(VecSet(y, 0.0));
1467     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1468       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1469       while (ilink->next) {
1470         ilink = ilink->next;
1471         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1472         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1473         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1474       }
1475       while (ilink->previous) {
1476         ilink = ilink->previous;
1477         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1478         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1479         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1480       }
1481     } else {
1482       while (ilink->next) { /* get to last entry in linked list */
1483         ilink = ilink->next;
1484       }
1485       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1486       while (ilink->previous) {
1487         ilink = ilink->previous;
1488         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1489         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1490         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1491       }
1492     }
1493   }
1494   PetscFunctionReturn(PETSC_SUCCESS);
1495 }
1496 
1497 static PetscErrorCode PCReset_FieldSplit(PC pc)
1498 {
1499   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1500   PC_FieldSplitLink ilink = jac->head, next;
1501 
1502   PetscFunctionBegin;
1503   while (ilink) {
1504     PetscCall(KSPDestroy(&ilink->ksp));
1505     PetscCall(VecDestroy(&ilink->x));
1506     PetscCall(VecDestroy(&ilink->y));
1507     PetscCall(VecDestroy(&ilink->z));
1508     PetscCall(VecScatterDestroy(&ilink->sctx));
1509     PetscCall(ISDestroy(&ilink->is));
1510     PetscCall(ISDestroy(&ilink->is_col));
1511     PetscCall(PetscFree(ilink->splitname));
1512     PetscCall(PetscFree(ilink->fields));
1513     PetscCall(PetscFree(ilink->fields_col));
1514     next = ilink->next;
1515     PetscCall(PetscFree(ilink));
1516     ilink = next;
1517   }
1518   jac->head = NULL;
1519   PetscCall(PetscFree2(jac->x, jac->y));
1520   if (jac->mat && jac->mat != jac->pmat) {
1521     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1522   } else if (jac->mat) {
1523     jac->mat = NULL;
1524   }
1525   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1526   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1527   jac->nsplits = 0;
1528   PetscCall(VecDestroy(&jac->w1));
1529   PetscCall(VecDestroy(&jac->w2));
1530   PetscCall(MatDestroy(&jac->schur));
1531   PetscCall(MatDestroy(&jac->schurp));
1532   PetscCall(MatDestroy(&jac->schur_user));
1533   PetscCall(KSPDestroy(&jac->kspschur));
1534   PetscCall(KSPDestroy(&jac->kspupper));
1535   PetscCall(MatDestroy(&jac->B));
1536   PetscCall(MatDestroy(&jac->C));
1537   PetscCall(MatDestroy(&jac->H));
1538   PetscCall(VecDestroy(&jac->u));
1539   PetscCall(VecDestroy(&jac->v));
1540   PetscCall(VecDestroy(&jac->Hu));
1541   PetscCall(VecDestroy(&jac->d));
1542   PetscCall(PetscFree(jac->vecz));
1543   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1544   jac->isrestrict = PETSC_FALSE;
1545   PetscFunctionReturn(PETSC_SUCCESS);
1546 }
1547 
1548 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1549 {
1550   PetscFunctionBegin;
1551   PetscCall(PCReset_FieldSplit(pc));
1552   PetscCall(PetscFree(pc->data));
1553   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1554   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1555   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1556   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1557   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1558   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1559   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1560   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1561 
1562   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1563   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1564   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1565   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1566   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1567   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1568   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1569   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1570   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1571   PetscFunctionReturn(PETSC_SUCCESS);
1572 }
1573 
1574 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject)
1575 {
1576   PetscInt        bs;
1577   PetscBool       flg;
1578   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1579   PCCompositeType ctype;
1580 
1581   PetscFunctionBegin;
1582   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1583   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1584   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1585   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1586   jac->diag_use_amat = pc->useAmat;
1587   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1588   jac->offdiag_use_amat = pc->useAmat;
1589   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1590   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1591   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1592   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1593   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1594   /* Only setup fields once */
1595   if ((jac->bs > 0) && (jac->nsplits == 0)) {
1596     /* only allow user to set fields from command line if bs is already known.
1597        otherwise user can set them in PCFieldSplitSetDefaults() */
1598     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1599     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1600   }
1601   if (jac->type == PC_COMPOSITE_SCHUR) {
1602     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1603     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1604     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1605     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1606     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1607   } else if (jac->type == PC_COMPOSITE_GKB) {
1608     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1609     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1610     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitGKBNu", jac->gkbnu, &jac->gkbnu, NULL));
1611     PetscCheck(jac->gkbnu >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nu cannot be less than 0: value %g", (double)jac->gkbnu);
1612     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1613     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1614   }
1615   /*
1616     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1617     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1618     is called on the outer solver in case changes were made in the options database
1619 
1620     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1621     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1622     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1623 
1624     There could be a negative side effect of calling the KSPSetFromOptions() below.
1625 
1626     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1627   */
1628   if (jac->issetup) {
1629     PC_FieldSplitLink ilink = jac->head;
1630     if (jac->type == PC_COMPOSITE_SCHUR) {
1631       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1632       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1633     }
1634     while (ilink) {
1635       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1636       ilink = ilink->next;
1637     }
1638   }
1639   PetscOptionsHeadEnd();
1640   PetscFunctionReturn(PETSC_SUCCESS);
1641 }
1642 
1643 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1644 {
1645   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1646   PC_FieldSplitLink ilink, next = jac->head;
1647   char              prefix[128];
1648   PetscInt          i;
1649 
1650   PetscFunctionBegin;
1651   if (jac->splitdefined) {
1652     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1653     PetscFunctionReturn(PETSC_SUCCESS);
1654   }
1655   for (i = 0; i < n; i++) {
1656     PetscCheck(fields[i] < jac->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", fields[i], jac->bs);
1657     PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]);
1658   }
1659   PetscCall(PetscNew(&ilink));
1660   if (splitname) {
1661     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1662   } else {
1663     PetscCall(PetscMalloc1(3, &ilink->splitname));
1664     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1665   }
1666   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1667   PetscCall(PetscMalloc1(n, &ilink->fields));
1668   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1669   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1670   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1671 
1672   ilink->nfields = n;
1673   ilink->next    = NULL;
1674   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1675   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1676   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1677   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1678 
1679   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1680   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1681 
1682   if (!next) {
1683     jac->head       = ilink;
1684     ilink->previous = NULL;
1685   } else {
1686     while (next->next) next = next->next;
1687     next->next      = ilink;
1688     ilink->previous = next;
1689   }
1690   jac->nsplits++;
1691   PetscFunctionReturn(PETSC_SUCCESS);
1692 }
1693 
1694 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1695 {
1696   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1697 
1698   PetscFunctionBegin;
1699   *subksp = NULL;
1700   if (n) *n = 0;
1701   if (jac->type == PC_COMPOSITE_SCHUR) {
1702     PetscInt nn;
1703 
1704     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1705     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1706     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1707     PetscCall(PetscMalloc1(nn, subksp));
1708     (*subksp)[0] = jac->head->ksp;
1709     (*subksp)[1] = jac->kspschur;
1710     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
1711     if (n) *n = nn;
1712   }
1713   PetscFunctionReturn(PETSC_SUCCESS);
1714 }
1715 
1716 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
1717 {
1718   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1719 
1720   PetscFunctionBegin;
1721   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
1722   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1723   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
1724 
1725   (*subksp)[1] = jac->kspschur;
1726   if (n) *n = jac->nsplits;
1727   PetscFunctionReturn(PETSC_SUCCESS);
1728 }
1729 
1730 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1731 {
1732   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1733   PetscInt          cnt   = 0;
1734   PC_FieldSplitLink ilink = jac->head;
1735 
1736   PetscFunctionBegin;
1737   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1738   while (ilink) {
1739     (*subksp)[cnt++] = ilink->ksp;
1740     ilink            = ilink->next;
1741   }
1742   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
1743   if (n) *n = jac->nsplits;
1744   PetscFunctionReturn(PETSC_SUCCESS);
1745 }
1746 
1747 /*@C
1748     PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
1749 
1750     Input Parameters:
1751 +   pc  - the preconditioner context
1752 -   is - the index set that defines the indices to which the fieldsplit is to be restricted
1753 
1754     Level: advanced
1755 
1756     Developer Note:
1757     It seems the resulting `IS`s will not cover the entire space, so
1758     how can they define a convergent preconditioner? Needs explaining.
1759 
1760 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
1761 @*/
1762 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
1763 {
1764   PetscFunctionBegin;
1765   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1766   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
1767   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
1768   PetscFunctionReturn(PETSC_SUCCESS);
1769 }
1770 
1771 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
1772 {
1773   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1774   PC_FieldSplitLink ilink = jac->head, next;
1775   PetscInt          localsize, size, sizez, i;
1776   const PetscInt   *ind, *indz;
1777   PetscInt         *indc, *indcz;
1778   PetscBool         flg;
1779 
1780   PetscFunctionBegin;
1781   PetscCall(ISGetLocalSize(isy, &localsize));
1782   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
1783   size -= localsize;
1784   while (ilink) {
1785     IS isrl, isr;
1786     PC subpc;
1787     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
1788     PetscCall(ISGetLocalSize(isrl, &localsize));
1789     PetscCall(PetscMalloc1(localsize, &indc));
1790     PetscCall(ISGetIndices(isrl, &ind));
1791     PetscCall(PetscArraycpy(indc, ind, localsize));
1792     PetscCall(ISRestoreIndices(isrl, &ind));
1793     PetscCall(ISDestroy(&isrl));
1794     for (i = 0; i < localsize; i++) *(indc + i) += size;
1795     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
1796     PetscCall(PetscObjectReference((PetscObject)isr));
1797     PetscCall(ISDestroy(&ilink->is));
1798     ilink->is = isr;
1799     PetscCall(PetscObjectReference((PetscObject)isr));
1800     PetscCall(ISDestroy(&ilink->is_col));
1801     ilink->is_col = isr;
1802     PetscCall(ISDestroy(&isr));
1803     PetscCall(KSPGetPC(ilink->ksp, &subpc));
1804     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
1805     if (flg) {
1806       IS       iszl, isz;
1807       MPI_Comm comm;
1808       PetscCall(ISGetLocalSize(ilink->is, &localsize));
1809       comm = PetscObjectComm((PetscObject)ilink->is);
1810       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
1811       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
1812       sizez -= localsize;
1813       PetscCall(ISGetLocalSize(iszl, &localsize));
1814       PetscCall(PetscMalloc1(localsize, &indcz));
1815       PetscCall(ISGetIndices(iszl, &indz));
1816       PetscCall(PetscArraycpy(indcz, indz, localsize));
1817       PetscCall(ISRestoreIndices(iszl, &indz));
1818       PetscCall(ISDestroy(&iszl));
1819       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
1820       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
1821       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
1822       PetscCall(ISDestroy(&isz));
1823     }
1824     next  = ilink->next;
1825     ilink = next;
1826   }
1827   jac->isrestrict = PETSC_TRUE;
1828   PetscFunctionReturn(PETSC_SUCCESS);
1829 }
1830 
1831 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
1832 {
1833   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1834   PC_FieldSplitLink ilink, next = jac->head;
1835   char              prefix[128];
1836 
1837   PetscFunctionBegin;
1838   if (jac->splitdefined) {
1839     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1840     PetscFunctionReturn(PETSC_SUCCESS);
1841   }
1842   PetscCall(PetscNew(&ilink));
1843   if (splitname) {
1844     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1845   } else {
1846     PetscCall(PetscMalloc1(8, &ilink->splitname));
1847     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
1848   }
1849   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1850   PetscCall(PetscObjectReference((PetscObject)is));
1851   PetscCall(ISDestroy(&ilink->is));
1852   ilink->is = is;
1853   PetscCall(PetscObjectReference((PetscObject)is));
1854   PetscCall(ISDestroy(&ilink->is_col));
1855   ilink->is_col = is;
1856   ilink->next   = NULL;
1857   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1858   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1859   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1860   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1861 
1862   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1863   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1864 
1865   if (!next) {
1866     jac->head       = ilink;
1867     ilink->previous = NULL;
1868   } else {
1869     while (next->next) next = next->next;
1870     next->next      = ilink;
1871     ilink->previous = next;
1872   }
1873   jac->nsplits++;
1874   PetscFunctionReturn(PETSC_SUCCESS);
1875 }
1876 
1877 /*@C
1878     PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
1879 
1880     Logically Collective
1881 
1882     Input Parameters:
1883 +   pc  - the preconditioner context
1884 .   splitname - name of this split, if `NULL` the number of the split is used
1885 .   n - the number of fields in this split
1886 .   fields - the fields in this split
1887 -   fields_col - generally the same as fields, if it does not match fields then the matrix block that is solved for this set of fields comes from an off-diagonal block
1888                  of the matrix and fields_col provides the column indices for that block
1889 
1890     Level: intermediate
1891 
1892     Notes:
1893     Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
1894 
1895      `PCFieldSplitSetFields()` is for defining fields as strided blocks. For example, if the block
1896      size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
1897      0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x....
1898      where the numbered entries indicate what is in the split.
1899 
1900      This function is called once per split (it creates a new split each time).  Solve options
1901      for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
1902 
1903    `PCFieldSplitSetIS()` does not support having a fields_col different from fields
1904 
1905    Developer Note:
1906    This routine does not actually create the `IS` representing the split, that is delayed
1907    until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
1908    available when this routine is called.
1909 
1910 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`
1911 @*/
1912 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1913 {
1914   PetscFunctionBegin;
1915   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1916   PetscValidCharPointer(splitname, 2);
1917   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
1918   PetscValidIntPointer(fields, 4);
1919   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
1920   PetscFunctionReturn(PETSC_SUCCESS);
1921 }
1922 
1923 /*@
1924     PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
1925     the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
1926 
1927     Logically Collective
1928 
1929     Input Parameters:
1930 +   pc  - the preconditioner object
1931 -   flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
1932 
1933     Options Database Keys:
1934 .   -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
1935 
1936     Level: intermediate
1937 
1938 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
1939 @*/
1940 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
1941 {
1942   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1943   PetscBool      isfs;
1944 
1945   PetscFunctionBegin;
1946   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1947   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
1948   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
1949   jac->diag_use_amat = flg;
1950   PetscFunctionReturn(PETSC_SUCCESS);
1951 }
1952 
1953 /*@
1954     PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
1955     the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
1956 
1957     Logically Collective
1958 
1959     Input Parameters:
1960 .   pc  - the preconditioner object
1961 
1962     Output Parameters:
1963 .   flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
1964 
1965     Level: intermediate
1966 
1967 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
1968 @*/
1969 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
1970 {
1971   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1972   PetscBool      isfs;
1973 
1974   PetscFunctionBegin;
1975   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1976   PetscValidBoolPointer(flg, 2);
1977   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
1978   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
1979   *flg = jac->diag_use_amat;
1980   PetscFunctionReturn(PETSC_SUCCESS);
1981 }
1982 
1983 /*@
1984     PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
1985     the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
1986 
1987     Logically Collective
1988 
1989     Input Parameters:
1990 +   pc  - the preconditioner object
1991 -   flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
1992 
1993     Options Database Keys:
1994 .     -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
1995 
1996     Level: intermediate
1997 
1998 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
1999 @*/
2000 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2001 {
2002   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2003   PetscBool      isfs;
2004 
2005   PetscFunctionBegin;
2006   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2007   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2008   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2009   jac->offdiag_use_amat = flg;
2010   PetscFunctionReturn(PETSC_SUCCESS);
2011 }
2012 
2013 /*@
2014     PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2015     the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
2016 
2017     Logically Collective
2018 
2019     Input Parameters:
2020 .   pc  - the preconditioner object
2021 
2022     Output Parameters:
2023 .   flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2024 
2025     Level: intermediate
2026 
2027 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2028 @*/
2029 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2030 {
2031   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2032   PetscBool      isfs;
2033 
2034   PetscFunctionBegin;
2035   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2036   PetscValidBoolPointer(flg, 2);
2037   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2038   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2039   *flg = jac->offdiag_use_amat;
2040   PetscFunctionReturn(PETSC_SUCCESS);
2041 }
2042 
2043 /*@C
2044     PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2045 
2046     Logically Collective
2047 
2048     Input Parameters:
2049 +   pc  - the preconditioner context
2050 .   splitname - name of this split, if `NULL` the number of the split is used
2051 -   is - the index set that defines the elements in this split
2052 
2053     Level: intermediate
2054 
2055     Notes:
2056     Use `PCFieldSplitSetFields()`, for splits defined by strided types.
2057 
2058     This function is called once per split (it creates a new split each time).  Solve options
2059     for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2060 
2061 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`
2062 @*/
2063 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2064 {
2065   PetscFunctionBegin;
2066   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2067   if (splitname) PetscValidCharPointer(splitname, 2);
2068   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2069   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2070   PetscFunctionReturn(PETSC_SUCCESS);
2071 }
2072 
2073 /*@C
2074     PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2075 
2076     Logically Collective
2077 
2078     Input Parameters:
2079 +   pc  - the preconditioner context
2080 -   splitname - name of this split
2081 
2082     Output Parameter:
2083 -   is - the index set that defines the elements in this split, or `NULL` if the split is not found
2084 
2085     Level: intermediate
2086 
2087 .seealso:  [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`
2088 @*/
2089 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2090 {
2091   PetscFunctionBegin;
2092   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2093   PetscValidCharPointer(splitname, 2);
2094   PetscValidPointer(is, 3);
2095   {
2096     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2097     PC_FieldSplitLink ilink = jac->head;
2098     PetscBool         found;
2099 
2100     *is = NULL;
2101     while (ilink) {
2102       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2103       if (found) {
2104         *is = ilink->is;
2105         break;
2106       }
2107       ilink = ilink->next;
2108     }
2109   }
2110   PetscFunctionReturn(PETSC_SUCCESS);
2111 }
2112 
2113 /*@C
2114     PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2115 
2116     Logically Collective
2117 
2118     Input Parameters:
2119 +   pc  - the preconditioner context
2120 -   index - index of this split
2121 
2122     Output Parameter:
2123 -   is - the index set that defines the elements in this split
2124 
2125     Level: intermediate
2126 
2127 .seealso:  [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`
2128 @*/
2129 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2130 {
2131   PetscFunctionBegin;
2132   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2133   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2134   PetscValidPointer(is, 3);
2135   {
2136     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2137     PC_FieldSplitLink ilink = jac->head;
2138     PetscInt          i     = 0;
2139     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2140 
2141     while (i < index) {
2142       ilink = ilink->next;
2143       ++i;
2144     }
2145     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2146   }
2147   PetscFunctionReturn(PETSC_SUCCESS);
2148 }
2149 
2150 /*@
2151     PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2152       fieldsplit preconditioner when calling `PCFieldSplitSetIS()`. If not set the matrix block size is used.
2153 
2154     Logically Collective
2155 
2156     Input Parameters:
2157 +   pc  - the preconditioner context
2158 -   bs - the block size
2159 
2160     Level: intermediate
2161 
2162 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2163 @*/
2164 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2165 {
2166   PetscFunctionBegin;
2167   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2168   PetscValidLogicalCollectiveInt(pc, bs, 2);
2169   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2170   PetscFunctionReturn(PETSC_SUCCESS);
2171 }
2172 
2173 /*@C
2174    PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2175 
2176    Collective
2177 
2178    Input Parameter:
2179 .  pc - the preconditioner context
2180 
2181    Output Parameters:
2182 +  n - the number of splits
2183 -  subksp - the array of `KSP` contexts
2184 
2185    Level: advanced
2186 
2187    Notes:
2188    After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2189    (not the `KSP`, just the array that contains them).
2190 
2191    You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2192 
2193    If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2194    Schur complement and the `KSP` object used to iterate over the Schur complement.
2195    To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2196 
2197    If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2198    inner linear system defined by the matrix H in each loop.
2199 
2200    Fortran Usage:
2201    You must pass in a `KSP` array that is large enough to contain all the `KSP`s.
2202    You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2203    `KSP` array must be.
2204 
2205    Developer Note:
2206    There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2207 
2208    The Fortran interface should be modernized to return directly the array of values.
2209 
2210 .seealso:  [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2211 @*/
2212 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2213 {
2214   PetscFunctionBegin;
2215   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2216   if (n) PetscValidIntPointer(n, 2);
2217   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2218   PetscFunctionReturn(PETSC_SUCCESS);
2219 }
2220 
2221 /*@C
2222    PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2223 
2224    Collective
2225 
2226    Input Parameter:
2227 .  pc - the preconditioner context
2228 
2229    Output Parameters:
2230 +  n - the number of splits
2231 -  subksp - the array of `KSP` contexts
2232 
2233    Level: advanced
2234 
2235    Notes:
2236    After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2237    (not the `KSP` just the array that contains them).
2238 
2239    You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2240 
2241    If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2242 +  1  - the `KSP` used for the (1,1) block
2243 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2244 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2245 
2246    It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2247 
2248    Fortran Note:
2249    You must pass in a `KSP` array that is large enough to contain all the local `KSP`s.
2250    You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2251    `KSP` array must be.
2252 
2253    Developer Notes:
2254    There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2255 
2256    Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2257 
2258    The Fortran interface should be modernized to return directly the array of values.
2259 
2260 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2261 @*/
2262 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2263 {
2264   PetscFunctionBegin;
2265   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2266   if (n) PetscValidIntPointer(n, 2);
2267   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2268   PetscFunctionReturn(PETSC_SUCCESS);
2269 }
2270 
2271 /*@
2272     PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructucted for the Schur complement.
2273       The default is the A11 matrix.
2274 
2275     Collective
2276 
2277     Input Parameters:
2278 +   pc      - the preconditioner context
2279 .   ptype   - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2280               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2281               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2282 -   pre - matrix to use for preconditioning, or `NULL`
2283 
2284     Options Database Keys:
2285 +    -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is a11. See notes for meaning of various arguments
2286 -    -fieldsplit_1_pc_type <pctype> - the preconditioner algorithm that is used to construct the preconditioner from the operator
2287 
2288      Level: intermediate
2289 
2290     Notes:
2291     If ptype is
2292 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2293      matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2294 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2295           The only preconditioner that currently works with this symbolic representation matrix object is the `PCLSC`
2296           preconditioner
2297 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2298           to this function).
2299 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation Sp = A11 - A10 inv(diag(A00)) A01
2300           This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2301           lumped before extracting the diagonal using the additional option -fieldsplit_1_mat_schur_complement_ainv_type lump
2302 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2303       computed internally by `PCFIELDSPLIT` (this is expensive)
2304       useful mostly as a test that the Schur complement approach can work for your problem
2305 
2306      When solving a saddle point problem, where the A11 block is identically zero, using a11 as the ptype only makes sense
2307     with the additional option -fieldsplit_1_pc_type none. Usually for saddle point problems one would use a ptype of self and
2308     -fieldsplit_1_pc_type lsc which uses the least squares commutator to compute a preconditioner for the Schur complement.
2309 
2310 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2311           `MatSchurComplementSetAinvType()`, `PCLSC`,
2312           `PCFieldSplitSchurPreType`
2313 @*/
2314 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2315 {
2316   PetscFunctionBegin;
2317   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2318   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2319   PetscFunctionReturn(PETSC_SUCCESS);
2320 }
2321 
2322 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2323 {
2324   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2325 } /* Deprecated name */
2326 
2327 /*@
2328     PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2329     preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2330 
2331     Logically Collective
2332 
2333     Input Parameter:
2334 .   pc      - the preconditioner context
2335 
2336     Output Parameters:
2337 +   ptype   - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_PRE_USER`
2338 -   pre - matrix to use for preconditioning (with `PC_FIELDSPLIT_PRE_USER`), or NULL
2339 
2340     Level: intermediate
2341 
2342 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`,
2343           `PCFieldSplitSchurPreType`
2344 @*/
2345 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2346 {
2347   PetscFunctionBegin;
2348   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2349   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2350   PetscFunctionReturn(PETSC_SUCCESS);
2351 }
2352 
2353 /*@
2354     PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2355 
2356     Not collective
2357 
2358     Input Parameter:
2359 .   pc      - the preconditioner context
2360 
2361     Output Parameter:
2362 .   S       - the Schur complement matrix
2363 
2364     Level: advanced
2365 
2366     Note:
2367     This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2368 
2369 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2370           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2371 @*/
2372 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2373 {
2374   const char    *t;
2375   PetscBool      isfs;
2376   PC_FieldSplit *jac;
2377 
2378   PetscFunctionBegin;
2379   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2380   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2381   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2382   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2383   jac = (PC_FieldSplit *)pc->data;
2384   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2385   if (S) *S = jac->schur;
2386   PetscFunctionReturn(PETSC_SUCCESS);
2387 }
2388 
2389 /*@
2390     PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2391 
2392     Not collective
2393 
2394     Input Parameters:
2395 +   pc      - the preconditioner context
2396 -   S       - the Schur complement matrix
2397 
2398     Level: advanced
2399 
2400 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2401 @*/
2402 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2403 {
2404   const char    *t;
2405   PetscBool      isfs;
2406   PC_FieldSplit *jac;
2407 
2408   PetscFunctionBegin;
2409   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2410   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2411   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2412   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2413   jac = (PC_FieldSplit *)pc->data;
2414   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2415   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2416   PetscFunctionReturn(PETSC_SUCCESS);
2417 }
2418 
2419 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2420 {
2421   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2422 
2423   PetscFunctionBegin;
2424   jac->schurpre = ptype;
2425   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2426     PetscCall(MatDestroy(&jac->schur_user));
2427     jac->schur_user = pre;
2428     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2429   }
2430   PetscFunctionReturn(PETSC_SUCCESS);
2431 }
2432 
2433 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2434 {
2435   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2436 
2437   PetscFunctionBegin;
2438   *ptype = jac->schurpre;
2439   *pre   = jac->schur_user;
2440   PetscFunctionReturn(PETSC_SUCCESS);
2441 }
2442 
2443 /*@
2444     PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner
2445 
2446     Collective
2447 
2448     Input Parameters:
2449 +   pc  - the preconditioner context
2450 -   ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2451 
2452     Options Database Key:
2453 .     -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is full
2454 
2455     Level: intermediate
2456 
2457     Notes:
2458     The FULL factorization is
2459 
2460 .vb
2461    (A   B)  = (1       0) (A   0) (1  Ainv*B)  = L D U
2462    (C   E)    (C*Ainv  1) (0   S) (0     1)
2463 .vb
2464     where S = E - C*Ainv*B. In practice, the full factorization is applied via block triangular solves with the grouping L*(D*U). UPPER uses D*U, LOWER uses L*D,
2465     and DIAG is the diagonal part with the sign of S flipped (because this makes the preconditioner positive definite for many formulations, thus allowing the use of KSPMINRES). Sign flipping of S can be turned off with PCFieldSplitSetSchurScale().
2466 
2467     If A and S are solved exactly
2468 .vb
2469       *) FULL factorization is a direct solver.
2470       *) The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2471       *) With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2472 .ve
2473 
2474     If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2475     application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2476 
2477     For symmetric problems in which A is positive definite and S is negative definite, DIAG can be used with `KSPMINRES`.
2478 
2479     A flexible method like `KSPFGMRES` or `KSPGCR` must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S).
2480 
2481     References:
2482 +   * - Murphy, Golub, and Wathen, A note on preconditioning indefinite linear systems, SIAM J. Sci. Comput., 21 (2000).
2483 -   * - Ipsen, A note on preconditioning nonsymmetric matrices, SIAM J. Sci. Comput., 23 (2001).
2484 
2485 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`
2486 @*/
2487 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2488 {
2489   PetscFunctionBegin;
2490   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2491   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2492   PetscFunctionReturn(PETSC_SUCCESS);
2493 }
2494 
2495 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2496 {
2497   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2498 
2499   PetscFunctionBegin;
2500   jac->schurfactorization = ftype;
2501   PetscFunctionReturn(PETSC_SUCCESS);
2502 }
2503 
2504 /*@
2505     PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2506 
2507     Collective
2508 
2509     Input Parameters:
2510 +   pc    - the preconditioner context
2511 -   scale - scaling factor for the Schur complement
2512 
2513     Options Database Key:
2514 .   -pc_fieldsplit_schur_scale - default is -1.0
2515 
2516     Level: intermediate
2517 
2518 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetSchurFactType()`
2519 @*/
2520 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2521 {
2522   PetscFunctionBegin;
2523   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2524   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2525   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2526   PetscFunctionReturn(PETSC_SUCCESS);
2527 }
2528 
2529 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2530 {
2531   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2532 
2533   PetscFunctionBegin;
2534   jac->schurscale = scale;
2535   PetscFunctionReturn(PETSC_SUCCESS);
2536 }
2537 
2538 /*@C
2539    PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2540 
2541    Collective
2542 
2543    Input Parameter:
2544 .  pc - the preconditioner context
2545 
2546    Output Parameters:
2547 +  A00 - the (0,0) block
2548 .  A01 - the (0,1) block
2549 .  A10 - the (1,0) block
2550 -  A11 - the (1,1) block
2551 
2552    Level: advanced
2553 
2554 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2555 @*/
2556 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2557 {
2558   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2559 
2560   PetscFunctionBegin;
2561   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2562   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2563   if (A00) *A00 = jac->pmat[0];
2564   if (A01) *A01 = jac->B;
2565   if (A10) *A10 = jac->C;
2566   if (A11) *A11 = jac->pmat[1];
2567   PetscFunctionReturn(PETSC_SUCCESS);
2568 }
2569 
2570 /*@
2571     PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT`
2572 
2573     Collective
2574 
2575     Input Parameters:
2576 +   pc        - the preconditioner context
2577 -   tolerance - the solver tolerance
2578 
2579     Options Database Key:
2580 .   -pc_fieldsplit_gkb_tol - default is 1e-5
2581 
2582     Level: intermediate
2583 
2584     Note:
2585     The generalized GKB algorithm uses a lower bound estimate of the error in energy norm as stopping criterion.
2586     It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2587     this estimate, the stopping criterion is satisfactory in practical cases [A13].
2588 
2589     References:
2590     [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2591 
2592 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2593 @*/
2594 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2595 {
2596   PetscFunctionBegin;
2597   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2598   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2599   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2600   PetscFunctionReturn(PETSC_SUCCESS);
2601 }
2602 
2603 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2604 {
2605   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2606 
2607   PetscFunctionBegin;
2608   jac->gkbtol = tolerance;
2609   PetscFunctionReturn(PETSC_SUCCESS);
2610 }
2611 
2612 /*@
2613     PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT`
2614 
2615     Collective
2616 
2617     Input Parameters:
2618 +   pc     - the preconditioner context
2619 -   maxit  - the maximum number of iterations
2620 
2621     Options Database Key:
2622 .   -pc_fieldsplit_gkb_maxit - default is 100
2623 
2624     Level: intermediate
2625 
2626 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2627 @*/
2628 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2629 {
2630   PetscFunctionBegin;
2631   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2632   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2633   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2634   PetscFunctionReturn(PETSC_SUCCESS);
2635 }
2636 
2637 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2638 {
2639   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2640 
2641   PetscFunctionBegin;
2642   jac->gkbmaxit = maxit;
2643   PetscFunctionReturn(PETSC_SUCCESS);
2644 }
2645 
2646 /*@
2647     PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization in `PCFIELDSPLIT`
2648     preconditioner.
2649 
2650     Collective
2651 
2652     Input Parameters:
2653 +   pc     - the preconditioner context
2654 -   delay  - the delay window in the lower bound estimate
2655 
2656     Options Database Key:
2657 .   -pc_fieldsplit_gkb_delay - default is 5
2658 
2659     Level: intermediate
2660 
2661     Note:
2662     The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error ||u-u^k||_H
2663     is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + delay), and thus the algorithm needs
2664     at least (delay + 1) iterations to stop. For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to
2665 
2666     References:
2667     [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2668 
2669 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2670 @*/
2671 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2672 {
2673   PetscFunctionBegin;
2674   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2675   PetscValidLogicalCollectiveInt(pc, delay, 2);
2676   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2677   PetscFunctionReturn(PETSC_SUCCESS);
2678 }
2679 
2680 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2681 {
2682   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2683 
2684   PetscFunctionBegin;
2685   jac->gkbdelay = delay;
2686   PetscFunctionReturn(PETSC_SUCCESS);
2687 }
2688 
2689 /*@
2690     PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the Golub-Kahan bidiagonalization preconditioner
2691     in `PCFIELDSPLIT`
2692 
2693     Collective
2694 
2695     Input Parameters:
2696 +   pc     - the preconditioner context
2697 -   nu     - the shift parameter
2698 
2699     Options Database Keys:
2700 .   -pc_fieldsplit_gkb_nu - default is 1
2701 
2702     Level: intermediate
2703 
2704     Notes:
2705     This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing nu sufficiently big. However,
2706     if nu is chosen too big, the matrix H might be badly conditioned and the solution of the linear system Hx = b in the inner loop becomes difficult. It is therefore
2707     necessary to find a good balance in between the convergence of the inner and outer loop.
2708 
2709     For nu = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in [Ar13] is then chosen as identity.
2710 
2711     References:
2712     [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2713 
2714 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2715 @*/
2716 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
2717 {
2718   PetscFunctionBegin;
2719   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2720   PetscValidLogicalCollectiveReal(pc, nu, 2);
2721   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
2722   PetscFunctionReturn(PETSC_SUCCESS);
2723 }
2724 
2725 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
2726 {
2727   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2728 
2729   PetscFunctionBegin;
2730   jac->gkbnu = nu;
2731   PetscFunctionReturn(PETSC_SUCCESS);
2732 }
2733 
2734 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
2735 {
2736   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2737 
2738   PetscFunctionBegin;
2739   jac->type = type;
2740   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
2741   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
2742   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
2743   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
2744   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
2745   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
2746   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
2747   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
2748   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
2749 
2750   if (type == PC_COMPOSITE_SCHUR) {
2751     pc->ops->apply = PCApply_FieldSplit_Schur;
2752     pc->ops->view  = PCView_FieldSplit_Schur;
2753 
2754     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
2755     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
2756     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
2757     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
2758     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
2759   } else if (type == PC_COMPOSITE_GKB) {
2760     pc->ops->apply = PCApply_FieldSplit_GKB;
2761     pc->ops->view  = PCView_FieldSplit_GKB;
2762 
2763     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2764     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
2765     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
2766     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
2767     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
2768   } else {
2769     pc->ops->apply = PCApply_FieldSplit;
2770     pc->ops->view  = PCView_FieldSplit;
2771 
2772     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2773   }
2774   PetscFunctionReturn(PETSC_SUCCESS);
2775 }
2776 
2777 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
2778 {
2779   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2780 
2781   PetscFunctionBegin;
2782   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
2783   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
2784   jac->bs = bs;
2785   PetscFunctionReturn(PETSC_SUCCESS);
2786 }
2787 
2788 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
2789 {
2790   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
2791   PC_FieldSplitLink ilink_current = jac->head;
2792   IS                is_owned;
2793 
2794   PetscFunctionBegin;
2795   jac->coordinates_set = PETSC_TRUE; // Internal flag
2796   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
2797 
2798   while (ilink_current) {
2799     // For each IS, embed it to get local coords indces
2800     IS              is_coords;
2801     PetscInt        ndofs_block;
2802     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
2803 
2804     // Setting drop to true for safety. It should make no difference.
2805     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
2806     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
2807     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
2808 
2809     // Allocate coordinates vector and set it directly
2810     PetscCall(PetscMalloc1(ndofs_block * dim, &(ilink_current->coords)));
2811     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
2812       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
2813     }
2814     ilink_current->dim   = dim;
2815     ilink_current->ndofs = ndofs_block;
2816     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
2817     PetscCall(ISDestroy(&is_coords));
2818     ilink_current = ilink_current->next;
2819   }
2820   PetscCall(ISDestroy(&is_owned));
2821   PetscFunctionReturn(PETSC_SUCCESS);
2822 }
2823 
2824 /*@
2825    PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
2826 
2827    Collective
2828 
2829    Input Parameters:
2830 +  pc - the preconditioner context
2831 -  type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2832 
2833    Options Database Key:
2834 .  -pc_fieldsplit_type <type: one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
2835 
2836    Level: Intermediate
2837 
2838 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
2839           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2840 @*/
2841 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
2842 {
2843   PetscFunctionBegin;
2844   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2845   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
2846   PetscFunctionReturn(PETSC_SUCCESS);
2847 }
2848 
2849 /*@
2850   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
2851 
2852   Not collective
2853 
2854   Input Parameter:
2855 . pc - the preconditioner context
2856 
2857   Output Parameter:
2858 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2859 
2860   Level: Intermediate
2861 
2862 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
2863           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2864 @*/
2865 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
2866 {
2867   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2868 
2869   PetscFunctionBegin;
2870   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2871   PetscValidIntPointer(type, 2);
2872   *type = jac->type;
2873   PetscFunctionReturn(PETSC_SUCCESS);
2874 }
2875 
2876 /*@
2877    PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
2878 
2879    Logically Collective
2880 
2881    Input Parameters:
2882 +  pc   - the preconditioner context
2883 -  flg  - boolean indicating whether to use field splits defined by the `DM`
2884 
2885    Options Database Key:
2886 .  -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
2887 
2888    Level: Intermediate
2889 
2890 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
2891 @*/
2892 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
2893 {
2894   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2895   PetscBool      isfs;
2896 
2897   PetscFunctionBegin;
2898   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2899   PetscValidLogicalCollectiveBool(pc, flg, 2);
2900   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2901   if (isfs) jac->dm_splits = flg;
2902   PetscFunctionReturn(PETSC_SUCCESS);
2903 }
2904 
2905 /*@
2906    PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
2907 
2908    Logically Collective
2909 
2910    Input Parameter:
2911 .  pc   - the preconditioner context
2912 
2913    Output Parameter:
2914 .  flg  - boolean indicating whether to use field splits defined by the `DM`
2915 
2916    Level: Intermediate
2917 
2918 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
2919 @*/
2920 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
2921 {
2922   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2923   PetscBool      isfs;
2924 
2925   PetscFunctionBegin;
2926   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2927   PetscValidBoolPointer(flg, 2);
2928   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2929   if (isfs) {
2930     if (flg) *flg = jac->dm_splits;
2931   }
2932   PetscFunctionReturn(PETSC_SUCCESS);
2933 }
2934 
2935 /*@
2936    PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
2937 
2938    Logically Collective
2939 
2940    Input Parameter:
2941 .  pc   - the preconditioner context
2942 
2943    Output Parameter:
2944 .  flg  - boolean indicating whether to detect fields or not
2945 
2946    Level: Intermediate
2947 
2948 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
2949 @*/
2950 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
2951 {
2952   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2953 
2954   PetscFunctionBegin;
2955   *flg = jac->detect;
2956   PetscFunctionReturn(PETSC_SUCCESS);
2957 }
2958 
2959 /*@
2960    PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
2961 
2962    Logically Collective
2963 
2964    Input Parameter:
2965 .  pc   - the preconditioner context
2966 
2967    Output Parameter:
2968 .  flg  - boolean indicating whether to detect fields or not
2969 
2970    Options Database Key:
2971 .  -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
2972 
2973    Level: Intermediate
2974 
2975  Note:
2976    Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
2977 
2978 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
2979 @*/
2980 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
2981 {
2982   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2983 
2984   PetscFunctionBegin;
2985   jac->detect = flg;
2986   if (jac->detect) {
2987     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
2988     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
2989   }
2990   PetscFunctionReturn(PETSC_SUCCESS);
2991 }
2992 
2993 /*MC
2994    PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
2995    collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
2996 
2997    Options Database Keys:
2998 +   -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
2999 .   -pc_fieldsplit_default - automatically add any fields to additional splits that have not
3000                               been supplied explicitly by `-pc_fieldsplit_%d_fields`
3001 .   -pc_fieldsplit_block_size <bs> - size of block that defines fields (i.e. there are bs fields)
3002 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3003 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is a11; see `PCFieldSplitSetSchurPre()`
3004 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - set factorization type when using `-pc_fieldsplit_type schur`; see `PCFieldSplitSetSchurFactType()`
3005 -   -pc_fieldsplit_detect_saddle_point - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3006 
3007      Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3008      The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3009      For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3010 
3011      To set options on the solvers for each block append `-fieldsplit_` to all the `PC`
3012      options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`
3013 
3014      To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3015       and set the options directly on the resulting `KSP` object
3016 
3017     Level: intermediate
3018 
3019    Notes:
3020     Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries and `PCFieldSplitSetIS()`
3021      to define a split by an arbitrary collection of entries.
3022 
3023       If no splits are set the default is used. The splits are defined by entries strided by bs,
3024       beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3025       if this is not called the block size defaults to the blocksize of the second matrix passed
3026       to `KSPSetOperators()`/`PCSetOperators()`.
3027 
3028       For the Schur complement preconditioner if
3029 
3030       ```{math}
3031       J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3032       ```
3033 
3034       the preconditioner using `full` factorization is logically
3035       ```{math}
3036       \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3037       ```
3038      where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3039      ```{math}
3040      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3041      ```
3042      which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given
3043      in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0,
3044      it returns the KSP associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default
3045      $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3046 
3047      The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3048      `diag` gives
3049       ```{math}
3050       \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3051       ```
3052      Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3053      can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3054       ```{math}
3055       \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3056       ```
3057      where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of
3058       ```{math}
3059       \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3060       ```
3061      where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3062 
3063      If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3064      is used automatically for a second block.
3065 
3066      The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3067      Generally it should be used with the `MATAIJ` format.
3068 
3069      The forms of these preconditioners are closely related if not identical to forms derived as "Distributive Iterations", see,
3070      for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`Wesseling2009`.
3071      One can also use `PCFIELDSPLIT`
3072      inside a smoother resulting in "Distributive Smoothers".
3073 
3074      See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3075 
3076      The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3077      residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3078 
3079      The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3080      ```{math}
3081      \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3082      ```
3083      with $A_{00}$ positive semi-definite. The implementation follows {cite}`Arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3084      A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3085 
3086    Developer Note:
3087    The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3088    user API.
3089 
3090      References:
3091      ```{bibliography}
3092      :filter: docname in docnames
3093      ```
3094 
3095 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3096           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3097           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3098           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3099 M*/
3100 
3101 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3102 {
3103   PC_FieldSplit *jac;
3104 
3105   PetscFunctionBegin;
3106   PetscCall(PetscNew(&jac));
3107 
3108   jac->bs                 = -1;
3109   jac->nsplits            = 0;
3110   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3111   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3112   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3113   jac->schurscale         = -1.0;
3114   jac->dm_splits          = PETSC_TRUE;
3115   jac->detect             = PETSC_FALSE;
3116   jac->gkbtol             = 1e-5;
3117   jac->gkbdelay           = 5;
3118   jac->gkbnu              = 1;
3119   jac->gkbmaxit           = 100;
3120   jac->gkbmonitor         = PETSC_FALSE;
3121   jac->coordinates_set    = PETSC_FALSE;
3122 
3123   pc->data = (void *)jac;
3124 
3125   pc->ops->apply           = PCApply_FieldSplit;
3126   pc->ops->applytranspose  = PCApplyTranspose_FieldSplit;
3127   pc->ops->setup           = PCSetUp_FieldSplit;
3128   pc->ops->reset           = PCReset_FieldSplit;
3129   pc->ops->destroy         = PCDestroy_FieldSplit;
3130   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3131   pc->ops->view            = PCView_FieldSplit;
3132   pc->ops->applyrichardson = NULL;
3133 
3134   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3135   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3136   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3137   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3138   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3139   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3140   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3141   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3142   PetscFunctionReturn(PETSC_SUCCESS);
3143 }
3144