xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision e92ee4c88f44303128c1ffcb4125ebdffe4723ad) !
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 #include <petscdevice.h>
5 #if PetscDefined(HAVE_CUDA)
6   #include <petscdevice_cuda.h>
7 #endif
8 #if PetscDefined(HAVE_HIP)
9   #include <petscdevice_hip.h>
10 #endif
11 
12 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
13 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
14 
15 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
16 
17 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
18 struct _PC_FieldSplitLink {
19   KSP               ksp;
20   Vec               x, y, z;
21   char             *splitname;
22   PetscInt          nfields;
23   PetscInt         *fields, *fields_col;
24   VecScatter        sctx;
25   IS                is, is_col;
26   PC_FieldSplitLink next, previous;
27   PetscLogEvent     event;
28 
29   /* Used only when setting coordinates with PCSetCoordinates */
30   PetscInt   dim;
31   PetscInt   ndofs;
32   PetscReal *coords;
33 };
34 
35 typedef struct {
36   PCCompositeType type;
37   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
38   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
39   PetscInt        bs;           /* Block size for IS and Mat structures */
40   PetscInt        nsplits;      /* Number of field divisions defined */
41   Vec            *x, *y, w1, w2;
42   Mat            *mat;    /* The diagonal block for each split */
43   Mat            *pmat;   /* The preconditioning diagonal block for each split */
44   Mat            *Afield; /* The rows of the matrix associated with each split */
45   PetscBool       issetup;
46 
47   /* Only used when Schur complement preconditioning is used */
48   Mat                       B;          /* The (0,1) block */
49   Mat                       C;          /* The (1,0) block */
50   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
51   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a matrix for constructing the preconditioner when solving with S */
52   Mat                       schur_user; /* User-provided matrix for constructing the preconditioner for the Schur complement */
53   PCFieldSplitSchurPreType  schurpre;   /* Determines which matrix is used for the Schur complement */
54   PCFieldSplitSchurFactType schurfactorization;
55   KSP                       kspschur;   /* The solver for S */
56   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
57   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
58 
59   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
60   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
61   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
62   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
63   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
64   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
65   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
66   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
67   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
68   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
69 
70   PC_FieldSplitLink head;
71   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
72   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
73   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
74   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
75   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
76   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
77   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
78 } PC_FieldSplit;
79 
80 /*
81     Note:
82     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
83    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
84    PC you could change this.
85 */
86 
87 /* This helper is so that setting a user-provided matrix is orthogonal to choosing to use it.  This way the
88 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
89 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
90 {
91   switch (jac->schurpre) {
92   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
93     return jac->schur;
94   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
95     return jac->schurp;
96   case PC_FIELDSPLIT_SCHUR_PRE_A11:
97     return jac->pmat[1];
98   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
99   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
100   default:
101     return jac->schur_user ? jac->schur_user : jac->pmat[1];
102   }
103 }
104 
105 #include <petscdraw.h>
106 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
107 {
108   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
109   PetscBool         isascii, isdraw;
110   PetscInt          i, j;
111   PC_FieldSplitLink ilink = jac->head;
112 
113   PetscFunctionBegin;
114   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
115   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
116   if (isascii) {
117     if (jac->bs > 0) {
118       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
119     } else {
120       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
121     }
122     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
123     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
124     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
125     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
126     for (i = 0; i < jac->nsplits; i++) {
127       if (ilink->fields) {
128         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
129         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
130         for (j = 0; j < ilink->nfields; j++) {
131           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
132           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
133         }
134         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
135         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
136       } else {
137         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
138       }
139       PetscCall(KSPView(ilink->ksp, viewer));
140       ilink = ilink->next;
141     }
142   }
143 
144   if (isdraw) {
145     PetscDraw draw;
146     PetscReal x, y, w, wd;
147 
148     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
149     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
150     w  = 2 * PetscMin(1.0 - x, x);
151     wd = w / (jac->nsplits + 1);
152     x  = x - wd * (jac->nsplits - 1) / 2.0;
153     for (i = 0; i < jac->nsplits; i++) {
154       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
155       PetscCall(KSPView(ilink->ksp, viewer));
156       PetscCall(PetscDrawPopCurrentPoint(draw));
157       x += wd;
158       ilink = ilink->next;
159     }
160   }
161   PetscFunctionReturn(PETSC_SUCCESS);
162 }
163 
164 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
165 {
166   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
167   PetscBool                  isascii, isdraw;
168   PetscInt                   i, j;
169   PC_FieldSplitLink          ilink = jac->head;
170   MatSchurComplementAinvType atype;
171 
172   PetscFunctionBegin;
173   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
174   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
175   if (isascii) {
176     if (jac->bs > 0) {
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
178     } else {
179       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
180     }
181     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
182     switch (jac->schurpre) {
183     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
184       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
185       break;
186     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
187       if (jac->schur) {
188         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
189         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
190       }
191       break;
192     case PC_FIELDSPLIT_SCHUR_PRE_A11:
193       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
194       break;
195     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
196       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
197       break;
198     case PC_FIELDSPLIT_SCHUR_PRE_USER:
199       if (jac->schur_user) {
200         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
201       } else {
202         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
203       }
204       break;
205     default:
206       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
207     }
208     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
209     PetscCall(PetscViewerASCIIPushTab(viewer));
210     for (i = 0; i < jac->nsplits; i++) {
211       if (ilink->fields) {
212         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
213         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
214         for (j = 0; j < ilink->nfields; j++) {
215           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
216           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
217         }
218         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
219         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
220       } else {
221         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
222       }
223       ilink = ilink->next;
224     }
225     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
226     PetscCall(PetscViewerASCIIPushTab(viewer));
227     if (jac->head) PetscCall(KSPView(jac->head->ksp, viewer));
228     else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
229     PetscCall(PetscViewerASCIIPopTab(viewer));
230     if (jac->head && jac->kspupper != jac->head->ksp) {
231       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n"));
232       PetscCall(PetscViewerASCIIPushTab(viewer));
233       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
234       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
235       PetscCall(PetscViewerASCIIPopTab(viewer));
236     }
237     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n"));
238     PetscCall(PetscViewerASCIIPushTab(viewer));
239     if (jac->kspschur) {
240       PetscCall(KSPView(jac->kspschur, viewer));
241     } else {
242       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
243     }
244     PetscCall(PetscViewerASCIIPopTab(viewer));
245     PetscCall(PetscViewerASCIIPopTab(viewer));
246   } else if (isdraw && jac->head) {
247     PetscDraw draw;
248     PetscReal x, y, w, wd, h;
249     PetscInt  cnt = 2;
250     char      str[32];
251 
252     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
253     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
254     if (jac->kspupper != jac->head->ksp) cnt++;
255     w  = 2 * PetscMin(1.0 - x, x);
256     wd = w / (cnt + 1);
257 
258     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
259     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
260     y -= h;
261     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
262       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
263     } else {
264       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
265     }
266     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
267     y -= h;
268     x = x - wd * (cnt - 1) / 2.0;
269 
270     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
271     PetscCall(KSPView(jac->head->ksp, viewer));
272     PetscCall(PetscDrawPopCurrentPoint(draw));
273     if (jac->kspupper != jac->head->ksp) {
274       x += wd;
275       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
276       PetscCall(KSPView(jac->kspupper, viewer));
277       PetscCall(PetscDrawPopCurrentPoint(draw));
278     }
279     x += wd;
280     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
281     PetscCall(KSPView(jac->kspschur, viewer));
282     PetscCall(PetscDrawPopCurrentPoint(draw));
283   }
284   PetscFunctionReturn(PETSC_SUCCESS);
285 }
286 
287 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
288 {
289   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
290   PetscBool         isascii, isdraw;
291   PetscInt          i, j;
292   PC_FieldSplitLink ilink = jac->head;
293 
294   PetscFunctionBegin;
295   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
296   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
297   if (isascii) {
298     if (jac->bs > 0) {
299       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
300     } else {
301       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
302     }
303     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
304     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
305     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
306 
307     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
308     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
309     PetscCall(PetscViewerASCIIPushTab(viewer));
310 
311     if (ilink->fields) {
312       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
313       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
314       for (j = 0; j < ilink->nfields; j++) {
315         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
316         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
317       }
318       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
319       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
320     } else {
321       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
322     }
323     PetscCall(KSPView(ilink->ksp, viewer));
324 
325     PetscCall(PetscViewerASCIIPopTab(viewer));
326   }
327 
328   if (isdraw) {
329     PetscDraw draw;
330     PetscReal x, y, w, wd;
331 
332     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
333     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
334     w  = 2 * PetscMin(1.0 - x, x);
335     wd = w / (jac->nsplits + 1);
336     x  = x - wd * (jac->nsplits - 1) / 2.0;
337     for (i = 0; i < jac->nsplits; i++) {
338       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
339       PetscCall(KSPView(ilink->ksp, viewer));
340       PetscCall(PetscDrawPopCurrentPoint(draw));
341       x += wd;
342       ilink = ilink->next;
343     }
344   }
345   PetscFunctionReturn(PETSC_SUCCESS);
346 }
347 
348 /* Precondition: jac->bs is set to a meaningful value or MATNEST */
349 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
350 {
351   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
352   PetscInt       bs, i, nfields, *ifields, nfields_col, *ifields_col;
353   PetscBool      flg, flg_col, mnest;
354   char           optionname[128], splitname[8], optionname_col[128];
355 
356   PetscFunctionBegin;
357   PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest));
358   if (mnest) {
359     PetscCall(MatNestGetSize(pc->pmat, &bs, NULL));
360   } else {
361     bs = jac->bs;
362   }
363   PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col));
364   for (i = 0, flg = PETSC_TRUE;; i++) {
365     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
366     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
367     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
368     nfields     = bs;
369     nfields_col = bs;
370     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
371     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
372     if (!flg) break;
373     else if (flg && !flg_col) {
374       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
375       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
376     } else {
377       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
378       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
379       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
380     }
381   }
382   if (i > 0) {
383     /* Makes command-line setting of splits take precedence over setting them in code.
384        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
385        create new splits, which would probably not be what the user wanted. */
386     jac->splitdefined = PETSC_TRUE;
387   }
388   PetscCall(PetscFree2(ifields, ifields_col));
389   PetscFunctionReturn(PETSC_SUCCESS);
390 }
391 
392 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
393 {
394   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
395   PC_FieldSplitLink ilink              = jac->head;
396   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
397   PetscInt          i;
398 
399   PetscFunctionBegin;
400   /*
401    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
402    Should probably be rewritten.
403    */
404   if (!ilink) {
405     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
406     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
407       PetscInt  numFields, f, i, j;
408       char    **fieldNames;
409       IS       *fields;
410       DM       *dms;
411       DM        subdm[128];
412       PetscBool flg;
413 
414       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
415       /* Allow the user to prescribe the splits */
416       for (i = 0, flg = PETSC_TRUE;; i++) {
417         PetscInt ifields[128];
418         IS       compField;
419         char     optionname[128], splitname[8];
420         PetscInt nfields = numFields;
421 
422         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
423         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
424         if (!flg) break;
425         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
426         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
427         if (nfields == 1) {
428           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
429         } else {
430           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
431           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
432         }
433         PetscCall(ISDestroy(&compField));
434         for (j = 0; j < nfields; ++j) {
435           f = ifields[j];
436           PetscCall(PetscFree(fieldNames[f]));
437           PetscCall(ISDestroy(&fields[f]));
438         }
439       }
440       if (i == 0) {
441         for (f = 0; f < numFields; ++f) {
442           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
443           PetscCall(PetscFree(fieldNames[f]));
444           PetscCall(ISDestroy(&fields[f]));
445         }
446       } else {
447         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
448         PetscCall(PetscFree(dms));
449         PetscCall(PetscMalloc1(i, &dms));
450         for (j = 0; j < i; ++j) dms[j] = subdm[j];
451       }
452       PetscCall(PetscFree(fieldNames));
453       PetscCall(PetscFree(fields));
454       if (dms) {
455         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
456         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
457           const char *prefix;
458           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix));
459           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix));
460           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
461           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
462           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
463           PetscCall(DMDestroy(&dms[i]));
464         }
465         PetscCall(PetscFree(dms));
466       }
467     } else {
468       if (jac->bs <= 0) {
469         if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
470         else jac->bs = 1;
471       }
472 
473       if (jac->detect) {
474         IS       zerodiags, rest;
475         PetscInt nmin, nmax;
476 
477         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
478         if (jac->diag_use_amat) {
479           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
480         } else {
481           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
482         }
483         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
484         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
485         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
486         PetscCall(ISDestroy(&zerodiags));
487         PetscCall(ISDestroy(&rest));
488       } else if (coupling) {
489         IS       coupling, rest;
490         PetscInt nmin, nmax;
491 
492         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
493         if (jac->offdiag_use_amat) {
494           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
495         } else {
496           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
497         }
498         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
499         PetscCall(ISSetIdentity(rest));
500         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
501         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
502         PetscCall(ISDestroy(&coupling));
503         PetscCall(ISDestroy(&rest));
504       } else {
505         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
506         if (!fieldsplit_default) {
507           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
508            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
509           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
510           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
511         }
512         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
513           Mat       M = pc->pmat;
514           PetscBool isnest;
515           PetscInt  nf;
516 
517           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
518           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
519           if (!isnest) {
520             M = pc->mat;
521             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
522           }
523           if (!isnest) nf = jac->bs;
524           else PetscCall(MatNestGetSize(M, &nf, NULL));
525           for (i = 0; i < nf; i++) {
526             char splitname[8];
527 
528             PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
529             PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
530           }
531           jac->defaultsplit = PETSC_TRUE;
532         }
533       }
534     }
535   } else if (jac->nsplits == 1) {
536     IS       is2;
537     PetscInt nmin, nmax;
538 
539     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
540     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
541     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
542     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
543     PetscCall(ISDestroy(&is2));
544   }
545 
546   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
547   PetscFunctionReturn(PETSC_SUCCESS);
548 }
549 
550 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
551 {
552   Mat       BT, T;
553   PetscReal nrmT, nrmB;
554 
555   PetscFunctionBegin;
556   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
557   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
558   PetscCall(MatNorm(T, NORM_1, &nrmT));
559   PetscCall(MatNorm(B, NORM_1, &nrmB));
560   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable.");
561 
562   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
563   /* setting N := 1/nu*I in [Ar13].                                                 */
564   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
565   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_CURRENT, H)); /* H = A01*A01'          */
566   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
567 
568   PetscCall(MatDestroy(&BT));
569   PetscCall(MatDestroy(&T));
570   PetscFunctionReturn(PETSC_SUCCESS);
571 }
572 
573 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg);
574 
575 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
576 {
577   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
578   PC_FieldSplitLink ilink;
579   PetscInt          i, nsplit;
580   PetscBool         sorted, sorted_col, matnest = PETSC_FALSE;
581 
582   PetscFunctionBegin;
583   pc->failedreason = PC_NOERROR;
584   PetscCall(PCFieldSplitSetDefaults(pc));
585   nsplit = jac->nsplits;
586   ilink  = jac->head;
587   if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
588 
589   /* get the matrices for each split */
590   if (!jac->issetup) {
591     PetscInt rstart, rend, nslots, bs;
592 
593     jac->issetup = PETSC_TRUE;
594 
595     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
596     if (jac->defaultsplit || !ilink->is) {
597       if (jac->bs <= 0) jac->bs = nsplit;
598     }
599 
600     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
601     PetscCall(MatGetBlockSize(pc->pmat, &bs));
602     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
603       PetscBool blk;
604 
605       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
606       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
607     }
608 
609     if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */
610       bs = jac->bs;
611       PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
612       nslots = (rend - rstart) / bs;
613       for (i = 0; i < nsplit; i++) {
614         if (jac->defaultsplit) {
615           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
616           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
617         } else if (!ilink->is) {
618           if (ilink->nfields > 1) {
619             PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
620 
621             PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
622             PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
623             for (j = 0; j < nslots; j++) {
624               for (k = 0; k < nfields; k++) {
625                 ii[nfields * j + k] = rstart + bs * j + fields[k];
626                 jj[nfields * j + k] = rstart + bs * j + fields_col[k];
627               }
628             }
629             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
630             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
631             PetscCall(ISSetBlockSize(ilink->is, nfields));
632             PetscCall(ISSetBlockSize(ilink->is_col, nfields));
633           } else {
634             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
635             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
636           }
637         }
638         PetscCall(ISSorted(ilink->is, &sorted));
639         if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col));
640         PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split");
641         ilink = ilink->next;
642       }
643     } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */
644       IS      *rowis, *colis, *ises = NULL;
645       PetscInt mis, nis;
646 
647       PetscCall(MatNestGetSize(pc->pmat, &mis, &nis));
648       PetscCall(PetscMalloc2(mis, &rowis, nis, &colis));
649       PetscCall(MatNestGetISs(pc->pmat, rowis, colis));
650       if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises));
651 
652       for (i = 0; i < nsplit; i++) {
653         if (jac->defaultsplit) {
654           PetscCall(ISDuplicate(rowis[i], &ilink->is));
655           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
656         } else if (!ilink->is) {
657           if (ilink->nfields > 1) {
658             for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]];
659             PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is));
660           } else {
661             PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is));
662           }
663           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
664         }
665         ilink = ilink->next;
666       }
667       PetscCall(PetscFree2(rowis, colis));
668       PetscCall(PetscFree(ises));
669     }
670   }
671 
672   ilink = jac->head;
673   if (!jac->pmat) {
674     Vec xtmp;
675 
676     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
677     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
678     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
679     for (i = 0; i < nsplit; i++) {
680       MatNullSpace sp;
681 
682       /* Check for matrix attached to IS */
683       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
684       if (jac->pmat[i]) {
685         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
686         if (jac->type == PC_COMPOSITE_SCHUR) {
687           jac->schur_user = jac->pmat[i];
688 
689           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
690         }
691       } else {
692         const char *prefix;
693         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
694         PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix));
695         if (!prefix) {
696           PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
697           PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
698         }
699         PetscCall(MatSetFromOptions(jac->pmat[i]));
700         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
701       }
702       /* create work vectors for each split */
703       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
704       ilink->x = jac->x[i];
705       ilink->y = jac->y[i];
706       ilink->z = NULL;
707       /* compute scatter contexts needed by multiplicative versions and non-default splits */
708       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
709       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
710       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
711       ilink = ilink->next;
712     }
713     PetscCall(VecDestroy(&xtmp));
714   } else {
715     MatReuse      scall;
716     MatNullSpace *nullsp = NULL;
717 
718     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
719       PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp));
720       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
721       scall = MAT_INITIAL_MATRIX;
722     } else scall = MAT_REUSE_MATRIX;
723 
724     for (i = 0; i < nsplit; i++) {
725       Mat pmat;
726 
727       /* Check for matrix attached to IS */
728       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
729       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
730       ilink = ilink->next;
731     }
732     if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp));
733   }
734   if (jac->diag_use_amat) {
735     ilink = jac->head;
736     if (!jac->mat) {
737       PetscCall(PetscMalloc1(nsplit, &jac->mat));
738       for (i = 0; i < nsplit; i++) {
739         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
740         ilink = ilink->next;
741       }
742     } else {
743       MatReuse      scall;
744       MatNullSpace *nullsp = NULL;
745 
746       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
747         PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp));
748         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
749         scall = MAT_INITIAL_MATRIX;
750       } else scall = MAT_REUSE_MATRIX;
751 
752       for (i = 0; i < nsplit; i++) {
753         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
754         ilink = ilink->next;
755       }
756       if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp));
757     }
758   } else {
759     jac->mat = jac->pmat;
760   }
761 
762   /* Check for null space attached to IS */
763   ilink = jac->head;
764   for (i = 0; i < nsplit; i++) {
765     MatNullSpace sp;
766 
767     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
768     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
769     ilink = ilink->next;
770   }
771 
772   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
773     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
774     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
775     ilink = jac->head;
776     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
777       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
778       if (!jac->Afield) {
779         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
780         if (jac->offdiag_use_amat) {
781           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
782         } else {
783           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
784         }
785       } else {
786         MatReuse scall;
787 
788         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
789           PetscCall(MatDestroy(&jac->Afield[1]));
790           scall = MAT_INITIAL_MATRIX;
791         } else scall = MAT_REUSE_MATRIX;
792 
793         if (jac->offdiag_use_amat) {
794           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
795         } else {
796           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
797         }
798       }
799     } else {
800       if (!jac->Afield) {
801         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
802         for (i = 0; i < nsplit; i++) {
803           if (jac->offdiag_use_amat) {
804             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
805           } else {
806             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
807           }
808           ilink = ilink->next;
809         }
810       } else {
811         MatReuse scall;
812         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
813           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
814           scall = MAT_INITIAL_MATRIX;
815         } else scall = MAT_REUSE_MATRIX;
816 
817         for (i = 0; i < nsplit; i++) {
818           if (jac->offdiag_use_amat) {
819             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
820           } else {
821             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
822           }
823           ilink = ilink->next;
824         }
825       }
826     }
827   }
828 
829   if (jac->type == PC_COMPOSITE_SCHUR) {
830     IS          ccis;
831     PetscBool   isset, isspd = PETSC_FALSE, issym = PETSC_FALSE, flg;
832     PetscInt    rstart, rend;
833     char        lscname[256];
834     PetscObject LSC_L;
835 
836     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
837 
838     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
839     PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
840     if (jac->schurscale == (PetscScalar)-1.0) jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
841     PetscCall(MatIsSymmetricKnown(pc->pmat, &isset, &issym));
842 
843     /* When extracting off-diagonal submatrices, we take complements from this range */
844     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
845     PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
846 
847     if (jac->schur) {
848       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
849       MatReuse scall;
850 
851       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
852         scall = MAT_INITIAL_MATRIX;
853         PetscCall(MatDestroy(&jac->B));
854         PetscCall(MatDestroy(&jac->C));
855       } else scall = MAT_REUSE_MATRIX;
856 
857       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
858       ilink = jac->head;
859       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
860       if (jac->offdiag_use_amat) {
861         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B));
862       } else {
863         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B));
864       }
865       PetscCall(ISDestroy(&ccis));
866       if (!flg) {
867         ilink = ilink->next;
868         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
869         if (jac->offdiag_use_amat) {
870           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C));
871         } else {
872           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C));
873         }
874         PetscCall(ISDestroy(&ccis));
875       } else {
876         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg));
877         if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
878         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
879       }
880       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
881       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
882         PetscCall(MatDestroy(&jac->schurp));
883         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
884       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL && jac->kspupper != jac->head->ksp) {
885         PetscCall(MatDestroy(&jac->schur_user));
886         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
887       }
888       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
889       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
890       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
891     } else {
892       const char  *Dprefix;
893       char         schurprefix[256], schurmatprefix[256];
894       char         schurtestoption[256];
895       MatNullSpace sp;
896       KSP          kspt;
897 
898       /* extract the A01 and A10 matrices */
899       ilink = jac->head;
900       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
901       if (jac->offdiag_use_amat) {
902         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
903       } else {
904         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
905       }
906       PetscCall(ISDestroy(&ccis));
907       ilink = ilink->next;
908       if (!flg) {
909         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
910         if (jac->offdiag_use_amat) {
911           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
912         } else {
913           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
914         }
915         PetscCall(ISDestroy(&ccis));
916       } else {
917         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg));
918         if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
919         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
920       }
921       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
922       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
923       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
924       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
925       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
926       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
927       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
928       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
929 
930       /* Note: this is not true in general */
931       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
932       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
933 
934       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
935       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
936       if (flg) {
937         DM  dmInner;
938         KSP kspInner;
939         PC  pcInner;
940 
941         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
942         PetscCall(KSPReset(kspInner));
943         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
944         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
945         /* Indent this deeper to emphasize the "inner" nature of this solver. */
946         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
947         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
948         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
949 
950         /* Set DM for new solver */
951         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
952         PetscCall(KSPSetDM(kspInner, dmInner));
953         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
954 
955         /* Defaults to PCKSP as preconditioner */
956         PetscCall(KSPGetPC(kspInner, &pcInner));
957         PetscCall(PCSetType(pcInner, PCKSP));
958         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
959       } else {
960         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
961           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
962           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
963           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
964           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
965           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
966         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
967         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
968       }
969       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
970       PetscCall(KSPSetFromOptions(jac->head->ksp));
971       PetscCall(MatSetFromOptions(jac->schur));
972 
973       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
974       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
975         KSP kspInner;
976         PC  pcInner;
977 
978         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
979         PetscCall(KSPGetPC(kspInner, &pcInner));
980         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
981         if (flg) {
982           KSP ksp;
983 
984           PetscCall(PCKSPGetKSP(pcInner, &ksp));
985           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
986         }
987       }
988       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
989       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
990       if (flg) {
991         DM dmInner;
992 
993         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
994         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
995         PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
996         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
997         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
998         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
999         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
1000         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
1001         PetscCall(KSPSetDM(jac->kspupper, dmInner));
1002         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
1003         PetscCall(KSPSetFromOptions(jac->kspupper));
1004         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
1005         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
1006       } else {
1007         jac->kspupper = jac->head->ksp;
1008         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
1009       }
1010 
1011       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
1012       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
1013       PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
1014       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
1015       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
1016       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
1017         PC pcschur;
1018         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
1019         PetscCall(PCSetType(pcschur, PCNONE));
1020         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
1021       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1022         if (jac->schurfactorization != PC_FIELDSPLIT_SCHUR_FACT_FULL || jac->kspupper != jac->head->ksp) PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1023       }
1024       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
1025       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
1026       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
1027       /* propagate DM */
1028       {
1029         DM sdm;
1030         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
1031         if (sdm) {
1032           PetscCall(KSPSetDM(jac->kspschur, sdm));
1033           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
1034         }
1035       }
1036       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1037       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
1038       PetscCall(KSPSetFromOptions(jac->kspschur));
1039     }
1040     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
1041     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
1042     if (issym) PetscCall(MatSetOption(jac->schur, MAT_SYMMETRIC, PETSC_TRUE));
1043     if (isspd) PetscCall(MatSetOption(jac->schur, MAT_SPD, PETSC_TRUE));
1044 
1045     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
1046     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
1047     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L));
1048     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L));
1049     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", LSC_L));
1050     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
1051     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L));
1052     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L));
1053     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", LSC_L));
1054   } else if (jac->type == PC_COMPOSITE_GKB) {
1055     IS       ccis;
1056     PetscInt rstart, rend;
1057 
1058     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1059 
1060     ilink = jac->head;
1061 
1062     /* When extracting off-diagonal submatrices, we take complements from this range */
1063     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
1064 
1065     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1066     if (jac->offdiag_use_amat) {
1067       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1068     } else {
1069       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1070     }
1071     PetscCall(ISDestroy(&ccis));
1072     /* Create work vectors for GKB algorithm */
1073     PetscCall(VecDuplicate(ilink->x, &jac->u));
1074     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1075     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1076     ilink = ilink->next;
1077     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1078     if (jac->offdiag_use_amat) {
1079       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1080     } else {
1081       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1082     }
1083     PetscCall(ISDestroy(&ccis));
1084     /* Create work vectors for GKB algorithm */
1085     PetscCall(VecDuplicate(ilink->x, &jac->v));
1086     PetscCall(VecDuplicate(ilink->x, &jac->d));
1087     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1088     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1089     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1090 
1091     ilink = jac->head;
1092     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1093     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1094     /* Create gkb_monitor context */
1095     if (jac->gkbmonitor) {
1096       PetscInt tablevel;
1097       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1098       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1099       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1100       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1101       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1102     }
1103   } else {
1104     /* set up the individual splits' PCs */
1105     i     = 0;
1106     ilink = jac->head;
1107     while (ilink) {
1108       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1109       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1110       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1111       i++;
1112       ilink = ilink->next;
1113     }
1114   }
1115 
1116   /* Set coordinates to the sub PC objects whenever these are set */
1117   if (jac->coordinates_set) {
1118     PC pc_coords;
1119     if (jac->type == PC_COMPOSITE_SCHUR) {
1120       // Head is first block.
1121       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1122       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1123       // Second one is Schur block, but its KSP object is in kspschur.
1124       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1125       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1126     } else if (jac->type == PC_COMPOSITE_GKB) {
1127       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1128     } else {
1129       ilink = jac->head;
1130       while (ilink) {
1131         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1132         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1133         ilink = ilink->next;
1134       }
1135     }
1136   }
1137 
1138   jac->suboptionsset = PETSC_TRUE;
1139   PetscFunctionReturn(PETSC_SUCCESS);
1140 }
1141 
1142 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_Schur(PC pc)
1143 {
1144   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1145   PC_FieldSplitLink ilinkA = jac->head;
1146   KSP               kspA = ilinkA->ksp, kspUpper = jac->kspupper;
1147 
1148   PetscFunctionBegin;
1149   if (jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL && kspUpper != kspA) {
1150     PetscCall(KSPSetUp(kspUpper));
1151     PetscCall(KSPSetUpOnBlocks(kspUpper));
1152   }
1153   PetscCall(KSPSetUp(kspA));
1154   PetscCall(KSPSetUpOnBlocks(kspA));
1155   if (jac->schurpre != PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1156     PetscCall(KSPSetUp(jac->kspschur));
1157     PetscCall(KSPSetUpOnBlocks(jac->kspschur));
1158   } else if (kspUpper == kspA) {
1159     Mat          A;
1160     PetscInt     m, M, N;
1161     VecType      vtype;
1162     PetscMemType mtype;
1163     PetscScalar *array;
1164 
1165     PetscCall(MatGetSize(jac->B, &M, &N));
1166     PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1167     PetscCall(MatGetVecType(jac->B, &vtype));
1168     PetscCall(VecGetArrayAndMemType(ilinkA->x, &array, &mtype));
1169     PetscCall(VecRestoreArrayAndMemType(ilinkA->x, &array));
1170     if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(PetscMalloc1(m * (N + 1), &array));
1171 #if PetscDefined(HAVE_CUDA)
1172     else if (PetscMemTypeCUDA(mtype)) PetscCallCUDA(cudaMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1173 #endif
1174 #if PetscDefined(HAVE_HIP)
1175     else if (PetscMemTypeHIP(mtype)) PetscCallHIP(hipMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1176 #endif
1177     PetscCall(MatCreateDenseFromVecType(PetscObjectComm((PetscObject)jac->schur), vtype, m, PETSC_DECIDE, M, N + 1, -1, array, &A)); // number of columns of the Schur complement plus one
1178     PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)A));
1179     PetscCall(MatDestroy(&A));
1180   }
1181   PetscFunctionReturn(PETSC_SUCCESS);
1182 }
1183 
1184 static PetscErrorCode PCSetUpOnBlocks_FieldSplit(PC pc)
1185 {
1186   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1187   PC_FieldSplitLink ilink = jac->head;
1188 
1189   PetscFunctionBegin;
1190   while (ilink) {
1191     PetscCall(KSPSetUp(ilink->ksp));
1192     PetscCall(KSPSetUpOnBlocks(ilink->ksp));
1193     ilink = ilink->next;
1194   }
1195   PetscFunctionReturn(PETSC_SUCCESS);
1196 }
1197 
1198 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_GKB(PC pc)
1199 {
1200   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1201   PC_FieldSplitLink ilinkA = jac->head;
1202   KSP               ksp    = ilinkA->ksp;
1203 
1204   PetscFunctionBegin;
1205   PetscCall(KSPSetUp(ksp));
1206   PetscCall(KSPSetUpOnBlocks(ksp));
1207   PetscFunctionReturn(PETSC_SUCCESS);
1208 }
1209 
1210 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1211 {
1212   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1213   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1214   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1215   Mat               AinvB = NULL;
1216   PetscInt          N, P;
1217 
1218   PetscFunctionBegin;
1219   switch (jac->schurfactorization) {
1220   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1221     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1222     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1223     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1224     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1225     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1226     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1227     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1228     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1229     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1230     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1231     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1232     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1233     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1234     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1235     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1236     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1237     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1238     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1239     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1240     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1241     break;
1242   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1243     /* [A00 0; A10 S], suitable for left preconditioning */
1244     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1245     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1246     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1247     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1248     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1249     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1250     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1251     PetscCall(VecScale(ilinkD->x, -1.));
1252     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1253     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1254     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1255     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1256     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1257     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1258     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1259     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1260     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1261     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1262     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1263     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1264     break;
1265   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1266     /* [A00 A01; 0 S], suitable for right preconditioning */
1267     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1268     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1269     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1270     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1271     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1272     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1273     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1274     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1275     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1276     PetscCall(VecScale(ilinkA->x, -1.));
1277     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1278     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1279     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1280     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1281     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1282     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1283     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1284     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1285     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1286     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1287     break;
1288   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1289     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1290     PetscCall(MatGetSize(jac->B, NULL, &P));
1291     N = P;
1292     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1293     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1294     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1295     if (kspUpper == kspA) {
1296       PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB));
1297       if (AinvB) {
1298         PetscCall(MatGetSize(AinvB, NULL, &N));
1299         if (N > P) { // first time PCApply_FieldSplit_Schur() is called
1300           PetscMemType mtype;
1301           Vec          c = NULL;
1302           PetscScalar *array;
1303           PetscInt     m, M;
1304 
1305           PetscCall(MatGetSize(jac->B, &M, NULL));
1306           PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1307           PetscCall(MatDenseGetArrayAndMemType(AinvB, &array, &mtype));
1308           if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1309 #if PetscDefined(HAVE_CUDA)
1310           else if (PetscMemTypeCUDA(mtype)) PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1311 #endif
1312 #if PetscDefined(HAVE_HIP)
1313           else if (PetscMemTypeHIP(mtype)) PetscCall(VecCreateMPIHIPWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1314 #endif
1315           PetscCall(MatDenseRestoreArrayAndMemType(AinvB, &array));
1316           PetscCall(VecCopy(ilinkA->x, c));
1317           PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1318           PetscCall(KSPSetOperators(jac->kspschur, jac->schur, jac->schur_user));
1319           PetscCall(VecCopy(c, ilinkA->y)); // retrieve the solution as the last column of the composed Mat
1320           PetscCall(VecDestroy(&c));
1321         }
1322       }
1323     }
1324     if (N == P) PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1325     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1326     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1327     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1328     PetscCall(VecScale(ilinkD->x, -1.0));
1329     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1330     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1331 
1332     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1333     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1334     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1335     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1336     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1337     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1338     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1339 
1340     if (kspUpper == kspA) {
1341       if (!AinvB) {
1342         PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1343         PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1344         PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1345         PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1346         PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1347         PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1348       } else PetscCall(MatMultAdd(AinvB, ilinkD->y, ilinkA->y, ilinkA->y));
1349     } else {
1350       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1351       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1352       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1353       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1354       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1355       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1356       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1357       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1358       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1359     }
1360     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1361     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1362     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1363   }
1364   PetscFunctionReturn(PETSC_SUCCESS);
1365 }
1366 
1367 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1368 {
1369   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1370   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1371   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1372 
1373   PetscFunctionBegin;
1374   switch (jac->schurfactorization) {
1375   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1376     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1377     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1378     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1379     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1380     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1381     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1382     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1383     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1384     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1385     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1386     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1387     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1388     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1389     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1390     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1391     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1392     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1393     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1394     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1395     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1396     break;
1397   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1398     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1399     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1400     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1401     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1402     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1403     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1404     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1405     PetscCall(VecScale(ilinkD->x, -1.));
1406     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1407     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1408     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1409     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1410     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1411     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1412     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1413     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1414     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1415     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1416     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1417     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1418     break;
1419   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1420     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1421     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1422     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1423     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1424     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1425     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1426     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1427     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1428     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1429     PetscCall(VecScale(ilinkA->x, -1.));
1430     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1431     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1432     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1433     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1434     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1435     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1436     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1437     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1438     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1439     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1440     break;
1441   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1442     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1443     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1444     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1445     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1446     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1447     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1448     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1449     PetscCall(VecScale(ilinkD->x, -1.0));
1450     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1451     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1452 
1453     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1454     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1455     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1456     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1457     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1458     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1459     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1460 
1461     if (kspLower == kspA) {
1462       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1463       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1464       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1465       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1466       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1467       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1468     } else {
1469       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1470       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1471       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1472       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1473       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1474       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1475       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1476       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1477       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1478     }
1479     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1480     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1481     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1482   }
1483   PetscFunctionReturn(PETSC_SUCCESS);
1484 }
1485 
1486 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1487   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1488                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1489                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1490 
1491 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1492 {
1493   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1494   PC_FieldSplitLink ilink = jac->head;
1495   PetscInt          cnt, bs;
1496 
1497   PetscFunctionBegin;
1498   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1499     PetscBool matnest;
1500 
1501     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1502     if (jac->defaultsplit && !matnest) {
1503       PetscCall(VecGetBlockSize(x, &bs));
1504       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1505       PetscCall(VecGetBlockSize(y, &bs));
1506       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1507       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1508       while (ilink) {
1509         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1510         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1511         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1512         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1513         ilink = ilink->next;
1514       }
1515       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1516     } else {
1517       PetscCall(VecSet(y, 0.0));
1518       while (ilink) {
1519         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1520         ilink = ilink->next;
1521       }
1522     }
1523   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1524     PetscCall(VecSet(y, 0.0));
1525     /* solve on first block for first block variables */
1526     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1527     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1528     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1529     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1530     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1531     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1532     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1533     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1534 
1535     /* compute the residual only onto second block variables using first block variables */
1536     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1537     ilink = ilink->next;
1538     PetscCall(VecScale(ilink->x, -1.0));
1539     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1540     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1541 
1542     /* solve on second block variables */
1543     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1544     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1545     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1546     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1547     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1548     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1549   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1550     if (!jac->w1) {
1551       PetscCall(VecDuplicate(x, &jac->w1));
1552       PetscCall(VecDuplicate(x, &jac->w2));
1553     }
1554     PetscCall(VecSet(y, 0.0));
1555     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1556     cnt = 1;
1557     while (ilink->next) {
1558       ilink = ilink->next;
1559       /* compute the residual only over the part of the vector needed */
1560       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1561       PetscCall(VecScale(ilink->x, -1.0));
1562       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1563       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1564       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1565       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1566       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1567       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1568       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1569       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1570     }
1571     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1572       cnt -= 2;
1573       while (ilink->previous) {
1574         ilink = ilink->previous;
1575         /* compute the residual only over the part of the vector needed */
1576         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1577         PetscCall(VecScale(ilink->x, -1.0));
1578         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1579         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1580         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1581         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1582         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1583         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1584         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1585         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1586       }
1587     }
1588   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1589   PetscFunctionReturn(PETSC_SUCCESS);
1590 }
1591 
1592 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1593 {
1594   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1595   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1596   KSP               ksp = ilinkA->ksp;
1597   Vec               u, v, Hu, d, work1, work2;
1598   PetscScalar       alpha, z, nrmz2, *vecz;
1599   PetscReal         lowbnd, nu, beta;
1600   PetscInt          j, iterGKB;
1601 
1602   PetscFunctionBegin;
1603   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1604   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1605   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1606   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1607 
1608   u     = jac->u;
1609   v     = jac->v;
1610   Hu    = jac->Hu;
1611   d     = jac->d;
1612   work1 = jac->w1;
1613   work2 = jac->w2;
1614   vecz  = jac->vecz;
1615 
1616   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1617   /* Add q = q + nu*B*b */
1618   if (jac->gkbnu) {
1619     nu = jac->gkbnu;
1620     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1621     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1622   } else {
1623     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1624     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1625     nu = 1;
1626   }
1627 
1628   /* Transform rhs from [q,tilde{b}] to [0,b] */
1629   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1630   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1631   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1632   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1633   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1634   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1635 
1636   /* First step of algorithm */
1637   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1638   KSPCheckDot(ksp, beta);
1639   beta = PetscSqrtReal(nu) * beta;
1640   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1641   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1642   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1643   PetscCall(KSPSolve(ksp, work2, u));
1644   PetscCall(KSPCheckSolve(ksp, pc, u));
1645   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1646   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1647   PetscCall(VecDot(Hu, u, &alpha));
1648   KSPCheckDot(ksp, alpha);
1649   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1650   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1651   PetscCall(VecScale(u, 1.0 / alpha));
1652   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1653 
1654   z       = beta / alpha;
1655   vecz[1] = z;
1656 
1657   /* Computation of first iterate x(1) and p(1) */
1658   PetscCall(VecAXPY(ilinkA->y, z, u));
1659   PetscCall(VecCopy(d, ilinkD->y));
1660   PetscCall(VecScale(ilinkD->y, -z));
1661 
1662   iterGKB = 1;
1663   lowbnd  = 2 * jac->gkbtol;
1664   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1665 
1666   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1667     iterGKB += 1;
1668     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1669     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1670     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1671     beta = beta / PetscSqrtReal(nu);
1672     PetscCall(VecScale(v, 1.0 / beta));
1673     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1674     PetscCall(MatMult(jac->H, u, Hu));
1675     PetscCall(VecAXPY(work2, -beta, Hu));
1676     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1677     PetscCall(KSPSolve(ksp, work2, u));
1678     PetscCall(KSPCheckSolve(ksp, pc, u));
1679     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1680     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1681     PetscCall(VecDot(Hu, u, &alpha));
1682     KSPCheckDot(ksp, alpha);
1683     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1684     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1685     PetscCall(VecScale(u, 1.0 / alpha));
1686 
1687     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1688     vecz[0] = z;
1689 
1690     /* Computation of new iterate x(i+1) and p(i+1) */
1691     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1692     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1693     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1694     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1695     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1696 
1697     /* Compute Lower Bound estimate */
1698     if (iterGKB > jac->gkbdelay) {
1699       lowbnd = 0.0;
1700       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1701       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1702     }
1703 
1704     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1705     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1706   }
1707 
1708   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1709   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1710   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1711   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1712   PetscFunctionReturn(PETSC_SUCCESS);
1713 }
1714 
1715 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1716   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1717                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1718                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1719 
1720 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1721 {
1722   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1723   PC_FieldSplitLink ilink = jac->head;
1724   PetscInt          bs;
1725 
1726   PetscFunctionBegin;
1727   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1728     PetscBool matnest;
1729 
1730     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1731     if (jac->defaultsplit && !matnest) {
1732       PetscCall(VecGetBlockSize(x, &bs));
1733       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1734       PetscCall(VecGetBlockSize(y, &bs));
1735       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1736       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1737       while (ilink) {
1738         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1739         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1740         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1741         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1742         ilink = ilink->next;
1743       }
1744       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1745     } else {
1746       PetscCall(VecSet(y, 0.0));
1747       while (ilink) {
1748         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1749         ilink = ilink->next;
1750       }
1751     }
1752   } else {
1753     if (!jac->w1) {
1754       PetscCall(VecDuplicate(x, &jac->w1));
1755       PetscCall(VecDuplicate(x, &jac->w2));
1756     }
1757     PetscCall(VecSet(y, 0.0));
1758     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1759       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1760       while (ilink->next) {
1761         ilink = ilink->next;
1762         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1763         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1764         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1765       }
1766       while (ilink->previous) {
1767         ilink = ilink->previous;
1768         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1769         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1770         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1771       }
1772     } else {
1773       while (ilink->next) { /* get to last entry in linked list */
1774         ilink = ilink->next;
1775       }
1776       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1777       while (ilink->previous) {
1778         ilink = ilink->previous;
1779         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1780         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1781         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1782       }
1783     }
1784   }
1785   PetscFunctionReturn(PETSC_SUCCESS);
1786 }
1787 
1788 static PetscErrorCode PCReset_FieldSplit(PC pc)
1789 {
1790   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1791   PC_FieldSplitLink ilink = jac->head, next;
1792 
1793   PetscFunctionBegin;
1794   while (ilink) {
1795     PetscCall(KSPDestroy(&ilink->ksp));
1796     PetscCall(VecDestroy(&ilink->x));
1797     PetscCall(VecDestroy(&ilink->y));
1798     PetscCall(VecDestroy(&ilink->z));
1799     PetscCall(VecScatterDestroy(&ilink->sctx));
1800     PetscCall(ISDestroy(&ilink->is));
1801     PetscCall(ISDestroy(&ilink->is_col));
1802     PetscCall(PetscFree(ilink->splitname));
1803     PetscCall(PetscFree(ilink->fields));
1804     PetscCall(PetscFree(ilink->fields_col));
1805     next = ilink->next;
1806     PetscCall(PetscFree(ilink));
1807     ilink = next;
1808   }
1809   jac->head = NULL;
1810   PetscCall(PetscFree2(jac->x, jac->y));
1811   if (jac->mat && jac->mat != jac->pmat) {
1812     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1813   } else if (jac->mat) {
1814     jac->mat = NULL;
1815   }
1816   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1817   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1818   jac->nsplits = 0;
1819   PetscCall(VecDestroy(&jac->w1));
1820   PetscCall(VecDestroy(&jac->w2));
1821   if (jac->schur) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", NULL));
1822   PetscCall(MatDestroy(&jac->schur));
1823   PetscCall(MatDestroy(&jac->schurp));
1824   PetscCall(MatDestroy(&jac->schur_user));
1825   PetscCall(KSPDestroy(&jac->kspschur));
1826   PetscCall(KSPDestroy(&jac->kspupper));
1827   PetscCall(MatDestroy(&jac->B));
1828   PetscCall(MatDestroy(&jac->C));
1829   PetscCall(MatDestroy(&jac->H));
1830   PetscCall(VecDestroy(&jac->u));
1831   PetscCall(VecDestroy(&jac->v));
1832   PetscCall(VecDestroy(&jac->Hu));
1833   PetscCall(VecDestroy(&jac->d));
1834   PetscCall(PetscFree(jac->vecz));
1835   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1836   jac->isrestrict = PETSC_FALSE;
1837   PetscFunctionReturn(PETSC_SUCCESS);
1838 }
1839 
1840 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1841 {
1842   PetscFunctionBegin;
1843   PetscCall(PCReset_FieldSplit(pc));
1844   PetscCall(PetscFree(pc->data));
1845   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1846   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1847   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1848   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1849   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1850   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1851   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1852   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1853   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1854   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1855   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1856   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1857   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1858   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1859   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1860   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1861   PetscFunctionReturn(PETSC_SUCCESS);
1862 }
1863 
1864 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems PetscOptionsObject)
1865 {
1866   PetscInt        bs;
1867   PetscBool       flg;
1868   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1869   PCCompositeType ctype;
1870 
1871   PetscFunctionBegin;
1872   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1873   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1874   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1875   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1876   jac->diag_use_amat = pc->useAmat;
1877   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1878   jac->offdiag_use_amat = pc->useAmat;
1879   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1880   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1881   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1882   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1883   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1884   /* Only setup fields once */
1885   if (jac->bs > 0 && jac->nsplits == 0) {
1886     /* only allow user to set fields from command line.
1887        otherwise user can set them in PCFieldSplitSetDefaults() */
1888     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1889     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1890   }
1891   if (jac->type == PC_COMPOSITE_SCHUR) {
1892     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1893     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1894     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1895     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1896     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1897   } else if (jac->type == PC_COMPOSITE_GKB) {
1898     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1899     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1900     PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0));
1901     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1902     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1903   }
1904   /*
1905     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1906     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1907     is called on the outer solver in case changes were made in the options database
1908 
1909     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1910     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1911     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1912 
1913     There could be a negative side effect of calling the KSPSetFromOptions() below.
1914 
1915     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1916   */
1917   if (jac->issetup) {
1918     PC_FieldSplitLink ilink = jac->head;
1919     if (jac->type == PC_COMPOSITE_SCHUR) {
1920       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1921       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1922     }
1923     while (ilink) {
1924       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1925       ilink = ilink->next;
1926     }
1927   }
1928   PetscOptionsHeadEnd();
1929   PetscFunctionReturn(PETSC_SUCCESS);
1930 }
1931 
1932 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1933 {
1934   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1935   PC_FieldSplitLink ilink, next = jac->head;
1936   char              prefix[128];
1937   PetscInt          i;
1938   PetscLogEvent     nse;
1939 
1940   PetscFunctionBegin;
1941   if (jac->splitdefined) {
1942     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1943     PetscFunctionReturn(PETSC_SUCCESS);
1944   }
1945   for (i = 0; i < n; i++) PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]);
1946   PetscCall(PetscNew(&ilink));
1947   if (splitname) {
1948     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1949   } else {
1950     PetscCall(PetscMalloc1(3, &ilink->splitname));
1951     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1952   }
1953   PetscCall(PetscMPIIntCast(jac->nsplits, &nse));
1954   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */
1955   PetscCall(PetscMalloc1(n, &ilink->fields));
1956   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1957   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1958   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1959 
1960   ilink->nfields = n;
1961   ilink->next    = NULL;
1962   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1963   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1964   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1965   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1966   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1967 
1968   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1969   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1970 
1971   if (!next) {
1972     jac->head       = ilink;
1973     ilink->previous = NULL;
1974   } else {
1975     while (next->next) next = next->next;
1976     next->next      = ilink;
1977     ilink->previous = next;
1978   }
1979   jac->nsplits++;
1980   PetscFunctionReturn(PETSC_SUCCESS);
1981 }
1982 
1983 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1984 {
1985   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1986 
1987   PetscFunctionBegin;
1988   *subksp = NULL;
1989   if (n) *n = 0;
1990   if (jac->type == PC_COMPOSITE_SCHUR) {
1991     PetscInt nn;
1992 
1993     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1994     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1995     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1996     PetscCall(PetscMalloc1(nn, subksp));
1997     (*subksp)[0] = jac->head->ksp;
1998     (*subksp)[1] = jac->kspschur;
1999     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
2000     if (n) *n = nn;
2001   }
2002   PetscFunctionReturn(PETSC_SUCCESS);
2003 }
2004 
2005 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
2006 {
2007   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2008 
2009   PetscFunctionBegin;
2010   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
2011   PetscCall(PetscMalloc1(jac->nsplits, subksp));
2012   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
2013 
2014   (*subksp)[1] = jac->kspschur;
2015   if (n) *n = jac->nsplits;
2016   PetscFunctionReturn(PETSC_SUCCESS);
2017 }
2018 
2019 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
2020 {
2021   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2022   PetscInt          cnt   = 0;
2023   PC_FieldSplitLink ilink = jac->head;
2024 
2025   PetscFunctionBegin;
2026   PetscCall(PetscMalloc1(jac->nsplits, subksp));
2027   while (ilink) {
2028     (*subksp)[cnt++] = ilink->ksp;
2029     ilink            = ilink->next;
2030   }
2031   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
2032   if (n) *n = jac->nsplits;
2033   PetscFunctionReturn(PETSC_SUCCESS);
2034 }
2035 
2036 /*@
2037   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
2038 
2039   Input Parameters:
2040 + pc  - the preconditioner context
2041 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
2042 
2043   Level: advanced
2044 
2045   Developer Notes:
2046   It seems the resulting `IS`s will not cover the entire space, so
2047   how can they define a convergent preconditioner? Needs explaining.
2048 
2049 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2050 @*/
2051 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
2052 {
2053   PetscFunctionBegin;
2054   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2055   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
2056   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
2057   PetscFunctionReturn(PETSC_SUCCESS);
2058 }
2059 
2060 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
2061 {
2062   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2063   PC_FieldSplitLink ilink = jac->head, next;
2064   PetscInt          localsize, size, sizez, i;
2065   const PetscInt   *ind, *indz;
2066   PetscInt         *indc, *indcz;
2067   PetscBool         flg;
2068 
2069   PetscFunctionBegin;
2070   PetscCall(ISGetLocalSize(isy, &localsize));
2071   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
2072   size -= localsize;
2073   while (ilink) {
2074     IS isrl, isr;
2075     PC subpc;
2076     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
2077     PetscCall(ISGetLocalSize(isrl, &localsize));
2078     PetscCall(PetscMalloc1(localsize, &indc));
2079     PetscCall(ISGetIndices(isrl, &ind));
2080     PetscCall(PetscArraycpy(indc, ind, localsize));
2081     PetscCall(ISRestoreIndices(isrl, &ind));
2082     PetscCall(ISDestroy(&isrl));
2083     for (i = 0; i < localsize; i++) *(indc + i) += size;
2084     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
2085     PetscCall(PetscObjectReference((PetscObject)isr));
2086     PetscCall(ISDestroy(&ilink->is));
2087     ilink->is = isr;
2088     PetscCall(PetscObjectReference((PetscObject)isr));
2089     PetscCall(ISDestroy(&ilink->is_col));
2090     ilink->is_col = isr;
2091     PetscCall(ISDestroy(&isr));
2092     PetscCall(KSPGetPC(ilink->ksp, &subpc));
2093     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
2094     if (flg) {
2095       IS       iszl, isz;
2096       MPI_Comm comm;
2097       PetscCall(ISGetLocalSize(ilink->is, &localsize));
2098       comm = PetscObjectComm((PetscObject)ilink->is);
2099       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
2100       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
2101       sizez -= localsize;
2102       PetscCall(ISGetLocalSize(iszl, &localsize));
2103       PetscCall(PetscMalloc1(localsize, &indcz));
2104       PetscCall(ISGetIndices(iszl, &indz));
2105       PetscCall(PetscArraycpy(indcz, indz, localsize));
2106       PetscCall(ISRestoreIndices(iszl, &indz));
2107       PetscCall(ISDestroy(&iszl));
2108       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
2109       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
2110       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
2111       PetscCall(ISDestroy(&isz));
2112     }
2113     next  = ilink->next;
2114     ilink = next;
2115   }
2116   jac->isrestrict = PETSC_TRUE;
2117   PetscFunctionReturn(PETSC_SUCCESS);
2118 }
2119 
2120 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
2121 {
2122   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
2123   PC_FieldSplitLink ilink, next = jac->head;
2124   char              prefix[128];
2125   PetscLogEvent     nse;
2126 
2127   PetscFunctionBegin;
2128   if (jac->splitdefined) {
2129     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
2130     PetscFunctionReturn(PETSC_SUCCESS);
2131   }
2132   PetscCall(PetscNew(&ilink));
2133   if (splitname) {
2134     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
2135   } else {
2136     PetscCall(PetscMalloc1(8, &ilink->splitname));
2137     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
2138   }
2139   PetscCall(PetscMPIIntCast(jac->nsplits, &nse));
2140   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */
2141   PetscCall(PetscObjectReference((PetscObject)is));
2142   PetscCall(ISDestroy(&ilink->is));
2143   ilink->is = is;
2144   PetscCall(PetscObjectReference((PetscObject)is));
2145   PetscCall(ISDestroy(&ilink->is_col));
2146   ilink->is_col = is;
2147   ilink->next   = NULL;
2148   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
2149   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
2150   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
2151   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
2152   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
2153 
2154   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
2155   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
2156 
2157   if (!next) {
2158     jac->head       = ilink;
2159     ilink->previous = NULL;
2160   } else {
2161     while (next->next) next = next->next;
2162     next->next      = ilink;
2163     ilink->previous = next;
2164   }
2165   jac->nsplits++;
2166   PetscFunctionReturn(PETSC_SUCCESS);
2167 }
2168 
2169 /*@
2170   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
2171 
2172   Logically Collective
2173 
2174   Input Parameters:
2175 + pc         - the preconditioner context
2176 . splitname  - name of this split, if `NULL` the number of the split is used
2177 . n          - the number of fields in this split
2178 . fields     - the fields in this split
2179 - fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block
2180                of the matrix and `fields_col` provides the column indices for that block
2181 
2182   Options Database Key:
2183 . -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
2184 
2185   Level: intermediate
2186 
2187   Notes:
2188   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2189 
2190   If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`.
2191 
2192   If the matrix used to construct the preconditioner is not `MATNEST` then
2193   `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlockSize()` or
2194   to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block
2195   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2196   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x23x56x8.. x12x45x78x....
2197   where the numbered entries indicate what is in the split.
2198 
2199   This function is called once per split (it creates a new split each time).  Solve options
2200   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2201 
2202   `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields`
2203 
2204   Developer Notes:
2205   This routine does not actually create the `IS` representing the split, that is delayed
2206   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2207   available when this routine is called.
2208 
2209 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`,
2210           `MatSetBlockSize()`, `MatCreateNest()`
2211 @*/
2212 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt fields[], const PetscInt fields_col[])
2213 {
2214   PetscFunctionBegin;
2215   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2216   PetscAssertPointer(splitname, 2);
2217   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2218   PetscAssertPointer(fields, 4);
2219   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2220   PetscFunctionReturn(PETSC_SUCCESS);
2221 }
2222 
2223 /*@
2224   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2225   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2226 
2227   Logically Collective
2228 
2229   Input Parameters:
2230 + pc  - the preconditioner object
2231 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2232 
2233   Options Database Key:
2234 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2235 
2236   Level: intermediate
2237 
2238 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2239 @*/
2240 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2241 {
2242   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2243   PetscBool      isfs;
2244 
2245   PetscFunctionBegin;
2246   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2247   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2248   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2249   jac->diag_use_amat = flg;
2250   PetscFunctionReturn(PETSC_SUCCESS);
2251 }
2252 
2253 /*@
2254   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2255   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2256 
2257   Logically Collective
2258 
2259   Input Parameter:
2260 . pc - the preconditioner object
2261 
2262   Output Parameter:
2263 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2264 
2265   Level: intermediate
2266 
2267 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2268 @*/
2269 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2270 {
2271   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2272   PetscBool      isfs;
2273 
2274   PetscFunctionBegin;
2275   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2276   PetscAssertPointer(flg, 2);
2277   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2278   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2279   *flg = jac->diag_use_amat;
2280   PetscFunctionReturn(PETSC_SUCCESS);
2281 }
2282 
2283 /*@
2284   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2285   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2286 
2287   Logically Collective
2288 
2289   Input Parameters:
2290 + pc  - the preconditioner object
2291 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2292 
2293   Options Database Key:
2294 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2295 
2296   Level: intermediate
2297 
2298 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2299 @*/
2300 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2301 {
2302   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2303   PetscBool      isfs;
2304 
2305   PetscFunctionBegin;
2306   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2307   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2308   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2309   jac->offdiag_use_amat = flg;
2310   PetscFunctionReturn(PETSC_SUCCESS);
2311 }
2312 
2313 /*@
2314   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2315   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2316 
2317   Logically Collective
2318 
2319   Input Parameter:
2320 . pc - the preconditioner object
2321 
2322   Output Parameter:
2323 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2324 
2325   Level: intermediate
2326 
2327 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2328 @*/
2329 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2330 {
2331   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2332   PetscBool      isfs;
2333 
2334   PetscFunctionBegin;
2335   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2336   PetscAssertPointer(flg, 2);
2337   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2338   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2339   *flg = jac->offdiag_use_amat;
2340   PetscFunctionReturn(PETSC_SUCCESS);
2341 }
2342 
2343 /*@
2344   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2345 
2346   Logically Collective
2347 
2348   Input Parameters:
2349 + pc        - the preconditioner context
2350 . splitname - name of this split, if `NULL` the number of the split is used
2351 - is        - the index set that defines the elements in this split
2352 
2353   Level: intermediate
2354 
2355   Notes:
2356   Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST`
2357 
2358   This function is called once per split (it creates a new split each time).  Solve options
2359   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2360 
2361 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()`
2362 @*/
2363 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2364 {
2365   PetscFunctionBegin;
2366   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2367   if (splitname) PetscAssertPointer(splitname, 2);
2368   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2369   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2370   PetscFunctionReturn(PETSC_SUCCESS);
2371 }
2372 
2373 /*@
2374   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2375 
2376   Logically Collective
2377 
2378   Input Parameters:
2379 + pc        - the preconditioner context
2380 - splitname - name of this split
2381 
2382   Output Parameter:
2383 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2384 
2385   Level: intermediate
2386 
2387 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()`
2388 @*/
2389 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2390 {
2391   PetscFunctionBegin;
2392   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2393   PetscAssertPointer(splitname, 2);
2394   PetscAssertPointer(is, 3);
2395   {
2396     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2397     PC_FieldSplitLink ilink = jac->head;
2398     PetscBool         found;
2399 
2400     *is = NULL;
2401     while (ilink) {
2402       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2403       if (found) {
2404         *is = ilink->is;
2405         break;
2406       }
2407       ilink = ilink->next;
2408     }
2409   }
2410   PetscFunctionReturn(PETSC_SUCCESS);
2411 }
2412 
2413 /*@
2414   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2415 
2416   Logically Collective
2417 
2418   Input Parameters:
2419 + pc    - the preconditioner context
2420 - index - index of this split
2421 
2422   Output Parameter:
2423 . is - the index set that defines the elements in this split
2424 
2425   Level: intermediate
2426 
2427 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`,
2428 
2429 @*/
2430 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2431 {
2432   PetscFunctionBegin;
2433   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2434   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2435   PetscAssertPointer(is, 3);
2436   {
2437     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2438     PC_FieldSplitLink ilink = jac->head;
2439     PetscInt          i     = 0;
2440     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2441 
2442     while (i < index) {
2443       ilink = ilink->next;
2444       ++i;
2445     }
2446     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2447   }
2448   PetscFunctionReturn(PETSC_SUCCESS);
2449 }
2450 
2451 /*@
2452   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2453   fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used.
2454 
2455   Logically Collective
2456 
2457   Input Parameters:
2458 + pc - the preconditioner context
2459 - bs - the block size
2460 
2461   Level: intermediate
2462 
2463   Note:
2464   If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields.
2465 
2466 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2467 @*/
2468 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2469 {
2470   PetscFunctionBegin;
2471   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2472   PetscValidLogicalCollectiveInt(pc, bs, 2);
2473   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2474   PetscFunctionReturn(PETSC_SUCCESS);
2475 }
2476 
2477 /*@C
2478   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2479 
2480   Collective
2481 
2482   Input Parameter:
2483 . pc - the preconditioner context
2484 
2485   Output Parameters:
2486 + n      - the number of splits
2487 - subksp - the array of `KSP` contexts
2488 
2489   Level: advanced
2490 
2491   Notes:
2492   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2493   (not the `KSP`, just the array that contains them).
2494 
2495   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2496 
2497   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2498   Schur complement and the `KSP` object used to iterate over the Schur complement.
2499   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2500 
2501   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2502   inner linear system defined by the matrix H in each loop.
2503 
2504   Fortran Note:
2505   Call `PCFieldSplitRestoreSubKSP()` when the array of `KSP` is no longer needed
2506 
2507   Developer Notes:
2508   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2509 
2510 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2511 @*/
2512 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2513 {
2514   PetscFunctionBegin;
2515   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2516   if (n) PetscAssertPointer(n, 2);
2517   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2518   PetscFunctionReturn(PETSC_SUCCESS);
2519 }
2520 
2521 /*@C
2522   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2523 
2524   Collective
2525 
2526   Input Parameter:
2527 . pc - the preconditioner context
2528 
2529   Output Parameters:
2530 + n      - the number of splits
2531 - subksp - the array of `KSP` contexts
2532 
2533   Level: advanced
2534 
2535   Notes:
2536   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2537   (not the `KSP` just the array that contains them).
2538 
2539   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2540 
2541   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2542 +  1  - the `KSP` used for the (1,1) block
2543 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2544 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2545 
2546   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2547 
2548   Fortran Note:
2549   Call `PCFieldSplitSchurRestoreSubKSP()` when the array of `KSP` is no longer needed
2550 
2551   Developer Notes:
2552   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2553 
2554   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2555 
2556 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2557 @*/
2558 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2559 {
2560   PetscFunctionBegin;
2561   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2562   if (n) PetscAssertPointer(n, 2);
2563   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2564   PetscFunctionReturn(PETSC_SUCCESS);
2565 }
2566 
2567 /*@
2568   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructed for the Schur complement.
2569   The default is the A11 matrix.
2570 
2571   Collective
2572 
2573   Input Parameters:
2574 + pc    - the preconditioner context
2575 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2576               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2577               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2578 - pre   - matrix to use for preconditioning, or `NULL`
2579 
2580   Options Database Keys:
2581 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2582 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2583 
2584   Level: intermediate
2585 
2586   Notes:
2587   If ptype is
2588 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2589   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2590 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2591   The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM`
2592 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2593   to this function).
2594 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $
2595   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2596   lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2597 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2598   computed internally by `PCFIELDSPLIT` (this is expensive)
2599   useful mostly as a test that the Schur complement approach can work for your problem
2600 
2601   When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2602   with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and
2603   `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2604 
2605   Developer Note:
2606   The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere.
2607 
2608 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2609           `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()`
2610 @*/
2611 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2612 {
2613   PetscFunctionBegin;
2614   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2615   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2616   PetscFunctionReturn(PETSC_SUCCESS);
2617 }
2618 
2619 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2620 {
2621   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2622 } /* Deprecated name */
2623 
2624 /*@
2625   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2626   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2627 
2628   Logically Collective
2629 
2630   Input Parameter:
2631 . pc - the preconditioner context
2632 
2633   Output Parameters:
2634 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2635 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2636 
2637   Level: intermediate
2638 
2639 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2640 @*/
2641 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2642 {
2643   PetscFunctionBegin;
2644   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2645   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2646   PetscFunctionReturn(PETSC_SUCCESS);
2647 }
2648 
2649 /*@
2650   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2651 
2652   Not Collective
2653 
2654   Input Parameter:
2655 . pc - the preconditioner context
2656 
2657   Output Parameter:
2658 . S - the Schur complement matrix
2659 
2660   Level: advanced
2661 
2662   Note:
2663   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2664 
2665 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2666           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2667 @*/
2668 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2669 {
2670   const char    *t;
2671   PetscBool      isfs;
2672   PC_FieldSplit *jac;
2673 
2674   PetscFunctionBegin;
2675   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2676   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2677   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2678   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2679   jac = (PC_FieldSplit *)pc->data;
2680   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2681   if (S) *S = jac->schur;
2682   PetscFunctionReturn(PETSC_SUCCESS);
2683 }
2684 
2685 /*@
2686   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2687 
2688   Not Collective
2689 
2690   Input Parameters:
2691 + pc - the preconditioner context
2692 - S  - the Schur complement matrix
2693 
2694   Level: advanced
2695 
2696 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2697 @*/
2698 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2699 {
2700   const char    *t;
2701   PetscBool      isfs;
2702   PC_FieldSplit *jac;
2703 
2704   PetscFunctionBegin;
2705   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2706   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2707   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2708   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2709   jac = (PC_FieldSplit *)pc->data;
2710   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2711   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2712   PetscFunctionReturn(PETSC_SUCCESS);
2713 }
2714 
2715 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2716 {
2717   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2718 
2719   PetscFunctionBegin;
2720   jac->schurpre = ptype;
2721   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2722     PetscCall(MatDestroy(&jac->schur_user));
2723     jac->schur_user = pre;
2724     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2725   }
2726   PetscFunctionReturn(PETSC_SUCCESS);
2727 }
2728 
2729 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2730 {
2731   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2732 
2733   PetscFunctionBegin;
2734   if (ptype) *ptype = jac->schurpre;
2735   if (pre) *pre = jac->schur_user;
2736   PetscFunctionReturn(PETSC_SUCCESS);
2737 }
2738 
2739 /*@
2740   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note`
2741 
2742   Collective
2743 
2744   Input Parameters:
2745 + pc    - the preconditioner context
2746 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2747 
2748   Options Database Key:
2749 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
2750 
2751   Level: intermediate
2752 
2753   Notes:
2754   The `full` factorization is
2755 
2756   ```{math}
2757   \left(\begin{array}{cc} A & B \\
2758   C & E \\
2759   \end{array}\right) =
2760   \left(\begin{array}{cc} I & 0 \\
2761   C A^{-1} & I \\
2762   \end{array}\right)
2763   \left(\begin{array}{cc} A & 0 \\
2764   0 & S \\
2765   \end{array}\right)
2766   \left(\begin{array}{cc} I & A^{-1}B \\
2767   0 & I \\
2768   \end{array}\right) = L D U,
2769   ```
2770 
2771   where $ S = E - C A^{-1} B $. In practice, the full factorization is applied via block triangular solves with the grouping $L(DU)$. `upper` uses $DU$, `lower` uses $LD$,
2772   and `diag` is the diagonal part with the sign of $S$ flipped (because this makes the preconditioner positive definite for many formulations,
2773   thus allowing the use of `KSPMINRES)`. Sign flipping of $S$ can be turned off with `PCFieldSplitSetSchurScale()`.
2774 
2775   If $A$ and $S$ are solved exactly
2776 +  1 - `full` factorization is a direct solver.
2777 .  2 - The preconditioned operator with `lower` or `upper` has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2778 -  3 - With `diag`, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2779 
2780   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2781   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2782 
2783   For symmetric problems in which $A$ is positive definite and $S$ is negative definite, `diag` can be used with `KSPMINRES`.
2784 
2785   A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g., a few iterations of a Krylov method is used to solve with $A$ or $S$).
2786 
2787 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`,
2788           [](sec_flexibleksp), `PCFieldSplitSetSchurPre()`
2789 @*/
2790 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2791 {
2792   PetscFunctionBegin;
2793   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2794   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2795   PetscFunctionReturn(PETSC_SUCCESS);
2796 }
2797 
2798 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2799 {
2800   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2801 
2802   PetscFunctionBegin;
2803   jac->schurfactorization = ftype;
2804   PetscFunctionReturn(PETSC_SUCCESS);
2805 }
2806 
2807 /*@
2808   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2809 
2810   Collective
2811 
2812   Input Parameters:
2813 + pc    - the preconditioner context
2814 - scale - scaling factor for the Schur complement
2815 
2816   Options Database Key:
2817 . -pc_fieldsplit_schur_scale <scale> - default is -1.0
2818 
2819   Level: intermediate
2820 
2821 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
2822 @*/
2823 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2824 {
2825   PetscFunctionBegin;
2826   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2827   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2828   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2829   PetscFunctionReturn(PETSC_SUCCESS);
2830 }
2831 
2832 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2833 {
2834   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2835 
2836   PetscFunctionBegin;
2837   jac->schurscale = scale;
2838   PetscFunctionReturn(PETSC_SUCCESS);
2839 }
2840 
2841 /*@C
2842   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2843 
2844   Collective
2845 
2846   Input Parameter:
2847 . pc - the preconditioner context
2848 
2849   Output Parameters:
2850 + A00 - the (0,0) block
2851 . A01 - the (0,1) block
2852 . A10 - the (1,0) block
2853 - A11 - the (1,1) block
2854 
2855   Level: advanced
2856 
2857   Note:
2858   Use `NULL` for any unneeded output arguments
2859 
2860 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2861 @*/
2862 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2863 {
2864   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2865 
2866   PetscFunctionBegin;
2867   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2868   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2869   if (A00) *A00 = jac->pmat[0];
2870   if (A01) *A01 = jac->B;
2871   if (A10) *A10 = jac->C;
2872   if (A11) *A11 = jac->pmat[1];
2873   PetscFunctionReturn(PETSC_SUCCESS);
2874 }
2875 
2876 /*@
2877   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2878 
2879   Collective
2880 
2881   Input Parameters:
2882 + pc        - the preconditioner context
2883 - tolerance - the solver tolerance
2884 
2885   Options Database Key:
2886 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5
2887 
2888   Level: intermediate
2889 
2890   Note:
2891   The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion.
2892   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2893   this estimate, the stopping criterion is satisfactory in practical cases.
2894 
2895 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2896 @*/
2897 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2898 {
2899   PetscFunctionBegin;
2900   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2901   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2902   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2903   PetscFunctionReturn(PETSC_SUCCESS);
2904 }
2905 
2906 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2907 {
2908   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2909 
2910   PetscFunctionBegin;
2911   jac->gkbtol = tolerance;
2912   PetscFunctionReturn(PETSC_SUCCESS);
2913 }
2914 
2915 /*@
2916   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2917 
2918   Collective
2919 
2920   Input Parameters:
2921 + pc    - the preconditioner context
2922 - maxit - the maximum number of iterations
2923 
2924   Options Database Key:
2925 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100
2926 
2927   Level: intermediate
2928 
2929 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2930 @*/
2931 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2932 {
2933   PetscFunctionBegin;
2934   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2935   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2936   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2937   PetscFunctionReturn(PETSC_SUCCESS);
2938 }
2939 
2940 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2941 {
2942   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2943 
2944   PetscFunctionBegin;
2945   jac->gkbmaxit = maxit;
2946   PetscFunctionReturn(PETSC_SUCCESS);
2947 }
2948 
2949 /*@
2950   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT`
2951   preconditioner.
2952 
2953   Collective
2954 
2955   Input Parameters:
2956 + pc    - the preconditioner context
2957 - delay - the delay window in the lower bound estimate
2958 
2959   Options Database Key:
2960 . -pc_fieldsplit_gkb_delay <delay> - default is 5
2961 
2962   Level: intermediate
2963 
2964   Notes:
2965   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $
2966   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs
2967   at least (`delay` + 1) iterations to stop.
2968 
2969   For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013`
2970 
2971 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2972 @*/
2973 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2974 {
2975   PetscFunctionBegin;
2976   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2977   PetscValidLogicalCollectiveInt(pc, delay, 2);
2978   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2979   PetscFunctionReturn(PETSC_SUCCESS);
2980 }
2981 
2982 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2983 {
2984   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2985 
2986   PetscFunctionBegin;
2987   jac->gkbdelay = delay;
2988   PetscFunctionReturn(PETSC_SUCCESS);
2989 }
2990 
2991 /*@
2992   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the
2993   Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2994 
2995   Collective
2996 
2997   Input Parameters:
2998 + pc - the preconditioner context
2999 - nu - the shift parameter
3000 
3001   Options Database Key:
3002 . -pc_fieldsplit_gkb_nu <nu> - default is 1
3003 
3004   Level: intermediate
3005 
3006   Notes:
3007   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However,
3008   if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore
3009   necessary to find a good balance in between the convergence of the inner and outer loop.
3010 
3011   For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity.
3012 
3013 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
3014 @*/
3015 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
3016 {
3017   PetscFunctionBegin;
3018   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3019   PetscValidLogicalCollectiveReal(pc, nu, 2);
3020   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
3021   PetscFunctionReturn(PETSC_SUCCESS);
3022 }
3023 
3024 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
3025 {
3026   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3027 
3028   PetscFunctionBegin;
3029   jac->gkbnu = nu;
3030   PetscFunctionReturn(PETSC_SUCCESS);
3031 }
3032 
3033 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
3034 {
3035   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3036 
3037   PetscFunctionBegin;
3038   jac->type = type;
3039   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
3040   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
3041   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
3042   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
3043   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
3044   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
3045   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
3046   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
3047   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
3048 
3049   if (type == PC_COMPOSITE_SCHUR) {
3050     pc->ops->apply          = PCApply_FieldSplit_Schur;
3051     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
3052     pc->ops->view           = PCView_FieldSplit_Schur;
3053     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit_Schur;
3054 
3055     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
3056     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
3057     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
3058     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
3059     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
3060   } else if (type == PC_COMPOSITE_GKB) {
3061     pc->ops->apply          = PCApply_FieldSplit_GKB;
3062     pc->ops->applytranspose = NULL;
3063     pc->ops->view           = PCView_FieldSplit_GKB;
3064     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit_GKB;
3065 
3066     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3067     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
3068     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
3069     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
3070     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
3071   } else {
3072     pc->ops->apply          = PCApply_FieldSplit;
3073     pc->ops->applytranspose = PCApplyTranspose_FieldSplit;
3074     pc->ops->view           = PCView_FieldSplit;
3075     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit;
3076 
3077     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3078   }
3079   PetscFunctionReturn(PETSC_SUCCESS);
3080 }
3081 
3082 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
3083 {
3084   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3085 
3086   PetscFunctionBegin;
3087   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
3088   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
3089   jac->bs = bs;
3090   PetscFunctionReturn(PETSC_SUCCESS);
3091 }
3092 
3093 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
3094 {
3095   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
3096   PC_FieldSplitLink ilink_current = jac->head;
3097   IS                is_owned;
3098 
3099   PetscFunctionBegin;
3100   jac->coordinates_set = PETSC_TRUE; // Internal flag
3101   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
3102 
3103   while (ilink_current) {
3104     // For each IS, embed it to get local coords indces
3105     IS              is_coords;
3106     PetscInt        ndofs_block;
3107     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
3108 
3109     // Setting drop to true for safety. It should make no difference.
3110     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
3111     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
3112     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
3113 
3114     // Allocate coordinates vector and set it directly
3115     PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords));
3116     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
3117       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
3118     }
3119     ilink_current->dim   = dim;
3120     ilink_current->ndofs = ndofs_block;
3121     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
3122     PetscCall(ISDestroy(&is_coords));
3123     ilink_current = ilink_current->next;
3124   }
3125   PetscCall(ISDestroy(&is_owned));
3126   PetscFunctionReturn(PETSC_SUCCESS);
3127 }
3128 
3129 /*@
3130   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3131 
3132   Collective
3133 
3134   Input Parameters:
3135 + pc   - the preconditioner context
3136 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`,
3137          `PC_COMPOSITE_GKB`
3138 
3139   Options Database Key:
3140 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
3141 
3142   Level: intermediate
3143 
3144 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3145           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()`
3146 @*/
3147 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
3148 {
3149   PetscFunctionBegin;
3150   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3151   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
3152   PetscFunctionReturn(PETSC_SUCCESS);
3153 }
3154 
3155 /*@
3156   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3157 
3158   Not collective
3159 
3160   Input Parameter:
3161 . pc - the preconditioner context
3162 
3163   Output Parameter:
3164 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3165 
3166   Level: intermediate
3167 
3168 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3169           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3170 @*/
3171 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
3172 {
3173   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3174 
3175   PetscFunctionBegin;
3176   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3177   PetscAssertPointer(type, 2);
3178   *type = jac->type;
3179   PetscFunctionReturn(PETSC_SUCCESS);
3180 }
3181 
3182 /*@
3183   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3184 
3185   Logically Collective
3186 
3187   Input Parameters:
3188 + pc  - the preconditioner context
3189 - flg - boolean indicating whether to use field splits defined by the `DM`
3190 
3191   Options Database Key:
3192 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3193 
3194   Level: intermediate
3195 
3196   Developer Note:
3197   The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database
3198 
3199 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
3200 @*/
3201 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3202 {
3203   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3204   PetscBool      isfs;
3205 
3206   PetscFunctionBegin;
3207   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3208   PetscValidLogicalCollectiveBool(pc, flg, 2);
3209   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3210   if (isfs) jac->dm_splits = flg;
3211   PetscFunctionReturn(PETSC_SUCCESS);
3212 }
3213 
3214 /*@
3215   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3216 
3217   Logically Collective
3218 
3219   Input Parameter:
3220 . pc - the preconditioner context
3221 
3222   Output Parameter:
3223 . flg - boolean indicating whether to use field splits defined by the `DM`
3224 
3225   Level: intermediate
3226 
3227   Developer Note:
3228   The name should be `PCFieldSplitGetUseDMSplits()`
3229 
3230 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
3231 @*/
3232 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3233 {
3234   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3235   PetscBool      isfs;
3236 
3237   PetscFunctionBegin;
3238   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3239   PetscAssertPointer(flg, 2);
3240   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3241   if (isfs) {
3242     if (flg) *flg = jac->dm_splits;
3243   }
3244   PetscFunctionReturn(PETSC_SUCCESS);
3245 }
3246 
3247 /*@
3248   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3249 
3250   Logically Collective
3251 
3252   Input Parameter:
3253 . pc - the preconditioner context
3254 
3255   Output Parameter:
3256 . flg - boolean indicating whether to detect fields or not
3257 
3258   Level: intermediate
3259 
3260 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3261 @*/
3262 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3263 {
3264   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3265 
3266   PetscFunctionBegin;
3267   *flg = jac->detect;
3268   PetscFunctionReturn(PETSC_SUCCESS);
3269 }
3270 
3271 /*@
3272   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3273 
3274   Logically Collective
3275 
3276   Input Parameter:
3277 . pc - the preconditioner context
3278 
3279   Output Parameter:
3280 . flg - boolean indicating whether to detect fields or not
3281 
3282   Options Database Key:
3283 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3284 
3285   Level: intermediate
3286 
3287   Note:
3288   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3289 
3290 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3291 @*/
3292 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3293 {
3294   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3295 
3296   PetscFunctionBegin;
3297   jac->detect = flg;
3298   if (jac->detect) {
3299     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3300     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3301   }
3302   PetscFunctionReturn(PETSC_SUCCESS);
3303 }
3304 
3305 /*MC
3306   PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3307   collections of variables (that may overlap) called fields or splits. Each field often represents a different continuum variable
3308   represented on a grid, such as velocity, pressure, or temperature.
3309   In the literature these are sometimes called block preconditioners; but should not be confused with `PCBJACOBI`.
3310   See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3311 
3312   Options Database Keys:
3313 +   -pc_fieldsplit_%d_fields <a,b,..>                                                - indicates the fields to be used in the `%d`'th split
3314 .   -pc_fieldsplit_default                                                           - automatically add any fields to additional splits that have not
3315                                                                                        been supplied explicitly by `-pc_fieldsplit_%d_fields`
3316 .   -pc_fieldsplit_block_size <bs>                                                   - size of block that defines fields (i.e. there are bs fields)
3317                                                                                        when the matrix is not of `MatType` `MATNEST`
3318 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3319 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full>                     - default is `a11`; see `PCFieldSplitSetSchurPre()`
3320 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full>                           - set factorization type when using `-pc_fieldsplit_type schur`;
3321                                                                                        see `PCFieldSplitSetSchurFactType()`
3322 .   -pc_fieldsplit_dm_splits <true,false> (default is true)                          - Whether to use `DMCreateFieldDecomposition()` for splits
3323 -   -pc_fieldsplit_detect_saddle_point                                               - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3324 
3325   Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3326   The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3327   For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3328 
3329   To set options on the solvers for all blocks, prepend `-fieldsplit_` to all the `PC`
3330   options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`.
3331 
3332   To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3333   and set the options directly on the resulting `KSP` object
3334 
3335   Level: intermediate
3336 
3337   Notes:
3338   Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()`
3339   to define a split by an arbitrary collection of entries.
3340 
3341   If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports
3342   `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs,
3343   beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3344   if this is not called the block size defaults to the blocksize of the second matrix passed
3345   to `KSPSetOperators()`/`PCSetOperators()`.
3346 
3347   For the Schur complement preconditioner if
3348   ```{math}
3349     J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3350   ```
3351 
3352   the preconditioner using `full` factorization is logically
3353   ```{math}
3354     \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3355       ```
3356   where the action of $\text{ksp}(A_{00})$ is applied using the `KSP` solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3357   ```{math}
3358      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3359   ```
3360   which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the `KSP` solver with prefix `-fieldsplit_splitname_` (where `splitname`
3361   was given in providing the SECOND split or 1 if not given). Accordingly, if using `PCFieldSplitGetSubKSP()`, the array of sub-`KSP` contexts will hold two `KSP`s: at its
3362   0th index, the `KSP` associated with `-fieldsplit_0_`, and at its 1st index, the `KSP` corresponding to `-fieldsplit_1_`.
3363   By default, $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3364 
3365   The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3366   `diag` gives
3367   ```{math}
3368     \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3369   ```
3370   Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3371   can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3372   ```{math}
3373     \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3374   ```
3375   where the inverses of $A_{00}$ and $S$ are applied using `KSP`s. The upper factorization is the inverse of
3376   ```{math}
3377     \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3378   ```
3379   where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3380 
3381   If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3382   is used automatically for a second submatrix.
3383 
3384   The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3385   Generally it should be used with the `MATAIJ` or `MATNEST` `MatType`
3386 
3387   The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see,
3388   for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3389   One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers".
3390 
3391   See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3392 
3393   The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3394   residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3395 
3396   The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3397   ```{math}
3398     \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3399   ```
3400   with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3401   A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3402 
3403   Some `PCFIELDSPLIT` variants are called physics-based preconditioners, since the preconditioner takes into account the underlying physics of the
3404   problem. But this nomenclature is not well-defined.
3405 
3406   Developer Note:
3407   The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3408   user API.
3409 
3410 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3411           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3412           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3413           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3414 M*/
3415 
3416 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3417 {
3418   PC_FieldSplit *jac;
3419 
3420   PetscFunctionBegin;
3421   PetscCall(PetscNew(&jac));
3422 
3423   jac->bs                 = -1;
3424   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3425   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3426   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3427   jac->schurscale         = -1.0;
3428   jac->dm_splits          = PETSC_TRUE;
3429   jac->gkbtol             = 1e-5;
3430   jac->gkbdelay           = 5;
3431   jac->gkbnu              = 1;
3432   jac->gkbmaxit           = 100;
3433 
3434   pc->data = (void *)jac;
3435 
3436   pc->ops->setup           = PCSetUp_FieldSplit;
3437   pc->ops->reset           = PCReset_FieldSplit;
3438   pc->ops->destroy         = PCDestroy_FieldSplit;
3439   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3440   pc->ops->applyrichardson = NULL;
3441 
3442   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3443   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3444   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3445   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3446   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3447   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3448   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3449 
3450   /* Initialize function pointers */
3451   PetscCall(PCFieldSplitSetType(pc, jac->type));
3452   PetscFunctionReturn(PETSC_SUCCESS);
3453 }
3454