xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision 2daea058f1fa0b4b5a893c784be52f4813ced5f1)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petsc/private/matimpl.h> /* MatScatterDense_Private() for PCMatApply() */
4 #include <petscdm.h>
5 #include <petscdevice.h>
6 #if PetscDefined(HAVE_CUDA)
7   #include <petscdevice_cuda.h>
8 #endif
9 #if PetscDefined(HAVE_HIP)
10   #include <petscdevice_hip.h>
11 #endif
12 
13 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
14 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
15 
16 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
17 
18 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
19 struct _PC_FieldSplitLink {
20   KSP               ksp;
21   Vec               x, y, z;
22   Mat               X, Y, Z;
23   char             *splitname;
24   PetscInt          nfields;
25   PetscInt         *fields, *fields_col;
26   VecScatter        sctx;
27   IS                is, is_col;
28   PC_FieldSplitLink next, previous;
29   PetscLogEvent     event;
30 
31   /* Used only when setting coordinates with PCSetCoordinates */
32   PetscInt   dim;
33   PetscInt   ndofs;
34   PetscReal *coords;
35 };
36 
37 typedef struct {
38   PCCompositeType type;
39   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
40   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
41   PetscInt        bs;           /* Block size for IS and Mat structures */
42   PetscInt        nsplits;      /* Number of field divisions defined */
43   Vec            *x, *y, w1, w2;
44   Mat            *mat;    /* The diagonal block for each split */
45   Mat            *pmat;   /* The preconditioning diagonal block for each split */
46   Mat            *Afield; /* The rows of the matrix associated with each split */
47   PetscBool       issetup;
48 
49   /* Only used when Schur complement preconditioning is used */
50   Mat                       B;          /* The (0,1) block */
51   Mat                       C;          /* The (1,0) block */
52   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
53   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a matrix for constructing the preconditioner when solving with S */
54   Mat                       schur_user; /* User-provided matrix for constructing the preconditioner for the Schur complement */
55   PCFieldSplitSchurPreType  schurpre;   /* Determines which matrix is used for the Schur complement */
56   PCFieldSplitSchurFactType schurfactorization;
57   KSP                       kspschur;   /* The solver for S */
58   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
59   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
60 
61   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
62   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
63   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
64   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
65   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
66   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
67   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
68   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
69   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
70   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
71 
72   PC_FieldSplitLink head;
73   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
74   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
75   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
76   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
77   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
78   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
79   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
80 } PC_FieldSplit;
81 
82 /*
83     Note:
84     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
85    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
86    PC you could change this.
87 */
88 
89 /* This helper is so that setting a user-provided matrix is orthogonal to choosing to use it.  This way the
90 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
91 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
92 {
93   switch (jac->schurpre) {
94   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
95     return jac->schur;
96   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
97     return jac->schurp;
98   case PC_FIELDSPLIT_SCHUR_PRE_A11:
99     return jac->pmat[1];
100   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
101   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
102   default:
103     return jac->schur_user ? jac->schur_user : jac->pmat[1];
104   }
105 }
106 
107 #include <petscdraw.h>
108 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
109 {
110   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
111   PetscBool         isascii, isdraw;
112   PetscInt          i, j;
113   PC_FieldSplitLink ilink = jac->head;
114 
115   PetscFunctionBegin;
116   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
117   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
118   if (isascii) {
119     if (jac->bs > 0) {
120       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
121     } else {
122       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
123     }
124     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
125     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
126     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
127     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
128     for (i = 0; i < jac->nsplits; i++) {
129       if (ilink->fields) {
130         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
131         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
132         for (j = 0; j < ilink->nfields; j++) {
133           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
134           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
135         }
136         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
137         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
138       } else {
139         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
140       }
141       PetscCall(KSPView(ilink->ksp, viewer));
142       ilink = ilink->next;
143     }
144   }
145 
146   if (isdraw) {
147     PetscDraw draw;
148     PetscReal x, y, w, wd;
149 
150     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
151     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
152     w  = 2 * PetscMin(1.0 - x, x);
153     wd = w / (jac->nsplits + 1);
154     x  = x - wd * (jac->nsplits - 1) / 2.0;
155     for (i = 0; i < jac->nsplits; i++) {
156       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
157       PetscCall(KSPView(ilink->ksp, viewer));
158       PetscCall(PetscDrawPopCurrentPoint(draw));
159       x += wd;
160       ilink = ilink->next;
161     }
162   }
163   PetscFunctionReturn(PETSC_SUCCESS);
164 }
165 
166 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
167 {
168   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
169   PetscBool                  isascii, isdraw;
170   PetscInt                   i, j;
171   PC_FieldSplitLink          ilink = jac->head;
172   MatSchurComplementAinvType atype;
173 
174   PetscFunctionBegin;
175   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
176   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
177   if (isascii) {
178     if (jac->bs > 0) {
179       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
180     } else {
181       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
182     }
183     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
184     switch (jac->schurpre) {
185     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
186       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
187       break;
188     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
189       if (jac->schur) {
190         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
191         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
192       }
193       break;
194     case PC_FIELDSPLIT_SCHUR_PRE_A11:
195       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
196       break;
197     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
198       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
199       break;
200     case PC_FIELDSPLIT_SCHUR_PRE_USER:
201       if (jac->schur_user) {
202         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
203       } else {
204         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
205       }
206       break;
207     default:
208       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
209     }
210     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
211     PetscCall(PetscViewerASCIIPushTab(viewer));
212     for (i = 0; i < jac->nsplits; i++) {
213       if (ilink->fields) {
214         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
215         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
216         for (j = 0; j < ilink->nfields; j++) {
217           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
218           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
219         }
220         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
221         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
222       } else {
223         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
224       }
225       ilink = ilink->next;
226     }
227     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
228     PetscCall(PetscViewerASCIIPushTab(viewer));
229     if (jac->head) PetscCall(KSPView(jac->head->ksp, viewer));
230     else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
231     PetscCall(PetscViewerASCIIPopTab(viewer));
232     if (jac->head && jac->kspupper != jac->head->ksp) {
233       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n"));
234       PetscCall(PetscViewerASCIIPushTab(viewer));
235       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
236       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
237       PetscCall(PetscViewerASCIIPopTab(viewer));
238     }
239     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n"));
240     PetscCall(PetscViewerASCIIPushTab(viewer));
241     if (jac->kspschur) {
242       PetscCall(KSPView(jac->kspschur, viewer));
243     } else {
244       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
245     }
246     PetscCall(PetscViewerASCIIPopTab(viewer));
247     PetscCall(PetscViewerASCIIPopTab(viewer));
248   } else if (isdraw && jac->head) {
249     PetscDraw draw;
250     PetscReal x, y, w, wd, h;
251     PetscInt  cnt = 2;
252     char      str[32];
253 
254     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
255     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
256     if (jac->kspupper != jac->head->ksp) cnt++;
257     w  = 2 * PetscMin(1.0 - x, x);
258     wd = w / (cnt + 1);
259 
260     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
261     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
262     y -= h;
263     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
264       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
265     } else {
266       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
267     }
268     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
269     y -= h;
270     x = x - wd * (cnt - 1) / 2.0;
271 
272     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
273     PetscCall(KSPView(jac->head->ksp, viewer));
274     PetscCall(PetscDrawPopCurrentPoint(draw));
275     if (jac->kspupper != jac->head->ksp) {
276       x += wd;
277       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
278       PetscCall(KSPView(jac->kspupper, viewer));
279       PetscCall(PetscDrawPopCurrentPoint(draw));
280     }
281     x += wd;
282     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
283     PetscCall(KSPView(jac->kspschur, viewer));
284     PetscCall(PetscDrawPopCurrentPoint(draw));
285   }
286   PetscFunctionReturn(PETSC_SUCCESS);
287 }
288 
289 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
290 {
291   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
292   PetscBool         isascii, isdraw;
293   PetscInt          i, j;
294   PC_FieldSplitLink ilink = jac->head;
295 
296   PetscFunctionBegin;
297   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
298   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
299   if (isascii) {
300     if (jac->bs > 0) {
301       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
302     } else {
303       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
304     }
305     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
306     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
307     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
308 
309     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
310     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
311     PetscCall(PetscViewerASCIIPushTab(viewer));
312 
313     if (ilink->fields) {
314       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
315       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
316       for (j = 0; j < ilink->nfields; j++) {
317         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
318         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
319       }
320       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
321       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
322     } else {
323       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
324     }
325     PetscCall(KSPView(ilink->ksp, viewer));
326 
327     PetscCall(PetscViewerASCIIPopTab(viewer));
328   }
329 
330   if (isdraw) {
331     PetscDraw draw;
332     PetscReal x, y, w, wd;
333 
334     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
335     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
336     w  = 2 * PetscMin(1.0 - x, x);
337     wd = w / (jac->nsplits + 1);
338     x  = x - wd * (jac->nsplits - 1) / 2.0;
339     for (i = 0; i < jac->nsplits; i++) {
340       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
341       PetscCall(KSPView(ilink->ksp, viewer));
342       PetscCall(PetscDrawPopCurrentPoint(draw));
343       x += wd;
344       ilink = ilink->next;
345     }
346   }
347   PetscFunctionReturn(PETSC_SUCCESS);
348 }
349 
350 /* Precondition: jac->bs is set to a meaningful value or MATNEST */
351 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
352 {
353   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
354   PetscInt       bs, i, nfields, *ifields, nfields_col, *ifields_col;
355   PetscBool      flg, flg_col, mnest;
356   char           optionname[128], splitname[8], optionname_col[128];
357 
358   PetscFunctionBegin;
359   PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest));
360   if (mnest) {
361     PetscCall(MatNestGetSize(pc->pmat, &bs, NULL));
362   } else {
363     bs = jac->bs;
364   }
365   PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col));
366   for (i = 0, flg = PETSC_TRUE;; i++) {
367     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
368     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
369     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
370     nfields     = bs;
371     nfields_col = bs;
372     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
373     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
374     if (!flg) break;
375     else if (flg && !flg_col) {
376       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
377       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
378     } else {
379       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
380       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
381       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
382     }
383   }
384   if (i > 0) {
385     /* Makes command-line setting of splits take precedence over setting them in code.
386        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
387        create new splits, which would probably not be what the user wanted. */
388     jac->splitdefined = PETSC_TRUE;
389   }
390   PetscCall(PetscFree2(ifields, ifields_col));
391   PetscFunctionReturn(PETSC_SUCCESS);
392 }
393 
394 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
395 {
396   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
397   PC_FieldSplitLink ilink              = jac->head;
398   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
399   PetscInt          i;
400 
401   PetscFunctionBegin;
402   /*
403    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
404    Should probably be rewritten.
405    */
406   if (!ilink) {
407     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
408     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
409       PetscInt  numFields, f, i, j;
410       char    **fieldNames;
411       IS       *fields;
412       DM       *dms;
413       DM        subdm[128];
414       PetscBool flg;
415 
416       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
417       /* Allow the user to prescribe the splits */
418       for (i = 0, flg = PETSC_TRUE;; i++) {
419         PetscInt ifields[128];
420         IS       compField;
421         char     optionname[128], splitname[8];
422         PetscInt nfields = numFields;
423 
424         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
425         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
426         if (!flg) break;
427         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
428         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
429         if (nfields == 1) {
430           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
431         } else {
432           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
433           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
434         }
435         PetscCall(ISDestroy(&compField));
436         for (j = 0; j < nfields; ++j) {
437           f = ifields[j];
438           PetscCall(PetscFree(fieldNames[f]));
439           PetscCall(ISDestroy(&fields[f]));
440         }
441       }
442       if (i == 0) {
443         for (f = 0; f < numFields; ++f) {
444           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
445           PetscCall(PetscFree(fieldNames[f]));
446           PetscCall(ISDestroy(&fields[f]));
447         }
448       } else {
449         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
450         PetscCall(PetscFree(dms));
451         PetscCall(PetscMalloc1(i, &dms));
452         for (j = 0; j < i; ++j) dms[j] = subdm[j];
453       }
454       PetscCall(PetscFree(fieldNames));
455       PetscCall(PetscFree(fields));
456       if (dms) {
457         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
458         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
459           const char *prefix;
460           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix));
461           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix));
462           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
463           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
464           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
465           PetscCall(DMDestroy(&dms[i]));
466         }
467         PetscCall(PetscFree(dms));
468       }
469     } else {
470       if (jac->bs <= 0) {
471         if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
472         else jac->bs = 1;
473       }
474 
475       if (jac->detect) {
476         IS       zerodiags, rest;
477         PetscInt nmin, nmax;
478 
479         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
480         if (jac->diag_use_amat) {
481           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
482         } else {
483           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
484         }
485         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
486         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
487         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
488         PetscCall(ISDestroy(&zerodiags));
489         PetscCall(ISDestroy(&rest));
490       } else if (coupling) {
491         IS       coupling, rest;
492         PetscInt nmin, nmax;
493 
494         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
495         if (jac->offdiag_use_amat) {
496           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
497         } else {
498           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
499         }
500         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
501         PetscCall(ISSetIdentity(rest));
502         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
503         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
504         PetscCall(ISDestroy(&coupling));
505         PetscCall(ISDestroy(&rest));
506       } else {
507         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
508         if (!fieldsplit_default) {
509           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
510            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
511           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
512           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
513         }
514         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
515           Mat       M = pc->pmat;
516           PetscBool isnest;
517           PetscInt  nf;
518 
519           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
520           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
521           if (!isnest) {
522             M = pc->mat;
523             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
524           }
525           if (!isnest) nf = jac->bs;
526           else PetscCall(MatNestGetSize(M, &nf, NULL));
527           for (i = 0; i < nf; i++) {
528             char splitname[8];
529 
530             PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
531             PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
532           }
533           jac->defaultsplit = PETSC_TRUE;
534         }
535       }
536     }
537   } else if (jac->nsplits == 1) {
538     IS       is2;
539     PetscInt nmin, nmax;
540 
541     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
542     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
543     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
544     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
545     PetscCall(ISDestroy(&is2));
546   }
547 
548   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
549   PetscFunctionReturn(PETSC_SUCCESS);
550 }
551 
552 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
553 {
554   Mat       BT, T;
555   PetscReal nrmT, nrmB;
556 
557   PetscFunctionBegin;
558   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
559   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
560   PetscCall(MatNorm(T, NORM_1, &nrmT));
561   PetscCall(MatNorm(B, NORM_1, &nrmB));
562   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/Hermitian, GKB is not applicable.");
563 
564   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
565   /* setting N := 1/nu*I in [Ar13].                                                 */
566   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
567   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_CURRENT, H)); /* H = A01*A01'          */
568   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
569 
570   PetscCall(MatDestroy(&BT));
571   PetscCall(MatDestroy(&T));
572   PetscFunctionReturn(PETSC_SUCCESS);
573 }
574 
575 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg);
576 
577 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
578 {
579   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
580   PC_FieldSplitLink ilink;
581   PetscInt          i, nsplit;
582   PetscBool         matnest = PETSC_FALSE;
583 
584   PetscFunctionBegin;
585   pc->failedreason = PC_NOERROR;
586   PetscCall(PCFieldSplitSetDefaults(pc));
587   nsplit = jac->nsplits;
588   ilink  = jac->head;
589   if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
590 
591   /* get the matrices for each split */
592   if (!jac->issetup) {
593     PetscInt rstart, rend, nslots, bs;
594 
595     jac->issetup = PETSC_TRUE;
596 
597     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
598     if (jac->defaultsplit || !ilink->is) {
599       if (jac->bs <= 0) jac->bs = nsplit;
600     }
601 
602     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
603     PetscCall(MatGetBlockSize(pc->pmat, &bs));
604     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
605       PetscBool blk;
606 
607       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
608       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
609     }
610 
611     if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */
612       bs = jac->bs;
613       PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
614       nslots = (rend - rstart) / bs;
615       for (i = 0; i < nsplit; i++) {
616         if (jac->defaultsplit) {
617           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
618           PetscCall(PetscObjectReference((PetscObject)ilink->is));
619           ilink->is_col = ilink->is;
620         } else if (!ilink->is) {
621           PetscBool same_fields = PETSC_TRUE;
622 
623           for (PetscInt k = 0; k < ilink->nfields; k++) {
624             if (ilink->fields[k] != ilink->fields_col[k]) same_fields = PETSC_FALSE;
625           }
626 
627           if (ilink->nfields > 1) {
628             PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
629 
630             PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
631             if (!same_fields) PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
632             for (j = 0; j < nslots; j++) {
633               for (k = 0; k < nfields; k++) {
634                 ii[nfields * j + k] = rstart + bs * j + fields[k];
635                 if (!same_fields) jj[nfields * j + k] = rstart + bs * j + fields_col[k];
636               }
637             }
638             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
639             if (!same_fields) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
640             else {
641               PetscCall(PetscObjectReference((PetscObject)ilink->is));
642               ilink->is_col = ilink->is;
643             }
644             PetscCall(ISSetBlockSize(ilink->is, nfields));
645             PetscCall(ISSetBlockSize(ilink->is_col, nfields));
646           } else {
647             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
648             if (!same_fields) PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
649             else {
650               PetscCall(PetscObjectReference((PetscObject)ilink->is));
651               ilink->is_col = ilink->is;
652             }
653           }
654         }
655         ilink = ilink->next;
656       }
657     } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */
658       IS      *rowis, *colis, *ises = NULL;
659       PetscInt mis, nis;
660 
661       PetscCall(MatNestGetSize(pc->pmat, &mis, &nis));
662       PetscCall(PetscMalloc2(mis, &rowis, nis, &colis));
663       PetscCall(MatNestGetISs(pc->pmat, rowis, colis));
664       if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises));
665 
666       for (i = 0; i < nsplit; i++) {
667         if (jac->defaultsplit) {
668           PetscCall(ISDuplicate(rowis[i], &ilink->is));
669           PetscCall(PetscObjectReference((PetscObject)ilink->is));
670           ilink->is_col = ilink->is;
671         } else if (!ilink->is) {
672           if (ilink->nfields > 1) {
673             for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]];
674             PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is));
675           } else {
676             PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is));
677           }
678           PetscCall(PetscObjectReference((PetscObject)ilink->is));
679           ilink->is_col = ilink->is;
680         }
681         ilink = ilink->next;
682       }
683       PetscCall(PetscFree2(rowis, colis));
684       PetscCall(PetscFree(ises));
685     }
686   }
687 
688   ilink = jac->head;
689   if (!jac->pmat) {
690     Vec xtmp;
691 
692     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
693     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
694     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
695     for (i = 0; i < nsplit; i++) {
696       MatNullSpace sp;
697 
698       /* Check for matrix attached to IS */
699       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
700       if (jac->pmat[i]) {
701         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
702         if (jac->type == PC_COMPOSITE_SCHUR) {
703           jac->schur_user = jac->pmat[i];
704 
705           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
706         }
707       } else {
708         const char *prefix;
709         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
710         PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix));
711         if (!prefix) {
712           PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
713           PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
714         }
715         PetscCall(MatSetFromOptions(jac->pmat[i]));
716         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
717       }
718       /* create work vectors for each split */
719       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
720       ilink->x = jac->x[i];
721       ilink->y = jac->y[i];
722       ilink->z = NULL;
723       /* compute scatter contexts needed by multiplicative versions and non-default splits */
724       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
725       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
726       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
727       ilink = ilink->next;
728     }
729     PetscCall(VecDestroy(&xtmp));
730   } else {
731     MatReuse      scall;
732     MatNullSpace *nullsp = NULL;
733 
734     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
735       PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp));
736       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
737       scall = MAT_INITIAL_MATRIX;
738     } else scall = MAT_REUSE_MATRIX;
739 
740     for (i = 0; i < nsplit; i++) {
741       Mat pmat;
742 
743       /* Check for matrix attached to IS */
744       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
745       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
746       ilink = ilink->next;
747     }
748     if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp));
749   }
750   if (jac->diag_use_amat) {
751     ilink = jac->head;
752     if (!jac->mat) {
753       PetscCall(PetscMalloc1(nsplit, &jac->mat));
754       for (i = 0; i < nsplit; i++) {
755         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
756         ilink = ilink->next;
757       }
758     } else {
759       MatReuse      scall;
760       MatNullSpace *nullsp = NULL;
761 
762       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
763         PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp));
764         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
765         scall = MAT_INITIAL_MATRIX;
766       } else scall = MAT_REUSE_MATRIX;
767 
768       for (i = 0; i < nsplit; i++) {
769         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
770         ilink = ilink->next;
771       }
772       if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp));
773     }
774   } else {
775     jac->mat = jac->pmat;
776   }
777 
778   /* Check for null space attached to IS */
779   ilink = jac->head;
780   for (i = 0; i < nsplit; i++) {
781     MatNullSpace sp;
782 
783     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
784     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
785     ilink = ilink->next;
786   }
787 
788   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
789     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
790     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
791     ilink = jac->head;
792     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
793       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
794       if (!jac->Afield) {
795         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
796         if (jac->offdiag_use_amat) {
797           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
798         } else {
799           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
800         }
801       } else {
802         MatReuse scall;
803 
804         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
805           PetscCall(MatDestroy(&jac->Afield[1]));
806           scall = MAT_INITIAL_MATRIX;
807         } else scall = MAT_REUSE_MATRIX;
808 
809         if (jac->offdiag_use_amat) {
810           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
811         } else {
812           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
813         }
814       }
815     } else {
816       if (!jac->Afield) {
817         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
818         for (i = 0; i < nsplit; i++) {
819           if (jac->offdiag_use_amat) {
820             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
821           } else {
822             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
823           }
824           ilink = ilink->next;
825         }
826       } else {
827         MatReuse scall;
828         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
829           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
830           scall = MAT_INITIAL_MATRIX;
831         } else scall = MAT_REUSE_MATRIX;
832 
833         for (i = 0; i < nsplit; i++) {
834           if (jac->offdiag_use_amat) {
835             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
836           } else {
837             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
838           }
839           ilink = ilink->next;
840         }
841       }
842     }
843   }
844 
845   if (jac->type == PC_COMPOSITE_SCHUR) {
846     PetscBool   isset, isspd = PETSC_FALSE, issym = PETSC_FALSE, flg;
847     char        lscname[256];
848     PetscObject LSC_L;
849 
850     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
851 
852     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
853     PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
854     if (jac->schurscale == (PetscScalar)-1.0) jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
855     PetscCall(MatIsSymmetricKnown(pc->pmat, &isset, &issym));
856 
857     PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
858 
859     if (jac->schur) {
860       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
861       MatReuse scall;
862 
863       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
864         scall = MAT_INITIAL_MATRIX;
865         PetscCall(MatDestroy(&jac->B));
866         PetscCall(MatDestroy(&jac->C));
867       } else scall = MAT_REUSE_MATRIX;
868 
869       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
870       ilink = jac->head;
871       PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->is, ilink->next->is, scall, &jac->B));
872       if (!flg) PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->next->is, ilink->is, scall, &jac->C));
873       else {
874         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg));
875         if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
876         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
877       }
878       ilink = ilink->next;
879       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
880       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
881         PetscCall(MatDestroy(&jac->schurp));
882         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
883       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL && jac->kspupper != jac->head->ksp) {
884         PetscCall(MatDestroy(&jac->schur_user));
885         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
886       }
887       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
888       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
889       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
890     } else {
891       const char  *Dprefix;
892       char         schurprefix[256], schurmatprefix[256];
893       char         schurtestoption[256];
894       MatNullSpace sp;
895       KSP          kspt;
896 
897       /* extract the A01 and A10 matrices */
898       ilink = jac->head;
899       PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->is, ilink->next->is, MAT_INITIAL_MATRIX, &jac->B));
900       if (!flg) PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->C));
901       else {
902         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg));
903         if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
904         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
905       }
906       ilink = ilink->next;
907       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
908       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
909       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
910       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
911       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
912       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
913       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
914       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
915 
916       /* Note: this is not true in general */
917       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
918       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
919 
920       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
921       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
922       if (flg) {
923         DM  dmInner;
924         KSP kspInner;
925         PC  pcInner;
926 
927         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
928         PetscCall(KSPReset(kspInner));
929         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
930         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
931         /* Indent this deeper to emphasize the "inner" nature of this solver. */
932         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
933         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
934         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
935 
936         /* Set DM for new solver */
937         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
938         PetscCall(KSPSetDM(kspInner, dmInner));
939         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
940 
941         /* Defaults to PCKSP as preconditioner */
942         PetscCall(KSPGetPC(kspInner, &pcInner));
943         PetscCall(PCSetType(pcInner, PCKSP));
944         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
945       } else {
946         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
947           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
948           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
949           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
950           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
951           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
952         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
953         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
954       }
955       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
956       PetscCall(KSPSetFromOptions(jac->head->ksp));
957       PetscCall(MatSetFromOptions(jac->schur));
958 
959       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
960       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
961         KSP kspInner;
962         PC  pcInner;
963 
964         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
965         PetscCall(KSPGetPC(kspInner, &pcInner));
966         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
967         if (flg) {
968           KSP ksp;
969 
970           PetscCall(PCKSPGetKSP(pcInner, &ksp));
971           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
972         }
973       }
974       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
975       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
976       if (flg) {
977         DM dmInner;
978 
979         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
980         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
981         PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
982         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
983         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
984         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
985         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
986         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
987         PetscCall(KSPSetDM(jac->kspupper, dmInner));
988         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
989         PetscCall(KSPSetFromOptions(jac->kspupper));
990         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
991         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
992       } else {
993         jac->kspupper = jac->head->ksp;
994         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
995       }
996 
997       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
998       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
999       PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
1000       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
1001       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
1002       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
1003         PC pcschur;
1004         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
1005         PetscCall(PCSetType(pcschur, PCNONE));
1006         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
1007       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1008         if (jac->schurfactorization != PC_FIELDSPLIT_SCHUR_FACT_FULL || jac->kspupper != jac->head->ksp) PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1009       }
1010       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
1011       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
1012       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
1013       /* propagate DM */
1014       {
1015         DM sdm;
1016         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
1017         if (sdm) {
1018           PetscCall(KSPSetDM(jac->kspschur, sdm));
1019           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
1020         }
1021       }
1022       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1023       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
1024       PetscCall(KSPSetFromOptions(jac->kspschur));
1025     }
1026     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
1027     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
1028     if (issym) PetscCall(MatSetOption(jac->schur, MAT_SYMMETRIC, PETSC_TRUE));
1029     if (isspd) PetscCall(MatSetOption(jac->schur, MAT_SPD, PETSC_TRUE));
1030 
1031     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
1032     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
1033     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L));
1034     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L));
1035     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", LSC_L));
1036     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
1037     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L));
1038     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L));
1039     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", LSC_L));
1040   } else if (jac->type == PC_COMPOSITE_GKB) {
1041     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1042     ilink = jac->head;
1043     PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->is, ilink->next->is, MAT_INITIAL_MATRIX, &jac->B));
1044     /* Create work vectors for GKB algorithm */
1045     PetscCall(VecDuplicate(ilink->x, &jac->u));
1046     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1047     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1048     PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->C));
1049     ilink = ilink->next;
1050     /* Create work vectors for GKB algorithm */
1051     PetscCall(VecDuplicate(ilink->x, &jac->v));
1052     PetscCall(VecDuplicate(ilink->x, &jac->d));
1053     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1054     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1055     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1056 
1057     ilink = jac->head;
1058     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1059     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1060     /* Create gkb_monitor context */
1061     if (jac->gkbmonitor) {
1062       PetscInt tablevel;
1063       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1064       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1065       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1066       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1067       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1068     }
1069   } else {
1070     /* set up the individual splits' PCs */
1071     i     = 0;
1072     ilink = jac->head;
1073     while (ilink) {
1074       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1075       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1076       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1077       i++;
1078       ilink = ilink->next;
1079     }
1080   }
1081 
1082   /* Set coordinates to the sub PC objects whenever these are set */
1083   if (jac->coordinates_set) {
1084     PC pc_coords;
1085     if (jac->type == PC_COMPOSITE_SCHUR) {
1086       // Head is first block.
1087       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1088       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1089       // Second one is Schur block, but its KSP object is in kspschur.
1090       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1091       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1092     } else if (jac->type == PC_COMPOSITE_GKB) {
1093       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1094     } else {
1095       ilink = jac->head;
1096       while (ilink) {
1097         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1098         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1099         ilink = ilink->next;
1100       }
1101     }
1102   }
1103 
1104   jac->suboptionsset = PETSC_TRUE;
1105   PetscFunctionReturn(PETSC_SUCCESS);
1106 }
1107 
1108 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_Schur(PC pc)
1109 {
1110   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1111   PC_FieldSplitLink ilinkA = jac->head;
1112   KSP               kspA = ilinkA->ksp, kspUpper = jac->kspupper;
1113 
1114   PetscFunctionBegin;
1115   if (jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL && kspUpper != kspA) {
1116     PetscCall(KSPSetUp(kspUpper));
1117     PetscCall(KSPSetUpOnBlocks(kspUpper));
1118   }
1119   PetscCall(KSPSetUp(kspA));
1120   PetscCall(KSPSetUpOnBlocks(kspA));
1121   if (jac->schurpre != PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1122     PetscCall(KSPSetUp(jac->kspschur));
1123     PetscCall(KSPSetUpOnBlocks(jac->kspschur));
1124   } else if (kspUpper == kspA && jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL) {
1125     Mat          A;
1126     PetscInt     m, M, N;
1127     VecType      vtype;
1128     PetscMemType mtype;
1129     PetscScalar *array;
1130 
1131     PetscCall(MatGetSize(jac->B, &M, &N));
1132     PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1133     PetscCall(MatGetVecType(jac->B, &vtype));
1134     PetscCall(VecGetArrayAndMemType(ilinkA->x, &array, &mtype));
1135     PetscCall(VecRestoreArrayAndMemType(ilinkA->x, &array));
1136     PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&A));
1137     if (!A) {
1138       if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(PetscMalloc1(m * (N + 1), &array));
1139 #if PetscDefined(HAVE_CUDA)
1140       else if (PetscMemTypeCUDA(mtype)) PetscCallCUDA(cudaMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1141 #endif
1142 #if PetscDefined(HAVE_HIP)
1143       else if (PetscMemTypeHIP(mtype)) PetscCallHIP(hipMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1144 #endif
1145       PetscCall(MatCreateDenseFromVecType(PetscObjectComm((PetscObject)jac->schur), vtype, m, PETSC_DECIDE, M, N + 1, -1, array, &A)); // number of columns of the Schur complement plus one
1146       PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)A));
1147       PetscCall(MatDestroy(&A));
1148     }
1149   }
1150   PetscFunctionReturn(PETSC_SUCCESS);
1151 }
1152 
1153 static PetscErrorCode PCSetUpOnBlocks_FieldSplit(PC pc)
1154 {
1155   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1156   PC_FieldSplitLink ilink = jac->head;
1157 
1158   PetscFunctionBegin;
1159   while (ilink) {
1160     PetscCall(KSPSetUp(ilink->ksp));
1161     PetscCall(KSPSetUpOnBlocks(ilink->ksp));
1162     ilink = ilink->next;
1163   }
1164   PetscFunctionReturn(PETSC_SUCCESS);
1165 }
1166 
1167 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_GKB(PC pc)
1168 {
1169   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1170   PC_FieldSplitLink ilinkA = jac->head;
1171   KSP               ksp    = ilinkA->ksp;
1172 
1173   PetscFunctionBegin;
1174   PetscCall(KSPSetUp(ksp));
1175   PetscCall(KSPSetUpOnBlocks(ksp));
1176   PetscFunctionReturn(PETSC_SUCCESS);
1177 }
1178 
1179 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1180 {
1181   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1182   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1183   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1184   Mat               AinvB = NULL;
1185   PetscInt          N, P;
1186 
1187   PetscFunctionBegin;
1188   switch (jac->schurfactorization) {
1189   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1190     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1191     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1192     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1193     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1194     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1195     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1196     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1197     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1198     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1199     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1200     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1201     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1202     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1203     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1204     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1205     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1206     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1207     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1208     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1209     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1210     break;
1211   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1212     /* [A00 0; A10 S], suitable for left preconditioning */
1213     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1214     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1215     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1216     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1217     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1218     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1219     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1220     PetscCall(VecScale(ilinkD->x, -1.));
1221     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1222     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1223     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1224     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1225     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1226     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1227     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1228     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1229     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1230     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1231     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1232     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1233     break;
1234   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1235     /* [A00 A01; 0 S], suitable for right preconditioning */
1236     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1237     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1238     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1239     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1240     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1241     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1242     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1243     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1244     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1245     PetscCall(VecScale(ilinkA->x, -1.));
1246     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1247     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1248     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1249     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1250     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1251     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1252     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1253     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1254     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1255     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1256     break;
1257   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1258     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1259     PetscCall(MatGetSize(jac->B, NULL, &P));
1260     N = P;
1261     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1262     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1263     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1264     if (kspUpper == kspA) {
1265       PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB));
1266       if (AinvB) {
1267         PetscCall(MatGetSize(AinvB, NULL, &N));
1268         if (N > P) { // first time PCApply_FieldSplit_Schur() is called
1269           PetscMemType mtype;
1270           Vec          c = NULL;
1271           PetscScalar *array;
1272           PetscInt     m, M;
1273 
1274           PetscCall(MatGetSize(jac->B, &M, NULL));
1275           PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1276           PetscCall(MatDenseGetArrayAndMemType(AinvB, &array, &mtype));
1277           if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1278 #if PetscDefined(HAVE_CUDA)
1279           else if (PetscMemTypeCUDA(mtype)) PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1280 #endif
1281 #if PetscDefined(HAVE_HIP)
1282           else if (PetscMemTypeHIP(mtype)) PetscCall(VecCreateMPIHIPWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1283 #endif
1284           PetscCall(MatDenseRestoreArrayAndMemType(AinvB, &array));
1285           PetscCall(VecCopy(ilinkA->x, c));
1286           PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1287           PetscCall(KSPSetOperators(jac->kspschur, jac->schur, jac->schur_user));
1288           PetscCall(VecCopy(c, ilinkA->y)); // retrieve the solution as the last column of the composed Mat
1289           PetscCall(VecDestroy(&c));
1290         }
1291       }
1292     }
1293     if (N == P) PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1294     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1295     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1296     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1297     PetscCall(VecScale(ilinkD->x, -1.0));
1298     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1299     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1300 
1301     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1302     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1303     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1304     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1305     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1306     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1307     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1308 
1309     if (kspUpper == kspA) {
1310       if (!AinvB) {
1311         PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1312         PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1313         PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1314         PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1315         PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1316         PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1317       } else PetscCall(MatMultAdd(AinvB, ilinkD->y, ilinkA->y, ilinkA->y));
1318     } else {
1319       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1320       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1321       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1322       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1323       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1324       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1325       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1326       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1327       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1328     }
1329     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1330     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1331     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1332   }
1333   PetscFunctionReturn(PETSC_SUCCESS);
1334 }
1335 
1336 /*
1337   PCFieldSplitCreateWorkMats_Private - Allocate per-field dense work matrices for multi-RHS
1338 
1339   Input Parameters:
1340 + pc - the PC context
1341 - X  - matrix to copy column-layout from
1342 
1343   Notes:
1344   If matrices already exist with correct column count, they are reused.
1345   If column count changed, old matrices are destroyed and new ones created.
1346 */
1347 static PetscErrorCode PCFieldSplitCreateWorkMats_Private(PC pc, Mat X)
1348 {
1349   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1350   PC_FieldSplitLink ilink = jac->head;
1351   PetscInt          mx, Mx, my, My, N;
1352 
1353   PetscFunctionBegin;
1354   while (ilink) {
1355     /* check if reallocation needed (previous allocation with wrong column count) */
1356     if (ilink->X) {
1357       PetscCall(MatGetSize(ilink->X, NULL, &N));
1358       if (N != X->cmap->N) {
1359         PetscCall(MatDestroy(&ilink->X));
1360         PetscCall(MatDestroy(&ilink->Y));
1361         PetscCall(MatDestroy(&ilink->Z));
1362       }
1363     }
1364     /* create if needed */
1365     if (!ilink->X) {
1366       VecType xtype, ytype;
1367 
1368       PetscCall(VecGetType(ilink->x, &xtype));
1369       PetscCall(VecGetType(ilink->y, &ytype));
1370       PetscCall(VecGetLocalSize(ilink->x, &mx));
1371       PetscCall(VecGetSize(ilink->x, &Mx));
1372       PetscCall(VecGetLocalSize(ilink->y, &my));
1373       PetscCall(VecGetSize(ilink->y, &My));
1374       /* use default lda */
1375       PetscCall(MatCreateDenseFromVecType(PetscObjectComm((PetscObject)pc), xtype, mx, X->cmap->n, Mx, X->cmap->N, -1, NULL, &ilink->X));
1376       PetscCall(MatCreateDenseFromVecType(PetscObjectComm((PetscObject)pc), ytype, my, X->cmap->n, My, X->cmap->N, -1, NULL, &ilink->Y));
1377     }
1378     ilink = ilink->next;
1379   }
1380   PetscFunctionReturn(PETSC_SUCCESS);
1381 }
1382 
1383 static PetscErrorCode PCMatApply_FieldSplit_Schur(PC pc, Mat X, Mat Y)
1384 {
1385   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1386   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1387   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1388   Mat               AinvB = NULL;
1389   PetscInt          N, P;
1390 
1391   PetscFunctionBegin;
1392   /* create working matrices with the correct number of columns */
1393   PetscCall(PCFieldSplitCreateWorkMats_Private(pc, X));
1394   switch (jac->schurfactorization) {
1395   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1396     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1397     PetscCall(MatDenseScatter_Private(ilinkA->sctx, X, ilinkA->X, INSERT_VALUES, SCATTER_FORWARD));
1398     PetscCall(MatDenseScatter_Private(ilinkD->sctx, X, ilinkD->X, INSERT_VALUES, SCATTER_FORWARD));
1399     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->X, ilinkA->Y, NULL));
1400     PetscCall(KSPMatSolve(kspA, ilinkA->X, ilinkA->Y));
1401     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->X, ilinkA->Y, NULL));
1402     PetscCall(MatDenseScatter_Private(ilinkA->sctx, ilinkA->Y, Y, INSERT_VALUES, SCATTER_REVERSE));
1403     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->X, ilinkD->Y, NULL));
1404     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1405     PetscCall(KSPMatSolve(jac->kspschur, ilinkD->X, ilinkD->Y));
1406     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1407     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->X, ilinkD->Y, NULL));
1408     PetscCall(MatScale(ilinkD->Y, jac->schurscale));
1409     PetscCall(MatDenseScatter_Private(ilinkD->sctx, ilinkD->Y, Y, INSERT_VALUES, SCATTER_REVERSE));
1410     break;
1411   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1412     /* [A00 0; A10 S], suitable for left preconditioning */
1413     PetscCall(MatDenseScatter_Private(ilinkA->sctx, X, ilinkA->X, INSERT_VALUES, SCATTER_FORWARD));
1414     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->X, ilinkA->Y, NULL));
1415     PetscCall(KSPMatSolve(kspA, ilinkA->X, ilinkA->Y));
1416     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->X, ilinkA->Y, NULL));
1417     PetscCall(MatMatMult(jac->C, ilinkA->Y, MAT_REUSE_MATRIX, PETSC_DETERMINE, &ilinkD->X));
1418     PetscCall(MatScale(ilinkD->X, -1.0));
1419     PetscCall(MatDenseScatter_Private(ilinkD->sctx, X, ilinkD->X, ADD_VALUES, SCATTER_FORWARD));
1420     PetscCall(MatDenseScatter_Private(ilinkA->sctx, ilinkA->Y, Y, INSERT_VALUES, SCATTER_REVERSE));
1421     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->X, ilinkD->Y, NULL));
1422     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1423     PetscCall(KSPMatSolve(jac->kspschur, ilinkD->X, ilinkD->Y));
1424     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1425     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->X, ilinkD->Y, NULL));
1426     PetscCall(MatDenseScatter_Private(ilinkD->sctx, ilinkD->Y, Y, INSERT_VALUES, SCATTER_REVERSE));
1427     break;
1428   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1429     /* [A00 A01; 0 S], suitable for right preconditioning */
1430     PetscCall(MatDenseScatter_Private(ilinkD->sctx, X, ilinkD->X, INSERT_VALUES, SCATTER_FORWARD));
1431     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->X, ilinkD->Y, NULL));
1432     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1433     PetscCall(KSPMatSolve(jac->kspschur, ilinkD->X, ilinkD->Y));
1434     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1435     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->X, ilinkD->Y, NULL));
1436     PetscCall(MatMatMult(jac->B, ilinkD->Y, MAT_REUSE_MATRIX, PETSC_DETERMINE, &ilinkA->X));
1437     PetscCall(MatScale(ilinkA->X, -1.0));
1438     PetscCall(MatDenseScatter_Private(ilinkA->sctx, X, ilinkA->X, ADD_VALUES, SCATTER_FORWARD));
1439     PetscCall(MatDenseScatter_Private(ilinkD->sctx, ilinkD->Y, Y, INSERT_VALUES, SCATTER_REVERSE));
1440     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->X, ilinkA->Y, NULL));
1441     PetscCall(KSPMatSolve(kspA, ilinkA->X, ilinkA->Y));
1442     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->X, ilinkA->Y, NULL));
1443     PetscCall(MatDenseScatter_Private(ilinkA->sctx, ilinkA->Y, Y, INSERT_VALUES, SCATTER_REVERSE));
1444     break;
1445   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1446     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1447     PetscCall(MatGetSize(jac->B, NULL, &P));
1448     N = P;
1449     PetscCall(MatDenseScatter_Private(ilinkA->sctx, X, ilinkA->X, INSERT_VALUES, SCATTER_FORWARD));
1450     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->X, ilinkA->Y, NULL));
1451     if (kspUpper == kspA) {
1452       PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB));
1453       if (AinvB) {
1454         PetscCall(MatGetSize(AinvB, NULL, &N));
1455         if (N > P) { // first time PCApply_FieldSplit_Schur() is called
1456           PetscMemType mtype;
1457           Mat          C = NULL;
1458           PetscScalar *array;
1459           PetscInt     m, M, q, Q, p;
1460 
1461           PetscCall(MatGetSize(jac->B, &M, NULL));
1462           PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1463           PetscCall(MatGetSize(X, NULL, &Q));
1464           PetscCall(MatGetLocalSize(X, NULL, &q));
1465           PetscCall(MatDenseGetArrayAndMemType(AinvB, &array, &mtype));
1466           if (N != P + Q) {
1467             Mat replace;
1468 
1469             PetscCall(MatGetLocalSize(jac->B, NULL, &p));
1470             if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) {
1471               PetscCall(PetscFree(array));
1472               PetscCall(PetscMalloc1(m * (P + Q), &array));
1473               PetscCall(MatCreateDense(PetscObjectComm((PetscObject)jac->schur), m, PETSC_DECIDE, M, P + Q, array, &replace));
1474             }
1475 #if PetscDefined(HAVE_CUDA)
1476             else if (PetscMemTypeCUDA(mtype)) {
1477               PetscCallCUDA(cudaFree(array));
1478               PetscCallCUDA(cudaMalloc((void **)&array, sizeof(PetscScalar) * m * (P + Q)));
1479               PetscCall(MatCreateDenseCUDA(PetscObjectComm((PetscObject)jac->schur), m, PETSC_DECIDE, M, P + Q, array, &replace));
1480             }
1481 #endif
1482 #if PetscDefined(HAVE_HIP)
1483             else if (PetscMemTypeHIP(mtype)) {
1484               PetscCallHIP(hipFree(array));
1485               PetscCallHIP(hipMalloc((void **)&array, sizeof(PetscScalar) * m * (P + Q)));
1486               PetscCall(MatCreateDenseHIP(PetscObjectComm((PetscObject)jac->schur), m, PETSC_DECIDE, M, P + Q, array, &replace));
1487             }
1488 #endif
1489             PetscCall(MatHeaderReplace(AinvB, &replace));
1490           }
1491           if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(MatCreateDense(PetscObjectComm((PetscObject)jac->schur), m, q, M, Q, array + m * P, &C));
1492 #if PetscDefined(HAVE_CUDA)
1493           else if (PetscMemTypeCUDA(mtype)) PetscCall(MatCreateDenseCUDA(PetscObjectComm((PetscObject)jac->schur), m, q, M, Q, array + m * P, &C));
1494 #endif
1495 #if PetscDefined(HAVE_HIP)
1496           else if (PetscMemTypeHIP(mtype)) PetscCall(MatCreateDenseHIP(PetscObjectComm((PetscObject)jac->schur), m, q, M, Q, array + m * P, &C));
1497 #endif
1498           PetscCall(MatDenseRestoreArrayAndMemType(AinvB, &array));
1499           PetscCall(MatCopy(ilinkA->X, C, SAME_NONZERO_PATTERN));
1500           PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1501           PetscCall(KSPSetOperators(jac->kspschur, jac->schur, jac->schur_user));
1502           PetscCall(MatCopy(C, ilinkA->Y, SAME_NONZERO_PATTERN)); // retrieve solutions as last columns of the composed Mat
1503           PetscCall(MatDestroy(&C));
1504         }
1505       }
1506     }
1507     if (N == P) PetscCall(KSPMatSolve(kspLower, ilinkA->X, ilinkA->Y));
1508     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->X, ilinkA->Y, NULL));
1509     PetscCall(MatMatMult(jac->C, ilinkA->Y, MAT_REUSE_MATRIX, PETSC_DETERMINE, &ilinkD->X));
1510     PetscCall(MatScale(ilinkD->X, -1.0));
1511     PetscCall(MatDenseScatter_Private(ilinkD->sctx, X, ilinkD->X, ADD_VALUES, SCATTER_FORWARD));
1512 
1513     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->X, ilinkD->Y, NULL));
1514     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1515     PetscCall(KSPMatSolve(jac->kspschur, ilinkD->X, ilinkD->Y));
1516     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1517     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->X, ilinkD->Y, NULL));
1518     PetscCall(MatDenseScatter_Private(ilinkD->sctx, ilinkD->Y, Y, INSERT_VALUES, SCATTER_REVERSE));
1519 
1520     if (kspUpper == kspA) {
1521       if (!AinvB) {
1522         PetscCall(MatMatMult(jac->B, ilinkD->Y, MAT_REUSE_MATRIX, PETSC_DETERMINE, &ilinkA->Y));
1523         PetscCall(MatAXPY(ilinkA->X, -1.0, ilinkA->Y, SAME_NONZERO_PATTERN));
1524         PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->X, ilinkA->Y, NULL));
1525         PetscCall(KSPMatSolve(kspA, ilinkA->X, ilinkA->Y));
1526         PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->X, ilinkA->Y, NULL));
1527       } else {
1528         PetscCall(MatMatMult(AinvB, ilinkD->Y, MAT_REUSE_MATRIX, PETSC_DETERMINE, &ilinkA->X));
1529         PetscCall(MatAXPY(ilinkA->Y, 1.0, ilinkA->X, SAME_NONZERO_PATTERN));
1530       }
1531     } else {
1532       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->X, ilinkA->Y, NULL));
1533       PetscCall(KSPMatSolve(kspA, ilinkA->X, ilinkA->Y));
1534       PetscCall(MatMatMult(jac->B, ilinkD->Y, MAT_REUSE_MATRIX, PETSC_DETERMINE, &ilinkA->X));
1535       if (!ilinkA->Z) PetscCall(MatDuplicate(ilinkA->X, MAT_DO_NOT_COPY_VALUES, &ilinkA->Z));
1536       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->X, ilinkA->Z, NULL));
1537       PetscCall(KSPMatSolve(kspUpper, ilinkA->X, ilinkA->Z));
1538       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->X, ilinkA->Z, NULL));
1539       PetscCall(MatAXPY(ilinkA->Y, -1.0, ilinkA->Z, SAME_NONZERO_PATTERN));
1540     }
1541     PetscCall(MatDenseScatter_Private(ilinkA->sctx, ilinkA->Y, Y, INSERT_VALUES, SCATTER_REVERSE));
1542   }
1543   PetscFunctionReturn(PETSC_SUCCESS);
1544 }
1545 
1546 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1547 {
1548   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1549   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1550   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1551 
1552   PetscFunctionBegin;
1553   switch (jac->schurfactorization) {
1554   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1555     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1556     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1557     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1558     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1559     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1560     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1561     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1562     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1563     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1564     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1565     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1566     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1567     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1568     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1569     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1570     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1571     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1572     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1573     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1574     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1575     break;
1576   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1577     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1578     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1579     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1580     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1581     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1582     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1583     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1584     PetscCall(VecScale(ilinkD->x, -1.));
1585     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1586     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1587     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1588     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1589     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1590     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1591     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1592     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1593     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1594     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1595     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1596     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1597     break;
1598   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1599     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1600     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1601     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1602     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1603     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1604     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1605     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1606     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1607     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1608     PetscCall(VecScale(ilinkA->x, -1.));
1609     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1610     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1611     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1612     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1613     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1614     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1615     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1616     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1617     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1618     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1619     break;
1620   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1621     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1622     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1623     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1624     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1625     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1626     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1627     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1628     PetscCall(VecScale(ilinkD->x, -1.0));
1629     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1630     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1631 
1632     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1633     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1634     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1635     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1636     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1637     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1638     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1639 
1640     if (kspLower == kspA) {
1641       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1642       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1643       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1644       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1645       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1646       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1647     } else {
1648       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1649       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1650       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1651       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1652       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1653       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1654       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1655       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1656       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1657     }
1658     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1659     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1660     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1661   }
1662   PetscFunctionReturn(PETSC_SUCCESS);
1663 }
1664 
1665 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1666   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1667                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1668                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1669 
1670 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1671 {
1672   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1673   PC_FieldSplitLink ilink = jac->head;
1674   PetscInt          cnt, bs;
1675 
1676   PetscFunctionBegin;
1677   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1678     PetscBool matnest;
1679 
1680     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1681     if (jac->defaultsplit && !matnest) {
1682       PetscCall(VecGetBlockSize(x, &bs));
1683       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1684       PetscCall(VecGetBlockSize(y, &bs));
1685       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1686       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1687       while (ilink) {
1688         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1689         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1690         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1691         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1692         ilink = ilink->next;
1693       }
1694       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1695     } else {
1696       PetscCall(VecSet(y, 0.0));
1697       while (ilink) {
1698         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1699         ilink = ilink->next;
1700       }
1701     }
1702   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1703     PetscCall(VecSet(y, 0.0));
1704     /* solve on first block for first block variables */
1705     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1706     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1707     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1708     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1709     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1710     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1711     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1712     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1713 
1714     /* compute the residual only onto second block variables using first block variables */
1715     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1716     ilink = ilink->next;
1717     PetscCall(VecScale(ilink->x, -1.0));
1718     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1719     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1720 
1721     /* solve on second block variables */
1722     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1723     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1724     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1725     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1726     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1727     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1728   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1729     if (!jac->w1) {
1730       PetscCall(VecDuplicate(x, &jac->w1));
1731       PetscCall(VecDuplicate(x, &jac->w2));
1732     }
1733     PetscCall(VecSet(y, 0.0));
1734     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1735     cnt = 1;
1736     while (ilink->next) {
1737       ilink = ilink->next;
1738       /* compute the residual only over the part of the vector needed */
1739       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1740       PetscCall(VecScale(ilink->x, -1.0));
1741       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1742       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1743       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1744       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1745       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1746       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1747       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1748       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1749     }
1750     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1751       cnt -= 2;
1752       while (ilink->previous) {
1753         ilink = ilink->previous;
1754         /* compute the residual only over the part of the vector needed */
1755         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1756         PetscCall(VecScale(ilink->x, -1.0));
1757         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1758         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1759         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1760         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1761         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1762         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1763         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1764         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1765       }
1766     }
1767   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1768   PetscFunctionReturn(PETSC_SUCCESS);
1769 }
1770 
1771 static PetscErrorCode PCMatApply_FieldSplit(PC pc, Mat X, Mat Y)
1772 {
1773   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1774   PC_FieldSplitLink ilink = jac->head;
1775   PetscInt          cnt;
1776 
1777   PetscFunctionBegin;
1778   /* create working matrices with the correct number of columns */
1779   PetscCall(PCFieldSplitCreateWorkMats_Private(pc, X));
1780   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1781     PetscCall(MatZeroEntries(Y));
1782     while (ilink) {
1783       PetscCall(MatDenseScatter_Private(ilink->sctx, X, ilink->X, INSERT_VALUES, SCATTER_FORWARD));
1784       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1785       PetscCall(KSPMatSolve(ilink->ksp, ilink->X, ilink->Y));
1786       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1787       PetscCall(MatDenseScatter_Private(ilink->sctx, ilink->Y, Y, ADD_VALUES, SCATTER_REVERSE));
1788       ilink = ilink->next;
1789     }
1790   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1791     PetscCall(MatZeroEntries(Y));
1792     PetscCall(MatDenseScatter_Private(ilink->sctx, X, ilink->X, INSERT_VALUES, SCATTER_FORWARD));
1793     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1794     PetscCall(KSPMatSolve(ilink->ksp, ilink->X, ilink->Y));
1795     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1796     PetscCall(MatDenseScatter_Private(ilink->sctx, ilink->Y, Y, ADD_VALUES, SCATTER_REVERSE));
1797 
1798     /* compute the residual only onto second block variables using first block variables */
1799     PetscCall(MatMatMult(jac->Afield[1], ilink->Y, MAT_REUSE_MATRIX, PETSC_DETERMINE, &ilink->next->X));
1800     ilink = ilink->next;
1801     PetscCall(MatScale(ilink->X, -1.0));
1802     PetscCall(MatDenseScatter_Private(ilink->sctx, X, ilink->X, ADD_VALUES, SCATTER_FORWARD));
1803 
1804     /* solve on second block variables */
1805     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1806     PetscCall(KSPMatSolve(ilink->ksp, ilink->X, ilink->Y));
1807     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1808     PetscCall(MatDenseScatter_Private(ilink->sctx, ilink->Y, Y, ADD_VALUES, SCATTER_REVERSE));
1809   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1810     /* general multiplicative with any number of splits */
1811     PetscCall(MatZeroEntries(Y));
1812     /* first split */
1813     PetscCall(MatDenseScatter_Private(ilink->sctx, X, ilink->X, INSERT_VALUES, SCATTER_FORWARD));
1814     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1815     PetscCall(KSPMatSolve(ilink->ksp, ilink->X, ilink->Y));
1816     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1817     PetscCall(MatDenseScatter_Private(ilink->sctx, ilink->Y, Y, ADD_VALUES, SCATTER_REVERSE));
1818     cnt = 1;
1819     /* forward sweep */
1820     while (ilink->next) {
1821       ilink = ilink->next;
1822       /* compute the residual only over the part of the vector needed */
1823       PetscCall(MatMatMult(jac->Afield[cnt++], Y, MAT_REUSE_MATRIX, PETSC_DETERMINE, &ilink->X));
1824       PetscCall(MatScale(ilink->X, -1.0));
1825       PetscCall(MatDenseScatter_Private(ilink->sctx, X, ilink->X, ADD_VALUES, SCATTER_FORWARD));
1826       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1827       PetscCall(KSPMatSolve(ilink->ksp, ilink->X, ilink->Y));
1828       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1829       PetscCall(MatDenseScatter_Private(ilink->sctx, ilink->Y, Y, ADD_VALUES, SCATTER_REVERSE));
1830     }
1831     /* backward sweep for symmetric multiplicative */
1832     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1833       cnt -= 2;
1834       while (ilink->previous) {
1835         ilink = ilink->previous;
1836         /* compute the residual only over the part of the vector needed */
1837         PetscCall(MatMatMult(jac->Afield[cnt--], Y, MAT_REUSE_MATRIX, PETSC_DETERMINE, &ilink->X));
1838         PetscCall(MatScale(ilink->X, -1.0));
1839         PetscCall(MatDenseScatter_Private(ilink->sctx, X, ilink->X, ADD_VALUES, SCATTER_FORWARD));
1840         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1841         PetscCall(KSPMatSolve(ilink->ksp, ilink->X, ilink->Y));
1842         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1843         PetscCall(MatDenseScatter_Private(ilink->sctx, ilink->Y, Y, ADD_VALUES, SCATTER_REVERSE));
1844       }
1845     }
1846   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCMatApply() not implemented for this fieldsplit type");
1847   PetscFunctionReturn(PETSC_SUCCESS);
1848 }
1849 
1850 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1851 {
1852   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1853   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1854   KSP               ksp = ilinkA->ksp;
1855   Vec               u, v, Hu, d, work1, work2;
1856   PetscScalar       alpha, z, nrmz2, *vecz;
1857   PetscReal         lowbnd, nu, beta;
1858   PetscInt          j, iterGKB;
1859 
1860   PetscFunctionBegin;
1861   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1862   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1863   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1864   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1865 
1866   u     = jac->u;
1867   v     = jac->v;
1868   Hu    = jac->Hu;
1869   d     = jac->d;
1870   work1 = jac->w1;
1871   work2 = jac->w2;
1872   vecz  = jac->vecz;
1873 
1874   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1875   /* Add q = q + nu*B*b */
1876   if (jac->gkbnu) {
1877     nu = jac->gkbnu;
1878     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1879     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1880   } else {
1881     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1882     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1883     nu = 1;
1884   }
1885 
1886   /* Transform rhs from [q,tilde{b}] to [0,b] */
1887   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1888   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1889   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1890   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1891   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1892   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1893 
1894   /* First step of algorithm */
1895   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1896   KSPCheckDot(ksp, beta);
1897   beta = PetscSqrtReal(nu) * beta;
1898   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1899   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1900   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1901   PetscCall(KSPSolve(ksp, work2, u));
1902   PetscCall(KSPCheckSolve(ksp, pc, u));
1903   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1904   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1905   PetscCall(VecDot(Hu, u, &alpha));
1906   KSPCheckDot(ksp, alpha);
1907   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1908   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1909   PetscCall(VecScale(u, 1.0 / alpha));
1910   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1911 
1912   z       = beta / alpha;
1913   vecz[1] = z;
1914 
1915   /* Computation of first iterate x(1) and p(1) */
1916   PetscCall(VecAXPY(ilinkA->y, z, u));
1917   PetscCall(VecCopy(d, ilinkD->y));
1918   PetscCall(VecScale(ilinkD->y, -z));
1919 
1920   iterGKB = 1;
1921   lowbnd  = 2 * jac->gkbtol;
1922   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1923 
1924   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1925     iterGKB += 1;
1926     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1927     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1928     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1929     beta = beta / PetscSqrtReal(nu);
1930     PetscCall(VecScale(v, 1.0 / beta));
1931     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1932     PetscCall(MatMult(jac->H, u, Hu));
1933     PetscCall(VecAXPY(work2, -beta, Hu));
1934     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1935     PetscCall(KSPSolve(ksp, work2, u));
1936     PetscCall(KSPCheckSolve(ksp, pc, u));
1937     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1938     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1939     PetscCall(VecDot(Hu, u, &alpha));
1940     KSPCheckDot(ksp, alpha);
1941     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1942     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1943     PetscCall(VecScale(u, 1.0 / alpha));
1944 
1945     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1946     vecz[0] = z;
1947 
1948     /* Computation of new iterate x(i+1) and p(i+1) */
1949     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1950     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1951     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1952     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1953     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1954 
1955     /* Compute Lower Bound estimate */
1956     if (iterGKB > jac->gkbdelay) {
1957       lowbnd = 0.0;
1958       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1959       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1960     }
1961 
1962     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1963     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1964   }
1965 
1966   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1967   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1968   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1969   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1970   PetscFunctionReturn(PETSC_SUCCESS);
1971 }
1972 
1973 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1974   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1975                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1976                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1977 
1978 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1979 {
1980   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1981   PC_FieldSplitLink ilink = jac->head;
1982   PetscInt          bs;
1983 
1984   PetscFunctionBegin;
1985   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1986     PetscBool matnest;
1987 
1988     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1989     if (jac->defaultsplit && !matnest) {
1990       PetscCall(VecGetBlockSize(x, &bs));
1991       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1992       PetscCall(VecGetBlockSize(y, &bs));
1993       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1994       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1995       while (ilink) {
1996         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1997         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1998         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1999         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
2000         ilink = ilink->next;
2001       }
2002       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
2003     } else {
2004       PetscCall(VecSet(y, 0.0));
2005       while (ilink) {
2006         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
2007         ilink = ilink->next;
2008       }
2009     }
2010   } else {
2011     if (!jac->w1) {
2012       PetscCall(VecDuplicate(x, &jac->w1));
2013       PetscCall(VecDuplicate(x, &jac->w2));
2014     }
2015     PetscCall(VecSet(y, 0.0));
2016     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
2017       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
2018       while (ilink->next) {
2019         ilink = ilink->next;
2020         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
2021         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
2022         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
2023       }
2024       while (ilink->previous) {
2025         ilink = ilink->previous;
2026         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
2027         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
2028         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
2029       }
2030     } else {
2031       while (ilink->next) { /* get to last entry in linked list */
2032         ilink = ilink->next;
2033       }
2034       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
2035       while (ilink->previous) {
2036         ilink = ilink->previous;
2037         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
2038         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
2039         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
2040       }
2041     }
2042   }
2043   PetscFunctionReturn(PETSC_SUCCESS);
2044 }
2045 
2046 static PetscErrorCode PCReset_FieldSplit(PC pc)
2047 {
2048   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2049   PC_FieldSplitLink ilink = jac->head, next;
2050 
2051   PetscFunctionBegin;
2052   while (ilink) {
2053     PetscCall(KSPDestroy(&ilink->ksp));
2054     PetscCall(VecDestroy(&ilink->x));
2055     PetscCall(VecDestroy(&ilink->y));
2056     PetscCall(VecDestroy(&ilink->z));
2057     PetscCall(MatDestroy(&ilink->X));
2058     PetscCall(MatDestroy(&ilink->Y));
2059     PetscCall(MatDestroy(&ilink->Z));
2060     PetscCall(VecScatterDestroy(&ilink->sctx));
2061     PetscCall(ISDestroy(&ilink->is));
2062     PetscCall(ISDestroy(&ilink->is_col));
2063     PetscCall(PetscFree(ilink->splitname));
2064     PetscCall(PetscFree(ilink->fields));
2065     PetscCall(PetscFree(ilink->fields_col));
2066     next = ilink->next;
2067     PetscCall(PetscFree(ilink));
2068     ilink = next;
2069   }
2070   jac->head = NULL;
2071   PetscCall(PetscFree2(jac->x, jac->y));
2072   if (jac->mat && jac->mat != jac->pmat) {
2073     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
2074   } else if (jac->mat) {
2075     jac->mat = NULL;
2076   }
2077   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
2078   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
2079   jac->nsplits = 0;
2080   PetscCall(VecDestroy(&jac->w1));
2081   PetscCall(VecDestroy(&jac->w2));
2082   if (jac->schur) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", NULL));
2083   PetscCall(MatDestroy(&jac->schur));
2084   PetscCall(MatDestroy(&jac->schurp));
2085   PetscCall(MatDestroy(&jac->schur_user));
2086   PetscCall(KSPDestroy(&jac->kspschur));
2087   PetscCall(KSPDestroy(&jac->kspupper));
2088   PetscCall(MatDestroy(&jac->B));
2089   PetscCall(MatDestroy(&jac->C));
2090   PetscCall(MatDestroy(&jac->H));
2091   PetscCall(VecDestroy(&jac->u));
2092   PetscCall(VecDestroy(&jac->v));
2093   PetscCall(VecDestroy(&jac->Hu));
2094   PetscCall(VecDestroy(&jac->d));
2095   PetscCall(PetscFree(jac->vecz));
2096   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
2097   jac->isrestrict = PETSC_FALSE;
2098   PetscFunctionReturn(PETSC_SUCCESS);
2099 }
2100 
2101 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
2102 {
2103   PetscFunctionBegin;
2104   PetscCall(PCReset_FieldSplit(pc));
2105   PetscCall(PetscFree(pc->data));
2106   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
2107   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
2108   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
2109   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
2110   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
2111   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
2112   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
2113   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
2114   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
2115   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
2116   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
2117   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
2118   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
2119   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
2120   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
2121   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
2122   PetscFunctionReturn(PETSC_SUCCESS);
2123 }
2124 
2125 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems PetscOptionsObject)
2126 {
2127   PetscInt        bs;
2128   PetscBool       flg;
2129   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
2130   PCCompositeType ctype;
2131 
2132   PetscFunctionBegin;
2133   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
2134   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
2135   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
2136   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
2137   jac->diag_use_amat = pc->useAmat;
2138   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
2139   jac->offdiag_use_amat = pc->useAmat;
2140   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
2141   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
2142   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
2143   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
2144   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
2145   /* Only setup fields once */
2146   if (jac->bs > 0 && jac->nsplits == 0) {
2147     /* only allow user to set fields from command line.
2148        otherwise user can set them in PCFieldSplitSetDefaults() */
2149     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
2150     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
2151   }
2152   if (jac->type == PC_COMPOSITE_SCHUR) {
2153     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
2154     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
2155     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
2156     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
2157     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
2158   } else if (jac->type == PC_COMPOSITE_GKB) {
2159     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
2160     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
2161     PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0));
2162     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
2163     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
2164   }
2165   /*
2166     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
2167     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
2168     is called on the outer solver in case changes were made in the options database
2169 
2170     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
2171     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
2172     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
2173 
2174     There could be a negative side effect of calling the KSPSetFromOptions() below.
2175 
2176     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
2177   */
2178   if (jac->issetup) {
2179     PC_FieldSplitLink ilink = jac->head;
2180     if (jac->type == PC_COMPOSITE_SCHUR) {
2181       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
2182       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
2183     }
2184     while (ilink) {
2185       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
2186       ilink = ilink->next;
2187     }
2188   }
2189   PetscOptionsHeadEnd();
2190   PetscFunctionReturn(PETSC_SUCCESS);
2191 }
2192 
2193 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
2194 {
2195   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
2196   PC_FieldSplitLink ilink, next = jac->head;
2197   char              prefix[128];
2198   PetscInt          i;
2199   PetscLogEvent     nse;
2200 
2201   PetscFunctionBegin;
2202   if (jac->splitdefined) {
2203     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
2204     PetscFunctionReturn(PETSC_SUCCESS);
2205   }
2206   for (i = 0; i < n; i++) PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]);
2207   PetscCall(PetscNew(&ilink));
2208   if (splitname) {
2209     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
2210   } else {
2211     PetscCall(PetscMalloc1(3, &ilink->splitname));
2212     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
2213   }
2214   PetscCall(PetscMPIIntCast(jac->nsplits, &nse));
2215   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */
2216   PetscCall(PetscMalloc1(n, &ilink->fields));
2217   PetscCall(PetscArraycpy(ilink->fields, fields, n));
2218   PetscCall(PetscMalloc1(n, &ilink->fields_col));
2219   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
2220 
2221   ilink->nfields = n;
2222   ilink->next    = NULL;
2223   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
2224   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
2225   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
2226   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
2227   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
2228 
2229   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
2230   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
2231 
2232   if (!next) {
2233     jac->head       = ilink;
2234     ilink->previous = NULL;
2235   } else {
2236     while (next->next) next = next->next;
2237     next->next      = ilink;
2238     ilink->previous = next;
2239   }
2240   jac->nsplits++;
2241   PetscFunctionReturn(PETSC_SUCCESS);
2242 }
2243 
2244 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
2245 {
2246   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2247 
2248   PetscFunctionBegin;
2249   *subksp = NULL;
2250   if (n) *n = 0;
2251   if (jac->type == PC_COMPOSITE_SCHUR) {
2252     PetscInt nn;
2253 
2254     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
2255     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
2256     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
2257     PetscCall(PetscMalloc1(nn, subksp));
2258     (*subksp)[0] = jac->head->ksp;
2259     (*subksp)[1] = jac->kspschur;
2260     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
2261     if (n) *n = nn;
2262   }
2263   PetscFunctionReturn(PETSC_SUCCESS);
2264 }
2265 
2266 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
2267 {
2268   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2269 
2270   PetscFunctionBegin;
2271   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
2272   PetscCall(PetscMalloc1(jac->nsplits, subksp));
2273   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
2274 
2275   (*subksp)[1] = jac->kspschur;
2276   if (n) *n = jac->nsplits;
2277   PetscFunctionReturn(PETSC_SUCCESS);
2278 }
2279 
2280 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
2281 {
2282   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2283   PetscInt          cnt   = 0;
2284   PC_FieldSplitLink ilink = jac->head;
2285 
2286   PetscFunctionBegin;
2287   PetscCall(PetscMalloc1(jac->nsplits, subksp));
2288   while (ilink) {
2289     (*subksp)[cnt++] = ilink->ksp;
2290     ilink            = ilink->next;
2291   }
2292   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
2293   if (n) *n = jac->nsplits;
2294   PetscFunctionReturn(PETSC_SUCCESS);
2295 }
2296 
2297 /*@
2298   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
2299 
2300   Input Parameters:
2301 + pc  - the preconditioner context
2302 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
2303 
2304   Level: advanced
2305 
2306   Developer Notes:
2307   It seems the resulting `IS`s will not cover the entire space, so
2308   how can they define a convergent preconditioner? Needs explaining.
2309 
2310 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2311 @*/
2312 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
2313 {
2314   PetscFunctionBegin;
2315   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2316   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
2317   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
2318   PetscFunctionReturn(PETSC_SUCCESS);
2319 }
2320 
2321 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
2322 {
2323   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2324   PC_FieldSplitLink ilink = jac->head, next;
2325   PetscInt          localsize, size, sizez, i;
2326   const PetscInt   *ind, *indz;
2327   PetscInt         *indc, *indcz;
2328   PetscBool         flg;
2329 
2330   PetscFunctionBegin;
2331   PetscCall(ISGetLocalSize(isy, &localsize));
2332   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
2333   size -= localsize;
2334   while (ilink) {
2335     IS isrl, isr;
2336     PC subpc;
2337     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
2338     PetscCall(ISGetLocalSize(isrl, &localsize));
2339     PetscCall(PetscMalloc1(localsize, &indc));
2340     PetscCall(ISGetIndices(isrl, &ind));
2341     PetscCall(PetscArraycpy(indc, ind, localsize));
2342     PetscCall(ISRestoreIndices(isrl, &ind));
2343     PetscCall(ISDestroy(&isrl));
2344     for (i = 0; i < localsize; i++) *(indc + i) += size;
2345     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
2346     PetscCall(PetscObjectReference((PetscObject)isr));
2347     PetscCall(ISDestroy(&ilink->is));
2348     ilink->is = isr;
2349     PetscCall(PetscObjectReference((PetscObject)isr));
2350     PetscCall(ISDestroy(&ilink->is_col));
2351     ilink->is_col = isr;
2352     PetscCall(ISDestroy(&isr));
2353     PetscCall(KSPGetPC(ilink->ksp, &subpc));
2354     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
2355     if (flg) {
2356       IS       iszl, isz;
2357       MPI_Comm comm;
2358       PetscCall(ISGetLocalSize(ilink->is, &localsize));
2359       comm = PetscObjectComm((PetscObject)ilink->is);
2360       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
2361       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
2362       sizez -= localsize;
2363       PetscCall(ISGetLocalSize(iszl, &localsize));
2364       PetscCall(PetscMalloc1(localsize, &indcz));
2365       PetscCall(ISGetIndices(iszl, &indz));
2366       PetscCall(PetscArraycpy(indcz, indz, localsize));
2367       PetscCall(ISRestoreIndices(iszl, &indz));
2368       PetscCall(ISDestroy(&iszl));
2369       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
2370       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
2371       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
2372       PetscCall(ISDestroy(&isz));
2373     }
2374     next  = ilink->next;
2375     ilink = next;
2376   }
2377   jac->isrestrict = PETSC_TRUE;
2378   PetscFunctionReturn(PETSC_SUCCESS);
2379 }
2380 
2381 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
2382 {
2383   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
2384   PC_FieldSplitLink ilink, next = jac->head;
2385   char              prefix[128];
2386   PetscLogEvent     nse;
2387 
2388   PetscFunctionBegin;
2389   if (jac->splitdefined) {
2390     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
2391     PetscFunctionReturn(PETSC_SUCCESS);
2392   }
2393   PetscCall(PetscNew(&ilink));
2394   if (splitname) {
2395     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
2396   } else {
2397     PetscCall(PetscMalloc1(8, &ilink->splitname));
2398     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
2399   }
2400   PetscCall(PetscMPIIntCast(jac->nsplits, &nse));
2401   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */
2402   PetscCall(PetscObjectReference((PetscObject)is));
2403   PetscCall(ISDestroy(&ilink->is));
2404   ilink->is = is;
2405   PetscCall(PetscObjectReference((PetscObject)is));
2406   PetscCall(ISDestroy(&ilink->is_col));
2407   ilink->is_col = is;
2408   ilink->next   = NULL;
2409   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
2410   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
2411   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
2412   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
2413   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
2414 
2415   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
2416   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
2417 
2418   if (!next) {
2419     jac->head       = ilink;
2420     ilink->previous = NULL;
2421   } else {
2422     while (next->next) next = next->next;
2423     next->next      = ilink;
2424     ilink->previous = next;
2425   }
2426   jac->nsplits++;
2427   PetscFunctionReturn(PETSC_SUCCESS);
2428 }
2429 
2430 /*@
2431   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
2432 
2433   Logically Collective
2434 
2435   Input Parameters:
2436 + pc         - the preconditioner context
2437 . splitname  - name of this split, if `NULL` the number of the split is used
2438 . n          - the number of fields in this split
2439 . fields     - the fields in this split
2440 - fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block
2441                of the matrix and `fields_col` provides the column indices for that block
2442 
2443   Options Database Key:
2444 . -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
2445 
2446   Level: intermediate
2447 
2448   Notes:
2449   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2450 
2451   If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`.
2452 
2453   If the matrix used to construct the preconditioner is not `MATNEST` then
2454   `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlockSize()` or
2455   to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block
2456   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2457   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x23x56x8.. x12x45x78x....
2458   where the numbered entries indicate what is in the split.
2459 
2460   This function is called once per split (it creates a new split each time).  Solve options
2461   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2462 
2463   `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields`
2464 
2465   Developer Notes:
2466   This routine does not actually create the `IS` representing the split, that is delayed
2467   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2468   available when this routine is called.
2469 
2470 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`,
2471           `MatSetBlockSize()`, `MatCreateNest()`
2472 @*/
2473 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt fields[], const PetscInt fields_col[])
2474 {
2475   PetscFunctionBegin;
2476   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2477   PetscAssertPointer(splitname, 2);
2478   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2479   PetscAssertPointer(fields, 4);
2480   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2481   PetscFunctionReturn(PETSC_SUCCESS);
2482 }
2483 
2484 /*@
2485   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2486   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2487 
2488   Logically Collective
2489 
2490   Input Parameters:
2491 + pc  - the preconditioner object
2492 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2493 
2494   Options Database Key:
2495 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2496 
2497   Level: intermediate
2498 
2499 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2500 @*/
2501 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2502 {
2503   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2504   PetscBool      isfs;
2505 
2506   PetscFunctionBegin;
2507   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2508   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2509   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2510   jac->diag_use_amat = flg;
2511   PetscFunctionReturn(PETSC_SUCCESS);
2512 }
2513 
2514 /*@
2515   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2516   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2517 
2518   Logically Collective
2519 
2520   Input Parameter:
2521 . pc - the preconditioner object
2522 
2523   Output Parameter:
2524 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2525 
2526   Level: intermediate
2527 
2528 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2529 @*/
2530 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2531 {
2532   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2533   PetscBool      isfs;
2534 
2535   PetscFunctionBegin;
2536   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2537   PetscAssertPointer(flg, 2);
2538   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2539   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2540   *flg = jac->diag_use_amat;
2541   PetscFunctionReturn(PETSC_SUCCESS);
2542 }
2543 
2544 /*@
2545   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2546   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2547 
2548   Logically Collective
2549 
2550   Input Parameters:
2551 + pc  - the preconditioner object
2552 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2553 
2554   Options Database Key:
2555 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2556 
2557   Level: intermediate
2558 
2559 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2560 @*/
2561 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2562 {
2563   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2564   PetscBool      isfs;
2565 
2566   PetscFunctionBegin;
2567   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2568   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2569   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2570   jac->offdiag_use_amat = flg;
2571   PetscFunctionReturn(PETSC_SUCCESS);
2572 }
2573 
2574 /*@
2575   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2576   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2577 
2578   Logically Collective
2579 
2580   Input Parameter:
2581 . pc - the preconditioner object
2582 
2583   Output Parameter:
2584 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2585 
2586   Level: intermediate
2587 
2588 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2589 @*/
2590 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2591 {
2592   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2593   PetscBool      isfs;
2594 
2595   PetscFunctionBegin;
2596   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2597   PetscAssertPointer(flg, 2);
2598   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2599   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2600   *flg = jac->offdiag_use_amat;
2601   PetscFunctionReturn(PETSC_SUCCESS);
2602 }
2603 
2604 /*@
2605   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2606 
2607   Logically Collective
2608 
2609   Input Parameters:
2610 + pc        - the preconditioner context
2611 . splitname - name of this split, if `NULL` the number of the split is used
2612 - is        - the index set that defines the elements in this split
2613 
2614   Level: intermediate
2615 
2616   Notes:
2617   Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST`
2618 
2619   This function is called once per split (it creates a new split each time).  Solve options
2620   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2621 
2622 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()`
2623 @*/
2624 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2625 {
2626   PetscFunctionBegin;
2627   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2628   if (splitname) PetscAssertPointer(splitname, 2);
2629   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2630   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2631   PetscFunctionReturn(PETSC_SUCCESS);
2632 }
2633 
2634 /*@
2635   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2636 
2637   Logically Collective
2638 
2639   Input Parameters:
2640 + pc        - the preconditioner context
2641 - splitname - name of this split
2642 
2643   Output Parameter:
2644 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2645 
2646   Level: intermediate
2647 
2648 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()`
2649 @*/
2650 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2651 {
2652   PetscFunctionBegin;
2653   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2654   PetscAssertPointer(splitname, 2);
2655   PetscAssertPointer(is, 3);
2656   {
2657     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2658     PC_FieldSplitLink ilink = jac->head;
2659     PetscBool         found;
2660 
2661     *is = NULL;
2662     while (ilink) {
2663       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2664       if (found) {
2665         *is = ilink->is;
2666         break;
2667       }
2668       ilink = ilink->next;
2669     }
2670   }
2671   PetscFunctionReturn(PETSC_SUCCESS);
2672 }
2673 
2674 /*@
2675   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2676 
2677   Logically Collective
2678 
2679   Input Parameters:
2680 + pc    - the preconditioner context
2681 - index - index of this split
2682 
2683   Output Parameter:
2684 . is - the index set that defines the elements in this split
2685 
2686   Level: intermediate
2687 
2688 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`,
2689 
2690 @*/
2691 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2692 {
2693   PetscFunctionBegin;
2694   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2695   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2696   PetscAssertPointer(is, 3);
2697   {
2698     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2699     PC_FieldSplitLink ilink = jac->head;
2700     PetscInt          i     = 0;
2701     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2702 
2703     while (i < index) {
2704       ilink = ilink->next;
2705       ++i;
2706     }
2707     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2708   }
2709   PetscFunctionReturn(PETSC_SUCCESS);
2710 }
2711 
2712 /*@
2713   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2714   fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used.
2715 
2716   Logically Collective
2717 
2718   Input Parameters:
2719 + pc - the preconditioner context
2720 - bs - the block size
2721 
2722   Level: intermediate
2723 
2724   Note:
2725   If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields.
2726 
2727 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2728 @*/
2729 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2730 {
2731   PetscFunctionBegin;
2732   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2733   PetscValidLogicalCollectiveInt(pc, bs, 2);
2734   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2735   PetscFunctionReturn(PETSC_SUCCESS);
2736 }
2737 
2738 /*@C
2739   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2740 
2741   Collective
2742 
2743   Input Parameter:
2744 . pc - the preconditioner context
2745 
2746   Output Parameters:
2747 + n      - the number of splits
2748 - subksp - the array of `KSP` contexts
2749 
2750   Level: advanced
2751 
2752   Notes:
2753   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2754   (not the `KSP`, just the array that contains them).
2755 
2756   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2757 
2758   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2759   Schur complement and the `KSP` object used to iterate over the Schur complement.
2760   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2761 
2762   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2763   inner linear system defined by the matrix H in each loop.
2764 
2765   Fortran Note:
2766   Call `PCFieldSplitRestoreSubKSP()` when the array of `KSP` is no longer needed
2767 
2768   Developer Notes:
2769   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2770 
2771 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2772 @*/
2773 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2774 {
2775   PetscFunctionBegin;
2776   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2777   if (n) PetscAssertPointer(n, 2);
2778   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2779   PetscFunctionReturn(PETSC_SUCCESS);
2780 }
2781 
2782 /*@C
2783   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2784 
2785   Collective
2786 
2787   Input Parameter:
2788 . pc - the preconditioner context
2789 
2790   Output Parameters:
2791 + n      - the number of splits
2792 - subksp - the array of `KSP` contexts
2793 
2794   Level: advanced
2795 
2796   Notes:
2797   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2798   (not the `KSP` just the array that contains them).
2799 
2800   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2801 
2802   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2803 +  1  - the `KSP` used for the (1,1) block
2804 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2805 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2806 
2807   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2808 
2809   Fortran Note:
2810   Call `PCFieldSplitSchurRestoreSubKSP()` when the array of `KSP` is no longer needed
2811 
2812   Developer Notes:
2813   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2814 
2815   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2816 
2817 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2818 @*/
2819 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2820 {
2821   PetscFunctionBegin;
2822   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2823   if (n) PetscAssertPointer(n, 2);
2824   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2825   PetscFunctionReturn(PETSC_SUCCESS);
2826 }
2827 
2828 /*@
2829   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructed for the Schur complement.
2830   The default is the A11 matrix.
2831 
2832   Collective
2833 
2834   Input Parameters:
2835 + pc    - the preconditioner context
2836 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2837               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2838               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2839 - pre   - matrix to use for preconditioning, or `NULL`
2840 
2841   Options Database Keys:
2842 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2843 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2844 
2845   Level: intermediate
2846 
2847   Notes:
2848   If ptype is
2849 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2850   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2851 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2852   The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM`
2853 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2854   to this function).
2855 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $
2856   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2857   lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2858 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2859   computed internally by `PCFIELDSPLIT` (this is expensive)
2860   useful mostly as a test that the Schur complement approach can work for your problem
2861 
2862   When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2863   with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and
2864   `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2865 
2866   Developer Note:
2867   The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere.
2868 
2869 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2870           `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()`
2871 @*/
2872 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2873 {
2874   PetscFunctionBegin;
2875   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2876   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2877   PetscFunctionReturn(PETSC_SUCCESS);
2878 }
2879 
2880 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2881 {
2882   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2883 } /* Deprecated name */
2884 
2885 /*@
2886   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2887   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2888 
2889   Logically Collective
2890 
2891   Input Parameter:
2892 . pc - the preconditioner context
2893 
2894   Output Parameters:
2895 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2896 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2897 
2898   Level: intermediate
2899 
2900 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2901 @*/
2902 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2903 {
2904   PetscFunctionBegin;
2905   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2906   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2907   PetscFunctionReturn(PETSC_SUCCESS);
2908 }
2909 
2910 /*@
2911   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2912 
2913   Not Collective
2914 
2915   Input Parameter:
2916 . pc - the preconditioner context
2917 
2918   Output Parameter:
2919 . S - the Schur complement matrix
2920 
2921   Level: advanced
2922 
2923   Note:
2924   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2925 
2926 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2927           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2928 @*/
2929 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2930 {
2931   const char    *t;
2932   PetscBool      isfs;
2933   PC_FieldSplit *jac;
2934 
2935   PetscFunctionBegin;
2936   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2937   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2938   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2939   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2940   jac = (PC_FieldSplit *)pc->data;
2941   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2942   if (S) *S = jac->schur;
2943   PetscFunctionReturn(PETSC_SUCCESS);
2944 }
2945 
2946 /*@
2947   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2948 
2949   Not Collective
2950 
2951   Input Parameters:
2952 + pc - the preconditioner context
2953 - S  - the Schur complement matrix
2954 
2955   Level: advanced
2956 
2957 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2958 @*/
2959 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2960 {
2961   const char    *t;
2962   PetscBool      isfs;
2963   PC_FieldSplit *jac;
2964 
2965   PetscFunctionBegin;
2966   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2967   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2968   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2969   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2970   jac = (PC_FieldSplit *)pc->data;
2971   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2972   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2973   PetscFunctionReturn(PETSC_SUCCESS);
2974 }
2975 
2976 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2977 {
2978   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2979 
2980   PetscFunctionBegin;
2981   jac->schurpre = ptype;
2982   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2983     PetscCall(MatDestroy(&jac->schur_user));
2984     jac->schur_user = pre;
2985     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2986   }
2987   PetscFunctionReturn(PETSC_SUCCESS);
2988 }
2989 
2990 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2991 {
2992   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2993 
2994   PetscFunctionBegin;
2995   if (ptype) *ptype = jac->schurpre;
2996   if (pre) *pre = jac->schur_user;
2997   PetscFunctionReturn(PETSC_SUCCESS);
2998 }
2999 
3000 /*@
3001   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note`
3002 
3003   Collective
3004 
3005   Input Parameters:
3006 + pc    - the preconditioner context
3007 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
3008 
3009   Options Database Key:
3010 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
3011 
3012   Level: intermediate
3013 
3014   Notes:
3015   The `full` factorization is
3016 
3017   ```{math}
3018   \left(\begin{array}{cc} A & B \\
3019   C & E \\
3020   \end{array}\right) =
3021   \left(\begin{array}{cc} I & 0 \\
3022   C A^{-1} & I \\
3023   \end{array}\right)
3024   \left(\begin{array}{cc} A & 0 \\
3025   0 & S \\
3026   \end{array}\right)
3027   \left(\begin{array}{cc} I & A^{-1}B \\
3028   0 & I \\
3029   \end{array}\right) = L D U,
3030   ```
3031 
3032   where $ S = E - C A^{-1} B $. In practice, the full factorization is applied via block triangular solves with the grouping $L(DU)$. `upper` uses $DU$, `lower` uses $LD$,
3033   and `diag` is the diagonal part with the sign of $S$ flipped (because this makes the preconditioner positive definite for many formulations,
3034   thus allowing the use of `KSPMINRES)`. Sign flipping of $S$ can be turned off with `PCFieldSplitSetSchurScale()`.
3035 
3036   If $A$ and $S$ are solved exactly
3037 +  1 - `full` factorization is a direct solver.
3038 .  2 - The preconditioned operator with `lower` or `upper` has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
3039 -  3 - With `diag`, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
3040 
3041   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
3042   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
3043 
3044   For symmetric problems in which $A$ is positive definite and $S$ is negative definite, `diag` can be used with `KSPMINRES`.
3045 
3046   A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g., a few iterations of a Krylov method is used to solve with $A$ or $S$).
3047 
3048 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`,
3049           [](sec_flexibleksp), `PCFieldSplitSetSchurPre()`
3050 @*/
3051 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
3052 {
3053   PetscFunctionBegin;
3054   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3055   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
3056   PetscFunctionReturn(PETSC_SUCCESS);
3057 }
3058 
3059 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
3060 {
3061   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3062 
3063   PetscFunctionBegin;
3064   jac->schurfactorization = ftype;
3065   PetscFunctionReturn(PETSC_SUCCESS);
3066 }
3067 
3068 /*@
3069   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
3070 
3071   Collective
3072 
3073   Input Parameters:
3074 + pc    - the preconditioner context
3075 - scale - scaling factor for the Schur complement
3076 
3077   Options Database Key:
3078 . -pc_fieldsplit_schur_scale <scale> - default is -1.0
3079 
3080   Level: intermediate
3081 
3082 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
3083 @*/
3084 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
3085 {
3086   PetscFunctionBegin;
3087   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3088   PetscValidLogicalCollectiveScalar(pc, scale, 2);
3089   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
3090   PetscFunctionReturn(PETSC_SUCCESS);
3091 }
3092 
3093 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
3094 {
3095   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3096 
3097   PetscFunctionBegin;
3098   jac->schurscale = scale;
3099   PetscFunctionReturn(PETSC_SUCCESS);
3100 }
3101 
3102 /*@C
3103   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
3104 
3105   Collective
3106 
3107   Input Parameter:
3108 . pc - the preconditioner context
3109 
3110   Output Parameters:
3111 + A00 - the (0,0) block
3112 . A01 - the (0,1) block
3113 . A10 - the (1,0) block
3114 - A11 - the (1,1) block
3115 
3116   Level: advanced
3117 
3118   Note:
3119   Use `NULL` for any unneeded output arguments
3120 
3121 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
3122 @*/
3123 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
3124 {
3125   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3126 
3127   PetscFunctionBegin;
3128   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3129   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
3130   if (A00) *A00 = jac->pmat[0];
3131   if (A01) *A01 = jac->B;
3132   if (A10) *A10 = jac->C;
3133   if (A11) *A11 = jac->pmat[1];
3134   PetscFunctionReturn(PETSC_SUCCESS);
3135 }
3136 
3137 /*@
3138   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
3139 
3140   Collective
3141 
3142   Input Parameters:
3143 + pc        - the preconditioner context
3144 - tolerance - the solver tolerance
3145 
3146   Options Database Key:
3147 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5
3148 
3149   Level: intermediate
3150 
3151   Note:
3152   The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion.
3153   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
3154   this estimate, the stopping criterion is satisfactory in practical cases.
3155 
3156 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
3157 @*/
3158 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
3159 {
3160   PetscFunctionBegin;
3161   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3162   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
3163   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
3164   PetscFunctionReturn(PETSC_SUCCESS);
3165 }
3166 
3167 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
3168 {
3169   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3170 
3171   PetscFunctionBegin;
3172   jac->gkbtol = tolerance;
3173   PetscFunctionReturn(PETSC_SUCCESS);
3174 }
3175 
3176 /*@
3177   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
3178 
3179   Collective
3180 
3181   Input Parameters:
3182 + pc    - the preconditioner context
3183 - maxit - the maximum number of iterations
3184 
3185   Options Database Key:
3186 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100
3187 
3188   Level: intermediate
3189 
3190 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
3191 @*/
3192 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
3193 {
3194   PetscFunctionBegin;
3195   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3196   PetscValidLogicalCollectiveInt(pc, maxit, 2);
3197   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
3198   PetscFunctionReturn(PETSC_SUCCESS);
3199 }
3200 
3201 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
3202 {
3203   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3204 
3205   PetscFunctionBegin;
3206   jac->gkbmaxit = maxit;
3207   PetscFunctionReturn(PETSC_SUCCESS);
3208 }
3209 
3210 /*@
3211   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT`
3212   preconditioner.
3213 
3214   Collective
3215 
3216   Input Parameters:
3217 + pc    - the preconditioner context
3218 - delay - the delay window in the lower bound estimate
3219 
3220   Options Database Key:
3221 . -pc_fieldsplit_gkb_delay <delay> - default is 5
3222 
3223   Level: intermediate
3224 
3225   Notes:
3226   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $
3227   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs
3228   at least (`delay` + 1) iterations to stop.
3229 
3230   For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013`
3231 
3232 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
3233 @*/
3234 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
3235 {
3236   PetscFunctionBegin;
3237   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3238   PetscValidLogicalCollectiveInt(pc, delay, 2);
3239   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
3240   PetscFunctionReturn(PETSC_SUCCESS);
3241 }
3242 
3243 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
3244 {
3245   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3246 
3247   PetscFunctionBegin;
3248   jac->gkbdelay = delay;
3249   PetscFunctionReturn(PETSC_SUCCESS);
3250 }
3251 
3252 /*@
3253   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the
3254   Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
3255 
3256   Collective
3257 
3258   Input Parameters:
3259 + pc - the preconditioner context
3260 - nu - the shift parameter
3261 
3262   Options Database Key:
3263 . -pc_fieldsplit_gkb_nu <nu> - default is 1
3264 
3265   Level: intermediate
3266 
3267   Notes:
3268   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However,
3269   if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore
3270   necessary to find a good balance in between the convergence of the inner and outer loop.
3271 
3272   For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity.
3273 
3274 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
3275 @*/
3276 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
3277 {
3278   PetscFunctionBegin;
3279   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3280   PetscValidLogicalCollectiveReal(pc, nu, 2);
3281   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
3282   PetscFunctionReturn(PETSC_SUCCESS);
3283 }
3284 
3285 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
3286 {
3287   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3288 
3289   PetscFunctionBegin;
3290   jac->gkbnu = nu;
3291   PetscFunctionReturn(PETSC_SUCCESS);
3292 }
3293 
3294 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
3295 {
3296   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3297 
3298   PetscFunctionBegin;
3299   jac->type = type;
3300   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
3301   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
3302   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
3303   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
3304   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
3305   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
3306   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
3307   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
3308   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
3309 
3310   if (type == PC_COMPOSITE_SCHUR) {
3311     pc->ops->apply          = PCApply_FieldSplit_Schur;
3312     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
3313     pc->ops->matapply       = PCMatApply_FieldSplit_Schur;
3314     pc->ops->view           = PCView_FieldSplit_Schur;
3315     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit_Schur;
3316 
3317     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
3318     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
3319     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
3320     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
3321     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
3322   } else if (type == PC_COMPOSITE_GKB) {
3323     pc->ops->apply          = PCApply_FieldSplit_GKB;
3324     pc->ops->applytranspose = NULL;
3325     pc->ops->matapply       = NULL;
3326     pc->ops->view           = PCView_FieldSplit_GKB;
3327     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit_GKB;
3328 
3329     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3330     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
3331     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
3332     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
3333     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
3334   } else {
3335     pc->ops->apply          = PCApply_FieldSplit;
3336     pc->ops->applytranspose = PCApplyTranspose_FieldSplit;
3337     pc->ops->matapply       = PCMatApply_FieldSplit;
3338     pc->ops->view           = PCView_FieldSplit;
3339     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit;
3340 
3341     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3342   }
3343   PetscFunctionReturn(PETSC_SUCCESS);
3344 }
3345 
3346 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
3347 {
3348   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3349 
3350   PetscFunctionBegin;
3351   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
3352   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
3353   jac->bs = bs;
3354   PetscFunctionReturn(PETSC_SUCCESS);
3355 }
3356 
3357 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
3358 {
3359   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
3360   PC_FieldSplitLink ilink_current = jac->head;
3361   IS                is_owned;
3362 
3363   PetscFunctionBegin;
3364   jac->coordinates_set = PETSC_TRUE; // Internal flag
3365   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
3366 
3367   while (ilink_current) {
3368     // For each IS, embed it to get local coords indces
3369     IS              is_coords;
3370     PetscInt        ndofs_block;
3371     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
3372 
3373     // Setting drop to true for safety. It should make no difference.
3374     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
3375     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
3376     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
3377 
3378     // Allocate coordinates vector and set it directly
3379     PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords));
3380     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
3381       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
3382     }
3383     ilink_current->dim   = dim;
3384     ilink_current->ndofs = ndofs_block;
3385     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
3386     PetscCall(ISDestroy(&is_coords));
3387     ilink_current = ilink_current->next;
3388   }
3389   PetscCall(ISDestroy(&is_owned));
3390   PetscFunctionReturn(PETSC_SUCCESS);
3391 }
3392 
3393 /*@
3394   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3395 
3396   Collective
3397 
3398   Input Parameters:
3399 + pc   - the preconditioner context
3400 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`,
3401          `PC_COMPOSITE_GKB`
3402 
3403   Options Database Key:
3404 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
3405 
3406   Level: intermediate
3407 
3408 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3409           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()`
3410 @*/
3411 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
3412 {
3413   PetscFunctionBegin;
3414   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3415   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
3416   PetscFunctionReturn(PETSC_SUCCESS);
3417 }
3418 
3419 /*@
3420   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3421 
3422   Not collective
3423 
3424   Input Parameter:
3425 . pc - the preconditioner context
3426 
3427   Output Parameter:
3428 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3429 
3430   Level: intermediate
3431 
3432 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3433           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3434 @*/
3435 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
3436 {
3437   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3438 
3439   PetscFunctionBegin;
3440   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3441   PetscAssertPointer(type, 2);
3442   *type = jac->type;
3443   PetscFunctionReturn(PETSC_SUCCESS);
3444 }
3445 
3446 /*@
3447   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3448 
3449   Logically Collective
3450 
3451   Input Parameters:
3452 + pc  - the preconditioner context
3453 - flg - boolean indicating whether to use field splits defined by the `DM`
3454 
3455   Options Database Key:
3456 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3457 
3458   Level: intermediate
3459 
3460   Developer Note:
3461   The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database
3462 
3463 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
3464 @*/
3465 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3466 {
3467   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3468   PetscBool      isfs;
3469 
3470   PetscFunctionBegin;
3471   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3472   PetscValidLogicalCollectiveBool(pc, flg, 2);
3473   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3474   if (isfs) jac->dm_splits = flg;
3475   PetscFunctionReturn(PETSC_SUCCESS);
3476 }
3477 
3478 /*@
3479   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3480 
3481   Logically Collective
3482 
3483   Input Parameter:
3484 . pc - the preconditioner context
3485 
3486   Output Parameter:
3487 . flg - boolean indicating whether to use field splits defined by the `DM`
3488 
3489   Level: intermediate
3490 
3491   Developer Note:
3492   The name should be `PCFieldSplitGetUseDMSplits()`
3493 
3494 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
3495 @*/
3496 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3497 {
3498   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3499   PetscBool      isfs;
3500 
3501   PetscFunctionBegin;
3502   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3503   PetscAssertPointer(flg, 2);
3504   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3505   if (isfs) {
3506     if (flg) *flg = jac->dm_splits;
3507   }
3508   PetscFunctionReturn(PETSC_SUCCESS);
3509 }
3510 
3511 /*@
3512   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3513 
3514   Logically Collective
3515 
3516   Input Parameter:
3517 . pc - the preconditioner context
3518 
3519   Output Parameter:
3520 . flg - boolean indicating whether to detect fields or not
3521 
3522   Level: intermediate
3523 
3524 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3525 @*/
3526 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3527 {
3528   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3529 
3530   PetscFunctionBegin;
3531   *flg = jac->detect;
3532   PetscFunctionReturn(PETSC_SUCCESS);
3533 }
3534 
3535 /*@
3536   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3537 
3538   Logically Collective
3539 
3540   Input Parameter:
3541 . pc - the preconditioner context
3542 
3543   Output Parameter:
3544 . flg - boolean indicating whether to detect fields or not
3545 
3546   Options Database Key:
3547 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3548 
3549   Level: intermediate
3550 
3551   Note:
3552   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3553 
3554 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3555 @*/
3556 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3557 {
3558   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3559 
3560   PetscFunctionBegin;
3561   jac->detect = flg;
3562   if (jac->detect) {
3563     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3564     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3565   }
3566   PetscFunctionReturn(PETSC_SUCCESS);
3567 }
3568 
3569 /*MC
3570   PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3571   collections of variables (that may overlap) called fields or splits. Each field often represents a different continuum variable
3572   represented on a grid, such as velocity, pressure, or temperature.
3573   In the literature these are sometimes called block preconditioners; but should not be confused with `PCBJACOBI`.
3574   See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3575 
3576   Options Database Keys:
3577 +   -pc_fieldsplit_%d_fields <a,b,..>                                                - indicates the fields to be used in the `%d`'th split
3578 .   -pc_fieldsplit_default                                                           - automatically add any fields to additional splits that have not
3579                                                                                        been supplied explicitly by `-pc_fieldsplit_%d_fields`
3580 .   -pc_fieldsplit_block_size <bs>                                                   - size of block that defines fields (i.e. there are bs fields)
3581                                                                                        when the matrix is not of `MatType` `MATNEST`
3582 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3583 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full>                     - default is `a11`; see `PCFieldSplitSetSchurPre()`
3584 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full>                           - set factorization type when using `-pc_fieldsplit_type schur`;
3585                                                                                        see `PCFieldSplitSetSchurFactType()`
3586 .   -pc_fieldsplit_dm_splits <true,false> (default is true)                          - Whether to use `DMCreateFieldDecomposition()` for splits
3587 -   -pc_fieldsplit_detect_saddle_point                                               - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3588 
3589   Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3590   The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3591   For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3592 
3593   To set options on the solvers for all blocks, prepend `-fieldsplit_` to all the `PC`
3594   options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`.
3595 
3596   To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3597   and set the options directly on the resulting `KSP` object
3598 
3599   Level: intermediate
3600 
3601   Notes:
3602   Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()`
3603   to define a split by an arbitrary collection of entries.
3604 
3605   If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports
3606   `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs,
3607   beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3608   if this is not called the block size defaults to the blocksize of the second matrix passed
3609   to `KSPSetOperators()`/`PCSetOperators()`.
3610 
3611   For the Schur complement preconditioner if
3612   ```{math}
3613     J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3614   ```
3615 
3616   the preconditioner using `full` factorization is logically
3617   ```{math}
3618     \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3619       ```
3620   where the action of $\text{ksp}(A_{00})$ is applied using the `KSP` solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3621   ```{math}
3622      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3623   ```
3624   which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the `KSP` solver with prefix `-fieldsplit_splitname_` (where `splitname`
3625   was given in providing the SECOND split or 1 if not given). Accordingly, if using `PCFieldSplitGetSubKSP()`, the array of sub-`KSP` contexts will hold two `KSP`s: at its
3626   0th index, the `KSP` associated with `-fieldsplit_0_`, and at its 1st index, the `KSP` corresponding to `-fieldsplit_1_`.
3627   By default, $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3628 
3629   The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3630   `diag` gives
3631   ```{math}
3632     \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3633   ```
3634   Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3635   can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3636   ```{math}
3637     \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3638   ```
3639   where the inverses of $A_{00}$ and $S$ are applied using `KSP`s. The upper factorization is the inverse of
3640   ```{math}
3641     \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3642   ```
3643   where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3644 
3645   If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3646   is used automatically for a second submatrix.
3647 
3648   The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3649   Generally it should be used with the `MATAIJ` or `MATNEST` `MatType`
3650 
3651   The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see,
3652   for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3653   One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers".
3654 
3655   See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3656 
3657   The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3658   residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3659 
3660   The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3661   ```{math}
3662     \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3663   ```
3664   with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3665   A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3666 
3667   Some `PCFIELDSPLIT` variants are called physics-based preconditioners, since the preconditioner takes into account the underlying physics of the
3668   problem. But this nomenclature is not well-defined.
3669 
3670   Developer Note:
3671   The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3672   user API.
3673 
3674 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3675           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3676           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3677           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3678 M*/
3679 
3680 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3681 {
3682   PC_FieldSplit *jac;
3683 
3684   PetscFunctionBegin;
3685   PetscCall(PetscNew(&jac));
3686 
3687   jac->bs                 = -1;
3688   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3689   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3690   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3691   jac->schurscale         = -1.0;
3692   jac->dm_splits          = PETSC_TRUE;
3693   jac->gkbtol             = 1e-5;
3694   jac->gkbdelay           = 5;
3695   jac->gkbnu              = 1;
3696   jac->gkbmaxit           = 100;
3697 
3698   pc->data = (void *)jac;
3699 
3700   pc->ops->setup           = PCSetUp_FieldSplit;
3701   pc->ops->reset           = PCReset_FieldSplit;
3702   pc->ops->destroy         = PCDestroy_FieldSplit;
3703   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3704   pc->ops->applyrichardson = NULL;
3705 
3706   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3707   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3708   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3709   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3710   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3711   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3712   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3713 
3714   /* Initialize function pointers */
3715   PetscCall(PCFieldSplitSetType(pc, jac->type));
3716   PetscFunctionReturn(PETSC_SUCCESS);
3717 }
3718