xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision c9d0a0b78d8cf8f2c3546ce630ce15a3c73af503)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 #include <petscdevice.h>
5 #if PetscDefined(HAVE_CUDA)
6   #include <petscdevice_cuda.h>
7 #endif
8 #if PetscDefined(HAVE_HIP)
9   #include <petscdevice_hip.h>
10 #endif
11 
12 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
13 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
14 
15 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
16 
17 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
18 struct _PC_FieldSplitLink {
19   KSP               ksp;
20   Vec               x, y, z;
21   char             *splitname;
22   PetscInt          nfields;
23   PetscInt         *fields, *fields_col;
24   VecScatter        sctx;
25   IS                is, is_col;
26   PC_FieldSplitLink next, previous;
27   PetscLogEvent     event;
28 
29   /* Used only when setting coordinates with PCSetCoordinates */
30   PetscInt   dim;
31   PetscInt   ndofs;
32   PetscReal *coords;
33 };
34 
35 typedef struct {
36   PCCompositeType type;
37   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
38   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
39   PetscInt        bs;           /* Block size for IS and Mat structures */
40   PetscInt        nsplits;      /* Number of field divisions defined */
41   Vec            *x, *y, w1, w2;
42   Mat            *mat;    /* The diagonal block for each split */
43   Mat            *pmat;   /* The preconditioning diagonal block for each split */
44   Mat            *Afield; /* The rows of the matrix associated with each split */
45   PetscBool       issetup;
46 
47   /* Only used when Schur complement preconditioning is used */
48   Mat                       B;          /* The (0,1) block */
49   Mat                       C;          /* The (1,0) block */
50   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
51   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a matrix for constructing the preconditioner when solving with S */
52   Mat                       schur_user; /* User-provided matrix for constructing the preconditioner for the Schur complement */
53   PCFieldSplitSchurPreType  schurpre;   /* Determines which matrix is used for the Schur complement */
54   PCFieldSplitSchurFactType schurfactorization;
55   KSP                       kspschur;   /* The solver for S */
56   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
57   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
58 
59   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
60   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
61   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
62   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
63   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
64   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
65   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
66   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
67   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
68   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
69 
70   PC_FieldSplitLink head;
71   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
72   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
73   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
74   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
75   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
76   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
77   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
78 } PC_FieldSplit;
79 
80 /*
81     Note:
82     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
83    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
84    PC you could change this.
85 */
86 
87 /* This helper is so that setting a user-provided matrix is orthogonal to choosing to use it.  This way the
88 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
89 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
90 {
91   switch (jac->schurpre) {
92   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
93     return jac->schur;
94   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
95     return jac->schurp;
96   case PC_FIELDSPLIT_SCHUR_PRE_A11:
97     return jac->pmat[1];
98   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
99   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
100   default:
101     return jac->schur_user ? jac->schur_user : jac->pmat[1];
102   }
103 }
104 
105 #include <petscdraw.h>
106 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
107 {
108   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
109   PetscBool         isascii, isdraw;
110   PetscInt          i, j;
111   PC_FieldSplitLink ilink = jac->head;
112 
113   PetscFunctionBegin;
114   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
115   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
116   if (isascii) {
117     if (jac->bs > 0) {
118       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
119     } else {
120       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
121     }
122     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
123     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
124     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
125     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
126     for (i = 0; i < jac->nsplits; i++) {
127       if (ilink->fields) {
128         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
129         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
130         for (j = 0; j < ilink->nfields; j++) {
131           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
132           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
133         }
134         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
135         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
136       } else {
137         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
138       }
139       PetscCall(KSPView(ilink->ksp, viewer));
140       ilink = ilink->next;
141     }
142   }
143 
144   if (isdraw) {
145     PetscDraw draw;
146     PetscReal x, y, w, wd;
147 
148     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
149     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
150     w  = 2 * PetscMin(1.0 - x, x);
151     wd = w / (jac->nsplits + 1);
152     x  = x - wd * (jac->nsplits - 1) / 2.0;
153     for (i = 0; i < jac->nsplits; i++) {
154       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
155       PetscCall(KSPView(ilink->ksp, viewer));
156       PetscCall(PetscDrawPopCurrentPoint(draw));
157       x += wd;
158       ilink = ilink->next;
159     }
160   }
161   PetscFunctionReturn(PETSC_SUCCESS);
162 }
163 
164 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
165 {
166   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
167   PetscBool                  isascii, isdraw;
168   PetscInt                   i, j;
169   PC_FieldSplitLink          ilink = jac->head;
170   MatSchurComplementAinvType atype;
171 
172   PetscFunctionBegin;
173   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
174   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
175   if (isascii) {
176     if (jac->bs > 0) {
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
178     } else {
179       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
180     }
181     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
182     switch (jac->schurpre) {
183     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
184       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
185       break;
186     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
187       if (jac->schur) {
188         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
189         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
190       }
191       break;
192     case PC_FIELDSPLIT_SCHUR_PRE_A11:
193       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
194       break;
195     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
196       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
197       break;
198     case PC_FIELDSPLIT_SCHUR_PRE_USER:
199       if (jac->schur_user) {
200         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
201       } else {
202         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
203       }
204       break;
205     default:
206       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
207     }
208     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
209     PetscCall(PetscViewerASCIIPushTab(viewer));
210     for (i = 0; i < jac->nsplits; i++) {
211       if (ilink->fields) {
212         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
213         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
214         for (j = 0; j < ilink->nfields; j++) {
215           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
216           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
217         }
218         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
219         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
220       } else {
221         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
222       }
223       ilink = ilink->next;
224     }
225     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
226     PetscCall(PetscViewerASCIIPushTab(viewer));
227     if (jac->head) PetscCall(KSPView(jac->head->ksp, viewer));
228     else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
229     PetscCall(PetscViewerASCIIPopTab(viewer));
230     if (jac->head && jac->kspupper != jac->head->ksp) {
231       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n"));
232       PetscCall(PetscViewerASCIIPushTab(viewer));
233       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
234       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
235       PetscCall(PetscViewerASCIIPopTab(viewer));
236     }
237     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n"));
238     PetscCall(PetscViewerASCIIPushTab(viewer));
239     if (jac->kspschur) {
240       PetscCall(KSPView(jac->kspschur, viewer));
241     } else {
242       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
243     }
244     PetscCall(PetscViewerASCIIPopTab(viewer));
245     PetscCall(PetscViewerASCIIPopTab(viewer));
246   } else if (isdraw && jac->head) {
247     PetscDraw draw;
248     PetscReal x, y, w, wd, h;
249     PetscInt  cnt = 2;
250     char      str[32];
251 
252     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
253     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
254     if (jac->kspupper != jac->head->ksp) cnt++;
255     w  = 2 * PetscMin(1.0 - x, x);
256     wd = w / (cnt + 1);
257 
258     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
259     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
260     y -= h;
261     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
262       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
263     } else {
264       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
265     }
266     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
267     y -= h;
268     x = x - wd * (cnt - 1) / 2.0;
269 
270     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
271     PetscCall(KSPView(jac->head->ksp, viewer));
272     PetscCall(PetscDrawPopCurrentPoint(draw));
273     if (jac->kspupper != jac->head->ksp) {
274       x += wd;
275       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
276       PetscCall(KSPView(jac->kspupper, viewer));
277       PetscCall(PetscDrawPopCurrentPoint(draw));
278     }
279     x += wd;
280     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
281     PetscCall(KSPView(jac->kspschur, viewer));
282     PetscCall(PetscDrawPopCurrentPoint(draw));
283   }
284   PetscFunctionReturn(PETSC_SUCCESS);
285 }
286 
287 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
288 {
289   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
290   PetscBool         isascii, isdraw;
291   PetscInt          i, j;
292   PC_FieldSplitLink ilink = jac->head;
293 
294   PetscFunctionBegin;
295   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
296   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
297   if (isascii) {
298     if (jac->bs > 0) {
299       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
300     } else {
301       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
302     }
303     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
304     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
305     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
306 
307     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
308     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
309     PetscCall(PetscViewerASCIIPushTab(viewer));
310 
311     if (ilink->fields) {
312       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
313       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
314       for (j = 0; j < ilink->nfields; j++) {
315         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
316         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
317       }
318       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
319       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
320     } else {
321       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
322     }
323     PetscCall(KSPView(ilink->ksp, viewer));
324 
325     PetscCall(PetscViewerASCIIPopTab(viewer));
326   }
327 
328   if (isdraw) {
329     PetscDraw draw;
330     PetscReal x, y, w, wd;
331 
332     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
333     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
334     w  = 2 * PetscMin(1.0 - x, x);
335     wd = w / (jac->nsplits + 1);
336     x  = x - wd * (jac->nsplits - 1) / 2.0;
337     for (i = 0; i < jac->nsplits; i++) {
338       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
339       PetscCall(KSPView(ilink->ksp, viewer));
340       PetscCall(PetscDrawPopCurrentPoint(draw));
341       x += wd;
342       ilink = ilink->next;
343     }
344   }
345   PetscFunctionReturn(PETSC_SUCCESS);
346 }
347 
348 /* Precondition: jac->bs is set to a meaningful value or MATNEST */
349 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
350 {
351   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
352   PetscInt       bs, i, nfields, *ifields, nfields_col, *ifields_col;
353   PetscBool      flg, flg_col, mnest;
354   char           optionname[128], splitname[8], optionname_col[128];
355 
356   PetscFunctionBegin;
357   PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest));
358   if (mnest) {
359     PetscCall(MatNestGetSize(pc->pmat, &bs, NULL));
360   } else {
361     bs = jac->bs;
362   }
363   PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col));
364   for (i = 0, flg = PETSC_TRUE;; i++) {
365     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
366     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
367     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
368     nfields     = bs;
369     nfields_col = bs;
370     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
371     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
372     if (!flg) break;
373     else if (flg && !flg_col) {
374       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
375       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
376     } else {
377       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
378       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
379       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
380     }
381   }
382   if (i > 0) {
383     /* Makes command-line setting of splits take precedence over setting them in code.
384        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
385        create new splits, which would probably not be what the user wanted. */
386     jac->splitdefined = PETSC_TRUE;
387   }
388   PetscCall(PetscFree2(ifields, ifields_col));
389   PetscFunctionReturn(PETSC_SUCCESS);
390 }
391 
392 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
393 {
394   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
395   PC_FieldSplitLink ilink              = jac->head;
396   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
397   PetscInt          i;
398 
399   PetscFunctionBegin;
400   /*
401    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
402    Should probably be rewritten.
403    */
404   if (!ilink) {
405     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
406     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
407       PetscInt  numFields, f, i, j;
408       char    **fieldNames;
409       IS       *fields;
410       DM       *dms;
411       DM        subdm[128];
412       PetscBool flg;
413 
414       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
415       /* Allow the user to prescribe the splits */
416       for (i = 0, flg = PETSC_TRUE;; i++) {
417         PetscInt ifields[128];
418         IS       compField;
419         char     optionname[128], splitname[8];
420         PetscInt nfields = numFields;
421 
422         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
423         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
424         if (!flg) break;
425         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
426         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
427         if (nfields == 1) {
428           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
429         } else {
430           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
431           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
432         }
433         PetscCall(ISDestroy(&compField));
434         for (j = 0; j < nfields; ++j) {
435           f = ifields[j];
436           PetscCall(PetscFree(fieldNames[f]));
437           PetscCall(ISDestroy(&fields[f]));
438         }
439       }
440       if (i == 0) {
441         for (f = 0; f < numFields; ++f) {
442           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
443           PetscCall(PetscFree(fieldNames[f]));
444           PetscCall(ISDestroy(&fields[f]));
445         }
446       } else {
447         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
448         PetscCall(PetscFree(dms));
449         PetscCall(PetscMalloc1(i, &dms));
450         for (j = 0; j < i; ++j) dms[j] = subdm[j];
451       }
452       PetscCall(PetscFree(fieldNames));
453       PetscCall(PetscFree(fields));
454       if (dms) {
455         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
456         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
457           const char *prefix;
458           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix));
459           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix));
460           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
461           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
462           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
463           PetscCall(DMDestroy(&dms[i]));
464         }
465         PetscCall(PetscFree(dms));
466       }
467     } else {
468       if (jac->bs <= 0) {
469         if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
470         else jac->bs = 1;
471       }
472 
473       if (jac->detect) {
474         IS       zerodiags, rest;
475         PetscInt nmin, nmax;
476 
477         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
478         if (jac->diag_use_amat) {
479           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
480         } else {
481           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
482         }
483         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
484         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
485         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
486         PetscCall(ISDestroy(&zerodiags));
487         PetscCall(ISDestroy(&rest));
488       } else if (coupling) {
489         IS       coupling, rest;
490         PetscInt nmin, nmax;
491 
492         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
493         if (jac->offdiag_use_amat) {
494           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
495         } else {
496           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
497         }
498         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
499         PetscCall(ISSetIdentity(rest));
500         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
501         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
502         PetscCall(ISDestroy(&coupling));
503         PetscCall(ISDestroy(&rest));
504       } else {
505         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
506         if (!fieldsplit_default) {
507           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
508            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
509           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
510           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
511         }
512         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
513           Mat       M = pc->pmat;
514           PetscBool isnest;
515           PetscInt  nf;
516 
517           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
518           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
519           if (!isnest) {
520             M = pc->mat;
521             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
522           }
523           if (!isnest) nf = jac->bs;
524           else PetscCall(MatNestGetSize(M, &nf, NULL));
525           for (i = 0; i < nf; i++) {
526             char splitname[8];
527 
528             PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
529             PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
530           }
531           jac->defaultsplit = PETSC_TRUE;
532         }
533       }
534     }
535   } else if (jac->nsplits == 1) {
536     IS       is2;
537     PetscInt nmin, nmax;
538 
539     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
540     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
541     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
542     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
543     PetscCall(ISDestroy(&is2));
544   }
545 
546   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
547   PetscFunctionReturn(PETSC_SUCCESS);
548 }
549 
550 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
551 {
552   Mat       BT, T;
553   PetscReal nrmT, nrmB;
554 
555   PetscFunctionBegin;
556   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
557   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
558   PetscCall(MatNorm(T, NORM_1, &nrmT));
559   PetscCall(MatNorm(B, NORM_1, &nrmB));
560   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/Hermitian, GKB is not applicable.");
561 
562   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
563   /* setting N := 1/nu*I in [Ar13].                                                 */
564   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
565   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_CURRENT, H)); /* H = A01*A01'          */
566   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
567 
568   PetscCall(MatDestroy(&BT));
569   PetscCall(MatDestroy(&T));
570   PetscFunctionReturn(PETSC_SUCCESS);
571 }
572 
573 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg);
574 
575 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
576 {
577   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
578   PC_FieldSplitLink ilink;
579   PetscInt          i, nsplit;
580   PetscBool         matnest = PETSC_FALSE;
581 
582   PetscFunctionBegin;
583   pc->failedreason = PC_NOERROR;
584   PetscCall(PCFieldSplitSetDefaults(pc));
585   nsplit = jac->nsplits;
586   ilink  = jac->head;
587   if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
588 
589   /* get the matrices for each split */
590   if (!jac->issetup) {
591     PetscInt rstart, rend, nslots, bs;
592 
593     jac->issetup = PETSC_TRUE;
594 
595     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
596     if (jac->defaultsplit || !ilink->is) {
597       if (jac->bs <= 0) jac->bs = nsplit;
598     }
599 
600     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
601     PetscCall(MatGetBlockSize(pc->pmat, &bs));
602     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
603       PetscBool blk;
604 
605       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
606       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
607     }
608 
609     if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */
610       bs = jac->bs;
611       PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
612       nslots = (rend - rstart) / bs;
613       for (i = 0; i < nsplit; i++) {
614         if (jac->defaultsplit) {
615           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
616           PetscCall(PetscObjectReference((PetscObject)ilink->is));
617           ilink->is_col = ilink->is;
618         } else if (!ilink->is) {
619           PetscBool same_fields = PETSC_TRUE;
620 
621           for (PetscInt k = 0; k < ilink->nfields; k++) {
622             if (ilink->fields[k] != ilink->fields_col[k]) same_fields = PETSC_FALSE;
623           }
624 
625           if (ilink->nfields > 1) {
626             PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
627 
628             PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
629             if (!same_fields) PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
630             for (j = 0; j < nslots; j++) {
631               for (k = 0; k < nfields; k++) {
632                 ii[nfields * j + k] = rstart + bs * j + fields[k];
633                 if (!same_fields) jj[nfields * j + k] = rstart + bs * j + fields_col[k];
634               }
635             }
636             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
637             if (!same_fields) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
638             else {
639               PetscCall(PetscObjectReference((PetscObject)ilink->is));
640               ilink->is_col = ilink->is;
641             }
642             PetscCall(ISSetBlockSize(ilink->is, nfields));
643             PetscCall(ISSetBlockSize(ilink->is_col, nfields));
644           } else {
645             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
646             if (!same_fields) PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
647             else {
648               PetscCall(PetscObjectReference((PetscObject)ilink->is));
649               ilink->is_col = ilink->is;
650             }
651           }
652         }
653         ilink = ilink->next;
654       }
655     } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */
656       IS      *rowis, *colis, *ises = NULL;
657       PetscInt mis, nis;
658 
659       PetscCall(MatNestGetSize(pc->pmat, &mis, &nis));
660       PetscCall(PetscMalloc2(mis, &rowis, nis, &colis));
661       PetscCall(MatNestGetISs(pc->pmat, rowis, colis));
662       if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises));
663 
664       for (i = 0; i < nsplit; i++) {
665         if (jac->defaultsplit) {
666           PetscCall(ISDuplicate(rowis[i], &ilink->is));
667           PetscCall(PetscObjectReference((PetscObject)ilink->is));
668           ilink->is_col = ilink->is;
669         } else if (!ilink->is) {
670           if (ilink->nfields > 1) {
671             for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]];
672             PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is));
673           } else {
674             PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is));
675           }
676           PetscCall(PetscObjectReference((PetscObject)ilink->is));
677           ilink->is_col = ilink->is;
678         }
679         ilink = ilink->next;
680       }
681       PetscCall(PetscFree2(rowis, colis));
682       PetscCall(PetscFree(ises));
683     }
684   }
685 
686   ilink = jac->head;
687   if (!jac->pmat) {
688     Vec xtmp;
689 
690     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
691     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
692     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
693     for (i = 0; i < nsplit; i++) {
694       MatNullSpace sp;
695 
696       /* Check for matrix attached to IS */
697       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
698       if (jac->pmat[i]) {
699         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
700         if (jac->type == PC_COMPOSITE_SCHUR) {
701           jac->schur_user = jac->pmat[i];
702 
703           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
704         }
705       } else {
706         const char *prefix;
707         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
708         PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix));
709         if (!prefix) {
710           PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
711           PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
712         }
713         PetscCall(MatSetFromOptions(jac->pmat[i]));
714         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
715       }
716       /* create work vectors for each split */
717       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
718       ilink->x = jac->x[i];
719       ilink->y = jac->y[i];
720       ilink->z = NULL;
721       /* compute scatter contexts needed by multiplicative versions and non-default splits */
722       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
723       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
724       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
725       ilink = ilink->next;
726     }
727     PetscCall(VecDestroy(&xtmp));
728   } else {
729     MatReuse      scall;
730     MatNullSpace *nullsp = NULL;
731 
732     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
733       PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp));
734       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
735       scall = MAT_INITIAL_MATRIX;
736     } else scall = MAT_REUSE_MATRIX;
737 
738     for (i = 0; i < nsplit; i++) {
739       Mat pmat;
740 
741       /* Check for matrix attached to IS */
742       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
743       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
744       ilink = ilink->next;
745     }
746     if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp));
747   }
748   if (jac->diag_use_amat) {
749     ilink = jac->head;
750     if (!jac->mat) {
751       PetscCall(PetscMalloc1(nsplit, &jac->mat));
752       for (i = 0; i < nsplit; i++) {
753         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
754         ilink = ilink->next;
755       }
756     } else {
757       MatReuse      scall;
758       MatNullSpace *nullsp = NULL;
759 
760       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
761         PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp));
762         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
763         scall = MAT_INITIAL_MATRIX;
764       } else scall = MAT_REUSE_MATRIX;
765 
766       for (i = 0; i < nsplit; i++) {
767         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
768         ilink = ilink->next;
769       }
770       if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp));
771     }
772   } else {
773     jac->mat = jac->pmat;
774   }
775 
776   /* Check for null space attached to IS */
777   ilink = jac->head;
778   for (i = 0; i < nsplit; i++) {
779     MatNullSpace sp;
780 
781     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
782     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
783     ilink = ilink->next;
784   }
785 
786   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
787     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
788     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
789     ilink = jac->head;
790     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
791       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
792       if (!jac->Afield) {
793         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
794         if (jac->offdiag_use_amat) {
795           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
796         } else {
797           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
798         }
799       } else {
800         MatReuse scall;
801 
802         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
803           PetscCall(MatDestroy(&jac->Afield[1]));
804           scall = MAT_INITIAL_MATRIX;
805         } else scall = MAT_REUSE_MATRIX;
806 
807         if (jac->offdiag_use_amat) {
808           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
809         } else {
810           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
811         }
812       }
813     } else {
814       if (!jac->Afield) {
815         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
816         for (i = 0; i < nsplit; i++) {
817           if (jac->offdiag_use_amat) {
818             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
819           } else {
820             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
821           }
822           ilink = ilink->next;
823         }
824       } else {
825         MatReuse scall;
826         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
827           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
828           scall = MAT_INITIAL_MATRIX;
829         } else scall = MAT_REUSE_MATRIX;
830 
831         for (i = 0; i < nsplit; i++) {
832           if (jac->offdiag_use_amat) {
833             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
834           } else {
835             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
836           }
837           ilink = ilink->next;
838         }
839       }
840     }
841   }
842 
843   if (jac->type == PC_COMPOSITE_SCHUR) {
844     PetscBool   isset, isspd = PETSC_FALSE, issym = PETSC_FALSE, flg;
845     char        lscname[256];
846     PetscObject LSC_L;
847 
848     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
849 
850     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
851     PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
852     if (jac->schurscale == (PetscScalar)-1.0) jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
853     PetscCall(MatIsSymmetricKnown(pc->pmat, &isset, &issym));
854 
855     PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
856 
857     if (jac->schur) {
858       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
859       MatReuse scall;
860 
861       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
862         scall = MAT_INITIAL_MATRIX;
863         PetscCall(MatDestroy(&jac->B));
864         PetscCall(MatDestroy(&jac->C));
865       } else scall = MAT_REUSE_MATRIX;
866 
867       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
868       ilink = jac->head;
869       PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->is, ilink->next->is, scall, &jac->B));
870       if (!flg) PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->next->is, ilink->is, scall, &jac->C));
871       else {
872         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg));
873         if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
874         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
875       }
876       ilink = ilink->next;
877       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
878       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
879         PetscCall(MatDestroy(&jac->schurp));
880         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
881       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL && jac->kspupper != jac->head->ksp) {
882         PetscCall(MatDestroy(&jac->schur_user));
883         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
884       }
885       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
886       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
887       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
888     } else {
889       const char  *Dprefix;
890       char         schurprefix[256], schurmatprefix[256];
891       char         schurtestoption[256];
892       MatNullSpace sp;
893       KSP          kspt;
894 
895       /* extract the A01 and A10 matrices */
896       ilink = jac->head;
897       PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->is, ilink->next->is, MAT_INITIAL_MATRIX, &jac->B));
898       if (!flg) PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->C));
899       else {
900         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg));
901         if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
902         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
903       }
904       ilink = ilink->next;
905       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
906       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
907       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
908       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
909       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
910       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
911       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
912       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
913 
914       /* Note: this is not true in general */
915       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
916       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
917 
918       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
919       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
920       if (flg) {
921         DM  dmInner;
922         KSP kspInner;
923         PC  pcInner;
924 
925         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
926         PetscCall(KSPReset(kspInner));
927         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
928         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
929         /* Indent this deeper to emphasize the "inner" nature of this solver. */
930         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
931         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
932         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
933 
934         /* Set DM for new solver */
935         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
936         PetscCall(KSPSetDM(kspInner, dmInner));
937         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
938 
939         /* Defaults to PCKSP as preconditioner */
940         PetscCall(KSPGetPC(kspInner, &pcInner));
941         PetscCall(PCSetType(pcInner, PCKSP));
942         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
943       } else {
944         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
945           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
946           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
947           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
948           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
949           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
950         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
951         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
952       }
953       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
954       PetscCall(KSPSetFromOptions(jac->head->ksp));
955       PetscCall(MatSetFromOptions(jac->schur));
956 
957       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
958       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
959         KSP kspInner;
960         PC  pcInner;
961 
962         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
963         PetscCall(KSPGetPC(kspInner, &pcInner));
964         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
965         if (flg) {
966           KSP ksp;
967 
968           PetscCall(PCKSPGetKSP(pcInner, &ksp));
969           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
970         }
971       }
972       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
973       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
974       if (flg) {
975         DM dmInner;
976 
977         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
978         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
979         PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
980         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
981         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
982         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
983         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
984         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
985         PetscCall(KSPSetDM(jac->kspupper, dmInner));
986         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
987         PetscCall(KSPSetFromOptions(jac->kspupper));
988         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
989         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
990       } else {
991         jac->kspupper = jac->head->ksp;
992         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
993       }
994 
995       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
996       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
997       PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
998       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
999       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
1000       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
1001         PC pcschur;
1002         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
1003         PetscCall(PCSetType(pcschur, PCNONE));
1004         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
1005       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1006         if (jac->schurfactorization != PC_FIELDSPLIT_SCHUR_FACT_FULL || jac->kspupper != jac->head->ksp) PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1007       }
1008       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
1009       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
1010       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
1011       /* propagate DM */
1012       {
1013         DM sdm;
1014         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
1015         if (sdm) {
1016           PetscCall(KSPSetDM(jac->kspschur, sdm));
1017           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
1018         }
1019       }
1020       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1021       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
1022       PetscCall(KSPSetFromOptions(jac->kspschur));
1023     }
1024     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
1025     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
1026     if (issym) PetscCall(MatSetOption(jac->schur, MAT_SYMMETRIC, PETSC_TRUE));
1027     if (isspd) PetscCall(MatSetOption(jac->schur, MAT_SPD, PETSC_TRUE));
1028 
1029     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
1030     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
1031     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L));
1032     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L));
1033     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", LSC_L));
1034     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
1035     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L));
1036     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L));
1037     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", LSC_L));
1038   } else if (jac->type == PC_COMPOSITE_GKB) {
1039     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1040     ilink = jac->head;
1041     PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->is, ilink->next->is, MAT_INITIAL_MATRIX, &jac->B));
1042     /* Create work vectors for GKB algorithm */
1043     PetscCall(VecDuplicate(ilink->x, &jac->u));
1044     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1045     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1046     PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->C));
1047     ilink = ilink->next;
1048     /* Create work vectors for GKB algorithm */
1049     PetscCall(VecDuplicate(ilink->x, &jac->v));
1050     PetscCall(VecDuplicate(ilink->x, &jac->d));
1051     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1052     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1053     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1054 
1055     ilink = jac->head;
1056     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1057     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1058     /* Create gkb_monitor context */
1059     if (jac->gkbmonitor) {
1060       PetscInt tablevel;
1061       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1062       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1063       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1064       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1065       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1066     }
1067   } else {
1068     /* set up the individual splits' PCs */
1069     i     = 0;
1070     ilink = jac->head;
1071     while (ilink) {
1072       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1073       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1074       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1075       i++;
1076       ilink = ilink->next;
1077     }
1078   }
1079 
1080   /* Set coordinates to the sub PC objects whenever these are set */
1081   if (jac->coordinates_set) {
1082     PC pc_coords;
1083     if (jac->type == PC_COMPOSITE_SCHUR) {
1084       // Head is first block.
1085       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1086       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1087       // Second one is Schur block, but its KSP object is in kspschur.
1088       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1089       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1090     } else if (jac->type == PC_COMPOSITE_GKB) {
1091       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1092     } else {
1093       ilink = jac->head;
1094       while (ilink) {
1095         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1096         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1097         ilink = ilink->next;
1098       }
1099     }
1100   }
1101 
1102   jac->suboptionsset = PETSC_TRUE;
1103   PetscFunctionReturn(PETSC_SUCCESS);
1104 }
1105 
1106 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_Schur(PC pc)
1107 {
1108   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1109   PC_FieldSplitLink ilinkA = jac->head;
1110   KSP               kspA = ilinkA->ksp, kspUpper = jac->kspupper;
1111 
1112   PetscFunctionBegin;
1113   if (jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL && kspUpper != kspA) {
1114     PetscCall(KSPSetUp(kspUpper));
1115     PetscCall(KSPSetUpOnBlocks(kspUpper));
1116   }
1117   PetscCall(KSPSetUp(kspA));
1118   PetscCall(KSPSetUpOnBlocks(kspA));
1119   if (jac->schurpre != PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1120     PetscCall(KSPSetUp(jac->kspschur));
1121     PetscCall(KSPSetUpOnBlocks(jac->kspschur));
1122   } else if (kspUpper == kspA && jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL) {
1123     Mat          A;
1124     PetscInt     m, M, N;
1125     VecType      vtype;
1126     PetscMemType mtype;
1127     PetscScalar *array;
1128 
1129     PetscCall(MatGetSize(jac->B, &M, &N));
1130     PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1131     PetscCall(MatGetVecType(jac->B, &vtype));
1132     PetscCall(VecGetArrayAndMemType(ilinkA->x, &array, &mtype));
1133     PetscCall(VecRestoreArrayAndMemType(ilinkA->x, &array));
1134     PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&A));
1135     if (!A) {
1136       if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(PetscMalloc1(m * (N + 1), &array));
1137 #if PetscDefined(HAVE_CUDA)
1138       else if (PetscMemTypeCUDA(mtype)) PetscCallCUDA(cudaMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1139 #endif
1140 #if PetscDefined(HAVE_HIP)
1141       else if (PetscMemTypeHIP(mtype)) PetscCallHIP(hipMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1142 #endif
1143       PetscCall(MatCreateDenseFromVecType(PetscObjectComm((PetscObject)jac->schur), vtype, m, PETSC_DECIDE, M, N + 1, -1, array, &A)); // number of columns of the Schur complement plus one
1144       PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)A));
1145       PetscCall(MatDestroy(&A));
1146     }
1147   }
1148   PetscFunctionReturn(PETSC_SUCCESS);
1149 }
1150 
1151 static PetscErrorCode PCSetUpOnBlocks_FieldSplit(PC pc)
1152 {
1153   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1154   PC_FieldSplitLink ilink = jac->head;
1155 
1156   PetscFunctionBegin;
1157   while (ilink) {
1158     PetscCall(KSPSetUp(ilink->ksp));
1159     PetscCall(KSPSetUpOnBlocks(ilink->ksp));
1160     ilink = ilink->next;
1161   }
1162   PetscFunctionReturn(PETSC_SUCCESS);
1163 }
1164 
1165 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_GKB(PC pc)
1166 {
1167   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1168   PC_FieldSplitLink ilinkA = jac->head;
1169   KSP               ksp    = ilinkA->ksp;
1170 
1171   PetscFunctionBegin;
1172   PetscCall(KSPSetUp(ksp));
1173   PetscCall(KSPSetUpOnBlocks(ksp));
1174   PetscFunctionReturn(PETSC_SUCCESS);
1175 }
1176 
1177 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1178 {
1179   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1180   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1181   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1182   Mat               AinvB = NULL;
1183   PetscInt          N, P;
1184 
1185   PetscFunctionBegin;
1186   switch (jac->schurfactorization) {
1187   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1188     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1189     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1190     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1191     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1192     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1193     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1194     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1195     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1196     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1197     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1198     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1199     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1200     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1201     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1202     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1203     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1204     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1205     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1206     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1207     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1208     break;
1209   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1210     /* [A00 0; A10 S], suitable for left preconditioning */
1211     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1212     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1213     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1214     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1215     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1216     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1217     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1218     PetscCall(VecScale(ilinkD->x, -1.));
1219     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1220     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1221     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1222     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1223     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1224     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1225     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1226     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1227     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1228     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1229     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1230     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1231     break;
1232   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1233     /* [A00 A01; 0 S], suitable for right preconditioning */
1234     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1235     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1236     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1237     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1238     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1239     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1240     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1241     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1242     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1243     PetscCall(VecScale(ilinkA->x, -1.));
1244     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1245     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1246     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1247     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1248     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1249     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1250     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1251     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1252     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1253     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1254     break;
1255   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1256     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1257     PetscCall(MatGetSize(jac->B, NULL, &P));
1258     N = P;
1259     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1260     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1261     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1262     if (kspUpper == kspA) {
1263       PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB));
1264       if (AinvB) {
1265         PetscCall(MatGetSize(AinvB, NULL, &N));
1266         if (N > P) { // first time PCApply_FieldSplit_Schur() is called
1267           PetscMemType mtype;
1268           Vec          c = NULL;
1269           PetscScalar *array;
1270           PetscInt     m, M;
1271 
1272           PetscCall(MatGetSize(jac->B, &M, NULL));
1273           PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1274           PetscCall(MatDenseGetArrayAndMemType(AinvB, &array, &mtype));
1275           if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1276 #if PetscDefined(HAVE_CUDA)
1277           else if (PetscMemTypeCUDA(mtype)) PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1278 #endif
1279 #if PetscDefined(HAVE_HIP)
1280           else if (PetscMemTypeHIP(mtype)) PetscCall(VecCreateMPIHIPWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1281 #endif
1282           PetscCall(MatDenseRestoreArrayAndMemType(AinvB, &array));
1283           PetscCall(VecCopy(ilinkA->x, c));
1284           PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1285           PetscCall(KSPSetOperators(jac->kspschur, jac->schur, jac->schur_user));
1286           PetscCall(VecCopy(c, ilinkA->y)); // retrieve the solution as the last column of the composed Mat
1287           PetscCall(VecDestroy(&c));
1288         }
1289       }
1290     }
1291     if (N == P) PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1292     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1293     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1294     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1295     PetscCall(VecScale(ilinkD->x, -1.0));
1296     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1297     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1298 
1299     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1300     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1301     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1302     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1303     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1304     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1305     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1306 
1307     if (kspUpper == kspA) {
1308       if (!AinvB) {
1309         PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1310         PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1311         PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1312         PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1313         PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1314         PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1315       } else PetscCall(MatMultAdd(AinvB, ilinkD->y, ilinkA->y, ilinkA->y));
1316     } else {
1317       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1318       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1319       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1320       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1321       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1322       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1323       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1324       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1325       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1326     }
1327     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1328     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1329     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1330   }
1331   PetscFunctionReturn(PETSC_SUCCESS);
1332 }
1333 
1334 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1335 {
1336   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1337   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1338   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1339 
1340   PetscFunctionBegin;
1341   switch (jac->schurfactorization) {
1342   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1343     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1344     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1345     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1346     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1347     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1348     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1349     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1350     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1351     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1352     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1353     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1354     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1355     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1356     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1357     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1358     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1359     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1360     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1361     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1362     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1363     break;
1364   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1365     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1366     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1367     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1368     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1369     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1370     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1371     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1372     PetscCall(VecScale(ilinkD->x, -1.));
1373     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1374     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1375     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1376     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1377     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1378     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1379     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1380     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1381     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1382     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1383     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1384     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1385     break;
1386   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1387     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1388     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1389     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1390     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1391     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1392     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1393     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1394     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1395     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1396     PetscCall(VecScale(ilinkA->x, -1.));
1397     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1398     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1399     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1400     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1401     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1402     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1403     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1404     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1405     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1406     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1407     break;
1408   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1409     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1410     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1411     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1412     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1413     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1414     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1415     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1416     PetscCall(VecScale(ilinkD->x, -1.0));
1417     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1418     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1419 
1420     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1421     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1422     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1423     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1424     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1425     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1426     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1427 
1428     if (kspLower == kspA) {
1429       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1430       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1431       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1432       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1433       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1434       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1435     } else {
1436       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1437       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1438       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1439       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1440       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1441       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1442       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1443       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1444       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1445     }
1446     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1447     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1448     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1449   }
1450   PetscFunctionReturn(PETSC_SUCCESS);
1451 }
1452 
1453 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1454   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1455                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1456                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1457 
1458 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1459 {
1460   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1461   PC_FieldSplitLink ilink = jac->head;
1462   PetscInt          cnt, bs;
1463 
1464   PetscFunctionBegin;
1465   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1466     PetscBool matnest;
1467 
1468     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1469     if (jac->defaultsplit && !matnest) {
1470       PetscCall(VecGetBlockSize(x, &bs));
1471       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1472       PetscCall(VecGetBlockSize(y, &bs));
1473       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1474       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1475       while (ilink) {
1476         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1477         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1478         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1479         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1480         ilink = ilink->next;
1481       }
1482       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1483     } else {
1484       PetscCall(VecSet(y, 0.0));
1485       while (ilink) {
1486         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1487         ilink = ilink->next;
1488       }
1489     }
1490   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1491     PetscCall(VecSet(y, 0.0));
1492     /* solve on first block for first block variables */
1493     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1494     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1495     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1496     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1497     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1498     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1499     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1500     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1501 
1502     /* compute the residual only onto second block variables using first block variables */
1503     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1504     ilink = ilink->next;
1505     PetscCall(VecScale(ilink->x, -1.0));
1506     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1507     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1508 
1509     /* solve on second block variables */
1510     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1511     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1512     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1513     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1514     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1515     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1516   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1517     if (!jac->w1) {
1518       PetscCall(VecDuplicate(x, &jac->w1));
1519       PetscCall(VecDuplicate(x, &jac->w2));
1520     }
1521     PetscCall(VecSet(y, 0.0));
1522     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1523     cnt = 1;
1524     while (ilink->next) {
1525       ilink = ilink->next;
1526       /* compute the residual only over the part of the vector needed */
1527       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1528       PetscCall(VecScale(ilink->x, -1.0));
1529       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1530       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1531       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1532       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1533       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1534       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1535       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1536       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1537     }
1538     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1539       cnt -= 2;
1540       while (ilink->previous) {
1541         ilink = ilink->previous;
1542         /* compute the residual only over the part of the vector needed */
1543         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1544         PetscCall(VecScale(ilink->x, -1.0));
1545         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1546         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1547         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1548         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1549         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1550         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1551         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1552         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1553       }
1554     }
1555   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1556   PetscFunctionReturn(PETSC_SUCCESS);
1557 }
1558 
1559 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1560 {
1561   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1562   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1563   KSP               ksp = ilinkA->ksp;
1564   Vec               u, v, Hu, d, work1, work2;
1565   PetscScalar       alpha, z, nrmz2, *vecz;
1566   PetscReal         lowbnd, nu, beta;
1567   PetscInt          j, iterGKB;
1568 
1569   PetscFunctionBegin;
1570   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1571   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1572   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1573   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1574 
1575   u     = jac->u;
1576   v     = jac->v;
1577   Hu    = jac->Hu;
1578   d     = jac->d;
1579   work1 = jac->w1;
1580   work2 = jac->w2;
1581   vecz  = jac->vecz;
1582 
1583   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1584   /* Add q = q + nu*B*b */
1585   if (jac->gkbnu) {
1586     nu = jac->gkbnu;
1587     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1588     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1589   } else {
1590     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1591     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1592     nu = 1;
1593   }
1594 
1595   /* Transform rhs from [q,tilde{b}] to [0,b] */
1596   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1597   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1598   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1599   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1600   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1601   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1602 
1603   /* First step of algorithm */
1604   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1605   KSPCheckDot(ksp, beta);
1606   beta = PetscSqrtReal(nu) * beta;
1607   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1608   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1609   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1610   PetscCall(KSPSolve(ksp, work2, u));
1611   PetscCall(KSPCheckSolve(ksp, pc, u));
1612   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1613   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1614   PetscCall(VecDot(Hu, u, &alpha));
1615   KSPCheckDot(ksp, alpha);
1616   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1617   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1618   PetscCall(VecScale(u, 1.0 / alpha));
1619   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1620 
1621   z       = beta / alpha;
1622   vecz[1] = z;
1623 
1624   /* Computation of first iterate x(1) and p(1) */
1625   PetscCall(VecAXPY(ilinkA->y, z, u));
1626   PetscCall(VecCopy(d, ilinkD->y));
1627   PetscCall(VecScale(ilinkD->y, -z));
1628 
1629   iterGKB = 1;
1630   lowbnd  = 2 * jac->gkbtol;
1631   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1632 
1633   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1634     iterGKB += 1;
1635     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1636     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1637     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1638     beta = beta / PetscSqrtReal(nu);
1639     PetscCall(VecScale(v, 1.0 / beta));
1640     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1641     PetscCall(MatMult(jac->H, u, Hu));
1642     PetscCall(VecAXPY(work2, -beta, Hu));
1643     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1644     PetscCall(KSPSolve(ksp, work2, u));
1645     PetscCall(KSPCheckSolve(ksp, pc, u));
1646     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1647     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1648     PetscCall(VecDot(Hu, u, &alpha));
1649     KSPCheckDot(ksp, alpha);
1650     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1651     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1652     PetscCall(VecScale(u, 1.0 / alpha));
1653 
1654     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1655     vecz[0] = z;
1656 
1657     /* Computation of new iterate x(i+1) and p(i+1) */
1658     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1659     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1660     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1661     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1662     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1663 
1664     /* Compute Lower Bound estimate */
1665     if (iterGKB > jac->gkbdelay) {
1666       lowbnd = 0.0;
1667       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1668       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1669     }
1670 
1671     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1672     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1673   }
1674 
1675   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1676   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1677   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1678   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1679   PetscFunctionReturn(PETSC_SUCCESS);
1680 }
1681 
1682 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1683   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1684                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1685                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1686 
1687 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1688 {
1689   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1690   PC_FieldSplitLink ilink = jac->head;
1691   PetscInt          bs;
1692 
1693   PetscFunctionBegin;
1694   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1695     PetscBool matnest;
1696 
1697     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1698     if (jac->defaultsplit && !matnest) {
1699       PetscCall(VecGetBlockSize(x, &bs));
1700       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1701       PetscCall(VecGetBlockSize(y, &bs));
1702       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1703       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1704       while (ilink) {
1705         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1706         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1707         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1708         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1709         ilink = ilink->next;
1710       }
1711       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1712     } else {
1713       PetscCall(VecSet(y, 0.0));
1714       while (ilink) {
1715         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1716         ilink = ilink->next;
1717       }
1718     }
1719   } else {
1720     if (!jac->w1) {
1721       PetscCall(VecDuplicate(x, &jac->w1));
1722       PetscCall(VecDuplicate(x, &jac->w2));
1723     }
1724     PetscCall(VecSet(y, 0.0));
1725     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1726       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1727       while (ilink->next) {
1728         ilink = ilink->next;
1729         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1730         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1731         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1732       }
1733       while (ilink->previous) {
1734         ilink = ilink->previous;
1735         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1736         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1737         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1738       }
1739     } else {
1740       while (ilink->next) { /* get to last entry in linked list */
1741         ilink = ilink->next;
1742       }
1743       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1744       while (ilink->previous) {
1745         ilink = ilink->previous;
1746         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1747         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1748         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1749       }
1750     }
1751   }
1752   PetscFunctionReturn(PETSC_SUCCESS);
1753 }
1754 
1755 static PetscErrorCode PCReset_FieldSplit(PC pc)
1756 {
1757   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1758   PC_FieldSplitLink ilink = jac->head, next;
1759 
1760   PetscFunctionBegin;
1761   while (ilink) {
1762     PetscCall(KSPDestroy(&ilink->ksp));
1763     PetscCall(VecDestroy(&ilink->x));
1764     PetscCall(VecDestroy(&ilink->y));
1765     PetscCall(VecDestroy(&ilink->z));
1766     PetscCall(VecScatterDestroy(&ilink->sctx));
1767     PetscCall(ISDestroy(&ilink->is));
1768     PetscCall(ISDestroy(&ilink->is_col));
1769     PetscCall(PetscFree(ilink->splitname));
1770     PetscCall(PetscFree(ilink->fields));
1771     PetscCall(PetscFree(ilink->fields_col));
1772     next = ilink->next;
1773     PetscCall(PetscFree(ilink));
1774     ilink = next;
1775   }
1776   jac->head = NULL;
1777   PetscCall(PetscFree2(jac->x, jac->y));
1778   if (jac->mat && jac->mat != jac->pmat) {
1779     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1780   } else if (jac->mat) {
1781     jac->mat = NULL;
1782   }
1783   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1784   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1785   jac->nsplits = 0;
1786   PetscCall(VecDestroy(&jac->w1));
1787   PetscCall(VecDestroy(&jac->w2));
1788   if (jac->schur) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", NULL));
1789   PetscCall(MatDestroy(&jac->schur));
1790   PetscCall(MatDestroy(&jac->schurp));
1791   PetscCall(MatDestroy(&jac->schur_user));
1792   PetscCall(KSPDestroy(&jac->kspschur));
1793   PetscCall(KSPDestroy(&jac->kspupper));
1794   PetscCall(MatDestroy(&jac->B));
1795   PetscCall(MatDestroy(&jac->C));
1796   PetscCall(MatDestroy(&jac->H));
1797   PetscCall(VecDestroy(&jac->u));
1798   PetscCall(VecDestroy(&jac->v));
1799   PetscCall(VecDestroy(&jac->Hu));
1800   PetscCall(VecDestroy(&jac->d));
1801   PetscCall(PetscFree(jac->vecz));
1802   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1803   jac->isrestrict = PETSC_FALSE;
1804   PetscFunctionReturn(PETSC_SUCCESS);
1805 }
1806 
1807 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1808 {
1809   PetscFunctionBegin;
1810   PetscCall(PCReset_FieldSplit(pc));
1811   PetscCall(PetscFree(pc->data));
1812   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1813   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1814   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1815   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1816   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1817   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1818   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1819   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1820   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1821   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1822   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1823   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1824   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1825   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1826   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1827   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1828   PetscFunctionReturn(PETSC_SUCCESS);
1829 }
1830 
1831 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems PetscOptionsObject)
1832 {
1833   PetscInt        bs;
1834   PetscBool       flg;
1835   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1836   PCCompositeType ctype;
1837 
1838   PetscFunctionBegin;
1839   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1840   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1841   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1842   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1843   jac->diag_use_amat = pc->useAmat;
1844   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1845   jac->offdiag_use_amat = pc->useAmat;
1846   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1847   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1848   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1849   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1850   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1851   /* Only setup fields once */
1852   if (jac->bs > 0 && jac->nsplits == 0) {
1853     /* only allow user to set fields from command line.
1854        otherwise user can set them in PCFieldSplitSetDefaults() */
1855     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1856     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1857   }
1858   if (jac->type == PC_COMPOSITE_SCHUR) {
1859     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1860     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1861     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1862     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1863     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1864   } else if (jac->type == PC_COMPOSITE_GKB) {
1865     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1866     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1867     PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0));
1868     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1869     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1870   }
1871   /*
1872     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1873     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1874     is called on the outer solver in case changes were made in the options database
1875 
1876     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1877     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1878     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1879 
1880     There could be a negative side effect of calling the KSPSetFromOptions() below.
1881 
1882     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1883   */
1884   if (jac->issetup) {
1885     PC_FieldSplitLink ilink = jac->head;
1886     if (jac->type == PC_COMPOSITE_SCHUR) {
1887       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1888       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1889     }
1890     while (ilink) {
1891       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1892       ilink = ilink->next;
1893     }
1894   }
1895   PetscOptionsHeadEnd();
1896   PetscFunctionReturn(PETSC_SUCCESS);
1897 }
1898 
1899 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1900 {
1901   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1902   PC_FieldSplitLink ilink, next = jac->head;
1903   char              prefix[128];
1904   PetscInt          i;
1905   PetscLogEvent     nse;
1906 
1907   PetscFunctionBegin;
1908   if (jac->splitdefined) {
1909     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1910     PetscFunctionReturn(PETSC_SUCCESS);
1911   }
1912   for (i = 0; i < n; i++) PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]);
1913   PetscCall(PetscNew(&ilink));
1914   if (splitname) {
1915     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1916   } else {
1917     PetscCall(PetscMalloc1(3, &ilink->splitname));
1918     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1919   }
1920   PetscCall(PetscMPIIntCast(jac->nsplits, &nse));
1921   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */
1922   PetscCall(PetscMalloc1(n, &ilink->fields));
1923   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1924   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1925   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1926 
1927   ilink->nfields = n;
1928   ilink->next    = NULL;
1929   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1930   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1931   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1932   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1933   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1934 
1935   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1936   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1937 
1938   if (!next) {
1939     jac->head       = ilink;
1940     ilink->previous = NULL;
1941   } else {
1942     while (next->next) next = next->next;
1943     next->next      = ilink;
1944     ilink->previous = next;
1945   }
1946   jac->nsplits++;
1947   PetscFunctionReturn(PETSC_SUCCESS);
1948 }
1949 
1950 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1951 {
1952   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1953 
1954   PetscFunctionBegin;
1955   *subksp = NULL;
1956   if (n) *n = 0;
1957   if (jac->type == PC_COMPOSITE_SCHUR) {
1958     PetscInt nn;
1959 
1960     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1961     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1962     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1963     PetscCall(PetscMalloc1(nn, subksp));
1964     (*subksp)[0] = jac->head->ksp;
1965     (*subksp)[1] = jac->kspschur;
1966     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
1967     if (n) *n = nn;
1968   }
1969   PetscFunctionReturn(PETSC_SUCCESS);
1970 }
1971 
1972 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
1973 {
1974   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1975 
1976   PetscFunctionBegin;
1977   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
1978   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1979   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
1980 
1981   (*subksp)[1] = jac->kspschur;
1982   if (n) *n = jac->nsplits;
1983   PetscFunctionReturn(PETSC_SUCCESS);
1984 }
1985 
1986 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1987 {
1988   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1989   PetscInt          cnt   = 0;
1990   PC_FieldSplitLink ilink = jac->head;
1991 
1992   PetscFunctionBegin;
1993   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1994   while (ilink) {
1995     (*subksp)[cnt++] = ilink->ksp;
1996     ilink            = ilink->next;
1997   }
1998   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
1999   if (n) *n = jac->nsplits;
2000   PetscFunctionReturn(PETSC_SUCCESS);
2001 }
2002 
2003 /*@
2004   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
2005 
2006   Input Parameters:
2007 + pc  - the preconditioner context
2008 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
2009 
2010   Level: advanced
2011 
2012   Developer Notes:
2013   It seems the resulting `IS`s will not cover the entire space, so
2014   how can they define a convergent preconditioner? Needs explaining.
2015 
2016 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2017 @*/
2018 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
2019 {
2020   PetscFunctionBegin;
2021   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2022   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
2023   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
2024   PetscFunctionReturn(PETSC_SUCCESS);
2025 }
2026 
2027 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
2028 {
2029   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2030   PC_FieldSplitLink ilink = jac->head, next;
2031   PetscInt          localsize, size, sizez, i;
2032   const PetscInt   *ind, *indz;
2033   PetscInt         *indc, *indcz;
2034   PetscBool         flg;
2035 
2036   PetscFunctionBegin;
2037   PetscCall(ISGetLocalSize(isy, &localsize));
2038   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
2039   size -= localsize;
2040   while (ilink) {
2041     IS isrl, isr;
2042     PC subpc;
2043     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
2044     PetscCall(ISGetLocalSize(isrl, &localsize));
2045     PetscCall(PetscMalloc1(localsize, &indc));
2046     PetscCall(ISGetIndices(isrl, &ind));
2047     PetscCall(PetscArraycpy(indc, ind, localsize));
2048     PetscCall(ISRestoreIndices(isrl, &ind));
2049     PetscCall(ISDestroy(&isrl));
2050     for (i = 0; i < localsize; i++) *(indc + i) += size;
2051     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
2052     PetscCall(PetscObjectReference((PetscObject)isr));
2053     PetscCall(ISDestroy(&ilink->is));
2054     ilink->is = isr;
2055     PetscCall(PetscObjectReference((PetscObject)isr));
2056     PetscCall(ISDestroy(&ilink->is_col));
2057     ilink->is_col = isr;
2058     PetscCall(ISDestroy(&isr));
2059     PetscCall(KSPGetPC(ilink->ksp, &subpc));
2060     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
2061     if (flg) {
2062       IS       iszl, isz;
2063       MPI_Comm comm;
2064       PetscCall(ISGetLocalSize(ilink->is, &localsize));
2065       comm = PetscObjectComm((PetscObject)ilink->is);
2066       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
2067       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
2068       sizez -= localsize;
2069       PetscCall(ISGetLocalSize(iszl, &localsize));
2070       PetscCall(PetscMalloc1(localsize, &indcz));
2071       PetscCall(ISGetIndices(iszl, &indz));
2072       PetscCall(PetscArraycpy(indcz, indz, localsize));
2073       PetscCall(ISRestoreIndices(iszl, &indz));
2074       PetscCall(ISDestroy(&iszl));
2075       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
2076       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
2077       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
2078       PetscCall(ISDestroy(&isz));
2079     }
2080     next  = ilink->next;
2081     ilink = next;
2082   }
2083   jac->isrestrict = PETSC_TRUE;
2084   PetscFunctionReturn(PETSC_SUCCESS);
2085 }
2086 
2087 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
2088 {
2089   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
2090   PC_FieldSplitLink ilink, next = jac->head;
2091   char              prefix[128];
2092   PetscLogEvent     nse;
2093 
2094   PetscFunctionBegin;
2095   if (jac->splitdefined) {
2096     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
2097     PetscFunctionReturn(PETSC_SUCCESS);
2098   }
2099   PetscCall(PetscNew(&ilink));
2100   if (splitname) {
2101     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
2102   } else {
2103     PetscCall(PetscMalloc1(8, &ilink->splitname));
2104     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
2105   }
2106   PetscCall(PetscMPIIntCast(jac->nsplits, &nse));
2107   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */
2108   PetscCall(PetscObjectReference((PetscObject)is));
2109   PetscCall(ISDestroy(&ilink->is));
2110   ilink->is = is;
2111   PetscCall(PetscObjectReference((PetscObject)is));
2112   PetscCall(ISDestroy(&ilink->is_col));
2113   ilink->is_col = is;
2114   ilink->next   = NULL;
2115   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
2116   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
2117   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
2118   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
2119   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
2120 
2121   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
2122   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
2123 
2124   if (!next) {
2125     jac->head       = ilink;
2126     ilink->previous = NULL;
2127   } else {
2128     while (next->next) next = next->next;
2129     next->next      = ilink;
2130     ilink->previous = next;
2131   }
2132   jac->nsplits++;
2133   PetscFunctionReturn(PETSC_SUCCESS);
2134 }
2135 
2136 /*@
2137   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
2138 
2139   Logically Collective
2140 
2141   Input Parameters:
2142 + pc         - the preconditioner context
2143 . splitname  - name of this split, if `NULL` the number of the split is used
2144 . n          - the number of fields in this split
2145 . fields     - the fields in this split
2146 - fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block
2147                of the matrix and `fields_col` provides the column indices for that block
2148 
2149   Options Database Key:
2150 . -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
2151 
2152   Level: intermediate
2153 
2154   Notes:
2155   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2156 
2157   If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`.
2158 
2159   If the matrix used to construct the preconditioner is not `MATNEST` then
2160   `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlockSize()` or
2161   to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block
2162   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2163   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x23x56x8.. x12x45x78x....
2164   where the numbered entries indicate what is in the split.
2165 
2166   This function is called once per split (it creates a new split each time).  Solve options
2167   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2168 
2169   `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields`
2170 
2171   Developer Notes:
2172   This routine does not actually create the `IS` representing the split, that is delayed
2173   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2174   available when this routine is called.
2175 
2176 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`,
2177           `MatSetBlockSize()`, `MatCreateNest()`
2178 @*/
2179 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt fields[], const PetscInt fields_col[])
2180 {
2181   PetscFunctionBegin;
2182   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2183   PetscAssertPointer(splitname, 2);
2184   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2185   PetscAssertPointer(fields, 4);
2186   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2187   PetscFunctionReturn(PETSC_SUCCESS);
2188 }
2189 
2190 /*@
2191   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2192   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2193 
2194   Logically Collective
2195 
2196   Input Parameters:
2197 + pc  - the preconditioner object
2198 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2199 
2200   Options Database Key:
2201 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2202 
2203   Level: intermediate
2204 
2205 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2206 @*/
2207 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2208 {
2209   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2210   PetscBool      isfs;
2211 
2212   PetscFunctionBegin;
2213   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2214   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2215   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2216   jac->diag_use_amat = flg;
2217   PetscFunctionReturn(PETSC_SUCCESS);
2218 }
2219 
2220 /*@
2221   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2222   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2223 
2224   Logically Collective
2225 
2226   Input Parameter:
2227 . pc - the preconditioner object
2228 
2229   Output Parameter:
2230 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2231 
2232   Level: intermediate
2233 
2234 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2235 @*/
2236 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2237 {
2238   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2239   PetscBool      isfs;
2240 
2241   PetscFunctionBegin;
2242   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2243   PetscAssertPointer(flg, 2);
2244   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2245   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2246   *flg = jac->diag_use_amat;
2247   PetscFunctionReturn(PETSC_SUCCESS);
2248 }
2249 
2250 /*@
2251   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2252   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2253 
2254   Logically Collective
2255 
2256   Input Parameters:
2257 + pc  - the preconditioner object
2258 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2259 
2260   Options Database Key:
2261 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2262 
2263   Level: intermediate
2264 
2265 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2266 @*/
2267 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2268 {
2269   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2270   PetscBool      isfs;
2271 
2272   PetscFunctionBegin;
2273   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2274   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2275   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2276   jac->offdiag_use_amat = flg;
2277   PetscFunctionReturn(PETSC_SUCCESS);
2278 }
2279 
2280 /*@
2281   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2282   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2283 
2284   Logically Collective
2285 
2286   Input Parameter:
2287 . pc - the preconditioner object
2288 
2289   Output Parameter:
2290 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2291 
2292   Level: intermediate
2293 
2294 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2295 @*/
2296 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2297 {
2298   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2299   PetscBool      isfs;
2300 
2301   PetscFunctionBegin;
2302   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2303   PetscAssertPointer(flg, 2);
2304   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2305   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2306   *flg = jac->offdiag_use_amat;
2307   PetscFunctionReturn(PETSC_SUCCESS);
2308 }
2309 
2310 /*@
2311   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2312 
2313   Logically Collective
2314 
2315   Input Parameters:
2316 + pc        - the preconditioner context
2317 . splitname - name of this split, if `NULL` the number of the split is used
2318 - is        - the index set that defines the elements in this split
2319 
2320   Level: intermediate
2321 
2322   Notes:
2323   Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST`
2324 
2325   This function is called once per split (it creates a new split each time).  Solve options
2326   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2327 
2328 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()`
2329 @*/
2330 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2331 {
2332   PetscFunctionBegin;
2333   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2334   if (splitname) PetscAssertPointer(splitname, 2);
2335   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2336   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2337   PetscFunctionReturn(PETSC_SUCCESS);
2338 }
2339 
2340 /*@
2341   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2342 
2343   Logically Collective
2344 
2345   Input Parameters:
2346 + pc        - the preconditioner context
2347 - splitname - name of this split
2348 
2349   Output Parameter:
2350 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2351 
2352   Level: intermediate
2353 
2354 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()`
2355 @*/
2356 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2357 {
2358   PetscFunctionBegin;
2359   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2360   PetscAssertPointer(splitname, 2);
2361   PetscAssertPointer(is, 3);
2362   {
2363     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2364     PC_FieldSplitLink ilink = jac->head;
2365     PetscBool         found;
2366 
2367     *is = NULL;
2368     while (ilink) {
2369       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2370       if (found) {
2371         *is = ilink->is;
2372         break;
2373       }
2374       ilink = ilink->next;
2375     }
2376   }
2377   PetscFunctionReturn(PETSC_SUCCESS);
2378 }
2379 
2380 /*@
2381   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2382 
2383   Logically Collective
2384 
2385   Input Parameters:
2386 + pc    - the preconditioner context
2387 - index - index of this split
2388 
2389   Output Parameter:
2390 . is - the index set that defines the elements in this split
2391 
2392   Level: intermediate
2393 
2394 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`,
2395 
2396 @*/
2397 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2398 {
2399   PetscFunctionBegin;
2400   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2401   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2402   PetscAssertPointer(is, 3);
2403   {
2404     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2405     PC_FieldSplitLink ilink = jac->head;
2406     PetscInt          i     = 0;
2407     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2408 
2409     while (i < index) {
2410       ilink = ilink->next;
2411       ++i;
2412     }
2413     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2414   }
2415   PetscFunctionReturn(PETSC_SUCCESS);
2416 }
2417 
2418 /*@
2419   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2420   fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used.
2421 
2422   Logically Collective
2423 
2424   Input Parameters:
2425 + pc - the preconditioner context
2426 - bs - the block size
2427 
2428   Level: intermediate
2429 
2430   Note:
2431   If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields.
2432 
2433 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2434 @*/
2435 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2436 {
2437   PetscFunctionBegin;
2438   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2439   PetscValidLogicalCollectiveInt(pc, bs, 2);
2440   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2441   PetscFunctionReturn(PETSC_SUCCESS);
2442 }
2443 
2444 /*@C
2445   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2446 
2447   Collective
2448 
2449   Input Parameter:
2450 . pc - the preconditioner context
2451 
2452   Output Parameters:
2453 + n      - the number of splits
2454 - subksp - the array of `KSP` contexts
2455 
2456   Level: advanced
2457 
2458   Notes:
2459   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2460   (not the `KSP`, just the array that contains them).
2461 
2462   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2463 
2464   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2465   Schur complement and the `KSP` object used to iterate over the Schur complement.
2466   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2467 
2468   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2469   inner linear system defined by the matrix H in each loop.
2470 
2471   Fortran Note:
2472   Call `PCFieldSplitRestoreSubKSP()` when the array of `KSP` is no longer needed
2473 
2474   Developer Notes:
2475   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2476 
2477 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2478 @*/
2479 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2480 {
2481   PetscFunctionBegin;
2482   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2483   if (n) PetscAssertPointer(n, 2);
2484   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2485   PetscFunctionReturn(PETSC_SUCCESS);
2486 }
2487 
2488 /*@C
2489   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2490 
2491   Collective
2492 
2493   Input Parameter:
2494 . pc - the preconditioner context
2495 
2496   Output Parameters:
2497 + n      - the number of splits
2498 - subksp - the array of `KSP` contexts
2499 
2500   Level: advanced
2501 
2502   Notes:
2503   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2504   (not the `KSP` just the array that contains them).
2505 
2506   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2507 
2508   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2509 +  1  - the `KSP` used for the (1,1) block
2510 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2511 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2512 
2513   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2514 
2515   Fortran Note:
2516   Call `PCFieldSplitSchurRestoreSubKSP()` when the array of `KSP` is no longer needed
2517 
2518   Developer Notes:
2519   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2520 
2521   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2522 
2523 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2524 @*/
2525 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2526 {
2527   PetscFunctionBegin;
2528   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2529   if (n) PetscAssertPointer(n, 2);
2530   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2531   PetscFunctionReturn(PETSC_SUCCESS);
2532 }
2533 
2534 /*@
2535   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructed for the Schur complement.
2536   The default is the A11 matrix.
2537 
2538   Collective
2539 
2540   Input Parameters:
2541 + pc    - the preconditioner context
2542 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2543               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2544               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2545 - pre   - matrix to use for preconditioning, or `NULL`
2546 
2547   Options Database Keys:
2548 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2549 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2550 
2551   Level: intermediate
2552 
2553   Notes:
2554   If ptype is
2555 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2556   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2557 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2558   The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM`
2559 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2560   to this function).
2561 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $
2562   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2563   lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2564 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2565   computed internally by `PCFIELDSPLIT` (this is expensive)
2566   useful mostly as a test that the Schur complement approach can work for your problem
2567 
2568   When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2569   with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and
2570   `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2571 
2572   Developer Note:
2573   The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere.
2574 
2575 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2576           `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()`
2577 @*/
2578 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2579 {
2580   PetscFunctionBegin;
2581   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2582   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2583   PetscFunctionReturn(PETSC_SUCCESS);
2584 }
2585 
2586 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2587 {
2588   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2589 } /* Deprecated name */
2590 
2591 /*@
2592   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2593   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2594 
2595   Logically Collective
2596 
2597   Input Parameter:
2598 . pc - the preconditioner context
2599 
2600   Output Parameters:
2601 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2602 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2603 
2604   Level: intermediate
2605 
2606 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2607 @*/
2608 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2609 {
2610   PetscFunctionBegin;
2611   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2612   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2613   PetscFunctionReturn(PETSC_SUCCESS);
2614 }
2615 
2616 /*@
2617   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2618 
2619   Not Collective
2620 
2621   Input Parameter:
2622 . pc - the preconditioner context
2623 
2624   Output Parameter:
2625 . S - the Schur complement matrix
2626 
2627   Level: advanced
2628 
2629   Note:
2630   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2631 
2632 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2633           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2634 @*/
2635 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2636 {
2637   const char    *t;
2638   PetscBool      isfs;
2639   PC_FieldSplit *jac;
2640 
2641   PetscFunctionBegin;
2642   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2643   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2644   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2645   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2646   jac = (PC_FieldSplit *)pc->data;
2647   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2648   if (S) *S = jac->schur;
2649   PetscFunctionReturn(PETSC_SUCCESS);
2650 }
2651 
2652 /*@
2653   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2654 
2655   Not Collective
2656 
2657   Input Parameters:
2658 + pc - the preconditioner context
2659 - S  - the Schur complement matrix
2660 
2661   Level: advanced
2662 
2663 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2664 @*/
2665 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2666 {
2667   const char    *t;
2668   PetscBool      isfs;
2669   PC_FieldSplit *jac;
2670 
2671   PetscFunctionBegin;
2672   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2673   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2674   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2675   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2676   jac = (PC_FieldSplit *)pc->data;
2677   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2678   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2679   PetscFunctionReturn(PETSC_SUCCESS);
2680 }
2681 
2682 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2683 {
2684   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2685 
2686   PetscFunctionBegin;
2687   jac->schurpre = ptype;
2688   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2689     PetscCall(MatDestroy(&jac->schur_user));
2690     jac->schur_user = pre;
2691     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2692   }
2693   PetscFunctionReturn(PETSC_SUCCESS);
2694 }
2695 
2696 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2697 {
2698   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2699 
2700   PetscFunctionBegin;
2701   if (ptype) *ptype = jac->schurpre;
2702   if (pre) *pre = jac->schur_user;
2703   PetscFunctionReturn(PETSC_SUCCESS);
2704 }
2705 
2706 /*@
2707   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note`
2708 
2709   Collective
2710 
2711   Input Parameters:
2712 + pc    - the preconditioner context
2713 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2714 
2715   Options Database Key:
2716 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
2717 
2718   Level: intermediate
2719 
2720   Notes:
2721   The `full` factorization is
2722 
2723   ```{math}
2724   \left(\begin{array}{cc} A & B \\
2725   C & E \\
2726   \end{array}\right) =
2727   \left(\begin{array}{cc} I & 0 \\
2728   C A^{-1} & I \\
2729   \end{array}\right)
2730   \left(\begin{array}{cc} A & 0 \\
2731   0 & S \\
2732   \end{array}\right)
2733   \left(\begin{array}{cc} I & A^{-1}B \\
2734   0 & I \\
2735   \end{array}\right) = L D U,
2736   ```
2737 
2738   where $ S = E - C A^{-1} B $. In practice, the full factorization is applied via block triangular solves with the grouping $L(DU)$. `upper` uses $DU$, `lower` uses $LD$,
2739   and `diag` is the diagonal part with the sign of $S$ flipped (because this makes the preconditioner positive definite for many formulations,
2740   thus allowing the use of `KSPMINRES)`. Sign flipping of $S$ can be turned off with `PCFieldSplitSetSchurScale()`.
2741 
2742   If $A$ and $S$ are solved exactly
2743 +  1 - `full` factorization is a direct solver.
2744 .  2 - The preconditioned operator with `lower` or `upper` has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2745 -  3 - With `diag`, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2746 
2747   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2748   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2749 
2750   For symmetric problems in which $A$ is positive definite and $S$ is negative definite, `diag` can be used with `KSPMINRES`.
2751 
2752   A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g., a few iterations of a Krylov method is used to solve with $A$ or $S$).
2753 
2754 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`,
2755           [](sec_flexibleksp), `PCFieldSplitSetSchurPre()`
2756 @*/
2757 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2758 {
2759   PetscFunctionBegin;
2760   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2761   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2762   PetscFunctionReturn(PETSC_SUCCESS);
2763 }
2764 
2765 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2766 {
2767   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2768 
2769   PetscFunctionBegin;
2770   jac->schurfactorization = ftype;
2771   PetscFunctionReturn(PETSC_SUCCESS);
2772 }
2773 
2774 /*@
2775   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2776 
2777   Collective
2778 
2779   Input Parameters:
2780 + pc    - the preconditioner context
2781 - scale - scaling factor for the Schur complement
2782 
2783   Options Database Key:
2784 . -pc_fieldsplit_schur_scale <scale> - default is -1.0
2785 
2786   Level: intermediate
2787 
2788 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
2789 @*/
2790 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2791 {
2792   PetscFunctionBegin;
2793   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2794   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2795   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2796   PetscFunctionReturn(PETSC_SUCCESS);
2797 }
2798 
2799 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2800 {
2801   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2802 
2803   PetscFunctionBegin;
2804   jac->schurscale = scale;
2805   PetscFunctionReturn(PETSC_SUCCESS);
2806 }
2807 
2808 /*@C
2809   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2810 
2811   Collective
2812 
2813   Input Parameter:
2814 . pc - the preconditioner context
2815 
2816   Output Parameters:
2817 + A00 - the (0,0) block
2818 . A01 - the (0,1) block
2819 . A10 - the (1,0) block
2820 - A11 - the (1,1) block
2821 
2822   Level: advanced
2823 
2824   Note:
2825   Use `NULL` for any unneeded output arguments
2826 
2827 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2828 @*/
2829 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2830 {
2831   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2832 
2833   PetscFunctionBegin;
2834   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2835   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2836   if (A00) *A00 = jac->pmat[0];
2837   if (A01) *A01 = jac->B;
2838   if (A10) *A10 = jac->C;
2839   if (A11) *A11 = jac->pmat[1];
2840   PetscFunctionReturn(PETSC_SUCCESS);
2841 }
2842 
2843 /*@
2844   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2845 
2846   Collective
2847 
2848   Input Parameters:
2849 + pc        - the preconditioner context
2850 - tolerance - the solver tolerance
2851 
2852   Options Database Key:
2853 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5
2854 
2855   Level: intermediate
2856 
2857   Note:
2858   The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion.
2859   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2860   this estimate, the stopping criterion is satisfactory in practical cases.
2861 
2862 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2863 @*/
2864 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2865 {
2866   PetscFunctionBegin;
2867   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2868   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2869   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2870   PetscFunctionReturn(PETSC_SUCCESS);
2871 }
2872 
2873 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2874 {
2875   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2876 
2877   PetscFunctionBegin;
2878   jac->gkbtol = tolerance;
2879   PetscFunctionReturn(PETSC_SUCCESS);
2880 }
2881 
2882 /*@
2883   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2884 
2885   Collective
2886 
2887   Input Parameters:
2888 + pc    - the preconditioner context
2889 - maxit - the maximum number of iterations
2890 
2891   Options Database Key:
2892 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100
2893 
2894   Level: intermediate
2895 
2896 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2897 @*/
2898 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2899 {
2900   PetscFunctionBegin;
2901   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2902   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2903   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2904   PetscFunctionReturn(PETSC_SUCCESS);
2905 }
2906 
2907 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2908 {
2909   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2910 
2911   PetscFunctionBegin;
2912   jac->gkbmaxit = maxit;
2913   PetscFunctionReturn(PETSC_SUCCESS);
2914 }
2915 
2916 /*@
2917   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT`
2918   preconditioner.
2919 
2920   Collective
2921 
2922   Input Parameters:
2923 + pc    - the preconditioner context
2924 - delay - the delay window in the lower bound estimate
2925 
2926   Options Database Key:
2927 . -pc_fieldsplit_gkb_delay <delay> - default is 5
2928 
2929   Level: intermediate
2930 
2931   Notes:
2932   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $
2933   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs
2934   at least (`delay` + 1) iterations to stop.
2935 
2936   For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013`
2937 
2938 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2939 @*/
2940 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2941 {
2942   PetscFunctionBegin;
2943   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2944   PetscValidLogicalCollectiveInt(pc, delay, 2);
2945   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2946   PetscFunctionReturn(PETSC_SUCCESS);
2947 }
2948 
2949 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2950 {
2951   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2952 
2953   PetscFunctionBegin;
2954   jac->gkbdelay = delay;
2955   PetscFunctionReturn(PETSC_SUCCESS);
2956 }
2957 
2958 /*@
2959   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the
2960   Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2961 
2962   Collective
2963 
2964   Input Parameters:
2965 + pc - the preconditioner context
2966 - nu - the shift parameter
2967 
2968   Options Database Key:
2969 . -pc_fieldsplit_gkb_nu <nu> - default is 1
2970 
2971   Level: intermediate
2972 
2973   Notes:
2974   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However,
2975   if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore
2976   necessary to find a good balance in between the convergence of the inner and outer loop.
2977 
2978   For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity.
2979 
2980 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2981 @*/
2982 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
2983 {
2984   PetscFunctionBegin;
2985   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2986   PetscValidLogicalCollectiveReal(pc, nu, 2);
2987   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
2988   PetscFunctionReturn(PETSC_SUCCESS);
2989 }
2990 
2991 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
2992 {
2993   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2994 
2995   PetscFunctionBegin;
2996   jac->gkbnu = nu;
2997   PetscFunctionReturn(PETSC_SUCCESS);
2998 }
2999 
3000 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
3001 {
3002   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3003 
3004   PetscFunctionBegin;
3005   jac->type = type;
3006   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
3007   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
3008   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
3009   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
3010   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
3011   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
3012   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
3013   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
3014   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
3015 
3016   if (type == PC_COMPOSITE_SCHUR) {
3017     pc->ops->apply          = PCApply_FieldSplit_Schur;
3018     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
3019     pc->ops->view           = PCView_FieldSplit_Schur;
3020     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit_Schur;
3021 
3022     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
3023     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
3024     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
3025     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
3026     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
3027   } else if (type == PC_COMPOSITE_GKB) {
3028     pc->ops->apply          = PCApply_FieldSplit_GKB;
3029     pc->ops->applytranspose = NULL;
3030     pc->ops->view           = PCView_FieldSplit_GKB;
3031     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit_GKB;
3032 
3033     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3034     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
3035     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
3036     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
3037     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
3038   } else {
3039     pc->ops->apply          = PCApply_FieldSplit;
3040     pc->ops->applytranspose = PCApplyTranspose_FieldSplit;
3041     pc->ops->view           = PCView_FieldSplit;
3042     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit;
3043 
3044     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3045   }
3046   PetscFunctionReturn(PETSC_SUCCESS);
3047 }
3048 
3049 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
3050 {
3051   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3052 
3053   PetscFunctionBegin;
3054   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
3055   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
3056   jac->bs = bs;
3057   PetscFunctionReturn(PETSC_SUCCESS);
3058 }
3059 
3060 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
3061 {
3062   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
3063   PC_FieldSplitLink ilink_current = jac->head;
3064   IS                is_owned;
3065 
3066   PetscFunctionBegin;
3067   jac->coordinates_set = PETSC_TRUE; // Internal flag
3068   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
3069 
3070   while (ilink_current) {
3071     // For each IS, embed it to get local coords indces
3072     IS              is_coords;
3073     PetscInt        ndofs_block;
3074     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
3075 
3076     // Setting drop to true for safety. It should make no difference.
3077     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
3078     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
3079     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
3080 
3081     // Allocate coordinates vector and set it directly
3082     PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords));
3083     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
3084       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
3085     }
3086     ilink_current->dim   = dim;
3087     ilink_current->ndofs = ndofs_block;
3088     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
3089     PetscCall(ISDestroy(&is_coords));
3090     ilink_current = ilink_current->next;
3091   }
3092   PetscCall(ISDestroy(&is_owned));
3093   PetscFunctionReturn(PETSC_SUCCESS);
3094 }
3095 
3096 /*@
3097   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3098 
3099   Collective
3100 
3101   Input Parameters:
3102 + pc   - the preconditioner context
3103 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`,
3104          `PC_COMPOSITE_GKB`
3105 
3106   Options Database Key:
3107 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
3108 
3109   Level: intermediate
3110 
3111 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3112           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()`
3113 @*/
3114 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
3115 {
3116   PetscFunctionBegin;
3117   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3118   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
3119   PetscFunctionReturn(PETSC_SUCCESS);
3120 }
3121 
3122 /*@
3123   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3124 
3125   Not collective
3126 
3127   Input Parameter:
3128 . pc - the preconditioner context
3129 
3130   Output Parameter:
3131 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3132 
3133   Level: intermediate
3134 
3135 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3136           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3137 @*/
3138 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
3139 {
3140   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3141 
3142   PetscFunctionBegin;
3143   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3144   PetscAssertPointer(type, 2);
3145   *type = jac->type;
3146   PetscFunctionReturn(PETSC_SUCCESS);
3147 }
3148 
3149 /*@
3150   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3151 
3152   Logically Collective
3153 
3154   Input Parameters:
3155 + pc  - the preconditioner context
3156 - flg - boolean indicating whether to use field splits defined by the `DM`
3157 
3158   Options Database Key:
3159 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3160 
3161   Level: intermediate
3162 
3163   Developer Note:
3164   The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database
3165 
3166 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
3167 @*/
3168 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3169 {
3170   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3171   PetscBool      isfs;
3172 
3173   PetscFunctionBegin;
3174   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3175   PetscValidLogicalCollectiveBool(pc, flg, 2);
3176   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3177   if (isfs) jac->dm_splits = flg;
3178   PetscFunctionReturn(PETSC_SUCCESS);
3179 }
3180 
3181 /*@
3182   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3183 
3184   Logically Collective
3185 
3186   Input Parameter:
3187 . pc - the preconditioner context
3188 
3189   Output Parameter:
3190 . flg - boolean indicating whether to use field splits defined by the `DM`
3191 
3192   Level: intermediate
3193 
3194   Developer Note:
3195   The name should be `PCFieldSplitGetUseDMSplits()`
3196 
3197 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
3198 @*/
3199 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3200 {
3201   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3202   PetscBool      isfs;
3203 
3204   PetscFunctionBegin;
3205   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3206   PetscAssertPointer(flg, 2);
3207   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3208   if (isfs) {
3209     if (flg) *flg = jac->dm_splits;
3210   }
3211   PetscFunctionReturn(PETSC_SUCCESS);
3212 }
3213 
3214 /*@
3215   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3216 
3217   Logically Collective
3218 
3219   Input Parameter:
3220 . pc - the preconditioner context
3221 
3222   Output Parameter:
3223 . flg - boolean indicating whether to detect fields or not
3224 
3225   Level: intermediate
3226 
3227 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3228 @*/
3229 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3230 {
3231   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3232 
3233   PetscFunctionBegin;
3234   *flg = jac->detect;
3235   PetscFunctionReturn(PETSC_SUCCESS);
3236 }
3237 
3238 /*@
3239   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3240 
3241   Logically Collective
3242 
3243   Input Parameter:
3244 . pc - the preconditioner context
3245 
3246   Output Parameter:
3247 . flg - boolean indicating whether to detect fields or not
3248 
3249   Options Database Key:
3250 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3251 
3252   Level: intermediate
3253 
3254   Note:
3255   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3256 
3257 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3258 @*/
3259 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3260 {
3261   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3262 
3263   PetscFunctionBegin;
3264   jac->detect = flg;
3265   if (jac->detect) {
3266     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3267     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3268   }
3269   PetscFunctionReturn(PETSC_SUCCESS);
3270 }
3271 
3272 /*MC
3273   PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3274   collections of variables (that may overlap) called fields or splits. Each field often represents a different continuum variable
3275   represented on a grid, such as velocity, pressure, or temperature.
3276   In the literature these are sometimes called block preconditioners; but should not be confused with `PCBJACOBI`.
3277   See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3278 
3279   Options Database Keys:
3280 +   -pc_fieldsplit_%d_fields <a,b,..>                                                - indicates the fields to be used in the `%d`'th split
3281 .   -pc_fieldsplit_default                                                           - automatically add any fields to additional splits that have not
3282                                                                                        been supplied explicitly by `-pc_fieldsplit_%d_fields`
3283 .   -pc_fieldsplit_block_size <bs>                                                   - size of block that defines fields (i.e. there are bs fields)
3284                                                                                        when the matrix is not of `MatType` `MATNEST`
3285 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3286 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full>                     - default is `a11`; see `PCFieldSplitSetSchurPre()`
3287 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full>                           - set factorization type when using `-pc_fieldsplit_type schur`;
3288                                                                                        see `PCFieldSplitSetSchurFactType()`
3289 .   -pc_fieldsplit_dm_splits <true,false> (default is true)                          - Whether to use `DMCreateFieldDecomposition()` for splits
3290 -   -pc_fieldsplit_detect_saddle_point                                               - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3291 
3292   Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3293   The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3294   For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3295 
3296   To set options on the solvers for all blocks, prepend `-fieldsplit_` to all the `PC`
3297   options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`.
3298 
3299   To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3300   and set the options directly on the resulting `KSP` object
3301 
3302   Level: intermediate
3303 
3304   Notes:
3305   Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()`
3306   to define a split by an arbitrary collection of entries.
3307 
3308   If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports
3309   `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs,
3310   beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3311   if this is not called the block size defaults to the blocksize of the second matrix passed
3312   to `KSPSetOperators()`/`PCSetOperators()`.
3313 
3314   For the Schur complement preconditioner if
3315   ```{math}
3316     J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3317   ```
3318 
3319   the preconditioner using `full` factorization is logically
3320   ```{math}
3321     \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3322       ```
3323   where the action of $\text{ksp}(A_{00})$ is applied using the `KSP` solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3324   ```{math}
3325      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3326   ```
3327   which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the `KSP` solver with prefix `-fieldsplit_splitname_` (where `splitname`
3328   was given in providing the SECOND split or 1 if not given). Accordingly, if using `PCFieldSplitGetSubKSP()`, the array of sub-`KSP` contexts will hold two `KSP`s: at its
3329   0th index, the `KSP` associated with `-fieldsplit_0_`, and at its 1st index, the `KSP` corresponding to `-fieldsplit_1_`.
3330   By default, $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3331 
3332   The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3333   `diag` gives
3334   ```{math}
3335     \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3336   ```
3337   Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3338   can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3339   ```{math}
3340     \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3341   ```
3342   where the inverses of $A_{00}$ and $S$ are applied using `KSP`s. The upper factorization is the inverse of
3343   ```{math}
3344     \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3345   ```
3346   where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3347 
3348   If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3349   is used automatically for a second submatrix.
3350 
3351   The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3352   Generally it should be used with the `MATAIJ` or `MATNEST` `MatType`
3353 
3354   The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see,
3355   for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3356   One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers".
3357 
3358   See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3359 
3360   The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3361   residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3362 
3363   The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3364   ```{math}
3365     \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3366   ```
3367   with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3368   A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3369 
3370   Some `PCFIELDSPLIT` variants are called physics-based preconditioners, since the preconditioner takes into account the underlying physics of the
3371   problem. But this nomenclature is not well-defined.
3372 
3373   Developer Note:
3374   The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3375   user API.
3376 
3377 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3378           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3379           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3380           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3381 M*/
3382 
3383 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3384 {
3385   PC_FieldSplit *jac;
3386 
3387   PetscFunctionBegin;
3388   PetscCall(PetscNew(&jac));
3389 
3390   jac->bs                 = -1;
3391   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3392   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3393   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3394   jac->schurscale         = -1.0;
3395   jac->dm_splits          = PETSC_TRUE;
3396   jac->gkbtol             = 1e-5;
3397   jac->gkbdelay           = 5;
3398   jac->gkbnu              = 1;
3399   jac->gkbmaxit           = 100;
3400 
3401   pc->data = (void *)jac;
3402 
3403   pc->ops->setup           = PCSetUp_FieldSplit;
3404   pc->ops->reset           = PCReset_FieldSplit;
3405   pc->ops->destroy         = PCDestroy_FieldSplit;
3406   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3407   pc->ops->applyrichardson = NULL;
3408 
3409   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3410   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3411   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3412   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3413   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3414   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3415   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3416 
3417   /* Initialize function pointers */
3418   PetscCall(PCFieldSplitSetType(pc, jac->type));
3419   PetscFunctionReturn(PETSC_SUCCESS);
3420 }
3421