xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision 97276fddab9e156c6aa2d4453777ea460476b6f0)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 #include <petscdevice.h>
5 #if PetscDefined(HAVE_CUDA)
6   #include <petscdevice_cuda.h>
7 #endif
8 #if PetscDefined(HAVE_HIP)
9   #include <petscdevice_hip.h>
10 #endif
11 
12 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
13 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
14 
15 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
16 
17 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
18 struct _PC_FieldSplitLink {
19   KSP               ksp;
20   Vec               x, y, z;
21   char             *splitname;
22   PetscInt          nfields;
23   PetscInt         *fields, *fields_col;
24   VecScatter        sctx;
25   IS                is, is_col;
26   PC_FieldSplitLink next, previous;
27   PetscLogEvent     event;
28 
29   /* Used only when setting coordinates with PCSetCoordinates */
30   PetscInt   dim;
31   PetscInt   ndofs;
32   PetscReal *coords;
33 };
34 
35 typedef struct {
36   PCCompositeType type;
37   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
38   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
39   PetscInt        bs;           /* Block size for IS and Mat structures */
40   PetscInt        nsplits;      /* Number of field divisions defined */
41   Vec            *x, *y, w1, w2;
42   Mat            *mat;    /* The diagonal block for each split */
43   Mat            *pmat;   /* The preconditioning diagonal block for each split */
44   Mat            *Afield; /* The rows of the matrix associated with each split */
45   PetscBool       issetup;
46 
47   /* Only used when Schur complement preconditioning is used */
48   Mat                       B;          /* The (0,1) block */
49   Mat                       C;          /* The (1,0) block */
50   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
51   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */
52   Mat                       schur_user; /* User-provided preconditioning matrix for the Schur complement */
53   PCFieldSplitSchurPreType  schurpre;   /* Determines which preconditioning matrix is used for the Schur complement */
54   PCFieldSplitSchurFactType schurfactorization;
55   KSP                       kspschur;   /* The solver for S */
56   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
57   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
58 
59   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
60   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
61   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
62   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
63   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
64   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
65   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
66   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
67   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
68   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
69 
70   PC_FieldSplitLink head;
71   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
72   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
73   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
74   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
75   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
76   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
77   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
78 } PC_FieldSplit;
79 
80 /*
81     Note:
82     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
83    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
84    PC you could change this.
85 */
86 
87 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it.  This way the
88 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
89 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
90 {
91   switch (jac->schurpre) {
92   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
93     return jac->schur;
94   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
95     return jac->schurp;
96   case PC_FIELDSPLIT_SCHUR_PRE_A11:
97     return jac->pmat[1];
98   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
99   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
100   default:
101     return jac->schur_user ? jac->schur_user : jac->pmat[1];
102   }
103 }
104 
105 #include <petscdraw.h>
106 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
107 {
108   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
109   PetscBool         iascii, isdraw;
110   PetscInt          i, j;
111   PC_FieldSplitLink ilink = jac->head;
112 
113   PetscFunctionBegin;
114   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
115   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
116   if (iascii) {
117     if (jac->bs > 0) {
118       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
119     } else {
120       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
121     }
122     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
123     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
124     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
125     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
126     for (i = 0; i < jac->nsplits; i++) {
127       if (ilink->fields) {
128         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
129         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
130         for (j = 0; j < ilink->nfields; j++) {
131           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
132           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
133         }
134         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
135         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
136       } else {
137         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
138       }
139       PetscCall(KSPView(ilink->ksp, viewer));
140       ilink = ilink->next;
141     }
142   }
143 
144   if (isdraw) {
145     PetscDraw draw;
146     PetscReal x, y, w, wd;
147 
148     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
149     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
150     w  = 2 * PetscMin(1.0 - x, x);
151     wd = w / (jac->nsplits + 1);
152     x  = x - wd * (jac->nsplits - 1) / 2.0;
153     for (i = 0; i < jac->nsplits; i++) {
154       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
155       PetscCall(KSPView(ilink->ksp, viewer));
156       PetscCall(PetscDrawPopCurrentPoint(draw));
157       x += wd;
158       ilink = ilink->next;
159     }
160   }
161   PetscFunctionReturn(PETSC_SUCCESS);
162 }
163 
164 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
165 {
166   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
167   PetscBool                  iascii, isdraw;
168   PetscInt                   i, j;
169   PC_FieldSplitLink          ilink = jac->head;
170   MatSchurComplementAinvType atype;
171 
172   PetscFunctionBegin;
173   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
174   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
175   if (iascii) {
176     if (jac->bs > 0) {
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
178     } else {
179       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
180     }
181     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
182     switch (jac->schurpre) {
183     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
184       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
185       break;
186     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
187       if (jac->schur) {
188         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
189         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
190       }
191       break;
192     case PC_FIELDSPLIT_SCHUR_PRE_A11:
193       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
194       break;
195     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
196       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
197       break;
198     case PC_FIELDSPLIT_SCHUR_PRE_USER:
199       if (jac->schur_user) {
200         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
201       } else {
202         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
203       }
204       break;
205     default:
206       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
207     }
208     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
209     PetscCall(PetscViewerASCIIPushTab(viewer));
210     for (i = 0; i < jac->nsplits; i++) {
211       if (ilink->fields) {
212         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
213         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
214         for (j = 0; j < ilink->nfields; j++) {
215           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
216           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
217         }
218         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
219         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
220       } else {
221         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
222       }
223       ilink = ilink->next;
224     }
225     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
226     PetscCall(PetscViewerASCIIPushTab(viewer));
227     if (jac->head) {
228       PetscCall(KSPView(jac->head->ksp, viewer));
229     } else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
230     PetscCall(PetscViewerASCIIPopTab(viewer));
231     if (jac->head && jac->kspupper != jac->head->ksp) {
232       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n"));
233       PetscCall(PetscViewerASCIIPushTab(viewer));
234       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
235       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
236       PetscCall(PetscViewerASCIIPopTab(viewer));
237     }
238     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n"));
239     PetscCall(PetscViewerASCIIPushTab(viewer));
240     if (jac->kspschur) {
241       PetscCall(KSPView(jac->kspschur, viewer));
242     } else {
243       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
244     }
245     PetscCall(PetscViewerASCIIPopTab(viewer));
246     PetscCall(PetscViewerASCIIPopTab(viewer));
247   } else if (isdraw && jac->head) {
248     PetscDraw draw;
249     PetscReal x, y, w, wd, h;
250     PetscInt  cnt = 2;
251     char      str[32];
252 
253     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
254     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
255     if (jac->kspupper != jac->head->ksp) cnt++;
256     w  = 2 * PetscMin(1.0 - x, x);
257     wd = w / (cnt + 1);
258 
259     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
260     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
261     y -= h;
262     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
263       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
264     } else {
265       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
266     }
267     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
268     y -= h;
269     x = x - wd * (cnt - 1) / 2.0;
270 
271     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
272     PetscCall(KSPView(jac->head->ksp, viewer));
273     PetscCall(PetscDrawPopCurrentPoint(draw));
274     if (jac->kspupper != jac->head->ksp) {
275       x += wd;
276       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
277       PetscCall(KSPView(jac->kspupper, viewer));
278       PetscCall(PetscDrawPopCurrentPoint(draw));
279     }
280     x += wd;
281     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
282     PetscCall(KSPView(jac->kspschur, viewer));
283     PetscCall(PetscDrawPopCurrentPoint(draw));
284   }
285   PetscFunctionReturn(PETSC_SUCCESS);
286 }
287 
288 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
289 {
290   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
291   PetscBool         iascii, isdraw;
292   PetscInt          i, j;
293   PC_FieldSplitLink ilink = jac->head;
294 
295   PetscFunctionBegin;
296   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
297   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
298   if (iascii) {
299     if (jac->bs > 0) {
300       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
301     } else {
302       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
303     }
304     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
305     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
306     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
307 
308     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
309     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
310     PetscCall(PetscViewerASCIIPushTab(viewer));
311 
312     if (ilink->fields) {
313       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
314       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
315       for (j = 0; j < ilink->nfields; j++) {
316         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
317         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
318       }
319       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
320       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
321     } else {
322       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
323     }
324     PetscCall(KSPView(ilink->ksp, viewer));
325 
326     PetscCall(PetscViewerASCIIPopTab(viewer));
327   }
328 
329   if (isdraw) {
330     PetscDraw draw;
331     PetscReal x, y, w, wd;
332 
333     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
334     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
335     w  = 2 * PetscMin(1.0 - x, x);
336     wd = w / (jac->nsplits + 1);
337     x  = x - wd * (jac->nsplits - 1) / 2.0;
338     for (i = 0; i < jac->nsplits; i++) {
339       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
340       PetscCall(KSPView(ilink->ksp, viewer));
341       PetscCall(PetscDrawPopCurrentPoint(draw));
342       x += wd;
343       ilink = ilink->next;
344     }
345   }
346   PetscFunctionReturn(PETSC_SUCCESS);
347 }
348 
349 /* Precondition: jac->bs is set to a meaningful value or MATNEST */
350 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
351 {
352   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
353   PetscInt       bs, i, nfields, *ifields, nfields_col, *ifields_col;
354   PetscBool      flg, flg_col, mnest;
355   char           optionname[128], splitname[8], optionname_col[128];
356 
357   PetscFunctionBegin;
358   PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest));
359   if (mnest) {
360     PetscCall(MatNestGetSize(pc->pmat, &bs, NULL));
361   } else {
362     bs = jac->bs;
363   }
364   PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col));
365   for (i = 0, flg = PETSC_TRUE;; i++) {
366     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
367     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
368     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
369     nfields     = bs;
370     nfields_col = bs;
371     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
372     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
373     if (!flg) break;
374     else if (flg && !flg_col) {
375       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
376       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
377     } else {
378       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
379       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
380       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
381     }
382   }
383   if (i > 0) {
384     /* Makes command-line setting of splits take precedence over setting them in code.
385        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
386        create new splits, which would probably not be what the user wanted. */
387     jac->splitdefined = PETSC_TRUE;
388   }
389   PetscCall(PetscFree2(ifields, ifields_col));
390   PetscFunctionReturn(PETSC_SUCCESS);
391 }
392 
393 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
394 {
395   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
396   PC_FieldSplitLink ilink              = jac->head;
397   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
398   PetscInt          i;
399 
400   PetscFunctionBegin;
401   /*
402    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
403    Should probably be rewritten.
404    */
405   if (!ilink) {
406     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
407     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
408       PetscInt  numFields, f, i, j;
409       char    **fieldNames;
410       IS       *fields;
411       DM       *dms;
412       DM        subdm[128];
413       PetscBool flg;
414 
415       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
416       /* Allow the user to prescribe the splits */
417       for (i = 0, flg = PETSC_TRUE;; i++) {
418         PetscInt ifields[128];
419         IS       compField;
420         char     optionname[128], splitname[8];
421         PetscInt nfields = numFields;
422 
423         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
424         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
425         if (!flg) break;
426         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
427         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
428         if (nfields == 1) {
429           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
430         } else {
431           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
432           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
433         }
434         PetscCall(ISDestroy(&compField));
435         for (j = 0; j < nfields; ++j) {
436           f = ifields[j];
437           PetscCall(PetscFree(fieldNames[f]));
438           PetscCall(ISDestroy(&fields[f]));
439         }
440       }
441       if (i == 0) {
442         for (f = 0; f < numFields; ++f) {
443           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
444           PetscCall(PetscFree(fieldNames[f]));
445           PetscCall(ISDestroy(&fields[f]));
446         }
447       } else {
448         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
449         PetscCall(PetscFree(dms));
450         PetscCall(PetscMalloc1(i, &dms));
451         for (j = 0; j < i; ++j) dms[j] = subdm[j];
452       }
453       PetscCall(PetscFree(fieldNames));
454       PetscCall(PetscFree(fields));
455       if (dms) {
456         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
457         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
458           const char *prefix;
459           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix));
460           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix));
461           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
462           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
463           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
464           PetscCall(DMDestroy(&dms[i]));
465         }
466         PetscCall(PetscFree(dms));
467       }
468     } else {
469       if (jac->bs <= 0) {
470         if (pc->pmat) {
471           PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
472         } else jac->bs = 1;
473       }
474 
475       if (jac->detect) {
476         IS       zerodiags, rest;
477         PetscInt nmin, nmax;
478 
479         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
480         if (jac->diag_use_amat) {
481           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
482         } else {
483           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
484         }
485         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
486         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
487         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
488         PetscCall(ISDestroy(&zerodiags));
489         PetscCall(ISDestroy(&rest));
490       } else if (coupling) {
491         IS       coupling, rest;
492         PetscInt nmin, nmax;
493 
494         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
495         if (jac->offdiag_use_amat) {
496           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
497         } else {
498           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
499         }
500         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
501         PetscCall(ISSetIdentity(rest));
502         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
503         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
504         PetscCall(ISDestroy(&coupling));
505         PetscCall(ISDestroy(&rest));
506       } else {
507         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
508         if (!fieldsplit_default) {
509           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
510            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
511           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
512           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
513         }
514         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
515           Mat       M = pc->pmat;
516           PetscBool isnest;
517           PetscInt  nf;
518 
519           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
520           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
521           if (!isnest) {
522             M = pc->mat;
523             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
524           }
525           if (!isnest) nf = jac->bs;
526           else PetscCall(MatNestGetSize(M, &nf, NULL));
527           for (i = 0; i < nf; i++) {
528             char splitname[8];
529 
530             PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
531             PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
532           }
533           jac->defaultsplit = PETSC_TRUE;
534         }
535       }
536     }
537   } else if (jac->nsplits == 1) {
538     IS       is2;
539     PetscInt nmin, nmax;
540 
541     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
542     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
543     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
544     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
545     PetscCall(ISDestroy(&is2));
546   }
547 
548   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
549   PetscFunctionReturn(PETSC_SUCCESS);
550 }
551 
552 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
553 {
554   Mat       BT, T;
555   PetscReal nrmT, nrmB;
556 
557   PetscFunctionBegin;
558   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
559   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
560   PetscCall(MatNorm(T, NORM_1, &nrmT));
561   PetscCall(MatNorm(B, NORM_1, &nrmB));
562   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable.");
563 
564   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
565   /* setting N := 1/nu*I in [Ar13].                                                 */
566   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
567   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_CURRENT, H)); /* H = A01*A01'          */
568   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
569 
570   PetscCall(MatDestroy(&BT));
571   PetscCall(MatDestroy(&T));
572   PetscFunctionReturn(PETSC_SUCCESS);
573 }
574 
575 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg);
576 
577 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
578 {
579   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
580   PC_FieldSplitLink ilink;
581   PetscInt          i, nsplit;
582   PetscBool         sorted, sorted_col, matnest = PETSC_FALSE;
583 
584   PetscFunctionBegin;
585   pc->failedreason = PC_NOERROR;
586   PetscCall(PCFieldSplitSetDefaults(pc));
587   nsplit = jac->nsplits;
588   ilink  = jac->head;
589   if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
590 
591   /* get the matrices for each split */
592   if (!jac->issetup) {
593     PetscInt rstart, rend, nslots, bs;
594 
595     jac->issetup = PETSC_TRUE;
596 
597     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
598     if (jac->defaultsplit || !ilink->is) {
599       if (jac->bs <= 0) jac->bs = nsplit;
600     }
601 
602     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
603     PetscCall(MatGetBlockSize(pc->pmat, &bs));
604     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
605       PetscBool blk;
606 
607       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
608       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
609     }
610 
611     if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */
612       bs = jac->bs;
613       PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
614       nslots = (rend - rstart) / bs;
615       for (i = 0; i < nsplit; i++) {
616         if (jac->defaultsplit) {
617           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
618           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
619         } else if (!ilink->is) {
620           if (ilink->nfields > 1) {
621             PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
622 
623             PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
624             PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
625             for (j = 0; j < nslots; j++) {
626               for (k = 0; k < nfields; k++) {
627                 ii[nfields * j + k] = rstart + bs * j + fields[k];
628                 jj[nfields * j + k] = rstart + bs * j + fields_col[k];
629               }
630             }
631             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
632             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
633             PetscCall(ISSetBlockSize(ilink->is, nfields));
634             PetscCall(ISSetBlockSize(ilink->is_col, nfields));
635           } else {
636             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
637             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
638           }
639         }
640         PetscCall(ISSorted(ilink->is, &sorted));
641         if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col));
642         PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split");
643         ilink = ilink->next;
644       }
645     } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */
646       IS      *rowis, *colis, *ises = NULL;
647       PetscInt mis, nis;
648 
649       PetscCall(MatNestGetSize(pc->pmat, &mis, &nis));
650       PetscCall(PetscMalloc2(mis, &rowis, nis, &colis));
651       PetscCall(MatNestGetISs(pc->pmat, rowis, colis));
652       if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises));
653 
654       for (i = 0; i < nsplit; i++) {
655         if (jac->defaultsplit) {
656           PetscCall(ISDuplicate(rowis[i], &ilink->is));
657           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
658         } else if (!ilink->is) {
659           if (ilink->nfields > 1) {
660             for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]];
661             PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is));
662           } else {
663             PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is));
664           }
665           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
666         }
667         ilink = ilink->next;
668       }
669       PetscCall(PetscFree2(rowis, colis));
670       PetscCall(PetscFree(ises));
671     }
672   }
673 
674   ilink = jac->head;
675   if (!jac->pmat) {
676     Vec xtmp;
677 
678     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
679     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
680     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
681     for (i = 0; i < nsplit; i++) {
682       MatNullSpace sp;
683 
684       /* Check for preconditioning matrix attached to IS */
685       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
686       if (jac->pmat[i]) {
687         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
688         if (jac->type == PC_COMPOSITE_SCHUR) {
689           jac->schur_user = jac->pmat[i];
690 
691           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
692         }
693       } else {
694         const char *prefix;
695         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
696         PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix));
697         if (!prefix) {
698           PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
699           PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
700         }
701         PetscCall(MatSetFromOptions(jac->pmat[i]));
702         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
703       }
704       /* create work vectors for each split */
705       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
706       ilink->x = jac->x[i];
707       ilink->y = jac->y[i];
708       ilink->z = NULL;
709       /* compute scatter contexts needed by multiplicative versions and non-default splits */
710       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
711       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
712       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
713       ilink = ilink->next;
714     }
715     PetscCall(VecDestroy(&xtmp));
716   } else {
717     MatReuse      scall;
718     MatNullSpace *nullsp = NULL;
719 
720     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
721       PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp));
722       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
723       scall = MAT_INITIAL_MATRIX;
724     } else scall = MAT_REUSE_MATRIX;
725 
726     for (i = 0; i < nsplit; i++) {
727       Mat pmat;
728 
729       /* Check for preconditioning matrix attached to IS */
730       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
731       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
732       ilink = ilink->next;
733     }
734     if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp));
735   }
736   if (jac->diag_use_amat) {
737     ilink = jac->head;
738     if (!jac->mat) {
739       PetscCall(PetscMalloc1(nsplit, &jac->mat));
740       for (i = 0; i < nsplit; i++) {
741         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
742         ilink = ilink->next;
743       }
744     } else {
745       MatReuse      scall;
746       MatNullSpace *nullsp = NULL;
747 
748       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
749         PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp));
750         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
751         scall = MAT_INITIAL_MATRIX;
752       } else scall = MAT_REUSE_MATRIX;
753 
754       for (i = 0; i < nsplit; i++) {
755         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
756         ilink = ilink->next;
757       }
758       if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp));
759     }
760   } else {
761     jac->mat = jac->pmat;
762   }
763 
764   /* Check for null space attached to IS */
765   ilink = jac->head;
766   for (i = 0; i < nsplit; i++) {
767     MatNullSpace sp;
768 
769     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
770     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
771     ilink = ilink->next;
772   }
773 
774   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
775     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
776     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
777     ilink = jac->head;
778     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
779       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
780       if (!jac->Afield) {
781         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
782         if (jac->offdiag_use_amat) {
783           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
784         } else {
785           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
786         }
787       } else {
788         MatReuse scall;
789 
790         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
791           PetscCall(MatDestroy(&jac->Afield[1]));
792           scall = MAT_INITIAL_MATRIX;
793         } else scall = MAT_REUSE_MATRIX;
794 
795         if (jac->offdiag_use_amat) {
796           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
797         } else {
798           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
799         }
800       }
801     } else {
802       if (!jac->Afield) {
803         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
804         for (i = 0; i < nsplit; i++) {
805           if (jac->offdiag_use_amat) {
806             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
807           } else {
808             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
809           }
810           ilink = ilink->next;
811         }
812       } else {
813         MatReuse scall;
814         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
815           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
816           scall = MAT_INITIAL_MATRIX;
817         } else scall = MAT_REUSE_MATRIX;
818 
819         for (i = 0; i < nsplit; i++) {
820           if (jac->offdiag_use_amat) {
821             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
822           } else {
823             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
824           }
825           ilink = ilink->next;
826         }
827       }
828     }
829   }
830 
831   if (jac->type == PC_COMPOSITE_SCHUR) {
832     IS          ccis;
833     PetscBool   isset, isspd;
834     PetscInt    rstart, rend;
835     char        lscname[256];
836     PetscObject LSC_L;
837     PetscBool   set, flg;
838 
839     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
840 
841     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
842     if (jac->schurscale == (PetscScalar)-1.0) {
843       PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
844       jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
845     }
846 
847     /* When extracting off-diagonal submatrices, we take complements from this range */
848     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
849     PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
850 
851     if (jac->schur) {
852       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
853       MatReuse scall;
854 
855       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
856         scall = MAT_INITIAL_MATRIX;
857         PetscCall(MatDestroy(&jac->B));
858         PetscCall(MatDestroy(&jac->C));
859       } else scall = MAT_REUSE_MATRIX;
860 
861       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
862       ilink = jac->head;
863       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
864       if (jac->offdiag_use_amat) {
865         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B));
866       } else {
867         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B));
868       }
869       PetscCall(ISDestroy(&ccis));
870       if (!flg) {
871         ilink = ilink->next;
872         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
873         if (jac->offdiag_use_amat) {
874           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C));
875         } else {
876           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C));
877         }
878         PetscCall(ISDestroy(&ccis));
879       } else {
880         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
881         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
882         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
883       }
884       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
885       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
886         PetscCall(MatDestroy(&jac->schurp));
887         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
888       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
889         PetscCall(MatDestroy(&jac->schur_user));
890         if (jac->kspupper == jac->head->ksp) {
891           Mat AinvB;
892 
893           PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB));
894           PetscCall(MatDestroy(&AinvB));
895           PetscCall(MatCreate(PetscObjectComm((PetscObject)jac->schur), &AinvB));
896           PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)AinvB));
897         }
898         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
899       }
900       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
901       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
902       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
903     } else {
904       const char  *Dprefix;
905       char         schurprefix[256], schurmatprefix[256];
906       char         schurtestoption[256];
907       MatNullSpace sp;
908       KSP          kspt;
909 
910       /* extract the A01 and A10 matrices */
911       ilink = jac->head;
912       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
913       if (jac->offdiag_use_amat) {
914         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
915       } else {
916         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
917       }
918       PetscCall(ISDestroy(&ccis));
919       ilink = ilink->next;
920       if (!flg) {
921         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
922         if (jac->offdiag_use_amat) {
923           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
924         } else {
925           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
926         }
927         PetscCall(ISDestroy(&ccis));
928       } else {
929         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
930         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
931         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
932       }
933       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
934       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
935       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
936       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
937       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
938       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
939       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
940       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
941 
942       /* Note: this is not true in general */
943       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
944       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
945 
946       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
947       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
948       if (flg) {
949         DM  dmInner;
950         KSP kspInner;
951         PC  pcInner;
952 
953         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
954         PetscCall(KSPReset(kspInner));
955         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
956         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
957         /* Indent this deeper to emphasize the "inner" nature of this solver. */
958         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
959         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
960         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
961 
962         /* Set DM for new solver */
963         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
964         PetscCall(KSPSetDM(kspInner, dmInner));
965         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
966 
967         /* Defaults to PCKSP as preconditioner */
968         PetscCall(KSPGetPC(kspInner, &pcInner));
969         PetscCall(PCSetType(pcInner, PCKSP));
970         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
971       } else {
972         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
973           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
974           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
975           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
976           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
977           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
978         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
979         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
980       }
981       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
982       PetscCall(KSPSetFromOptions(jac->head->ksp));
983       PetscCall(MatSetFromOptions(jac->schur));
984 
985       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
986       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
987         KSP kspInner;
988         PC  pcInner;
989 
990         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
991         PetscCall(KSPGetPC(kspInner, &pcInner));
992         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
993         if (flg) {
994           KSP ksp;
995 
996           PetscCall(PCKSPGetKSP(pcInner, &ksp));
997           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
998         }
999       }
1000       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
1001       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
1002       if (flg) {
1003         DM dmInner;
1004 
1005         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1006         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
1007         PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
1008         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
1009         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
1010         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
1011         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
1012         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
1013         PetscCall(KSPSetDM(jac->kspupper, dmInner));
1014         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
1015         PetscCall(KSPSetFromOptions(jac->kspupper));
1016         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
1017         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
1018       } else {
1019         jac->kspupper = jac->head->ksp;
1020         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
1021       }
1022 
1023       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
1024       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
1025       PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
1026       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
1027       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
1028       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
1029         PC pcschur;
1030         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
1031         PetscCall(PCSetType(pcschur, PCNONE));
1032         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
1033       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1034         if (jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL && jac->kspupper == jac->head->ksp) {
1035           Mat AinvB;
1036 
1037           PetscCall(MatCreate(PetscObjectComm((PetscObject)jac->schur), &AinvB));
1038           PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)AinvB));
1039         }
1040         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1041       }
1042       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
1043       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
1044       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
1045       /* propagate DM */
1046       {
1047         DM sdm;
1048         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
1049         if (sdm) {
1050           PetscCall(KSPSetDM(jac->kspschur, sdm));
1051           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
1052         }
1053       }
1054       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1055       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
1056       PetscCall(KSPSetFromOptions(jac->kspschur));
1057     }
1058     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
1059     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
1060 
1061     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
1062     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
1063     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1064     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1065     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L));
1066     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
1067     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1068     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1069     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L));
1070   } else if (jac->type == PC_COMPOSITE_GKB) {
1071     IS       ccis;
1072     PetscInt rstart, rend;
1073 
1074     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1075 
1076     ilink = jac->head;
1077 
1078     /* When extracting off-diagonal submatrices, we take complements from this range */
1079     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
1080 
1081     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1082     if (jac->offdiag_use_amat) {
1083       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1084     } else {
1085       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1086     }
1087     PetscCall(ISDestroy(&ccis));
1088     /* Create work vectors for GKB algorithm */
1089     PetscCall(VecDuplicate(ilink->x, &jac->u));
1090     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1091     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1092     ilink = ilink->next;
1093     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1094     if (jac->offdiag_use_amat) {
1095       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1096     } else {
1097       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1098     }
1099     PetscCall(ISDestroy(&ccis));
1100     /* Create work vectors for GKB algorithm */
1101     PetscCall(VecDuplicate(ilink->x, &jac->v));
1102     PetscCall(VecDuplicate(ilink->x, &jac->d));
1103     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1104     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1105     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1106 
1107     ilink = jac->head;
1108     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1109     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1110     /* Create gkb_monitor context */
1111     if (jac->gkbmonitor) {
1112       PetscInt tablevel;
1113       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1114       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1115       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1116       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1117       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1118     }
1119   } else {
1120     /* set up the individual splits' PCs */
1121     i     = 0;
1122     ilink = jac->head;
1123     while (ilink) {
1124       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1125       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1126       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1127       i++;
1128       ilink = ilink->next;
1129     }
1130   }
1131 
1132   /* Set coordinates to the sub PC objects whenever these are set */
1133   if (jac->coordinates_set) {
1134     PC pc_coords;
1135     if (jac->type == PC_COMPOSITE_SCHUR) {
1136       // Head is first block.
1137       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1138       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1139       // Second one is Schur block, but its KSP object is in kspschur.
1140       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1141       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1142     } else if (jac->type == PC_COMPOSITE_GKB) {
1143       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1144     } else {
1145       ilink = jac->head;
1146       while (ilink) {
1147         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1148         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1149         ilink = ilink->next;
1150       }
1151     }
1152   }
1153 
1154   jac->suboptionsset = PETSC_TRUE;
1155   PetscFunctionReturn(PETSC_SUCCESS);
1156 }
1157 
1158 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1159   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1160                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1161                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1162 
1163 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1164 {
1165   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1166   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1167   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1168   Mat               AinvB = NULL;
1169   PetscInt          N     = -1;
1170 
1171   PetscFunctionBegin;
1172   switch (jac->schurfactorization) {
1173   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1174     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1175     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1176     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1177     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1178     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1179     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1180     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1181     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1182     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1183     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1184     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1185     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1186     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1187     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1188     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1189     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1190     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1191     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1192     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1193     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1194     break;
1195   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1196     /* [A00 0; A10 S], suitable for left preconditioning */
1197     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1198     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1199     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1200     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1201     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1202     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1203     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1204     PetscCall(VecScale(ilinkD->x, -1.));
1205     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1206     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1207     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1208     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1209     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1210     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1211     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1212     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1213     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1214     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1215     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1216     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1217     break;
1218   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1219     /* [A00 A01; 0 S], suitable for right preconditioning */
1220     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1221     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1222     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1223     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1224     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1225     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1226     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1227     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1228     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1229     PetscCall(VecScale(ilinkA->x, -1.));
1230     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1231     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1232     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1233     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1234     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1235     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1236     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1237     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1238     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1239     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1240     break;
1241   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1242     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1243     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1244     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1245     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1246     if (kspUpper == kspA) {
1247       PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB));
1248       if (AinvB) {
1249         PetscCall(MatGetSize(AinvB, NULL, &N));
1250         if (N == -1) { // first time PCApply_FieldSplit_Schur() is called
1251           Mat                A;
1252           VecType            vtype;
1253           PetscMemType       mtype;
1254           Vec                column, c;
1255           const PetscScalar *read;
1256           PetscScalar       *array;
1257           PetscInt           m, M, N;
1258 
1259           PetscCall(MatGetSize(jac->B, &M, &N));
1260           PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1261           PetscCall(MatGetVecType(jac->B, &vtype));
1262           PetscCall(VecGetArrayReadAndMemType(ilinkA->x, &read, &mtype));
1263           if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) {
1264             PetscCall(PetscMalloc1(m * (N + 1), &array));
1265             PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * N, &c));
1266           }
1267 #if PetscDefined(HAVE_CUDA)
1268           else if (PetscMemTypeCUDA(mtype)) {
1269             PetscCallCUDA(cudaMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1270             PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * N, &c));
1271           }
1272 #endif
1273 #if PetscDefined(HAVE_HIP)
1274           else if (PetscMemTypeHIP(mtype)) {
1275             PetscCallHIP(hipMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1276             PetscCall(VecCreateMPIHIPWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * N, &c));
1277           }
1278 #endif
1279           PetscCall(VecRestoreArrayReadAndMemType(ilinkA->x, &read));
1280           PetscCall(MatCreateDenseFromVecType(PetscObjectComm((PetscObject)jac->schur), vtype, m, PETSC_DECIDE, M, N + 1, -1, array, &A)); // number of columns of the Schur complement plus one
1281           PetscCall(MatDenseGetColumnVecWrite(A, N, &column));
1282           PetscCall(VecCopy(ilinkA->x, column));
1283           PetscCall(MatDenseRestoreColumnVecWrite(A, N, &column));
1284           PetscCall(MatHeaderReplace(AinvB, &A));
1285           PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1286           PetscCall(KSPSetOperators(jac->kspschur, jac->schur, jac->schur_user));
1287           PetscCall(VecCopy(c, ilinkA->y)); // retrieve the solution as the last column of the composed Mat
1288           PetscCall(VecDestroy(&c));
1289         }
1290       }
1291     }
1292     if (!AinvB || N != -1) PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1293     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1294     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1295     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1296     PetscCall(VecScale(ilinkD->x, -1.0));
1297     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1298     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1299 
1300     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1301     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1302     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1303     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1304     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1305     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1306     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1307 
1308     if (kspUpper == kspA) {
1309       if (!AinvB) {
1310         PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1311         PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1312         PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1313         PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1314         PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1315         PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1316       } else PetscCall(MatMultAdd(AinvB, ilinkD->y, ilinkA->y, ilinkA->y));
1317     } else {
1318       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1319       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1320       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1321       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1322       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1323       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1324       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1325       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1326       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1327     }
1328     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1329     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1330     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1331   }
1332   PetscFunctionReturn(PETSC_SUCCESS);
1333 }
1334 
1335 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1336 {
1337   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1338   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1339   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1340 
1341   PetscFunctionBegin;
1342   switch (jac->schurfactorization) {
1343   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1344     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1345     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1346     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1347     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1348     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1349     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1350     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1351     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1352     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1353     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1354     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1355     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1356     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1357     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1358     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1359     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1360     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1361     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1362     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1363     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1364     break;
1365   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1366     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1367     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1368     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1369     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1370     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1371     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1372     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1373     PetscCall(VecScale(ilinkD->x, -1.));
1374     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1375     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1376     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1377     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1378     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1379     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1380     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1381     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1382     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1383     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1384     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1385     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1386     break;
1387   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1388     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1389     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1390     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1391     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1392     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1393     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1394     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1395     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1396     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1397     PetscCall(VecScale(ilinkA->x, -1.));
1398     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1399     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1400     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1401     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1402     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1403     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1404     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1405     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1406     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1407     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1408     break;
1409   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1410     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1411     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1412     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1413     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1414     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1415     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1416     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1417     PetscCall(VecScale(ilinkD->x, -1.0));
1418     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1419     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1420 
1421     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1422     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1423     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1424     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1425     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1426     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1427     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1428 
1429     if (kspLower == kspA) {
1430       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1431       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1432       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1433       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1434       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1435       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1436     } else {
1437       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1438       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1439       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1440       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1441       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1442       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1443       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1444       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1445       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1446     }
1447     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1448     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1449     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1450   }
1451   PetscFunctionReturn(PETSC_SUCCESS);
1452 }
1453 
1454 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1455 {
1456   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1457   PC_FieldSplitLink ilink = jac->head;
1458   PetscInt          cnt, bs;
1459 
1460   PetscFunctionBegin;
1461   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1462     PetscBool matnest;
1463 
1464     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1465     if (jac->defaultsplit && !matnest) {
1466       PetscCall(VecGetBlockSize(x, &bs));
1467       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1468       PetscCall(VecGetBlockSize(y, &bs));
1469       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1470       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1471       while (ilink) {
1472         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1473         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1474         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1475         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1476         ilink = ilink->next;
1477       }
1478       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1479     } else {
1480       PetscCall(VecSet(y, 0.0));
1481       while (ilink) {
1482         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1483         ilink = ilink->next;
1484       }
1485     }
1486   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1487     PetscCall(VecSet(y, 0.0));
1488     /* solve on first block for first block variables */
1489     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1490     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1491     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1492     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1493     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1494     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1495     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1496     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1497 
1498     /* compute the residual only onto second block variables using first block variables */
1499     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1500     ilink = ilink->next;
1501     PetscCall(VecScale(ilink->x, -1.0));
1502     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1503     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1504 
1505     /* solve on second block variables */
1506     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1507     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1508     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1509     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1510     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1511     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1512   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1513     if (!jac->w1) {
1514       PetscCall(VecDuplicate(x, &jac->w1));
1515       PetscCall(VecDuplicate(x, &jac->w2));
1516     }
1517     PetscCall(VecSet(y, 0.0));
1518     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1519     cnt = 1;
1520     while (ilink->next) {
1521       ilink = ilink->next;
1522       /* compute the residual only over the part of the vector needed */
1523       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1524       PetscCall(VecScale(ilink->x, -1.0));
1525       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1526       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1527       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1528       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1529       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1530       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1531       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1532       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1533     }
1534     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1535       cnt -= 2;
1536       while (ilink->previous) {
1537         ilink = ilink->previous;
1538         /* compute the residual only over the part of the vector needed */
1539         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1540         PetscCall(VecScale(ilink->x, -1.0));
1541         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1542         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1543         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1544         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1545         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1546         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1547         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1548         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1549       }
1550     }
1551   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1552   PetscFunctionReturn(PETSC_SUCCESS);
1553 }
1554 
1555 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1556 {
1557   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1558   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1559   KSP               ksp = ilinkA->ksp;
1560   Vec               u, v, Hu, d, work1, work2;
1561   PetscScalar       alpha, z, nrmz2, *vecz;
1562   PetscReal         lowbnd, nu, beta;
1563   PetscInt          j, iterGKB;
1564 
1565   PetscFunctionBegin;
1566   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1567   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1568   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1569   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1570 
1571   u     = jac->u;
1572   v     = jac->v;
1573   Hu    = jac->Hu;
1574   d     = jac->d;
1575   work1 = jac->w1;
1576   work2 = jac->w2;
1577   vecz  = jac->vecz;
1578 
1579   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1580   /* Add q = q + nu*B*b */
1581   if (jac->gkbnu) {
1582     nu = jac->gkbnu;
1583     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1584     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1585   } else {
1586     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1587     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1588     nu = 1;
1589   }
1590 
1591   /* Transform rhs from [q,tilde{b}] to [0,b] */
1592   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1593   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1594   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1595   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1596   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1597   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1598 
1599   /* First step of algorithm */
1600   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1601   KSPCheckDot(ksp, beta);
1602   beta = PetscSqrtReal(nu) * beta;
1603   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1604   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1605   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1606   PetscCall(KSPSolve(ksp, work2, u));
1607   PetscCall(KSPCheckSolve(ksp, pc, u));
1608   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1609   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1610   PetscCall(VecDot(Hu, u, &alpha));
1611   KSPCheckDot(ksp, alpha);
1612   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1613   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1614   PetscCall(VecScale(u, 1.0 / alpha));
1615   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1616 
1617   z       = beta / alpha;
1618   vecz[1] = z;
1619 
1620   /* Computation of first iterate x(1) and p(1) */
1621   PetscCall(VecAXPY(ilinkA->y, z, u));
1622   PetscCall(VecCopy(d, ilinkD->y));
1623   PetscCall(VecScale(ilinkD->y, -z));
1624 
1625   iterGKB = 1;
1626   lowbnd  = 2 * jac->gkbtol;
1627   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1628 
1629   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1630     iterGKB += 1;
1631     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1632     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1633     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1634     beta = beta / PetscSqrtReal(nu);
1635     PetscCall(VecScale(v, 1.0 / beta));
1636     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1637     PetscCall(MatMult(jac->H, u, Hu));
1638     PetscCall(VecAXPY(work2, -beta, Hu));
1639     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1640     PetscCall(KSPSolve(ksp, work2, u));
1641     PetscCall(KSPCheckSolve(ksp, pc, u));
1642     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1643     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1644     PetscCall(VecDot(Hu, u, &alpha));
1645     KSPCheckDot(ksp, alpha);
1646     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1647     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1648     PetscCall(VecScale(u, 1.0 / alpha));
1649 
1650     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1651     vecz[0] = z;
1652 
1653     /* Computation of new iterate x(i+1) and p(i+1) */
1654     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1655     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1656     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1657     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1658     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1659 
1660     /* Compute Lower Bound estimate */
1661     if (iterGKB > jac->gkbdelay) {
1662       lowbnd = 0.0;
1663       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1664       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1665     }
1666 
1667     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1668     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1669   }
1670 
1671   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1672   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1673   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1674   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1675   PetscFunctionReturn(PETSC_SUCCESS);
1676 }
1677 
1678 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1679   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1680                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1681                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1682 
1683 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1684 {
1685   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1686   PC_FieldSplitLink ilink = jac->head;
1687   PetscInt          bs;
1688 
1689   PetscFunctionBegin;
1690   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1691     PetscBool matnest;
1692 
1693     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1694     if (jac->defaultsplit && !matnest) {
1695       PetscCall(VecGetBlockSize(x, &bs));
1696       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1697       PetscCall(VecGetBlockSize(y, &bs));
1698       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1699       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1700       while (ilink) {
1701         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1702         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1703         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1704         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1705         ilink = ilink->next;
1706       }
1707       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1708     } else {
1709       PetscCall(VecSet(y, 0.0));
1710       while (ilink) {
1711         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1712         ilink = ilink->next;
1713       }
1714     }
1715   } else {
1716     if (!jac->w1) {
1717       PetscCall(VecDuplicate(x, &jac->w1));
1718       PetscCall(VecDuplicate(x, &jac->w2));
1719     }
1720     PetscCall(VecSet(y, 0.0));
1721     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1722       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1723       while (ilink->next) {
1724         ilink = ilink->next;
1725         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1726         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1727         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1728       }
1729       while (ilink->previous) {
1730         ilink = ilink->previous;
1731         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1732         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1733         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1734       }
1735     } else {
1736       while (ilink->next) { /* get to last entry in linked list */
1737         ilink = ilink->next;
1738       }
1739       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1740       while (ilink->previous) {
1741         ilink = ilink->previous;
1742         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1743         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1744         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1745       }
1746     }
1747   }
1748   PetscFunctionReturn(PETSC_SUCCESS);
1749 }
1750 
1751 static PetscErrorCode PCReset_FieldSplit(PC pc)
1752 {
1753   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1754   PC_FieldSplitLink ilink = jac->head, next;
1755   Mat               AinvB;
1756 
1757   PetscFunctionBegin;
1758   while (ilink) {
1759     PetscCall(KSPDestroy(&ilink->ksp));
1760     PetscCall(VecDestroy(&ilink->x));
1761     PetscCall(VecDestroy(&ilink->y));
1762     PetscCall(VecDestroy(&ilink->z));
1763     PetscCall(VecScatterDestroy(&ilink->sctx));
1764     PetscCall(ISDestroy(&ilink->is));
1765     PetscCall(ISDestroy(&ilink->is_col));
1766     PetscCall(PetscFree(ilink->splitname));
1767     PetscCall(PetscFree(ilink->fields));
1768     PetscCall(PetscFree(ilink->fields_col));
1769     next = ilink->next;
1770     PetscCall(PetscFree(ilink));
1771     ilink = next;
1772   }
1773   jac->head = NULL;
1774   PetscCall(PetscFree2(jac->x, jac->y));
1775   if (jac->mat && jac->mat != jac->pmat) {
1776     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1777   } else if (jac->mat) {
1778     jac->mat = NULL;
1779   }
1780   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1781   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1782   jac->nsplits = 0;
1783   PetscCall(VecDestroy(&jac->w1));
1784   PetscCall(VecDestroy(&jac->w2));
1785   if (jac->schur) {
1786     PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB));
1787     PetscCall(MatDestroy(&AinvB));
1788     PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", NULL));
1789   }
1790   PetscCall(MatDestroy(&jac->schur));
1791   PetscCall(MatDestroy(&jac->schurp));
1792   PetscCall(MatDestroy(&jac->schur_user));
1793   PetscCall(KSPDestroy(&jac->kspschur));
1794   PetscCall(KSPDestroy(&jac->kspupper));
1795   PetscCall(MatDestroy(&jac->B));
1796   PetscCall(MatDestroy(&jac->C));
1797   PetscCall(MatDestroy(&jac->H));
1798   PetscCall(VecDestroy(&jac->u));
1799   PetscCall(VecDestroy(&jac->v));
1800   PetscCall(VecDestroy(&jac->Hu));
1801   PetscCall(VecDestroy(&jac->d));
1802   PetscCall(PetscFree(jac->vecz));
1803   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1804   jac->isrestrict = PETSC_FALSE;
1805   PetscFunctionReturn(PETSC_SUCCESS);
1806 }
1807 
1808 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1809 {
1810   PetscFunctionBegin;
1811   PetscCall(PCReset_FieldSplit(pc));
1812   PetscCall(PetscFree(pc->data));
1813   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1814   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1815   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1816   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1817   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1818   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1819   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1820   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1821 
1822   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1823   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1824   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1825   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1826   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1827   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1828   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1829   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1830   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1831   PetscFunctionReturn(PETSC_SUCCESS);
1832 }
1833 
1834 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject)
1835 {
1836   PetscInt        bs;
1837   PetscBool       flg;
1838   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1839   PCCompositeType ctype;
1840 
1841   PetscFunctionBegin;
1842   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1843   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1844   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1845   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1846   jac->diag_use_amat = pc->useAmat;
1847   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1848   jac->offdiag_use_amat = pc->useAmat;
1849   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1850   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1851   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1852   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1853   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1854   /* Only setup fields once */
1855   if ((jac->bs > 0) && (jac->nsplits == 0)) {
1856     /* only allow user to set fields from command line.
1857        otherwise user can set them in PCFieldSplitSetDefaults() */
1858     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1859     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1860   }
1861   if (jac->type == PC_COMPOSITE_SCHUR) {
1862     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1863     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1864     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1865     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1866     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1867   } else if (jac->type == PC_COMPOSITE_GKB) {
1868     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1869     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1870     PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0));
1871     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1872     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1873   }
1874   /*
1875     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1876     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1877     is called on the outer solver in case changes were made in the options database
1878 
1879     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1880     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1881     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1882 
1883     There could be a negative side effect of calling the KSPSetFromOptions() below.
1884 
1885     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1886   */
1887   if (jac->issetup) {
1888     PC_FieldSplitLink ilink = jac->head;
1889     if (jac->type == PC_COMPOSITE_SCHUR) {
1890       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1891       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1892     }
1893     while (ilink) {
1894       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1895       ilink = ilink->next;
1896     }
1897   }
1898   PetscOptionsHeadEnd();
1899   PetscFunctionReturn(PETSC_SUCCESS);
1900 }
1901 
1902 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1903 {
1904   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1905   PC_FieldSplitLink ilink, next = jac->head;
1906   char              prefix[128];
1907   PetscInt          i;
1908 
1909   PetscFunctionBegin;
1910   if (jac->splitdefined) {
1911     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1912     PetscFunctionReturn(PETSC_SUCCESS);
1913   }
1914   for (i = 0; i < n; i++) { PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); }
1915   PetscCall(PetscNew(&ilink));
1916   if (splitname) {
1917     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1918   } else {
1919     PetscCall(PetscMalloc1(3, &ilink->splitname));
1920     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1921   }
1922   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1923   PetscCall(PetscMalloc1(n, &ilink->fields));
1924   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1925   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1926   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1927 
1928   ilink->nfields = n;
1929   ilink->next    = NULL;
1930   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1931   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1932   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1933   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1934   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1935 
1936   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1937   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1938 
1939   if (!next) {
1940     jac->head       = ilink;
1941     ilink->previous = NULL;
1942   } else {
1943     while (next->next) next = next->next;
1944     next->next      = ilink;
1945     ilink->previous = next;
1946   }
1947   jac->nsplits++;
1948   PetscFunctionReturn(PETSC_SUCCESS);
1949 }
1950 
1951 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1952 {
1953   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1954 
1955   PetscFunctionBegin;
1956   *subksp = NULL;
1957   if (n) *n = 0;
1958   if (jac->type == PC_COMPOSITE_SCHUR) {
1959     PetscInt nn;
1960 
1961     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1962     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1963     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1964     PetscCall(PetscMalloc1(nn, subksp));
1965     (*subksp)[0] = jac->head->ksp;
1966     (*subksp)[1] = jac->kspschur;
1967     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
1968     if (n) *n = nn;
1969   }
1970   PetscFunctionReturn(PETSC_SUCCESS);
1971 }
1972 
1973 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
1974 {
1975   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1976 
1977   PetscFunctionBegin;
1978   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
1979   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1980   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
1981 
1982   (*subksp)[1] = jac->kspschur;
1983   if (n) *n = jac->nsplits;
1984   PetscFunctionReturn(PETSC_SUCCESS);
1985 }
1986 
1987 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1988 {
1989   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1990   PetscInt          cnt   = 0;
1991   PC_FieldSplitLink ilink = jac->head;
1992 
1993   PetscFunctionBegin;
1994   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1995   while (ilink) {
1996     (*subksp)[cnt++] = ilink->ksp;
1997     ilink            = ilink->next;
1998   }
1999   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
2000   if (n) *n = jac->nsplits;
2001   PetscFunctionReturn(PETSC_SUCCESS);
2002 }
2003 
2004 /*@
2005   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
2006 
2007   Input Parameters:
2008 + pc  - the preconditioner context
2009 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
2010 
2011   Level: advanced
2012 
2013   Developer Notes:
2014   It seems the resulting `IS`s will not cover the entire space, so
2015   how can they define a convergent preconditioner? Needs explaining.
2016 
2017 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2018 @*/
2019 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
2020 {
2021   PetscFunctionBegin;
2022   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2023   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
2024   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
2025   PetscFunctionReturn(PETSC_SUCCESS);
2026 }
2027 
2028 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
2029 {
2030   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2031   PC_FieldSplitLink ilink = jac->head, next;
2032   PetscInt          localsize, size, sizez, i;
2033   const PetscInt   *ind, *indz;
2034   PetscInt         *indc, *indcz;
2035   PetscBool         flg;
2036 
2037   PetscFunctionBegin;
2038   PetscCall(ISGetLocalSize(isy, &localsize));
2039   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
2040   size -= localsize;
2041   while (ilink) {
2042     IS isrl, isr;
2043     PC subpc;
2044     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
2045     PetscCall(ISGetLocalSize(isrl, &localsize));
2046     PetscCall(PetscMalloc1(localsize, &indc));
2047     PetscCall(ISGetIndices(isrl, &ind));
2048     PetscCall(PetscArraycpy(indc, ind, localsize));
2049     PetscCall(ISRestoreIndices(isrl, &ind));
2050     PetscCall(ISDestroy(&isrl));
2051     for (i = 0; i < localsize; i++) *(indc + i) += size;
2052     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
2053     PetscCall(PetscObjectReference((PetscObject)isr));
2054     PetscCall(ISDestroy(&ilink->is));
2055     ilink->is = isr;
2056     PetscCall(PetscObjectReference((PetscObject)isr));
2057     PetscCall(ISDestroy(&ilink->is_col));
2058     ilink->is_col = isr;
2059     PetscCall(ISDestroy(&isr));
2060     PetscCall(KSPGetPC(ilink->ksp, &subpc));
2061     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
2062     if (flg) {
2063       IS       iszl, isz;
2064       MPI_Comm comm;
2065       PetscCall(ISGetLocalSize(ilink->is, &localsize));
2066       comm = PetscObjectComm((PetscObject)ilink->is);
2067       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
2068       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
2069       sizez -= localsize;
2070       PetscCall(ISGetLocalSize(iszl, &localsize));
2071       PetscCall(PetscMalloc1(localsize, &indcz));
2072       PetscCall(ISGetIndices(iszl, &indz));
2073       PetscCall(PetscArraycpy(indcz, indz, localsize));
2074       PetscCall(ISRestoreIndices(iszl, &indz));
2075       PetscCall(ISDestroy(&iszl));
2076       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
2077       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
2078       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
2079       PetscCall(ISDestroy(&isz));
2080     }
2081     next  = ilink->next;
2082     ilink = next;
2083   }
2084   jac->isrestrict = PETSC_TRUE;
2085   PetscFunctionReturn(PETSC_SUCCESS);
2086 }
2087 
2088 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
2089 {
2090   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
2091   PC_FieldSplitLink ilink, next = jac->head;
2092   char              prefix[128];
2093 
2094   PetscFunctionBegin;
2095   if (jac->splitdefined) {
2096     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
2097     PetscFunctionReturn(PETSC_SUCCESS);
2098   }
2099   PetscCall(PetscNew(&ilink));
2100   if (splitname) {
2101     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
2102   } else {
2103     PetscCall(PetscMalloc1(8, &ilink->splitname));
2104     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
2105   }
2106   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
2107   PetscCall(PetscObjectReference((PetscObject)is));
2108   PetscCall(ISDestroy(&ilink->is));
2109   ilink->is = is;
2110   PetscCall(PetscObjectReference((PetscObject)is));
2111   PetscCall(ISDestroy(&ilink->is_col));
2112   ilink->is_col = is;
2113   ilink->next   = NULL;
2114   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
2115   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
2116   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
2117   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
2118   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
2119 
2120   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
2121   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
2122 
2123   if (!next) {
2124     jac->head       = ilink;
2125     ilink->previous = NULL;
2126   } else {
2127     while (next->next) next = next->next;
2128     next->next      = ilink;
2129     ilink->previous = next;
2130   }
2131   jac->nsplits++;
2132   PetscFunctionReturn(PETSC_SUCCESS);
2133 }
2134 
2135 /*@
2136   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
2137 
2138   Logically Collective
2139 
2140   Input Parameters:
2141 + pc         - the preconditioner context
2142 . splitname  - name of this split, if `NULL` the number of the split is used
2143 . n          - the number of fields in this split
2144 . fields     - the fields in this split
2145 - fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block
2146                of the matrix and `fields_col` provides the column indices for that block
2147 
2148   Options Database Key:
2149 . -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
2150 
2151   Level: intermediate
2152 
2153   Notes:
2154   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2155 
2156   If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`.
2157 
2158   If the matrix used to construct the preconditioner is not `MATNEST` then
2159   `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlocksize()` or
2160   to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block
2161   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2162   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x....
2163   where the numbered entries indicate what is in the split.
2164 
2165   This function is called once per split (it creates a new split each time).  Solve options
2166   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2167 
2168   `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields`
2169 
2170   Developer Notes:
2171   This routine does not actually create the `IS` representing the split, that is delayed
2172   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2173   available when this routine is called.
2174 
2175 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`,
2176           `MatSetBlocksize()`, `MatCreateNest()`
2177 @*/
2178 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt fields[], const PetscInt fields_col[])
2179 {
2180   PetscFunctionBegin;
2181   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2182   PetscAssertPointer(splitname, 2);
2183   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2184   PetscAssertPointer(fields, 4);
2185   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2186   PetscFunctionReturn(PETSC_SUCCESS);
2187 }
2188 
2189 /*@
2190   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2191   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2192 
2193   Logically Collective
2194 
2195   Input Parameters:
2196 + pc  - the preconditioner object
2197 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2198 
2199   Options Database Key:
2200 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2201 
2202   Level: intermediate
2203 
2204 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2205 @*/
2206 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2207 {
2208   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2209   PetscBool      isfs;
2210 
2211   PetscFunctionBegin;
2212   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2213   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2214   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2215   jac->diag_use_amat = flg;
2216   PetscFunctionReturn(PETSC_SUCCESS);
2217 }
2218 
2219 /*@
2220   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2221   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2222 
2223   Logically Collective
2224 
2225   Input Parameter:
2226 . pc - the preconditioner object
2227 
2228   Output Parameter:
2229 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2230 
2231   Level: intermediate
2232 
2233 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2234 @*/
2235 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2236 {
2237   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2238   PetscBool      isfs;
2239 
2240   PetscFunctionBegin;
2241   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2242   PetscAssertPointer(flg, 2);
2243   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2244   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2245   *flg = jac->diag_use_amat;
2246   PetscFunctionReturn(PETSC_SUCCESS);
2247 }
2248 
2249 /*@
2250   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2251   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2252 
2253   Logically Collective
2254 
2255   Input Parameters:
2256 + pc  - the preconditioner object
2257 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2258 
2259   Options Database Key:
2260 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2261 
2262   Level: intermediate
2263 
2264 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2265 @*/
2266 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2267 {
2268   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2269   PetscBool      isfs;
2270 
2271   PetscFunctionBegin;
2272   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2273   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2274   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2275   jac->offdiag_use_amat = flg;
2276   PetscFunctionReturn(PETSC_SUCCESS);
2277 }
2278 
2279 /*@
2280   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2281   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2282 
2283   Logically Collective
2284 
2285   Input Parameter:
2286 . pc - the preconditioner object
2287 
2288   Output Parameter:
2289 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2290 
2291   Level: intermediate
2292 
2293 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2294 @*/
2295 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2296 {
2297   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2298   PetscBool      isfs;
2299 
2300   PetscFunctionBegin;
2301   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2302   PetscAssertPointer(flg, 2);
2303   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2304   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2305   *flg = jac->offdiag_use_amat;
2306   PetscFunctionReturn(PETSC_SUCCESS);
2307 }
2308 
2309 /*@
2310   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2311 
2312   Logically Collective
2313 
2314   Input Parameters:
2315 + pc        - the preconditioner context
2316 . splitname - name of this split, if `NULL` the number of the split is used
2317 - is        - the index set that defines the elements in this split
2318 
2319   Level: intermediate
2320 
2321   Notes:
2322   Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST`
2323 
2324   This function is called once per split (it creates a new split each time).  Solve options
2325   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2326 
2327 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()`
2328 @*/
2329 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2330 {
2331   PetscFunctionBegin;
2332   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2333   if (splitname) PetscAssertPointer(splitname, 2);
2334   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2335   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2336   PetscFunctionReturn(PETSC_SUCCESS);
2337 }
2338 
2339 /*@
2340   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2341 
2342   Logically Collective
2343 
2344   Input Parameters:
2345 + pc        - the preconditioner context
2346 - splitname - name of this split
2347 
2348   Output Parameter:
2349 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2350 
2351   Level: intermediate
2352 
2353 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()`
2354 @*/
2355 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2356 {
2357   PetscFunctionBegin;
2358   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2359   PetscAssertPointer(splitname, 2);
2360   PetscAssertPointer(is, 3);
2361   {
2362     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2363     PC_FieldSplitLink ilink = jac->head;
2364     PetscBool         found;
2365 
2366     *is = NULL;
2367     while (ilink) {
2368       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2369       if (found) {
2370         *is = ilink->is;
2371         break;
2372       }
2373       ilink = ilink->next;
2374     }
2375   }
2376   PetscFunctionReturn(PETSC_SUCCESS);
2377 }
2378 
2379 /*@
2380   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2381 
2382   Logically Collective
2383 
2384   Input Parameters:
2385 + pc    - the preconditioner context
2386 - index - index of this split
2387 
2388   Output Parameter:
2389 . is - the index set that defines the elements in this split
2390 
2391   Level: intermediate
2392 
2393 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`,
2394 
2395 @*/
2396 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2397 {
2398   PetscFunctionBegin;
2399   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2400   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2401   PetscAssertPointer(is, 3);
2402   {
2403     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2404     PC_FieldSplitLink ilink = jac->head;
2405     PetscInt          i     = 0;
2406     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2407 
2408     while (i < index) {
2409       ilink = ilink->next;
2410       ++i;
2411     }
2412     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2413   }
2414   PetscFunctionReturn(PETSC_SUCCESS);
2415 }
2416 
2417 /*@
2418   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2419   fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used.
2420 
2421   Logically Collective
2422 
2423   Input Parameters:
2424 + pc - the preconditioner context
2425 - bs - the block size
2426 
2427   Level: intermediate
2428 
2429   Note:
2430   If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields.
2431 
2432 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2433 @*/
2434 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2435 {
2436   PetscFunctionBegin;
2437   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2438   PetscValidLogicalCollectiveInt(pc, bs, 2);
2439   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2440   PetscFunctionReturn(PETSC_SUCCESS);
2441 }
2442 
2443 /*@C
2444   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2445 
2446   Collective
2447 
2448   Input Parameter:
2449 . pc - the preconditioner context
2450 
2451   Output Parameters:
2452 + n      - the number of splits
2453 - subksp - the array of `KSP` contexts
2454 
2455   Level: advanced
2456 
2457   Notes:
2458   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2459   (not the `KSP`, just the array that contains them).
2460 
2461   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2462 
2463   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2464   Schur complement and the `KSP` object used to iterate over the Schur complement.
2465   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2466 
2467   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2468   inner linear system defined by the matrix H in each loop.
2469 
2470   Fortran Notes:
2471   You must pass in a `KSP` array that is large enough to contain all the `KSP`s.
2472   You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2473   `KSP` array must be.
2474 
2475   Developer Notes:
2476   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2477 
2478   The Fortran interface could be modernized to return directly the array of values.
2479 
2480 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2481 @*/
2482 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2483 {
2484   PetscFunctionBegin;
2485   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2486   if (n) PetscAssertPointer(n, 2);
2487   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2488   PetscFunctionReturn(PETSC_SUCCESS);
2489 }
2490 
2491 /*@C
2492   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2493 
2494   Collective
2495 
2496   Input Parameter:
2497 . pc - the preconditioner context
2498 
2499   Output Parameters:
2500 + n      - the number of splits
2501 - subksp - the array of `KSP` contexts
2502 
2503   Level: advanced
2504 
2505   Notes:
2506   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2507   (not the `KSP` just the array that contains them).
2508 
2509   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2510 
2511   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2512 +  1  - the `KSP` used for the (1,1) block
2513 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2514 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2515 
2516   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2517 
2518   Fortran Notes:
2519   You must pass in a `KSP` array that is large enough to contain all the local `KSP`s.
2520   You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2521   `KSP` array must be.
2522 
2523   Developer Notes:
2524   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2525 
2526   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2527 
2528   The Fortran interface should be modernized to return directly the array of values.
2529 
2530 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2531 @*/
2532 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2533 {
2534   PetscFunctionBegin;
2535   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2536   if (n) PetscAssertPointer(n, 2);
2537   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2538   PetscFunctionReturn(PETSC_SUCCESS);
2539 }
2540 
2541 /*@
2542   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructed for the Schur complement.
2543   The default is the A11 matrix.
2544 
2545   Collective
2546 
2547   Input Parameters:
2548 + pc    - the preconditioner context
2549 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2550               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2551               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2552 - pre   - matrix to use for preconditioning, or `NULL`
2553 
2554   Options Database Keys:
2555 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2556 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2557 
2558   Level: intermediate
2559 
2560   Notes:
2561   If ptype is
2562 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2563   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2564 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2565   The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM`
2566 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2567   to this function).
2568 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $
2569   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2570   lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2571 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2572   computed internally by `PCFIELDSPLIT` (this is expensive)
2573   useful mostly as a test that the Schur complement approach can work for your problem
2574 
2575   When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2576   with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and
2577   `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2578 
2579   Developer Note:
2580   The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere.
2581 
2582 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2583           `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()`
2584 @*/
2585 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2586 {
2587   PetscFunctionBegin;
2588   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2589   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2590   PetscFunctionReturn(PETSC_SUCCESS);
2591 }
2592 
2593 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2594 {
2595   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2596 } /* Deprecated name */
2597 
2598 /*@
2599   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2600   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2601 
2602   Logically Collective
2603 
2604   Input Parameter:
2605 . pc - the preconditioner context
2606 
2607   Output Parameters:
2608 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2609 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2610 
2611   Level: intermediate
2612 
2613 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2614 @*/
2615 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2616 {
2617   PetscFunctionBegin;
2618   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2619   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2620   PetscFunctionReturn(PETSC_SUCCESS);
2621 }
2622 
2623 /*@
2624   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2625 
2626   Not Collective
2627 
2628   Input Parameter:
2629 . pc - the preconditioner context
2630 
2631   Output Parameter:
2632 . S - the Schur complement matrix
2633 
2634   Level: advanced
2635 
2636   Note:
2637   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2638 
2639 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2640           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2641 @*/
2642 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2643 {
2644   const char    *t;
2645   PetscBool      isfs;
2646   PC_FieldSplit *jac;
2647 
2648   PetscFunctionBegin;
2649   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2650   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2651   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2652   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2653   jac = (PC_FieldSplit *)pc->data;
2654   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2655   if (S) *S = jac->schur;
2656   PetscFunctionReturn(PETSC_SUCCESS);
2657 }
2658 
2659 /*@
2660   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2661 
2662   Not Collective
2663 
2664   Input Parameters:
2665 + pc - the preconditioner context
2666 - S  - the Schur complement matrix
2667 
2668   Level: advanced
2669 
2670 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2671 @*/
2672 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2673 {
2674   const char    *t;
2675   PetscBool      isfs;
2676   PC_FieldSplit *jac;
2677 
2678   PetscFunctionBegin;
2679   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2680   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2681   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2682   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2683   jac = (PC_FieldSplit *)pc->data;
2684   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2685   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2686   PetscFunctionReturn(PETSC_SUCCESS);
2687 }
2688 
2689 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2690 {
2691   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2692 
2693   PetscFunctionBegin;
2694   jac->schurpre = ptype;
2695   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2696     PetscCall(MatDestroy(&jac->schur_user));
2697     jac->schur_user = pre;
2698     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2699   }
2700   PetscFunctionReturn(PETSC_SUCCESS);
2701 }
2702 
2703 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2704 {
2705   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2706 
2707   PetscFunctionBegin;
2708   if (ptype) *ptype = jac->schurpre;
2709   if (pre) *pre = jac->schur_user;
2710   PetscFunctionReturn(PETSC_SUCCESS);
2711 }
2712 
2713 /*@
2714   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note`
2715 
2716   Collective
2717 
2718   Input Parameters:
2719 + pc    - the preconditioner context
2720 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2721 
2722   Options Database Key:
2723 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
2724 
2725   Level: intermediate
2726 
2727   Notes:
2728   The FULL factorization is
2729 
2730   ```{math}
2731   \left(\begin{array}{cc} A & B \\
2732   C & E \\
2733   \end{array}\right) =
2734   \left(\begin{array}{cc} 1 & 0 \\
2735   C*A^{-1} & I \\
2736   \end{array}\right)
2737   \left(\begin{array}{cc} A & 0 \\
2738   0 & S \\
2739   \end{array}\right)
2740   \left(\begin{array}{cc} I & A^{-1}B \\
2741   0 & I \\
2742   \end{array}\right) = L D U.
2743   ```
2744 
2745   where $ S = E - C*A^{-1}*B $. In practice, the full factorization is applied via block triangular solves with the grouping $L*(D*U)$. UPPER uses $D*U$, LOWER uses $L*D$,
2746   and DIAG is the diagonal part with the sign of $ S $ flipped (because this makes the preconditioner positive definite for many formulations,
2747   thus allowing the use of `KSPMINRES)`. Sign flipping of $ S $ can be turned off with `PCFieldSplitSetSchurScale()`.
2748 
2749   If $A$ and $S$ are solved exactly
2750 +  1 - FULL factorization is a direct solver.
2751 .  2 - The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2752 -  3 -  With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2753 
2754   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2755   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2756 
2757   For symmetric problems in which $A$ is positive definite and $S$ is negative definite, DIAG can be used with `KSPMINRES`.
2758 
2759   A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S).
2760 
2761 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`,
2762           [](sec_flexibleksp), `PCFieldSplitSetSchurPre()`
2763 @*/
2764 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2765 {
2766   PetscFunctionBegin;
2767   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2768   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2769   PetscFunctionReturn(PETSC_SUCCESS);
2770 }
2771 
2772 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2773 {
2774   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2775 
2776   PetscFunctionBegin;
2777   jac->schurfactorization = ftype;
2778   PetscFunctionReturn(PETSC_SUCCESS);
2779 }
2780 
2781 /*@
2782   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2783 
2784   Collective
2785 
2786   Input Parameters:
2787 + pc    - the preconditioner context
2788 - scale - scaling factor for the Schur complement
2789 
2790   Options Database Key:
2791 . -pc_fieldsplit_schur_scale <scale> - default is -1.0
2792 
2793   Level: intermediate
2794 
2795 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
2796 @*/
2797 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2798 {
2799   PetscFunctionBegin;
2800   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2801   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2802   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2803   PetscFunctionReturn(PETSC_SUCCESS);
2804 }
2805 
2806 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2807 {
2808   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2809 
2810   PetscFunctionBegin;
2811   jac->schurscale = scale;
2812   PetscFunctionReturn(PETSC_SUCCESS);
2813 }
2814 
2815 /*@C
2816   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2817 
2818   Collective
2819 
2820   Input Parameter:
2821 . pc - the preconditioner context
2822 
2823   Output Parameters:
2824 + A00 - the (0,0) block
2825 . A01 - the (0,1) block
2826 . A10 - the (1,0) block
2827 - A11 - the (1,1) block
2828 
2829   Level: advanced
2830 
2831   Note:
2832   Use `NULL` for any unneeded output arguments
2833 
2834 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2835 @*/
2836 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2837 {
2838   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2839 
2840   PetscFunctionBegin;
2841   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2842   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2843   if (A00) *A00 = jac->pmat[0];
2844   if (A01) *A01 = jac->B;
2845   if (A10) *A10 = jac->C;
2846   if (A11) *A11 = jac->pmat[1];
2847   PetscFunctionReturn(PETSC_SUCCESS);
2848 }
2849 
2850 /*@
2851   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2852 
2853   Collective
2854 
2855   Input Parameters:
2856 + pc        - the preconditioner context
2857 - tolerance - the solver tolerance
2858 
2859   Options Database Key:
2860 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5
2861 
2862   Level: intermediate
2863 
2864   Note:
2865   The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion.
2866   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2867   this estimate, the stopping criterion is satisfactory in practical cases.
2868 
2869 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2870 @*/
2871 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2872 {
2873   PetscFunctionBegin;
2874   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2875   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2876   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2877   PetscFunctionReturn(PETSC_SUCCESS);
2878 }
2879 
2880 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2881 {
2882   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2883 
2884   PetscFunctionBegin;
2885   jac->gkbtol = tolerance;
2886   PetscFunctionReturn(PETSC_SUCCESS);
2887 }
2888 
2889 /*@
2890   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2891 
2892   Collective
2893 
2894   Input Parameters:
2895 + pc    - the preconditioner context
2896 - maxit - the maximum number of iterations
2897 
2898   Options Database Key:
2899 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100
2900 
2901   Level: intermediate
2902 
2903 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2904 @*/
2905 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2906 {
2907   PetscFunctionBegin;
2908   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2909   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2910   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2911   PetscFunctionReturn(PETSC_SUCCESS);
2912 }
2913 
2914 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2915 {
2916   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2917 
2918   PetscFunctionBegin;
2919   jac->gkbmaxit = maxit;
2920   PetscFunctionReturn(PETSC_SUCCESS);
2921 }
2922 
2923 /*@
2924   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT`
2925   preconditioner.
2926 
2927   Collective
2928 
2929   Input Parameters:
2930 + pc    - the preconditioner context
2931 - delay - the delay window in the lower bound estimate
2932 
2933   Options Database Key:
2934 . -pc_fieldsplit_gkb_delay <delay> - default is 5
2935 
2936   Level: intermediate
2937 
2938   Notes:
2939   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $
2940   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs
2941   at least (`delay` + 1) iterations to stop.
2942 
2943   For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013`
2944 
2945 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2946 @*/
2947 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2948 {
2949   PetscFunctionBegin;
2950   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2951   PetscValidLogicalCollectiveInt(pc, delay, 2);
2952   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2953   PetscFunctionReturn(PETSC_SUCCESS);
2954 }
2955 
2956 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2957 {
2958   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2959 
2960   PetscFunctionBegin;
2961   jac->gkbdelay = delay;
2962   PetscFunctionReturn(PETSC_SUCCESS);
2963 }
2964 
2965 /*@
2966   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the
2967   Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2968 
2969   Collective
2970 
2971   Input Parameters:
2972 + pc - the preconditioner context
2973 - nu - the shift parameter
2974 
2975   Options Database Key:
2976 . -pc_fieldsplit_gkb_nu <nu> - default is 1
2977 
2978   Level: intermediate
2979 
2980   Notes:
2981   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However,
2982   if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore
2983   necessary to find a good balance in between the convergence of the inner and outer loop.
2984 
2985   For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity.
2986 
2987 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2988 @*/
2989 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
2990 {
2991   PetscFunctionBegin;
2992   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2993   PetscValidLogicalCollectiveReal(pc, nu, 2);
2994   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
2995   PetscFunctionReturn(PETSC_SUCCESS);
2996 }
2997 
2998 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
2999 {
3000   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3001 
3002   PetscFunctionBegin;
3003   jac->gkbnu = nu;
3004   PetscFunctionReturn(PETSC_SUCCESS);
3005 }
3006 
3007 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
3008 {
3009   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3010 
3011   PetscFunctionBegin;
3012   jac->type = type;
3013   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
3014   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
3015   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
3016   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
3017   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
3018   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
3019   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
3020   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
3021   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
3022 
3023   if (type == PC_COMPOSITE_SCHUR) {
3024     pc->ops->apply          = PCApply_FieldSplit_Schur;
3025     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
3026     pc->ops->view           = PCView_FieldSplit_Schur;
3027 
3028     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
3029     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
3030     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
3031     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
3032     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
3033   } else if (type == PC_COMPOSITE_GKB) {
3034     pc->ops->apply = PCApply_FieldSplit_GKB;
3035     pc->ops->view  = PCView_FieldSplit_GKB;
3036 
3037     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3038     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
3039     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
3040     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
3041     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
3042   } else {
3043     pc->ops->apply = PCApply_FieldSplit;
3044     pc->ops->view  = PCView_FieldSplit;
3045 
3046     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3047   }
3048   PetscFunctionReturn(PETSC_SUCCESS);
3049 }
3050 
3051 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
3052 {
3053   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3054 
3055   PetscFunctionBegin;
3056   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
3057   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
3058   jac->bs = bs;
3059   PetscFunctionReturn(PETSC_SUCCESS);
3060 }
3061 
3062 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
3063 {
3064   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
3065   PC_FieldSplitLink ilink_current = jac->head;
3066   IS                is_owned;
3067 
3068   PetscFunctionBegin;
3069   jac->coordinates_set = PETSC_TRUE; // Internal flag
3070   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
3071 
3072   while (ilink_current) {
3073     // For each IS, embed it to get local coords indces
3074     IS              is_coords;
3075     PetscInt        ndofs_block;
3076     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
3077 
3078     // Setting drop to true for safety. It should make no difference.
3079     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
3080     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
3081     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
3082 
3083     // Allocate coordinates vector and set it directly
3084     PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords));
3085     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
3086       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
3087     }
3088     ilink_current->dim   = dim;
3089     ilink_current->ndofs = ndofs_block;
3090     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
3091     PetscCall(ISDestroy(&is_coords));
3092     ilink_current = ilink_current->next;
3093   }
3094   PetscCall(ISDestroy(&is_owned));
3095   PetscFunctionReturn(PETSC_SUCCESS);
3096 }
3097 
3098 /*@
3099   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3100 
3101   Collective
3102 
3103   Input Parameters:
3104 + pc   - the preconditioner context
3105 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`,
3106          `PC_COMPOSITE_GKB`
3107 
3108   Options Database Key:
3109 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
3110 
3111   Level: intermediate
3112 
3113 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3114           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()`
3115 @*/
3116 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
3117 {
3118   PetscFunctionBegin;
3119   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3120   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
3121   PetscFunctionReturn(PETSC_SUCCESS);
3122 }
3123 
3124 /*@
3125   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3126 
3127   Not collective
3128 
3129   Input Parameter:
3130 . pc - the preconditioner context
3131 
3132   Output Parameter:
3133 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3134 
3135   Level: intermediate
3136 
3137 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3138           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3139 @*/
3140 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
3141 {
3142   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3143 
3144   PetscFunctionBegin;
3145   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3146   PetscAssertPointer(type, 2);
3147   *type = jac->type;
3148   PetscFunctionReturn(PETSC_SUCCESS);
3149 }
3150 
3151 /*@
3152   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3153 
3154   Logically Collective
3155 
3156   Input Parameters:
3157 + pc  - the preconditioner context
3158 - flg - boolean indicating whether to use field splits defined by the `DM`
3159 
3160   Options Database Key:
3161 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3162 
3163   Level: intermediate
3164 
3165   Developer Note:
3166   The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database
3167 
3168 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3169 @*/
3170 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3171 {
3172   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3173   PetscBool      isfs;
3174 
3175   PetscFunctionBegin;
3176   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3177   PetscValidLogicalCollectiveBool(pc, flg, 2);
3178   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3179   if (isfs) jac->dm_splits = flg;
3180   PetscFunctionReturn(PETSC_SUCCESS);
3181 }
3182 
3183 /*@
3184   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3185 
3186   Logically Collective
3187 
3188   Input Parameter:
3189 . pc - the preconditioner context
3190 
3191   Output Parameter:
3192 . flg - boolean indicating whether to use field splits defined by the `DM`
3193 
3194   Level: intermediate
3195 
3196   Developer Note:
3197   The name should be `PCFieldSplitGetUseDMSplits()`
3198 
3199 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3200 @*/
3201 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3202 {
3203   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3204   PetscBool      isfs;
3205 
3206   PetscFunctionBegin;
3207   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3208   PetscAssertPointer(flg, 2);
3209   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3210   if (isfs) {
3211     if (flg) *flg = jac->dm_splits;
3212   }
3213   PetscFunctionReturn(PETSC_SUCCESS);
3214 }
3215 
3216 /*@
3217   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3218 
3219   Logically Collective
3220 
3221   Input Parameter:
3222 . pc - the preconditioner context
3223 
3224   Output Parameter:
3225 . flg - boolean indicating whether to detect fields or not
3226 
3227   Level: intermediate
3228 
3229 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3230 @*/
3231 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3232 {
3233   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3234 
3235   PetscFunctionBegin;
3236   *flg = jac->detect;
3237   PetscFunctionReturn(PETSC_SUCCESS);
3238 }
3239 
3240 /*@
3241   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3242 
3243   Logically Collective
3244 
3245   Input Parameter:
3246 . pc - the preconditioner context
3247 
3248   Output Parameter:
3249 . flg - boolean indicating whether to detect fields or not
3250 
3251   Options Database Key:
3252 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3253 
3254   Level: intermediate
3255 
3256   Note:
3257   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3258 
3259 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3260 @*/
3261 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3262 {
3263   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3264 
3265   PetscFunctionBegin;
3266   jac->detect = flg;
3267   if (jac->detect) {
3268     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3269     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3270   }
3271   PetscFunctionReturn(PETSC_SUCCESS);
3272 }
3273 
3274 /*MC
3275   PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3276   collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3277 
3278   Options Database Keys:
3279 +   -pc_fieldsplit_%d_fields <a,b,..>                                                - indicates the fields to be used in the `%d`'th split
3280 .   -pc_fieldsplit_default                                                           - automatically add any fields to additional splits that have not
3281                                                                                        been supplied explicitly by `-pc_fieldsplit_%d_fields`
3282 .   -pc_fieldsplit_block_size <bs>                                                   - size of block that defines fields (i.e. there are bs fields)
3283                                                                                        when the matrix is not of `MatType` `MATNEST`
3284 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3285 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full>                     - default is `a11`; see `PCFieldSplitSetSchurPre()`
3286 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full>                           - set factorization type when using `-pc_fieldsplit_type schur`;
3287                                                                                        see `PCFieldSplitSetSchurFactType()`
3288 .   -pc_fieldsplit_dm_splits <true,false> (default is true)                          - Whether to use `DMCreateFieldDecomposition()` for splits
3289 -   -pc_fieldsplit_detect_saddle_point                                               - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3290 
3291   Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3292   The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3293   For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3294 
3295   To set options on the solvers for each block append `-fieldsplit_` to all the `PC`
3296   options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`
3297 
3298   To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3299   and set the options directly on the resulting `KSP` object
3300 
3301   Level: intermediate
3302 
3303   Notes:
3304   Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()`
3305   to define a split by an arbitrary collection of entries.
3306 
3307   If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports
3308   `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs,
3309   beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3310   if this is not called the block size defaults to the blocksize of the second matrix passed
3311   to `KSPSetOperators()`/`PCSetOperators()`.
3312 
3313   For the Schur complement preconditioner if
3314   ```{math}
3315     J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3316   ```
3317 
3318   the preconditioner using `full` factorization is logically
3319   ```{math}
3320     \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3321       ```
3322   where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3323   ```{math}
3324      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3325   ```
3326   which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given
3327   in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0,
3328   it returns the `KSP` associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default
3329   $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3330 
3331   The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3332   `diag` gives
3333   ```{math}
3334     \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3335   ```
3336   Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3337   can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3338   ```{math}
3339     \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3340   ```
3341   where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of
3342   ```{math}
3343     \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3344   ```
3345   where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3346 
3347   If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3348   is used automatically for a second submatrix.
3349 
3350   The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3351   Generally it should be used with the `MATAIJ` or `MATNEST` `MatType`
3352 
3353   The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see,
3354   for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3355   One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers".
3356 
3357   See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3358 
3359   The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3360   residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3361 
3362   The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3363   ```{math}
3364     \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3365   ```
3366   with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3367   A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3368 
3369   Developer Note:
3370   The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3371   user API.
3372 
3373 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3374           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3375           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3376           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3377 M*/
3378 
3379 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3380 {
3381   PC_FieldSplit *jac;
3382 
3383   PetscFunctionBegin;
3384   PetscCall(PetscNew(&jac));
3385 
3386   jac->bs                 = -1;
3387   jac->nsplits            = 0;
3388   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3389   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3390   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3391   jac->schurscale         = -1.0;
3392   jac->dm_splits          = PETSC_TRUE;
3393   jac->detect             = PETSC_FALSE;
3394   jac->gkbtol             = 1e-5;
3395   jac->gkbdelay           = 5;
3396   jac->gkbnu              = 1;
3397   jac->gkbmaxit           = 100;
3398   jac->gkbmonitor         = PETSC_FALSE;
3399   jac->coordinates_set    = PETSC_FALSE;
3400 
3401   pc->data = (void *)jac;
3402 
3403   pc->ops->apply           = PCApply_FieldSplit;
3404   pc->ops->applytranspose  = PCApplyTranspose_FieldSplit;
3405   pc->ops->setup           = PCSetUp_FieldSplit;
3406   pc->ops->reset           = PCReset_FieldSplit;
3407   pc->ops->destroy         = PCDestroy_FieldSplit;
3408   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3409   pc->ops->view            = PCView_FieldSplit;
3410   pc->ops->applyrichardson = NULL;
3411 
3412   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3413   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3414   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3415   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3416   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3417   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3418   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3419   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3420   PetscFunctionReturn(PETSC_SUCCESS);
3421 }
3422