xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision 7371636706d0e957dcf5cdcbd64be01474eb50f1)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 #include <petscdevice.h>
5 #if PetscDefined(HAVE_CUDA)
6   #include <petscdevice_cuda.h>
7 #endif
8 #if PetscDefined(HAVE_HIP)
9   #include <petscdevice_hip.h>
10 #endif
11 
12 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
13 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
14 
15 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
16 
17 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
18 struct _PC_FieldSplitLink {
19   KSP               ksp;
20   Vec               x, y, z;
21   char             *splitname;
22   PetscInt          nfields;
23   PetscInt         *fields, *fields_col;
24   VecScatter        sctx;
25   IS                is, is_col;
26   PC_FieldSplitLink next, previous;
27   PetscLogEvent     event;
28 
29   /* Used only when setting coordinates with PCSetCoordinates */
30   PetscInt   dim;
31   PetscInt   ndofs;
32   PetscReal *coords;
33 };
34 
35 typedef struct {
36   PCCompositeType type;
37   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
38   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
39   PetscInt        bs;           /* Block size for IS and Mat structures */
40   PetscInt        nsplits;      /* Number of field divisions defined */
41   Vec            *x, *y, w1, w2;
42   Mat            *mat;    /* The diagonal block for each split */
43   Mat            *pmat;   /* The preconditioning diagonal block for each split */
44   Mat            *Afield; /* The rows of the matrix associated with each split */
45   PetscBool       issetup;
46 
47   /* Only used when Schur complement preconditioning is used */
48   Mat                       B;          /* The (0,1) block */
49   Mat                       C;          /* The (1,0) block */
50   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
51   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */
52   Mat                       schur_user; /* User-provided preconditioning matrix for the Schur complement */
53   PCFieldSplitSchurPreType  schurpre;   /* Determines which preconditioning matrix is used for the Schur complement */
54   PCFieldSplitSchurFactType schurfactorization;
55   KSP                       kspschur;   /* The solver for S */
56   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
57   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
58 
59   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
60   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
61   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
62   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
63   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
64   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
65   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
66   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
67   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
68   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
69 
70   PC_FieldSplitLink head;
71   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
72   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
73   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
74   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
75   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
76   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
77   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
78 } PC_FieldSplit;
79 
80 /*
81     Note:
82     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
83    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
84    PC you could change this.
85 */
86 
87 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it.  This way the
88 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
89 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
90 {
91   switch (jac->schurpre) {
92   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
93     return jac->schur;
94   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
95     return jac->schurp;
96   case PC_FIELDSPLIT_SCHUR_PRE_A11:
97     return jac->pmat[1];
98   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
99   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
100   default:
101     return jac->schur_user ? jac->schur_user : jac->pmat[1];
102   }
103 }
104 
105 #include <petscdraw.h>
106 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
107 {
108   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
109   PetscBool         iascii, isdraw;
110   PetscInt          i, j;
111   PC_FieldSplitLink ilink = jac->head;
112 
113   PetscFunctionBegin;
114   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
115   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
116   if (iascii) {
117     if (jac->bs > 0) {
118       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
119     } else {
120       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
121     }
122     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
123     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
124     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
125     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
126     for (i = 0; i < jac->nsplits; i++) {
127       if (ilink->fields) {
128         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
129         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
130         for (j = 0; j < ilink->nfields; j++) {
131           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
132           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
133         }
134         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
135         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
136       } else {
137         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
138       }
139       PetscCall(KSPView(ilink->ksp, viewer));
140       ilink = ilink->next;
141     }
142   }
143 
144   if (isdraw) {
145     PetscDraw draw;
146     PetscReal x, y, w, wd;
147 
148     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
149     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
150     w  = 2 * PetscMin(1.0 - x, x);
151     wd = w / (jac->nsplits + 1);
152     x  = x - wd * (jac->nsplits - 1) / 2.0;
153     for (i = 0; i < jac->nsplits; i++) {
154       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
155       PetscCall(KSPView(ilink->ksp, viewer));
156       PetscCall(PetscDrawPopCurrentPoint(draw));
157       x += wd;
158       ilink = ilink->next;
159     }
160   }
161   PetscFunctionReturn(PETSC_SUCCESS);
162 }
163 
164 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
165 {
166   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
167   PetscBool                  iascii, isdraw;
168   PetscInt                   i, j;
169   PC_FieldSplitLink          ilink = jac->head;
170   MatSchurComplementAinvType atype;
171 
172   PetscFunctionBegin;
173   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
174   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
175   if (iascii) {
176     if (jac->bs > 0) {
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
178     } else {
179       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
180     }
181     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
182     switch (jac->schurpre) {
183     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
184       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
185       break;
186     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
187       if (jac->schur) {
188         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
189         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
190       }
191       break;
192     case PC_FIELDSPLIT_SCHUR_PRE_A11:
193       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
194       break;
195     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
196       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
197       break;
198     case PC_FIELDSPLIT_SCHUR_PRE_USER:
199       if (jac->schur_user) {
200         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
201       } else {
202         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
203       }
204       break;
205     default:
206       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
207     }
208     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
209     PetscCall(PetscViewerASCIIPushTab(viewer));
210     for (i = 0; i < jac->nsplits; i++) {
211       if (ilink->fields) {
212         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
213         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
214         for (j = 0; j < ilink->nfields; j++) {
215           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
216           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
217         }
218         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
219         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
220       } else {
221         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
222       }
223       ilink = ilink->next;
224     }
225     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
226     PetscCall(PetscViewerASCIIPushTab(viewer));
227     if (jac->head) {
228       PetscCall(KSPView(jac->head->ksp, viewer));
229     } else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
230     PetscCall(PetscViewerASCIIPopTab(viewer));
231     if (jac->head && jac->kspupper != jac->head->ksp) {
232       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n"));
233       PetscCall(PetscViewerASCIIPushTab(viewer));
234       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
235       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
236       PetscCall(PetscViewerASCIIPopTab(viewer));
237     }
238     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n"));
239     PetscCall(PetscViewerASCIIPushTab(viewer));
240     if (jac->kspschur) {
241       PetscCall(KSPView(jac->kspschur, viewer));
242     } else {
243       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
244     }
245     PetscCall(PetscViewerASCIIPopTab(viewer));
246     PetscCall(PetscViewerASCIIPopTab(viewer));
247   } else if (isdraw && jac->head) {
248     PetscDraw draw;
249     PetscReal x, y, w, wd, h;
250     PetscInt  cnt = 2;
251     char      str[32];
252 
253     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
254     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
255     if (jac->kspupper != jac->head->ksp) cnt++;
256     w  = 2 * PetscMin(1.0 - x, x);
257     wd = w / (cnt + 1);
258 
259     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
260     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
261     y -= h;
262     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
263       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
264     } else {
265       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
266     }
267     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
268     y -= h;
269     x = x - wd * (cnt - 1) / 2.0;
270 
271     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
272     PetscCall(KSPView(jac->head->ksp, viewer));
273     PetscCall(PetscDrawPopCurrentPoint(draw));
274     if (jac->kspupper != jac->head->ksp) {
275       x += wd;
276       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
277       PetscCall(KSPView(jac->kspupper, viewer));
278       PetscCall(PetscDrawPopCurrentPoint(draw));
279     }
280     x += wd;
281     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
282     PetscCall(KSPView(jac->kspschur, viewer));
283     PetscCall(PetscDrawPopCurrentPoint(draw));
284   }
285   PetscFunctionReturn(PETSC_SUCCESS);
286 }
287 
288 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
289 {
290   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
291   PetscBool         iascii, isdraw;
292   PetscInt          i, j;
293   PC_FieldSplitLink ilink = jac->head;
294 
295   PetscFunctionBegin;
296   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
297   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
298   if (iascii) {
299     if (jac->bs > 0) {
300       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
301     } else {
302       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
303     }
304     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
305     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
306     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
307 
308     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
309     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
310     PetscCall(PetscViewerASCIIPushTab(viewer));
311 
312     if (ilink->fields) {
313       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
314       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
315       for (j = 0; j < ilink->nfields; j++) {
316         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
317         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
318       }
319       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
320       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
321     } else {
322       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
323     }
324     PetscCall(KSPView(ilink->ksp, viewer));
325 
326     PetscCall(PetscViewerASCIIPopTab(viewer));
327   }
328 
329   if (isdraw) {
330     PetscDraw draw;
331     PetscReal x, y, w, wd;
332 
333     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
334     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
335     w  = 2 * PetscMin(1.0 - x, x);
336     wd = w / (jac->nsplits + 1);
337     x  = x - wd * (jac->nsplits - 1) / 2.0;
338     for (i = 0; i < jac->nsplits; i++) {
339       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
340       PetscCall(KSPView(ilink->ksp, viewer));
341       PetscCall(PetscDrawPopCurrentPoint(draw));
342       x += wd;
343       ilink = ilink->next;
344     }
345   }
346   PetscFunctionReturn(PETSC_SUCCESS);
347 }
348 
349 /* Precondition: jac->bs is set to a meaningful value or MATNEST */
350 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
351 {
352   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
353   PetscInt       bs, i, nfields, *ifields, nfields_col, *ifields_col;
354   PetscBool      flg, flg_col, mnest;
355   char           optionname[128], splitname[8], optionname_col[128];
356 
357   PetscFunctionBegin;
358   PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest));
359   if (mnest) {
360     PetscCall(MatNestGetSize(pc->pmat, &bs, NULL));
361   } else {
362     bs = jac->bs;
363   }
364   PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col));
365   for (i = 0, flg = PETSC_TRUE;; i++) {
366     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
367     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
368     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
369     nfields     = bs;
370     nfields_col = bs;
371     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
372     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
373     if (!flg) break;
374     else if (flg && !flg_col) {
375       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
376       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
377     } else {
378       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
379       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
380       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
381     }
382   }
383   if (i > 0) {
384     /* Makes command-line setting of splits take precedence over setting them in code.
385        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
386        create new splits, which would probably not be what the user wanted. */
387     jac->splitdefined = PETSC_TRUE;
388   }
389   PetscCall(PetscFree2(ifields, ifields_col));
390   PetscFunctionReturn(PETSC_SUCCESS);
391 }
392 
393 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
394 {
395   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
396   PC_FieldSplitLink ilink              = jac->head;
397   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
398   PetscInt          i;
399 
400   PetscFunctionBegin;
401   /*
402    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
403    Should probably be rewritten.
404    */
405   if (!ilink) {
406     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
407     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
408       PetscInt  numFields, f, i, j;
409       char    **fieldNames;
410       IS       *fields;
411       DM       *dms;
412       DM        subdm[128];
413       PetscBool flg;
414 
415       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
416       /* Allow the user to prescribe the splits */
417       for (i = 0, flg = PETSC_TRUE;; i++) {
418         PetscInt ifields[128];
419         IS       compField;
420         char     optionname[128], splitname[8];
421         PetscInt nfields = numFields;
422 
423         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
424         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
425         if (!flg) break;
426         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
427         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
428         if (nfields == 1) {
429           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
430         } else {
431           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
432           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
433         }
434         PetscCall(ISDestroy(&compField));
435         for (j = 0; j < nfields; ++j) {
436           f = ifields[j];
437           PetscCall(PetscFree(fieldNames[f]));
438           PetscCall(ISDestroy(&fields[f]));
439         }
440       }
441       if (i == 0) {
442         for (f = 0; f < numFields; ++f) {
443           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
444           PetscCall(PetscFree(fieldNames[f]));
445           PetscCall(ISDestroy(&fields[f]));
446         }
447       } else {
448         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
449         PetscCall(PetscFree(dms));
450         PetscCall(PetscMalloc1(i, &dms));
451         for (j = 0; j < i; ++j) dms[j] = subdm[j];
452       }
453       PetscCall(PetscFree(fieldNames));
454       PetscCall(PetscFree(fields));
455       if (dms) {
456         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
457         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
458           const char *prefix;
459           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix));
460           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix));
461           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
462           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
463           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
464           PetscCall(DMDestroy(&dms[i]));
465         }
466         PetscCall(PetscFree(dms));
467       }
468     } else {
469       if (jac->bs <= 0) {
470         if (pc->pmat) {
471           PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
472         } else jac->bs = 1;
473       }
474 
475       if (jac->detect) {
476         IS       zerodiags, rest;
477         PetscInt nmin, nmax;
478 
479         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
480         if (jac->diag_use_amat) {
481           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
482         } else {
483           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
484         }
485         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
486         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
487         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
488         PetscCall(ISDestroy(&zerodiags));
489         PetscCall(ISDestroy(&rest));
490       } else if (coupling) {
491         IS       coupling, rest;
492         PetscInt nmin, nmax;
493 
494         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
495         if (jac->offdiag_use_amat) {
496           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
497         } else {
498           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
499         }
500         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
501         PetscCall(ISSetIdentity(rest));
502         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
503         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
504         PetscCall(ISDestroy(&coupling));
505         PetscCall(ISDestroy(&rest));
506       } else {
507         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
508         if (!fieldsplit_default) {
509           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
510            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
511           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
512           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
513         }
514         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
515           Mat       M = pc->pmat;
516           PetscBool isnest;
517           PetscInt  nf;
518 
519           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
520           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
521           if (!isnest) {
522             M = pc->mat;
523             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
524           }
525           if (!isnest) nf = jac->bs;
526           else PetscCall(MatNestGetSize(M, &nf, NULL));
527           for (i = 0; i < nf; i++) {
528             char splitname[8];
529 
530             PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
531             PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
532           }
533           jac->defaultsplit = PETSC_TRUE;
534         }
535       }
536     }
537   } else if (jac->nsplits == 1) {
538     IS       is2;
539     PetscInt nmin, nmax;
540 
541     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
542     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
543     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
544     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
545     PetscCall(ISDestroy(&is2));
546   }
547 
548   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
549   PetscFunctionReturn(PETSC_SUCCESS);
550 }
551 
552 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
553 {
554   Mat       BT, T;
555   PetscReal nrmT, nrmB;
556 
557   PetscFunctionBegin;
558   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
559   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
560   PetscCall(MatNorm(T, NORM_1, &nrmT));
561   PetscCall(MatNorm(B, NORM_1, &nrmB));
562   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable.");
563 
564   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
565   /* setting N := 1/nu*I in [Ar13].                                                 */
566   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
567   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_CURRENT, H)); /* H = A01*A01'          */
568   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
569 
570   PetscCall(MatDestroy(&BT));
571   PetscCall(MatDestroy(&T));
572   PetscFunctionReturn(PETSC_SUCCESS);
573 }
574 
575 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg);
576 
577 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
578 {
579   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
580   PC_FieldSplitLink ilink;
581   PetscInt          i, nsplit;
582   PetscBool         sorted, sorted_col, matnest = PETSC_FALSE;
583 
584   PetscFunctionBegin;
585   pc->failedreason = PC_NOERROR;
586   PetscCall(PCFieldSplitSetDefaults(pc));
587   nsplit = jac->nsplits;
588   ilink  = jac->head;
589   if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
590 
591   /* get the matrices for each split */
592   if (!jac->issetup) {
593     PetscInt rstart, rend, nslots, bs;
594 
595     jac->issetup = PETSC_TRUE;
596 
597     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
598     if (jac->defaultsplit || !ilink->is) {
599       if (jac->bs <= 0) jac->bs = nsplit;
600     }
601 
602     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
603     PetscCall(MatGetBlockSize(pc->pmat, &bs));
604     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
605       PetscBool blk;
606 
607       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
608       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
609     }
610 
611     if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */
612       bs = jac->bs;
613       PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
614       nslots = (rend - rstart) / bs;
615       for (i = 0; i < nsplit; i++) {
616         if (jac->defaultsplit) {
617           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
618           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
619         } else if (!ilink->is) {
620           if (ilink->nfields > 1) {
621             PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
622 
623             PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
624             PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
625             for (j = 0; j < nslots; j++) {
626               for (k = 0; k < nfields; k++) {
627                 ii[nfields * j + k] = rstart + bs * j + fields[k];
628                 jj[nfields * j + k] = rstart + bs * j + fields_col[k];
629               }
630             }
631             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
632             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
633             PetscCall(ISSetBlockSize(ilink->is, nfields));
634             PetscCall(ISSetBlockSize(ilink->is_col, nfields));
635           } else {
636             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
637             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
638           }
639         }
640         PetscCall(ISSorted(ilink->is, &sorted));
641         if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col));
642         PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split");
643         ilink = ilink->next;
644       }
645     } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */
646       IS      *rowis, *colis, *ises = NULL;
647       PetscInt mis, nis;
648 
649       PetscCall(MatNestGetSize(pc->pmat, &mis, &nis));
650       PetscCall(PetscMalloc2(mis, &rowis, nis, &colis));
651       PetscCall(MatNestGetISs(pc->pmat, rowis, colis));
652       if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises));
653 
654       for (i = 0; i < nsplit; i++) {
655         if (jac->defaultsplit) {
656           PetscCall(ISDuplicate(rowis[i], &ilink->is));
657           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
658         } else if (!ilink->is) {
659           if (ilink->nfields > 1) {
660             for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]];
661             PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is));
662           } else {
663             PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is));
664           }
665           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
666         }
667         ilink = ilink->next;
668       }
669       PetscCall(PetscFree2(rowis, colis));
670       PetscCall(PetscFree(ises));
671     }
672   }
673 
674   ilink = jac->head;
675   if (!jac->pmat) {
676     Vec xtmp;
677 
678     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
679     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
680     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
681     for (i = 0; i < nsplit; i++) {
682       MatNullSpace sp;
683 
684       /* Check for preconditioning matrix attached to IS */
685       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
686       if (jac->pmat[i]) {
687         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
688         if (jac->type == PC_COMPOSITE_SCHUR) {
689           jac->schur_user = jac->pmat[i];
690 
691           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
692         }
693       } else {
694         const char *prefix;
695         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
696         PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix));
697         if (!prefix) {
698           PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
699           PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
700         }
701         PetscCall(MatSetFromOptions(jac->pmat[i]));
702         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
703       }
704       /* create work vectors for each split */
705       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
706       ilink->x = jac->x[i];
707       ilink->y = jac->y[i];
708       ilink->z = NULL;
709       /* compute scatter contexts needed by multiplicative versions and non-default splits */
710       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
711       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
712       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
713       ilink = ilink->next;
714     }
715     PetscCall(VecDestroy(&xtmp));
716   } else {
717     MatReuse      scall;
718     MatNullSpace *nullsp = NULL;
719 
720     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
721       PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp));
722       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
723       scall = MAT_INITIAL_MATRIX;
724     } else scall = MAT_REUSE_MATRIX;
725 
726     for (i = 0; i < nsplit; i++) {
727       Mat pmat;
728 
729       /* Check for preconditioning matrix attached to IS */
730       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
731       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
732       ilink = ilink->next;
733     }
734     if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp));
735   }
736   if (jac->diag_use_amat) {
737     ilink = jac->head;
738     if (!jac->mat) {
739       PetscCall(PetscMalloc1(nsplit, &jac->mat));
740       for (i = 0; i < nsplit; i++) {
741         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
742         ilink = ilink->next;
743       }
744     } else {
745       MatReuse      scall;
746       MatNullSpace *nullsp = NULL;
747 
748       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
749         PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp));
750         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
751         scall = MAT_INITIAL_MATRIX;
752       } else scall = MAT_REUSE_MATRIX;
753 
754       for (i = 0; i < nsplit; i++) {
755         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
756         ilink = ilink->next;
757       }
758       if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp));
759     }
760   } else {
761     jac->mat = jac->pmat;
762   }
763 
764   /* Check for null space attached to IS */
765   ilink = jac->head;
766   for (i = 0; i < nsplit; i++) {
767     MatNullSpace sp;
768 
769     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
770     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
771     ilink = ilink->next;
772   }
773 
774   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
775     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
776     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
777     ilink = jac->head;
778     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
779       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
780       if (!jac->Afield) {
781         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
782         if (jac->offdiag_use_amat) {
783           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
784         } else {
785           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
786         }
787       } else {
788         MatReuse scall;
789 
790         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
791           PetscCall(MatDestroy(&jac->Afield[1]));
792           scall = MAT_INITIAL_MATRIX;
793         } else scall = MAT_REUSE_MATRIX;
794 
795         if (jac->offdiag_use_amat) {
796           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
797         } else {
798           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
799         }
800       }
801     } else {
802       if (!jac->Afield) {
803         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
804         for (i = 0; i < nsplit; i++) {
805           if (jac->offdiag_use_amat) {
806             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
807           } else {
808             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
809           }
810           ilink = ilink->next;
811         }
812       } else {
813         MatReuse scall;
814         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
815           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
816           scall = MAT_INITIAL_MATRIX;
817         } else scall = MAT_REUSE_MATRIX;
818 
819         for (i = 0; i < nsplit; i++) {
820           if (jac->offdiag_use_amat) {
821             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
822           } else {
823             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
824           }
825           ilink = ilink->next;
826         }
827       }
828     }
829   }
830 
831   if (jac->type == PC_COMPOSITE_SCHUR) {
832     IS          ccis;
833     PetscBool   isset, isspd;
834     PetscInt    rstart, rend;
835     char        lscname[256];
836     PetscObject LSC_L;
837     PetscBool   set, flg;
838 
839     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
840 
841     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
842     if (jac->schurscale == (PetscScalar)-1.0) {
843       PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
844       jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
845     }
846 
847     /* When extracting off-diagonal submatrices, we take complements from this range */
848     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
849     PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
850 
851     if (jac->schur) {
852       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
853       MatReuse scall;
854 
855       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
856         scall = MAT_INITIAL_MATRIX;
857         PetscCall(MatDestroy(&jac->B));
858         PetscCall(MatDestroy(&jac->C));
859       } else scall = MAT_REUSE_MATRIX;
860 
861       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
862       ilink = jac->head;
863       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
864       if (jac->offdiag_use_amat) {
865         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B));
866       } else {
867         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B));
868       }
869       PetscCall(ISDestroy(&ccis));
870       if (!flg) {
871         ilink = ilink->next;
872         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
873         if (jac->offdiag_use_amat) {
874           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C));
875         } else {
876           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C));
877         }
878         PetscCall(ISDestroy(&ccis));
879       } else {
880         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
881         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
882         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
883       }
884       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
885       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
886         PetscCall(MatDestroy(&jac->schurp));
887         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
888       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
889         PetscCall(MatDestroy(&jac->schur_user));
890         if (jac->kspupper == jac->head->ksp) {
891           Mat AinvB;
892 
893           PetscCall(MatCreate(PetscObjectComm((PetscObject)jac->schur), &AinvB));
894           PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)AinvB));
895           PetscCall(MatDestroy(&AinvB));
896         }
897         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
898       }
899       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
900       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
901       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
902     } else {
903       const char  *Dprefix;
904       char         schurprefix[256], schurmatprefix[256];
905       char         schurtestoption[256];
906       MatNullSpace sp;
907       KSP          kspt;
908 
909       /* extract the A01 and A10 matrices */
910       ilink = jac->head;
911       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
912       if (jac->offdiag_use_amat) {
913         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
914       } else {
915         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
916       }
917       PetscCall(ISDestroy(&ccis));
918       ilink = ilink->next;
919       if (!flg) {
920         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
921         if (jac->offdiag_use_amat) {
922           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
923         } else {
924           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
925         }
926         PetscCall(ISDestroy(&ccis));
927       } else {
928         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
929         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
930         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
931       }
932       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
933       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
934       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
935       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
936       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
937       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
938       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
939       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
940 
941       /* Note: this is not true in general */
942       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
943       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
944 
945       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
946       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
947       if (flg) {
948         DM  dmInner;
949         KSP kspInner;
950         PC  pcInner;
951 
952         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
953         PetscCall(KSPReset(kspInner));
954         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
955         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
956         /* Indent this deeper to emphasize the "inner" nature of this solver. */
957         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
958         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
959         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
960 
961         /* Set DM for new solver */
962         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
963         PetscCall(KSPSetDM(kspInner, dmInner));
964         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
965 
966         /* Defaults to PCKSP as preconditioner */
967         PetscCall(KSPGetPC(kspInner, &pcInner));
968         PetscCall(PCSetType(pcInner, PCKSP));
969         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
970       } else {
971         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
972           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
973           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
974           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
975           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
976           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
977         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
978         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
979       }
980       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
981       PetscCall(KSPSetFromOptions(jac->head->ksp));
982       PetscCall(MatSetFromOptions(jac->schur));
983 
984       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
985       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
986         KSP kspInner;
987         PC  pcInner;
988 
989         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
990         PetscCall(KSPGetPC(kspInner, &pcInner));
991         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
992         if (flg) {
993           KSP ksp;
994 
995           PetscCall(PCKSPGetKSP(pcInner, &ksp));
996           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
997         }
998       }
999       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
1000       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
1001       if (flg) {
1002         DM dmInner;
1003 
1004         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1005         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
1006         PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
1007         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
1008         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
1009         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
1010         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
1011         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
1012         PetscCall(KSPSetDM(jac->kspupper, dmInner));
1013         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
1014         PetscCall(KSPSetFromOptions(jac->kspupper));
1015         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
1016         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
1017       } else {
1018         jac->kspupper = jac->head->ksp;
1019         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
1020       }
1021 
1022       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
1023       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
1024       PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
1025       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
1026       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
1027       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
1028         PC pcschur;
1029         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
1030         PetscCall(PCSetType(pcschur, PCNONE));
1031         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
1032       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1033         if (jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL && jac->kspupper == jac->head->ksp) {
1034           Mat AinvB;
1035 
1036           PetscCall(MatCreate(PetscObjectComm((PetscObject)jac->schur), &AinvB));
1037           PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)AinvB));
1038           PetscCall(MatDestroy(&AinvB));
1039         }
1040         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1041       }
1042       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
1043       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
1044       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
1045       /* propagate DM */
1046       {
1047         DM sdm;
1048         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
1049         if (sdm) {
1050           PetscCall(KSPSetDM(jac->kspschur, sdm));
1051           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
1052         }
1053       }
1054       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1055       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
1056       PetscCall(KSPSetFromOptions(jac->kspschur));
1057     }
1058     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
1059     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
1060 
1061     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
1062     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
1063     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1064     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1065     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L));
1066     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
1067     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1068     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1069     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L));
1070   } else if (jac->type == PC_COMPOSITE_GKB) {
1071     IS       ccis;
1072     PetscInt rstart, rend;
1073 
1074     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1075 
1076     ilink = jac->head;
1077 
1078     /* When extracting off-diagonal submatrices, we take complements from this range */
1079     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
1080 
1081     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1082     if (jac->offdiag_use_amat) {
1083       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1084     } else {
1085       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1086     }
1087     PetscCall(ISDestroy(&ccis));
1088     /* Create work vectors for GKB algorithm */
1089     PetscCall(VecDuplicate(ilink->x, &jac->u));
1090     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1091     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1092     ilink = ilink->next;
1093     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1094     if (jac->offdiag_use_amat) {
1095       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1096     } else {
1097       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1098     }
1099     PetscCall(ISDestroy(&ccis));
1100     /* Create work vectors for GKB algorithm */
1101     PetscCall(VecDuplicate(ilink->x, &jac->v));
1102     PetscCall(VecDuplicate(ilink->x, &jac->d));
1103     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1104     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1105     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1106 
1107     ilink = jac->head;
1108     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1109     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1110     /* Create gkb_monitor context */
1111     if (jac->gkbmonitor) {
1112       PetscInt tablevel;
1113       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1114       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1115       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1116       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1117       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1118     }
1119   } else {
1120     /* set up the individual splits' PCs */
1121     i     = 0;
1122     ilink = jac->head;
1123     while (ilink) {
1124       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1125       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1126       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1127       i++;
1128       ilink = ilink->next;
1129     }
1130   }
1131 
1132   /* Set coordinates to the sub PC objects whenever these are set */
1133   if (jac->coordinates_set) {
1134     PC pc_coords;
1135     if (jac->type == PC_COMPOSITE_SCHUR) {
1136       // Head is first block.
1137       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1138       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1139       // Second one is Schur block, but its KSP object is in kspschur.
1140       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1141       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1142     } else if (jac->type == PC_COMPOSITE_GKB) {
1143       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1144     } else {
1145       ilink = jac->head;
1146       while (ilink) {
1147         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1148         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1149         ilink = ilink->next;
1150       }
1151     }
1152   }
1153 
1154   jac->suboptionsset = PETSC_TRUE;
1155   PetscFunctionReturn(PETSC_SUCCESS);
1156 }
1157 
1158 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1159   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1160                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1161                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1162 
1163 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_Schur(PC pc)
1164 {
1165   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1166   PC_FieldSplitLink ilinkA = jac->head;
1167   KSP               kspA = ilinkA->ksp, kspUpper = jac->kspupper;
1168 
1169   PetscFunctionBegin;
1170   if (jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL && kspUpper != kspA) {
1171     PetscCall(KSPSetUp(kspUpper));
1172     PetscCall(KSPSetUpOnBlocks(kspUpper));
1173   }
1174   PetscCall(KSPSetUp(kspA));
1175   PetscCall(KSPSetUpOnBlocks(kspA));
1176   // TODO, fix AinvB dependency
1177   if (jac->schurpre != PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1178     PetscCall(KSPSetUp(jac->kspschur));
1179     PetscCall(KSPSetUpOnBlocks(jac->kspschur));
1180   }
1181   PetscFunctionReturn(PETSC_SUCCESS);
1182 }
1183 
1184 static PetscErrorCode PCSetUpOnBlocks_FieldSplit(PC pc)
1185 {
1186   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1187   PC_FieldSplitLink ilink = jac->head;
1188 
1189   PetscFunctionBegin;
1190   while (ilink) {
1191     PetscCall(KSPSetUp(ilink->ksp));
1192     PetscCall(KSPSetUpOnBlocks(ilink->ksp));
1193     ilink = ilink->next;
1194   }
1195   PetscFunctionReturn(PETSC_SUCCESS);
1196 }
1197 
1198 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_GKB(PC pc)
1199 {
1200   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1201   PC_FieldSplitLink ilinkA = jac->head;
1202   KSP               ksp    = ilinkA->ksp;
1203 
1204   PetscFunctionBegin;
1205   PetscCall(KSPSetUp(ksp));
1206   PetscCall(KSPSetUpOnBlocks(ksp));
1207   PetscFunctionReturn(PETSC_SUCCESS);
1208 }
1209 
1210 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1211 {
1212   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1213   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1214   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1215   Mat               AinvB = NULL;
1216   PetscInt          N     = -1;
1217 
1218   PetscFunctionBegin;
1219   switch (jac->schurfactorization) {
1220   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1221     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1222     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1223     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1224     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1225     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1226     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1227     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1228     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1229     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1230     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1231     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1232     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1233     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1234     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1235     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1236     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1237     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1238     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1239     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1240     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1241     break;
1242   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1243     /* [A00 0; A10 S], suitable for left preconditioning */
1244     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1245     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1246     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1247     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1248     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1249     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1250     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1251     PetscCall(VecScale(ilinkD->x, -1.));
1252     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1253     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1254     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1255     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1256     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1257     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1258     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1259     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1260     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1261     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1262     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1263     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1264     break;
1265   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1266     /* [A00 A01; 0 S], suitable for right preconditioning */
1267     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1268     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1269     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1270     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1271     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1272     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1273     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1274     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1275     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1276     PetscCall(VecScale(ilinkA->x, -1.));
1277     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1278     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1279     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1280     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1281     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1282     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1283     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1284     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1285     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1286     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1287     break;
1288   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1289     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1290     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1291     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1292     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1293     if (kspUpper == kspA) {
1294       PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB));
1295       if (AinvB) {
1296         PetscCall(MatGetSize(AinvB, NULL, &N));
1297         if (N == -1) { // first time PCApply_FieldSplit_Schur() is called; TODO: Move to PCSetUpOnBlocks?
1298           Mat                A;
1299           VecType            vtype;
1300           PetscMemType       mtype;
1301           Vec                column, c;
1302           const PetscScalar *read;
1303           PetscScalar       *array;
1304           PetscInt           m, M, N;
1305 
1306           PetscCall(MatGetSize(jac->B, &M, &N));
1307           PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1308           PetscCall(MatGetVecType(jac->B, &vtype));
1309           PetscCall(VecGetArrayReadAndMemType(ilinkA->x, &read, &mtype));
1310           if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) {
1311             PetscCall(PetscMalloc1(m * (N + 1), &array));
1312             PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * N, &c));
1313           }
1314 #if PetscDefined(HAVE_CUDA)
1315           else if (PetscMemTypeCUDA(mtype)) {
1316             PetscCallCUDA(cudaMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1317             PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * N, &c));
1318           }
1319 #endif
1320 #if PetscDefined(HAVE_HIP)
1321           else if (PetscMemTypeHIP(mtype)) {
1322             PetscCallHIP(hipMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1323             PetscCall(VecCreateMPIHIPWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * N, &c));
1324           }
1325 #endif
1326           PetscCall(VecRestoreArrayReadAndMemType(ilinkA->x, &read));
1327           PetscCall(MatCreateDenseFromVecType(PetscObjectComm((PetscObject)jac->schur), vtype, m, PETSC_DECIDE, M, N + 1, -1, array, &A)); // number of columns of the Schur complement plus one
1328           PetscCall(MatDenseGetColumnVecWrite(A, N, &column));
1329           PetscCall(VecCopy(ilinkA->x, column));
1330           PetscCall(MatDenseRestoreColumnVecWrite(A, N, &column));
1331           PetscCall(MatHeaderReplace(AinvB, &A));
1332           PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1333           PetscCall(KSPSetOperators(jac->kspschur, jac->schur, jac->schur_user));
1334           PetscCall(VecCopy(c, ilinkA->y)); // retrieve the solution as the last column of the composed Mat
1335           PetscCall(VecDestroy(&c));
1336         }
1337       }
1338     }
1339     if (!AinvB || N != -1) PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1340     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1341     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1342     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1343     PetscCall(VecScale(ilinkD->x, -1.0));
1344     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1345     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1346 
1347     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1348     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1349     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1350     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1351     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1352     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1353     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1354 
1355     if (kspUpper == kspA) {
1356       if (!AinvB) {
1357         PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1358         PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1359         PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1360         PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1361         PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1362         PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1363       } else PetscCall(MatMultAdd(AinvB, ilinkD->y, ilinkA->y, ilinkA->y));
1364     } else {
1365       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1366       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1367       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1368       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1369       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1370       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1371       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1372       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1373       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1374     }
1375     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1376     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1377     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1378   }
1379   PetscFunctionReturn(PETSC_SUCCESS);
1380 }
1381 
1382 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1383 {
1384   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1385   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1386   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1387 
1388   PetscFunctionBegin;
1389   switch (jac->schurfactorization) {
1390   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1391     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1392     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1393     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1394     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1395     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1396     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1397     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1398     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1399     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1400     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1401     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1402     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1403     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1404     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1405     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1406     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1407     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1408     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1409     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1410     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1411     break;
1412   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1413     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1414     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1415     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1416     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1417     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1418     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1419     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1420     PetscCall(VecScale(ilinkD->x, -1.));
1421     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1422     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1423     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1424     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1425     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1426     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1427     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1428     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1429     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1430     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1431     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1432     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1433     break;
1434   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1435     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1436     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1437     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1438     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1439     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1440     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1441     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1442     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1443     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1444     PetscCall(VecScale(ilinkA->x, -1.));
1445     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1446     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1447     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1448     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1449     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1450     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1451     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1452     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1453     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1454     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1455     break;
1456   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1457     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1458     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1459     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1460     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1461     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1462     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1463     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1464     PetscCall(VecScale(ilinkD->x, -1.0));
1465     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1466     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1467 
1468     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1469     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1470     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1471     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1472     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1473     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1474     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1475 
1476     if (kspLower == kspA) {
1477       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1478       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1479       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1480       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1481       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1482       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1483     } else {
1484       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1485       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1486       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1487       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1488       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1489       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1490       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1491       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1492       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1493     }
1494     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1495     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1496     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1497   }
1498   PetscFunctionReturn(PETSC_SUCCESS);
1499 }
1500 
1501 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1502 {
1503   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1504   PC_FieldSplitLink ilink = jac->head;
1505   PetscInt          cnt, bs;
1506 
1507   PetscFunctionBegin;
1508   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1509     PetscBool matnest;
1510 
1511     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1512     if (jac->defaultsplit && !matnest) {
1513       PetscCall(VecGetBlockSize(x, &bs));
1514       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1515       PetscCall(VecGetBlockSize(y, &bs));
1516       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1517       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1518       while (ilink) {
1519         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1520         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1521         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1522         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1523         ilink = ilink->next;
1524       }
1525       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1526     } else {
1527       PetscCall(VecSet(y, 0.0));
1528       while (ilink) {
1529         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1530         ilink = ilink->next;
1531       }
1532     }
1533   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1534     PetscCall(VecSet(y, 0.0));
1535     /* solve on first block for first block variables */
1536     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1537     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1538     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1539     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1540     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1541     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1542     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1543     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1544 
1545     /* compute the residual only onto second block variables using first block variables */
1546     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1547     ilink = ilink->next;
1548     PetscCall(VecScale(ilink->x, -1.0));
1549     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1550     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1551 
1552     /* solve on second block variables */
1553     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1554     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1555     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1556     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1557     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1558     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1559   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1560     if (!jac->w1) {
1561       PetscCall(VecDuplicate(x, &jac->w1));
1562       PetscCall(VecDuplicate(x, &jac->w2));
1563     }
1564     PetscCall(VecSet(y, 0.0));
1565     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1566     cnt = 1;
1567     while (ilink->next) {
1568       ilink = ilink->next;
1569       /* compute the residual only over the part of the vector needed */
1570       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1571       PetscCall(VecScale(ilink->x, -1.0));
1572       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1573       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1574       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1575       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1576       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1577       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1578       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1579       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1580     }
1581     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1582       cnt -= 2;
1583       while (ilink->previous) {
1584         ilink = ilink->previous;
1585         /* compute the residual only over the part of the vector needed */
1586         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1587         PetscCall(VecScale(ilink->x, -1.0));
1588         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1589         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1590         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1591         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1592         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1593         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1594         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1595         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1596       }
1597     }
1598   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1599   PetscFunctionReturn(PETSC_SUCCESS);
1600 }
1601 
1602 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1603 {
1604   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1605   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1606   KSP               ksp = ilinkA->ksp;
1607   Vec               u, v, Hu, d, work1, work2;
1608   PetscScalar       alpha, z, nrmz2, *vecz;
1609   PetscReal         lowbnd, nu, beta;
1610   PetscInt          j, iterGKB;
1611 
1612   PetscFunctionBegin;
1613   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1614   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1615   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1616   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1617 
1618   u     = jac->u;
1619   v     = jac->v;
1620   Hu    = jac->Hu;
1621   d     = jac->d;
1622   work1 = jac->w1;
1623   work2 = jac->w2;
1624   vecz  = jac->vecz;
1625 
1626   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1627   /* Add q = q + nu*B*b */
1628   if (jac->gkbnu) {
1629     nu = jac->gkbnu;
1630     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1631     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1632   } else {
1633     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1634     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1635     nu = 1;
1636   }
1637 
1638   /* Transform rhs from [q,tilde{b}] to [0,b] */
1639   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1640   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1641   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1642   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1643   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1644   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1645 
1646   /* First step of algorithm */
1647   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1648   KSPCheckDot(ksp, beta);
1649   beta = PetscSqrtReal(nu) * beta;
1650   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1651   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1652   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1653   PetscCall(KSPSolve(ksp, work2, u));
1654   PetscCall(KSPCheckSolve(ksp, pc, u));
1655   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1656   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1657   PetscCall(VecDot(Hu, u, &alpha));
1658   KSPCheckDot(ksp, alpha);
1659   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1660   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1661   PetscCall(VecScale(u, 1.0 / alpha));
1662   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1663 
1664   z       = beta / alpha;
1665   vecz[1] = z;
1666 
1667   /* Computation of first iterate x(1) and p(1) */
1668   PetscCall(VecAXPY(ilinkA->y, z, u));
1669   PetscCall(VecCopy(d, ilinkD->y));
1670   PetscCall(VecScale(ilinkD->y, -z));
1671 
1672   iterGKB = 1;
1673   lowbnd  = 2 * jac->gkbtol;
1674   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1675 
1676   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1677     iterGKB += 1;
1678     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1679     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1680     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1681     beta = beta / PetscSqrtReal(nu);
1682     PetscCall(VecScale(v, 1.0 / beta));
1683     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1684     PetscCall(MatMult(jac->H, u, Hu));
1685     PetscCall(VecAXPY(work2, -beta, Hu));
1686     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1687     PetscCall(KSPSolve(ksp, work2, u));
1688     PetscCall(KSPCheckSolve(ksp, pc, u));
1689     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1690     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1691     PetscCall(VecDot(Hu, u, &alpha));
1692     KSPCheckDot(ksp, alpha);
1693     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1694     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1695     PetscCall(VecScale(u, 1.0 / alpha));
1696 
1697     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1698     vecz[0] = z;
1699 
1700     /* Computation of new iterate x(i+1) and p(i+1) */
1701     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1702     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1703     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1704     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1705     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1706 
1707     /* Compute Lower Bound estimate */
1708     if (iterGKB > jac->gkbdelay) {
1709       lowbnd = 0.0;
1710       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1711       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1712     }
1713 
1714     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1715     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1716   }
1717 
1718   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1719   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1720   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1721   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1722   PetscFunctionReturn(PETSC_SUCCESS);
1723 }
1724 
1725 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1726   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1727                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1728                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1729 
1730 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1731 {
1732   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1733   PC_FieldSplitLink ilink = jac->head;
1734   PetscInt          bs;
1735 
1736   PetscFunctionBegin;
1737   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1738     PetscBool matnest;
1739 
1740     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1741     if (jac->defaultsplit && !matnest) {
1742       PetscCall(VecGetBlockSize(x, &bs));
1743       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1744       PetscCall(VecGetBlockSize(y, &bs));
1745       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1746       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1747       while (ilink) {
1748         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1749         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1750         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1751         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1752         ilink = ilink->next;
1753       }
1754       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1755     } else {
1756       PetscCall(VecSet(y, 0.0));
1757       while (ilink) {
1758         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1759         ilink = ilink->next;
1760       }
1761     }
1762   } else {
1763     if (!jac->w1) {
1764       PetscCall(VecDuplicate(x, &jac->w1));
1765       PetscCall(VecDuplicate(x, &jac->w2));
1766     }
1767     PetscCall(VecSet(y, 0.0));
1768     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1769       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1770       while (ilink->next) {
1771         ilink = ilink->next;
1772         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1773         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1774         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1775       }
1776       while (ilink->previous) {
1777         ilink = ilink->previous;
1778         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1779         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1780         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1781       }
1782     } else {
1783       while (ilink->next) { /* get to last entry in linked list */
1784         ilink = ilink->next;
1785       }
1786       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1787       while (ilink->previous) {
1788         ilink = ilink->previous;
1789         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1790         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1791         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1792       }
1793     }
1794   }
1795   PetscFunctionReturn(PETSC_SUCCESS);
1796 }
1797 
1798 static PetscErrorCode PCReset_FieldSplit(PC pc)
1799 {
1800   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1801   PC_FieldSplitLink ilink = jac->head, next;
1802 
1803   PetscFunctionBegin;
1804   while (ilink) {
1805     PetscCall(KSPDestroy(&ilink->ksp));
1806     PetscCall(VecDestroy(&ilink->x));
1807     PetscCall(VecDestroy(&ilink->y));
1808     PetscCall(VecDestroy(&ilink->z));
1809     PetscCall(VecScatterDestroy(&ilink->sctx));
1810     PetscCall(ISDestroy(&ilink->is));
1811     PetscCall(ISDestroy(&ilink->is_col));
1812     PetscCall(PetscFree(ilink->splitname));
1813     PetscCall(PetscFree(ilink->fields));
1814     PetscCall(PetscFree(ilink->fields_col));
1815     next = ilink->next;
1816     PetscCall(PetscFree(ilink));
1817     ilink = next;
1818   }
1819   jac->head = NULL;
1820   PetscCall(PetscFree2(jac->x, jac->y));
1821   if (jac->mat && jac->mat != jac->pmat) {
1822     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1823   } else if (jac->mat) {
1824     jac->mat = NULL;
1825   }
1826   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1827   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1828   jac->nsplits = 0;
1829   PetscCall(VecDestroy(&jac->w1));
1830   PetscCall(VecDestroy(&jac->w2));
1831   if (jac->schur) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", NULL));
1832   PetscCall(MatDestroy(&jac->schur));
1833   PetscCall(MatDestroy(&jac->schurp));
1834   PetscCall(MatDestroy(&jac->schur_user));
1835   PetscCall(KSPDestroy(&jac->kspschur));
1836   PetscCall(KSPDestroy(&jac->kspupper));
1837   PetscCall(MatDestroy(&jac->B));
1838   PetscCall(MatDestroy(&jac->C));
1839   PetscCall(MatDestroy(&jac->H));
1840   PetscCall(VecDestroy(&jac->u));
1841   PetscCall(VecDestroy(&jac->v));
1842   PetscCall(VecDestroy(&jac->Hu));
1843   PetscCall(VecDestroy(&jac->d));
1844   PetscCall(PetscFree(jac->vecz));
1845   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1846   jac->isrestrict = PETSC_FALSE;
1847   PetscFunctionReturn(PETSC_SUCCESS);
1848 }
1849 
1850 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1851 {
1852   PetscFunctionBegin;
1853   PetscCall(PCReset_FieldSplit(pc));
1854   PetscCall(PetscFree(pc->data));
1855   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1856   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1857   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1858   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1859   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1860   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1861   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1862   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1863 
1864   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1865   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1866   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1867   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1868   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1869   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1870   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1871   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1872   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1873   PetscFunctionReturn(PETSC_SUCCESS);
1874 }
1875 
1876 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject)
1877 {
1878   PetscInt        bs;
1879   PetscBool       flg;
1880   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1881   PCCompositeType ctype;
1882 
1883   PetscFunctionBegin;
1884   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1885   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1886   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1887   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1888   jac->diag_use_amat = pc->useAmat;
1889   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1890   jac->offdiag_use_amat = pc->useAmat;
1891   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1892   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1893   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1894   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1895   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1896   /* Only setup fields once */
1897   if ((jac->bs > 0) && (jac->nsplits == 0)) {
1898     /* only allow user to set fields from command line.
1899        otherwise user can set them in PCFieldSplitSetDefaults() */
1900     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1901     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1902   }
1903   if (jac->type == PC_COMPOSITE_SCHUR) {
1904     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1905     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1906     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1907     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1908     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1909   } else if (jac->type == PC_COMPOSITE_GKB) {
1910     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1911     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1912     PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0));
1913     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1914     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1915   }
1916   /*
1917     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1918     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1919     is called on the outer solver in case changes were made in the options database
1920 
1921     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1922     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1923     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1924 
1925     There could be a negative side effect of calling the KSPSetFromOptions() below.
1926 
1927     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1928   */
1929   if (jac->issetup) {
1930     PC_FieldSplitLink ilink = jac->head;
1931     if (jac->type == PC_COMPOSITE_SCHUR) {
1932       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1933       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1934     }
1935     while (ilink) {
1936       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1937       ilink = ilink->next;
1938     }
1939   }
1940   PetscOptionsHeadEnd();
1941   PetscFunctionReturn(PETSC_SUCCESS);
1942 }
1943 
1944 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1945 {
1946   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1947   PC_FieldSplitLink ilink, next = jac->head;
1948   char              prefix[128];
1949   PetscInt          i;
1950 
1951   PetscFunctionBegin;
1952   if (jac->splitdefined) {
1953     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1954     PetscFunctionReturn(PETSC_SUCCESS);
1955   }
1956   for (i = 0; i < n; i++) { PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); }
1957   PetscCall(PetscNew(&ilink));
1958   if (splitname) {
1959     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1960   } else {
1961     PetscCall(PetscMalloc1(3, &ilink->splitname));
1962     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1963   }
1964   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1965   PetscCall(PetscMalloc1(n, &ilink->fields));
1966   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1967   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1968   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1969 
1970   ilink->nfields = n;
1971   ilink->next    = NULL;
1972   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1973   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1974   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1975   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1976   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1977 
1978   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1979   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1980 
1981   if (!next) {
1982     jac->head       = ilink;
1983     ilink->previous = NULL;
1984   } else {
1985     while (next->next) next = next->next;
1986     next->next      = ilink;
1987     ilink->previous = next;
1988   }
1989   jac->nsplits++;
1990   PetscFunctionReturn(PETSC_SUCCESS);
1991 }
1992 
1993 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1994 {
1995   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1996 
1997   PetscFunctionBegin;
1998   *subksp = NULL;
1999   if (n) *n = 0;
2000   if (jac->type == PC_COMPOSITE_SCHUR) {
2001     PetscInt nn;
2002 
2003     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
2004     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
2005     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
2006     PetscCall(PetscMalloc1(nn, subksp));
2007     (*subksp)[0] = jac->head->ksp;
2008     (*subksp)[1] = jac->kspschur;
2009     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
2010     if (n) *n = nn;
2011   }
2012   PetscFunctionReturn(PETSC_SUCCESS);
2013 }
2014 
2015 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
2016 {
2017   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2018 
2019   PetscFunctionBegin;
2020   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
2021   PetscCall(PetscMalloc1(jac->nsplits, subksp));
2022   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
2023 
2024   (*subksp)[1] = jac->kspschur;
2025   if (n) *n = jac->nsplits;
2026   PetscFunctionReturn(PETSC_SUCCESS);
2027 }
2028 
2029 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
2030 {
2031   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2032   PetscInt          cnt   = 0;
2033   PC_FieldSplitLink ilink = jac->head;
2034 
2035   PetscFunctionBegin;
2036   PetscCall(PetscMalloc1(jac->nsplits, subksp));
2037   while (ilink) {
2038     (*subksp)[cnt++] = ilink->ksp;
2039     ilink            = ilink->next;
2040   }
2041   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
2042   if (n) *n = jac->nsplits;
2043   PetscFunctionReturn(PETSC_SUCCESS);
2044 }
2045 
2046 /*@
2047   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
2048 
2049   Input Parameters:
2050 + pc  - the preconditioner context
2051 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
2052 
2053   Level: advanced
2054 
2055   Developer Notes:
2056   It seems the resulting `IS`s will not cover the entire space, so
2057   how can they define a convergent preconditioner? Needs explaining.
2058 
2059 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2060 @*/
2061 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
2062 {
2063   PetscFunctionBegin;
2064   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2065   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
2066   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
2067   PetscFunctionReturn(PETSC_SUCCESS);
2068 }
2069 
2070 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
2071 {
2072   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2073   PC_FieldSplitLink ilink = jac->head, next;
2074   PetscInt          localsize, size, sizez, i;
2075   const PetscInt   *ind, *indz;
2076   PetscInt         *indc, *indcz;
2077   PetscBool         flg;
2078 
2079   PetscFunctionBegin;
2080   PetscCall(ISGetLocalSize(isy, &localsize));
2081   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
2082   size -= localsize;
2083   while (ilink) {
2084     IS isrl, isr;
2085     PC subpc;
2086     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
2087     PetscCall(ISGetLocalSize(isrl, &localsize));
2088     PetscCall(PetscMalloc1(localsize, &indc));
2089     PetscCall(ISGetIndices(isrl, &ind));
2090     PetscCall(PetscArraycpy(indc, ind, localsize));
2091     PetscCall(ISRestoreIndices(isrl, &ind));
2092     PetscCall(ISDestroy(&isrl));
2093     for (i = 0; i < localsize; i++) *(indc + i) += size;
2094     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
2095     PetscCall(PetscObjectReference((PetscObject)isr));
2096     PetscCall(ISDestroy(&ilink->is));
2097     ilink->is = isr;
2098     PetscCall(PetscObjectReference((PetscObject)isr));
2099     PetscCall(ISDestroy(&ilink->is_col));
2100     ilink->is_col = isr;
2101     PetscCall(ISDestroy(&isr));
2102     PetscCall(KSPGetPC(ilink->ksp, &subpc));
2103     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
2104     if (flg) {
2105       IS       iszl, isz;
2106       MPI_Comm comm;
2107       PetscCall(ISGetLocalSize(ilink->is, &localsize));
2108       comm = PetscObjectComm((PetscObject)ilink->is);
2109       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
2110       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
2111       sizez -= localsize;
2112       PetscCall(ISGetLocalSize(iszl, &localsize));
2113       PetscCall(PetscMalloc1(localsize, &indcz));
2114       PetscCall(ISGetIndices(iszl, &indz));
2115       PetscCall(PetscArraycpy(indcz, indz, localsize));
2116       PetscCall(ISRestoreIndices(iszl, &indz));
2117       PetscCall(ISDestroy(&iszl));
2118       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
2119       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
2120       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
2121       PetscCall(ISDestroy(&isz));
2122     }
2123     next  = ilink->next;
2124     ilink = next;
2125   }
2126   jac->isrestrict = PETSC_TRUE;
2127   PetscFunctionReturn(PETSC_SUCCESS);
2128 }
2129 
2130 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
2131 {
2132   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
2133   PC_FieldSplitLink ilink, next = jac->head;
2134   char              prefix[128];
2135 
2136   PetscFunctionBegin;
2137   if (jac->splitdefined) {
2138     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
2139     PetscFunctionReturn(PETSC_SUCCESS);
2140   }
2141   PetscCall(PetscNew(&ilink));
2142   if (splitname) {
2143     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
2144   } else {
2145     PetscCall(PetscMalloc1(8, &ilink->splitname));
2146     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
2147   }
2148   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
2149   PetscCall(PetscObjectReference((PetscObject)is));
2150   PetscCall(ISDestroy(&ilink->is));
2151   ilink->is = is;
2152   PetscCall(PetscObjectReference((PetscObject)is));
2153   PetscCall(ISDestroy(&ilink->is_col));
2154   ilink->is_col = is;
2155   ilink->next   = NULL;
2156   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
2157   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
2158   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
2159   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
2160   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
2161 
2162   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
2163   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
2164 
2165   if (!next) {
2166     jac->head       = ilink;
2167     ilink->previous = NULL;
2168   } else {
2169     while (next->next) next = next->next;
2170     next->next      = ilink;
2171     ilink->previous = next;
2172   }
2173   jac->nsplits++;
2174   PetscFunctionReturn(PETSC_SUCCESS);
2175 }
2176 
2177 /*@
2178   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
2179 
2180   Logically Collective
2181 
2182   Input Parameters:
2183 + pc         - the preconditioner context
2184 . splitname  - name of this split, if `NULL` the number of the split is used
2185 . n          - the number of fields in this split
2186 . fields     - the fields in this split
2187 - fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block
2188                of the matrix and `fields_col` provides the column indices for that block
2189 
2190   Options Database Key:
2191 . -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
2192 
2193   Level: intermediate
2194 
2195   Notes:
2196   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2197 
2198   If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`.
2199 
2200   If the matrix used to construct the preconditioner is not `MATNEST` then
2201   `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlocksize()` or
2202   to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block
2203   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2204   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x....
2205   where the numbered entries indicate what is in the split.
2206 
2207   This function is called once per split (it creates a new split each time).  Solve options
2208   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2209 
2210   `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields`
2211 
2212   Developer Notes:
2213   This routine does not actually create the `IS` representing the split, that is delayed
2214   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2215   available when this routine is called.
2216 
2217 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`,
2218           `MatSetBlocksize()`, `MatCreateNest()`
2219 @*/
2220 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt fields[], const PetscInt fields_col[])
2221 {
2222   PetscFunctionBegin;
2223   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2224   PetscAssertPointer(splitname, 2);
2225   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2226   PetscAssertPointer(fields, 4);
2227   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2228   PetscFunctionReturn(PETSC_SUCCESS);
2229 }
2230 
2231 /*@
2232   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2233   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2234 
2235   Logically Collective
2236 
2237   Input Parameters:
2238 + pc  - the preconditioner object
2239 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2240 
2241   Options Database Key:
2242 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2243 
2244   Level: intermediate
2245 
2246 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2247 @*/
2248 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2249 {
2250   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2251   PetscBool      isfs;
2252 
2253   PetscFunctionBegin;
2254   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2255   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2256   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2257   jac->diag_use_amat = flg;
2258   PetscFunctionReturn(PETSC_SUCCESS);
2259 }
2260 
2261 /*@
2262   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2263   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2264 
2265   Logically Collective
2266 
2267   Input Parameter:
2268 . pc - the preconditioner object
2269 
2270   Output Parameter:
2271 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2272 
2273   Level: intermediate
2274 
2275 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2276 @*/
2277 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2278 {
2279   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2280   PetscBool      isfs;
2281 
2282   PetscFunctionBegin;
2283   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2284   PetscAssertPointer(flg, 2);
2285   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2286   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2287   *flg = jac->diag_use_amat;
2288   PetscFunctionReturn(PETSC_SUCCESS);
2289 }
2290 
2291 /*@
2292   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2293   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2294 
2295   Logically Collective
2296 
2297   Input Parameters:
2298 + pc  - the preconditioner object
2299 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2300 
2301   Options Database Key:
2302 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2303 
2304   Level: intermediate
2305 
2306 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2307 @*/
2308 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2309 {
2310   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2311   PetscBool      isfs;
2312 
2313   PetscFunctionBegin;
2314   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2315   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2316   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2317   jac->offdiag_use_amat = flg;
2318   PetscFunctionReturn(PETSC_SUCCESS);
2319 }
2320 
2321 /*@
2322   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2323   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2324 
2325   Logically Collective
2326 
2327   Input Parameter:
2328 . pc - the preconditioner object
2329 
2330   Output Parameter:
2331 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2332 
2333   Level: intermediate
2334 
2335 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2336 @*/
2337 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2338 {
2339   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2340   PetscBool      isfs;
2341 
2342   PetscFunctionBegin;
2343   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2344   PetscAssertPointer(flg, 2);
2345   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2346   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2347   *flg = jac->offdiag_use_amat;
2348   PetscFunctionReturn(PETSC_SUCCESS);
2349 }
2350 
2351 /*@
2352   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2353 
2354   Logically Collective
2355 
2356   Input Parameters:
2357 + pc        - the preconditioner context
2358 . splitname - name of this split, if `NULL` the number of the split is used
2359 - is        - the index set that defines the elements in this split
2360 
2361   Level: intermediate
2362 
2363   Notes:
2364   Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST`
2365 
2366   This function is called once per split (it creates a new split each time).  Solve options
2367   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2368 
2369 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()`
2370 @*/
2371 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2372 {
2373   PetscFunctionBegin;
2374   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2375   if (splitname) PetscAssertPointer(splitname, 2);
2376   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2377   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2378   PetscFunctionReturn(PETSC_SUCCESS);
2379 }
2380 
2381 /*@
2382   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2383 
2384   Logically Collective
2385 
2386   Input Parameters:
2387 + pc        - the preconditioner context
2388 - splitname - name of this split
2389 
2390   Output Parameter:
2391 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2392 
2393   Level: intermediate
2394 
2395 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()`
2396 @*/
2397 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2398 {
2399   PetscFunctionBegin;
2400   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2401   PetscAssertPointer(splitname, 2);
2402   PetscAssertPointer(is, 3);
2403   {
2404     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2405     PC_FieldSplitLink ilink = jac->head;
2406     PetscBool         found;
2407 
2408     *is = NULL;
2409     while (ilink) {
2410       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2411       if (found) {
2412         *is = ilink->is;
2413         break;
2414       }
2415       ilink = ilink->next;
2416     }
2417   }
2418   PetscFunctionReturn(PETSC_SUCCESS);
2419 }
2420 
2421 /*@
2422   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2423 
2424   Logically Collective
2425 
2426   Input Parameters:
2427 + pc    - the preconditioner context
2428 - index - index of this split
2429 
2430   Output Parameter:
2431 . is - the index set that defines the elements in this split
2432 
2433   Level: intermediate
2434 
2435 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`,
2436 
2437 @*/
2438 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2439 {
2440   PetscFunctionBegin;
2441   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2442   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2443   PetscAssertPointer(is, 3);
2444   {
2445     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2446     PC_FieldSplitLink ilink = jac->head;
2447     PetscInt          i     = 0;
2448     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2449 
2450     while (i < index) {
2451       ilink = ilink->next;
2452       ++i;
2453     }
2454     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2455   }
2456   PetscFunctionReturn(PETSC_SUCCESS);
2457 }
2458 
2459 /*@
2460   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2461   fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used.
2462 
2463   Logically Collective
2464 
2465   Input Parameters:
2466 + pc - the preconditioner context
2467 - bs - the block size
2468 
2469   Level: intermediate
2470 
2471   Note:
2472   If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields.
2473 
2474 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2475 @*/
2476 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2477 {
2478   PetscFunctionBegin;
2479   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2480   PetscValidLogicalCollectiveInt(pc, bs, 2);
2481   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2482   PetscFunctionReturn(PETSC_SUCCESS);
2483 }
2484 
2485 /*@C
2486   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2487 
2488   Collective
2489 
2490   Input Parameter:
2491 . pc - the preconditioner context
2492 
2493   Output Parameters:
2494 + n      - the number of splits
2495 - subksp - the array of `KSP` contexts
2496 
2497   Level: advanced
2498 
2499   Notes:
2500   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2501   (not the `KSP`, just the array that contains them).
2502 
2503   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2504 
2505   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2506   Schur complement and the `KSP` object used to iterate over the Schur complement.
2507   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2508 
2509   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2510   inner linear system defined by the matrix H in each loop.
2511 
2512   Fortran Notes:
2513   You must pass in a `KSP` array that is large enough to contain all the `KSP`s.
2514   You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2515   `KSP` array must be.
2516 
2517   Developer Notes:
2518   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2519 
2520   The Fortran interface could be modernized to return directly the array of values.
2521 
2522 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2523 @*/
2524 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2525 {
2526   PetscFunctionBegin;
2527   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2528   if (n) PetscAssertPointer(n, 2);
2529   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2530   PetscFunctionReturn(PETSC_SUCCESS);
2531 }
2532 
2533 /*@C
2534   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2535 
2536   Collective
2537 
2538   Input Parameter:
2539 . pc - the preconditioner context
2540 
2541   Output Parameters:
2542 + n      - the number of splits
2543 - subksp - the array of `KSP` contexts
2544 
2545   Level: advanced
2546 
2547   Notes:
2548   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2549   (not the `KSP` just the array that contains them).
2550 
2551   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2552 
2553   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2554 +  1  - the `KSP` used for the (1,1) block
2555 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2556 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2557 
2558   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2559 
2560   Fortran Notes:
2561   You must pass in a `KSP` array that is large enough to contain all the local `KSP`s.
2562   You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2563   `KSP` array must be.
2564 
2565   Developer Notes:
2566   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2567 
2568   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2569 
2570   The Fortran interface should be modernized to return directly the array of values.
2571 
2572 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2573 @*/
2574 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2575 {
2576   PetscFunctionBegin;
2577   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2578   if (n) PetscAssertPointer(n, 2);
2579   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2580   PetscFunctionReturn(PETSC_SUCCESS);
2581 }
2582 
2583 /*@
2584   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructed for the Schur complement.
2585   The default is the A11 matrix.
2586 
2587   Collective
2588 
2589   Input Parameters:
2590 + pc    - the preconditioner context
2591 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2592               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2593               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2594 - pre   - matrix to use for preconditioning, or `NULL`
2595 
2596   Options Database Keys:
2597 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2598 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2599 
2600   Level: intermediate
2601 
2602   Notes:
2603   If ptype is
2604 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2605   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2606 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2607   The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM`
2608 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2609   to this function).
2610 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $
2611   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2612   lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2613 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2614   computed internally by `PCFIELDSPLIT` (this is expensive)
2615   useful mostly as a test that the Schur complement approach can work for your problem
2616 
2617   When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2618   with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and
2619   `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2620 
2621   Developer Note:
2622   The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere.
2623 
2624 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2625           `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()`
2626 @*/
2627 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2628 {
2629   PetscFunctionBegin;
2630   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2631   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2632   PetscFunctionReturn(PETSC_SUCCESS);
2633 }
2634 
2635 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2636 {
2637   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2638 } /* Deprecated name */
2639 
2640 /*@
2641   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2642   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2643 
2644   Logically Collective
2645 
2646   Input Parameter:
2647 . pc - the preconditioner context
2648 
2649   Output Parameters:
2650 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2651 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2652 
2653   Level: intermediate
2654 
2655 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2656 @*/
2657 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2658 {
2659   PetscFunctionBegin;
2660   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2661   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2662   PetscFunctionReturn(PETSC_SUCCESS);
2663 }
2664 
2665 /*@
2666   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2667 
2668   Not Collective
2669 
2670   Input Parameter:
2671 . pc - the preconditioner context
2672 
2673   Output Parameter:
2674 . S - the Schur complement matrix
2675 
2676   Level: advanced
2677 
2678   Note:
2679   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2680 
2681 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2682           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2683 @*/
2684 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2685 {
2686   const char    *t;
2687   PetscBool      isfs;
2688   PC_FieldSplit *jac;
2689 
2690   PetscFunctionBegin;
2691   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2692   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2693   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2694   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2695   jac = (PC_FieldSplit *)pc->data;
2696   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2697   if (S) *S = jac->schur;
2698   PetscFunctionReturn(PETSC_SUCCESS);
2699 }
2700 
2701 /*@
2702   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2703 
2704   Not Collective
2705 
2706   Input Parameters:
2707 + pc - the preconditioner context
2708 - S  - the Schur complement matrix
2709 
2710   Level: advanced
2711 
2712 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2713 @*/
2714 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2715 {
2716   const char    *t;
2717   PetscBool      isfs;
2718   PC_FieldSplit *jac;
2719 
2720   PetscFunctionBegin;
2721   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2722   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2723   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2724   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2725   jac = (PC_FieldSplit *)pc->data;
2726   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2727   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2728   PetscFunctionReturn(PETSC_SUCCESS);
2729 }
2730 
2731 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2732 {
2733   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2734 
2735   PetscFunctionBegin;
2736   jac->schurpre = ptype;
2737   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2738     PetscCall(MatDestroy(&jac->schur_user));
2739     jac->schur_user = pre;
2740     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2741   }
2742   PetscFunctionReturn(PETSC_SUCCESS);
2743 }
2744 
2745 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2746 {
2747   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2748 
2749   PetscFunctionBegin;
2750   if (ptype) *ptype = jac->schurpre;
2751   if (pre) *pre = jac->schur_user;
2752   PetscFunctionReturn(PETSC_SUCCESS);
2753 }
2754 
2755 /*@
2756   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note`
2757 
2758   Collective
2759 
2760   Input Parameters:
2761 + pc    - the preconditioner context
2762 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2763 
2764   Options Database Key:
2765 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
2766 
2767   Level: intermediate
2768 
2769   Notes:
2770   The FULL factorization is
2771 
2772   ```{math}
2773   \left(\begin{array}{cc} A & B \\
2774   C & E \\
2775   \end{array}\right) =
2776   \left(\begin{array}{cc} 1 & 0 \\
2777   C*A^{-1} & I \\
2778   \end{array}\right)
2779   \left(\begin{array}{cc} A & 0 \\
2780   0 & S \\
2781   \end{array}\right)
2782   \left(\begin{array}{cc} I & A^{-1}B \\
2783   0 & I \\
2784   \end{array}\right) = L D U.
2785   ```
2786 
2787   where $ S = E - C*A^{-1}*B $. In practice, the full factorization is applied via block triangular solves with the grouping $L*(D*U)$. UPPER uses $D*U$, LOWER uses $L*D$,
2788   and DIAG is the diagonal part with the sign of $ S $ flipped (because this makes the preconditioner positive definite for many formulations,
2789   thus allowing the use of `KSPMINRES)`. Sign flipping of $ S $ can be turned off with `PCFieldSplitSetSchurScale()`.
2790 
2791   If $A$ and $S$ are solved exactly
2792 +  1 - FULL factorization is a direct solver.
2793 .  2 - The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2794 -  3 -  With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2795 
2796   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2797   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2798 
2799   For symmetric problems in which $A$ is positive definite and $S$ is negative definite, DIAG can be used with `KSPMINRES`.
2800 
2801   A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S).
2802 
2803 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`,
2804           [](sec_flexibleksp), `PCFieldSplitSetSchurPre()`
2805 @*/
2806 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2807 {
2808   PetscFunctionBegin;
2809   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2810   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2811   PetscFunctionReturn(PETSC_SUCCESS);
2812 }
2813 
2814 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2815 {
2816   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2817 
2818   PetscFunctionBegin;
2819   jac->schurfactorization = ftype;
2820   PetscFunctionReturn(PETSC_SUCCESS);
2821 }
2822 
2823 /*@
2824   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2825 
2826   Collective
2827 
2828   Input Parameters:
2829 + pc    - the preconditioner context
2830 - scale - scaling factor for the Schur complement
2831 
2832   Options Database Key:
2833 . -pc_fieldsplit_schur_scale <scale> - default is -1.0
2834 
2835   Level: intermediate
2836 
2837 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
2838 @*/
2839 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2840 {
2841   PetscFunctionBegin;
2842   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2843   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2844   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2845   PetscFunctionReturn(PETSC_SUCCESS);
2846 }
2847 
2848 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2849 {
2850   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2851 
2852   PetscFunctionBegin;
2853   jac->schurscale = scale;
2854   PetscFunctionReturn(PETSC_SUCCESS);
2855 }
2856 
2857 /*@C
2858   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2859 
2860   Collective
2861 
2862   Input Parameter:
2863 . pc - the preconditioner context
2864 
2865   Output Parameters:
2866 + A00 - the (0,0) block
2867 . A01 - the (0,1) block
2868 . A10 - the (1,0) block
2869 - A11 - the (1,1) block
2870 
2871   Level: advanced
2872 
2873   Note:
2874   Use `NULL` for any unneeded output arguments
2875 
2876 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2877 @*/
2878 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2879 {
2880   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2881 
2882   PetscFunctionBegin;
2883   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2884   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2885   if (A00) *A00 = jac->pmat[0];
2886   if (A01) *A01 = jac->B;
2887   if (A10) *A10 = jac->C;
2888   if (A11) *A11 = jac->pmat[1];
2889   PetscFunctionReturn(PETSC_SUCCESS);
2890 }
2891 
2892 /*@
2893   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2894 
2895   Collective
2896 
2897   Input Parameters:
2898 + pc        - the preconditioner context
2899 - tolerance - the solver tolerance
2900 
2901   Options Database Key:
2902 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5
2903 
2904   Level: intermediate
2905 
2906   Note:
2907   The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion.
2908   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2909   this estimate, the stopping criterion is satisfactory in practical cases.
2910 
2911 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2912 @*/
2913 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2914 {
2915   PetscFunctionBegin;
2916   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2917   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2918   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2919   PetscFunctionReturn(PETSC_SUCCESS);
2920 }
2921 
2922 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2923 {
2924   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2925 
2926   PetscFunctionBegin;
2927   jac->gkbtol = tolerance;
2928   PetscFunctionReturn(PETSC_SUCCESS);
2929 }
2930 
2931 /*@
2932   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2933 
2934   Collective
2935 
2936   Input Parameters:
2937 + pc    - the preconditioner context
2938 - maxit - the maximum number of iterations
2939 
2940   Options Database Key:
2941 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100
2942 
2943   Level: intermediate
2944 
2945 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2946 @*/
2947 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2948 {
2949   PetscFunctionBegin;
2950   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2951   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2952   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2953   PetscFunctionReturn(PETSC_SUCCESS);
2954 }
2955 
2956 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2957 {
2958   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2959 
2960   PetscFunctionBegin;
2961   jac->gkbmaxit = maxit;
2962   PetscFunctionReturn(PETSC_SUCCESS);
2963 }
2964 
2965 /*@
2966   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT`
2967   preconditioner.
2968 
2969   Collective
2970 
2971   Input Parameters:
2972 + pc    - the preconditioner context
2973 - delay - the delay window in the lower bound estimate
2974 
2975   Options Database Key:
2976 . -pc_fieldsplit_gkb_delay <delay> - default is 5
2977 
2978   Level: intermediate
2979 
2980   Notes:
2981   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $
2982   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs
2983   at least (`delay` + 1) iterations to stop.
2984 
2985   For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013`
2986 
2987 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2988 @*/
2989 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2990 {
2991   PetscFunctionBegin;
2992   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2993   PetscValidLogicalCollectiveInt(pc, delay, 2);
2994   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2995   PetscFunctionReturn(PETSC_SUCCESS);
2996 }
2997 
2998 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2999 {
3000   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3001 
3002   PetscFunctionBegin;
3003   jac->gkbdelay = delay;
3004   PetscFunctionReturn(PETSC_SUCCESS);
3005 }
3006 
3007 /*@
3008   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the
3009   Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
3010 
3011   Collective
3012 
3013   Input Parameters:
3014 + pc - the preconditioner context
3015 - nu - the shift parameter
3016 
3017   Options Database Key:
3018 . -pc_fieldsplit_gkb_nu <nu> - default is 1
3019 
3020   Level: intermediate
3021 
3022   Notes:
3023   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However,
3024   if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore
3025   necessary to find a good balance in between the convergence of the inner and outer loop.
3026 
3027   For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity.
3028 
3029 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
3030 @*/
3031 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
3032 {
3033   PetscFunctionBegin;
3034   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3035   PetscValidLogicalCollectiveReal(pc, nu, 2);
3036   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
3037   PetscFunctionReturn(PETSC_SUCCESS);
3038 }
3039 
3040 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
3041 {
3042   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3043 
3044   PetscFunctionBegin;
3045   jac->gkbnu = nu;
3046   PetscFunctionReturn(PETSC_SUCCESS);
3047 }
3048 
3049 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
3050 {
3051   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3052 
3053   PetscFunctionBegin;
3054   jac->type = type;
3055   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
3056   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
3057   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
3058   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
3059   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
3060   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
3061   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
3062   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
3063   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
3064 
3065   if (type == PC_COMPOSITE_SCHUR) {
3066     pc->ops->apply          = PCApply_FieldSplit_Schur;
3067     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
3068     pc->ops->view           = PCView_FieldSplit_Schur;
3069     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit_Schur;
3070 
3071     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
3072     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
3073     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
3074     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
3075     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
3076   } else if (type == PC_COMPOSITE_GKB) {
3077     pc->ops->apply         = PCApply_FieldSplit_GKB;
3078     pc->ops->view          = PCView_FieldSplit_GKB;
3079     pc->ops->setuponblocks = PCSetUpOnBlocks_FieldSplit_GKB;
3080 
3081     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3082     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
3083     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
3084     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
3085     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
3086   } else {
3087     pc->ops->apply         = PCApply_FieldSplit;
3088     pc->ops->view          = PCView_FieldSplit;
3089     pc->ops->setuponblocks = PCSetUpOnBlocks_FieldSplit;
3090 
3091     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3092   }
3093   PetscFunctionReturn(PETSC_SUCCESS);
3094 }
3095 
3096 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
3097 {
3098   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3099 
3100   PetscFunctionBegin;
3101   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
3102   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
3103   jac->bs = bs;
3104   PetscFunctionReturn(PETSC_SUCCESS);
3105 }
3106 
3107 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
3108 {
3109   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
3110   PC_FieldSplitLink ilink_current = jac->head;
3111   IS                is_owned;
3112 
3113   PetscFunctionBegin;
3114   jac->coordinates_set = PETSC_TRUE; // Internal flag
3115   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
3116 
3117   while (ilink_current) {
3118     // For each IS, embed it to get local coords indces
3119     IS              is_coords;
3120     PetscInt        ndofs_block;
3121     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
3122 
3123     // Setting drop to true for safety. It should make no difference.
3124     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
3125     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
3126     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
3127 
3128     // Allocate coordinates vector and set it directly
3129     PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords));
3130     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
3131       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
3132     }
3133     ilink_current->dim   = dim;
3134     ilink_current->ndofs = ndofs_block;
3135     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
3136     PetscCall(ISDestroy(&is_coords));
3137     ilink_current = ilink_current->next;
3138   }
3139   PetscCall(ISDestroy(&is_owned));
3140   PetscFunctionReturn(PETSC_SUCCESS);
3141 }
3142 
3143 /*@
3144   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3145 
3146   Collective
3147 
3148   Input Parameters:
3149 + pc   - the preconditioner context
3150 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`,
3151          `PC_COMPOSITE_GKB`
3152 
3153   Options Database Key:
3154 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
3155 
3156   Level: intermediate
3157 
3158 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3159           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()`
3160 @*/
3161 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
3162 {
3163   PetscFunctionBegin;
3164   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3165   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
3166   PetscFunctionReturn(PETSC_SUCCESS);
3167 }
3168 
3169 /*@
3170   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3171 
3172   Not collective
3173 
3174   Input Parameter:
3175 . pc - the preconditioner context
3176 
3177   Output Parameter:
3178 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3179 
3180   Level: intermediate
3181 
3182 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3183           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3184 @*/
3185 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
3186 {
3187   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3188 
3189   PetscFunctionBegin;
3190   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3191   PetscAssertPointer(type, 2);
3192   *type = jac->type;
3193   PetscFunctionReturn(PETSC_SUCCESS);
3194 }
3195 
3196 /*@
3197   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3198 
3199   Logically Collective
3200 
3201   Input Parameters:
3202 + pc  - the preconditioner context
3203 - flg - boolean indicating whether to use field splits defined by the `DM`
3204 
3205   Options Database Key:
3206 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3207 
3208   Level: intermediate
3209 
3210   Developer Note:
3211   The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database
3212 
3213 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3214 @*/
3215 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3216 {
3217   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3218   PetscBool      isfs;
3219 
3220   PetscFunctionBegin;
3221   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3222   PetscValidLogicalCollectiveBool(pc, flg, 2);
3223   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3224   if (isfs) jac->dm_splits = flg;
3225   PetscFunctionReturn(PETSC_SUCCESS);
3226 }
3227 
3228 /*@
3229   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3230 
3231   Logically Collective
3232 
3233   Input Parameter:
3234 . pc - the preconditioner context
3235 
3236   Output Parameter:
3237 . flg - boolean indicating whether to use field splits defined by the `DM`
3238 
3239   Level: intermediate
3240 
3241   Developer Note:
3242   The name should be `PCFieldSplitGetUseDMSplits()`
3243 
3244 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3245 @*/
3246 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3247 {
3248   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3249   PetscBool      isfs;
3250 
3251   PetscFunctionBegin;
3252   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3253   PetscAssertPointer(flg, 2);
3254   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3255   if (isfs) {
3256     if (flg) *flg = jac->dm_splits;
3257   }
3258   PetscFunctionReturn(PETSC_SUCCESS);
3259 }
3260 
3261 /*@
3262   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3263 
3264   Logically Collective
3265 
3266   Input Parameter:
3267 . pc - the preconditioner context
3268 
3269   Output Parameter:
3270 . flg - boolean indicating whether to detect fields or not
3271 
3272   Level: intermediate
3273 
3274 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3275 @*/
3276 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3277 {
3278   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3279 
3280   PetscFunctionBegin;
3281   *flg = jac->detect;
3282   PetscFunctionReturn(PETSC_SUCCESS);
3283 }
3284 
3285 /*@
3286   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3287 
3288   Logically Collective
3289 
3290   Input Parameter:
3291 . pc - the preconditioner context
3292 
3293   Output Parameter:
3294 . flg - boolean indicating whether to detect fields or not
3295 
3296   Options Database Key:
3297 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3298 
3299   Level: intermediate
3300 
3301   Note:
3302   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3303 
3304 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3305 @*/
3306 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3307 {
3308   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3309 
3310   PetscFunctionBegin;
3311   jac->detect = flg;
3312   if (jac->detect) {
3313     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3314     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3315   }
3316   PetscFunctionReturn(PETSC_SUCCESS);
3317 }
3318 
3319 /*MC
3320   PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3321   collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3322 
3323   Options Database Keys:
3324 +   -pc_fieldsplit_%d_fields <a,b,..>                                                - indicates the fields to be used in the `%d`'th split
3325 .   -pc_fieldsplit_default                                                           - automatically add any fields to additional splits that have not
3326                                                                                        been supplied explicitly by `-pc_fieldsplit_%d_fields`
3327 .   -pc_fieldsplit_block_size <bs>                                                   - size of block that defines fields (i.e. there are bs fields)
3328                                                                                        when the matrix is not of `MatType` `MATNEST`
3329 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3330 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full>                     - default is `a11`; see `PCFieldSplitSetSchurPre()`
3331 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full>                           - set factorization type when using `-pc_fieldsplit_type schur`;
3332                                                                                        see `PCFieldSplitSetSchurFactType()`
3333 .   -pc_fieldsplit_dm_splits <true,false> (default is true)                          - Whether to use `DMCreateFieldDecomposition()` for splits
3334 -   -pc_fieldsplit_detect_saddle_point                                               - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3335 
3336   Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3337   The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3338   For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3339 
3340   To set options on the solvers for each block append `-fieldsplit_` to all the `PC`
3341   options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`
3342 
3343   To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3344   and set the options directly on the resulting `KSP` object
3345 
3346   Level: intermediate
3347 
3348   Notes:
3349   Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()`
3350   to define a split by an arbitrary collection of entries.
3351 
3352   If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports
3353   `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs,
3354   beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3355   if this is not called the block size defaults to the blocksize of the second matrix passed
3356   to `KSPSetOperators()`/`PCSetOperators()`.
3357 
3358   For the Schur complement preconditioner if
3359   ```{math}
3360     J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3361   ```
3362 
3363   the preconditioner using `full` factorization is logically
3364   ```{math}
3365     \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3366       ```
3367   where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3368   ```{math}
3369      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3370   ```
3371   which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given
3372   in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0,
3373   it returns the `KSP` associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default
3374   $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3375 
3376   The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3377   `diag` gives
3378   ```{math}
3379     \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3380   ```
3381   Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3382   can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3383   ```{math}
3384     \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3385   ```
3386   where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of
3387   ```{math}
3388     \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3389   ```
3390   where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3391 
3392   If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3393   is used automatically for a second submatrix.
3394 
3395   The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3396   Generally it should be used with the `MATAIJ` or `MATNEST` `MatType`
3397 
3398   The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see,
3399   for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3400   One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers".
3401 
3402   See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3403 
3404   The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3405   residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3406 
3407   The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3408   ```{math}
3409     \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3410   ```
3411   with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3412   A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3413 
3414   Developer Note:
3415   The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3416   user API.
3417 
3418 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3419           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3420           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3421           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3422 M*/
3423 
3424 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3425 {
3426   PC_FieldSplit *jac;
3427 
3428   PetscFunctionBegin;
3429   PetscCall(PetscNew(&jac));
3430 
3431   jac->bs                 = -1;
3432   jac->nsplits            = 0;
3433   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3434   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3435   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3436   jac->schurscale         = -1.0;
3437   jac->dm_splits          = PETSC_TRUE;
3438   jac->detect             = PETSC_FALSE;
3439   jac->gkbtol             = 1e-5;
3440   jac->gkbdelay           = 5;
3441   jac->gkbnu              = 1;
3442   jac->gkbmaxit           = 100;
3443   jac->gkbmonitor         = PETSC_FALSE;
3444   jac->coordinates_set    = PETSC_FALSE;
3445 
3446   pc->data = (void *)jac;
3447 
3448   pc->ops->apply           = PCApply_FieldSplit;
3449   pc->ops->applytranspose  = PCApplyTranspose_FieldSplit;
3450   pc->ops->setup           = PCSetUp_FieldSplit;
3451   pc->ops->reset           = PCReset_FieldSplit;
3452   pc->ops->destroy         = PCDestroy_FieldSplit;
3453   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3454   pc->ops->view            = PCView_FieldSplit;
3455   pc->ops->applyrichardson = NULL;
3456 
3457   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3458   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3459   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3460   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3461   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3462   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3463   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3464   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3465   PetscFunctionReturn(PETSC_SUCCESS);
3466 }
3467