xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision bcd3bd92eda2d5998e2f14c4bbfb33bd936bdc3e)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 
5 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
6 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
7 
8 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
9 
10 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
11 struct _PC_FieldSplitLink {
12   KSP               ksp;
13   Vec               x, y, z;
14   char             *splitname;
15   PetscInt          nfields;
16   PetscInt         *fields, *fields_col;
17   VecScatter        sctx;
18   IS                is, is_col;
19   PC_FieldSplitLink next, previous;
20   PetscLogEvent     event;
21 
22   /* Used only when setting coordinates with PCSetCoordinates */
23   PetscInt   dim;
24   PetscInt   ndofs;
25   PetscReal *coords;
26 };
27 
28 typedef struct {
29   PCCompositeType type;
30   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
31   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
32   PetscInt        bs;           /* Block size for IS and Mat structures */
33   PetscInt        nsplits;      /* Number of field divisions defined */
34   Vec            *x, *y, w1, w2;
35   Mat            *mat;    /* The diagonal block for each split */
36   Mat            *pmat;   /* The preconditioning diagonal block for each split */
37   Mat            *Afield; /* The rows of the matrix associated with each split */
38   PetscBool       issetup;
39 
40   /* Only used when Schur complement preconditioning is used */
41   Mat                       B;          /* The (0,1) block */
42   Mat                       C;          /* The (1,0) block */
43   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
44   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */
45   Mat                       schur_user; /* User-provided preconditioning matrix for the Schur complement */
46   PCFieldSplitSchurPreType  schurpre;   /* Determines which preconditioning matrix is used for the Schur complement */
47   PCFieldSplitSchurFactType schurfactorization;
48   KSP                       kspschur;   /* The solver for S */
49   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
50   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
51 
52   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
53   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
54   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
55   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
56   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
57   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
58   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
59   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
60   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
61   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
62 
63   PC_FieldSplitLink head;
64   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
65   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
66   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
67   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
68   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
69   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
70   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
71 } PC_FieldSplit;
72 
73 /*
74     Note:
75     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
76    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
77    PC you could change this.
78 */
79 
80 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it.  This way the
81 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
82 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
83 {
84   switch (jac->schurpre) {
85   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
86     return jac->schur;
87   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
88     return jac->schurp;
89   case PC_FIELDSPLIT_SCHUR_PRE_A11:
90     return jac->pmat[1];
91   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
92   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
93   default:
94     return jac->schur_user ? jac->schur_user : jac->pmat[1];
95   }
96 }
97 
98 #include <petscdraw.h>
99 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
100 {
101   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
102   PetscBool         iascii, isdraw;
103   PetscInt          i, j;
104   PC_FieldSplitLink ilink = jac->head;
105 
106   PetscFunctionBegin;
107   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
108   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
109   if (iascii) {
110     if (jac->bs > 0) {
111       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
112     } else {
113       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
114     }
115     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
116     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
117     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
118     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
119     for (i = 0; i < jac->nsplits; i++) {
120       if (ilink->fields) {
121         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
122         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
123         for (j = 0; j < ilink->nfields; j++) {
124           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
125           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
126         }
127         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
128         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
129       } else {
130         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
131       }
132       PetscCall(KSPView(ilink->ksp, viewer));
133       ilink = ilink->next;
134     }
135   }
136 
137   if (isdraw) {
138     PetscDraw draw;
139     PetscReal x, y, w, wd;
140 
141     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
142     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
143     w  = 2 * PetscMin(1.0 - x, x);
144     wd = w / (jac->nsplits + 1);
145     x  = x - wd * (jac->nsplits - 1) / 2.0;
146     for (i = 0; i < jac->nsplits; i++) {
147       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
148       PetscCall(KSPView(ilink->ksp, viewer));
149       PetscCall(PetscDrawPopCurrentPoint(draw));
150       x += wd;
151       ilink = ilink->next;
152     }
153   }
154   PetscFunctionReturn(PETSC_SUCCESS);
155 }
156 
157 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
158 {
159   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
160   PetscBool                  iascii, isdraw;
161   PetscInt                   i, j;
162   PC_FieldSplitLink          ilink = jac->head;
163   MatSchurComplementAinvType atype;
164 
165   PetscFunctionBegin;
166   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
167   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
168   if (iascii) {
169     if (jac->bs > 0) {
170       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
171     } else {
172       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
173     }
174     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
175     switch (jac->schurpre) {
176     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
178       break;
179     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
180       if (jac->schur) {
181         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
182         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
183       }
184       break;
185     case PC_FIELDSPLIT_SCHUR_PRE_A11:
186       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
187       break;
188     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
189       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
190       break;
191     case PC_FIELDSPLIT_SCHUR_PRE_USER:
192       if (jac->schur_user) {
193         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
194       } else {
195         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
196       }
197       break;
198     default:
199       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
200     }
201     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
202     PetscCall(PetscViewerASCIIPushTab(viewer));
203     for (i = 0; i < jac->nsplits; i++) {
204       if (ilink->fields) {
205         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
206         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
207         for (j = 0; j < ilink->nfields; j++) {
208           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
209           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
210         }
211         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
212         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
213       } else {
214         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
215       }
216       ilink = ilink->next;
217     }
218     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
219     PetscCall(PetscViewerASCIIPushTab(viewer));
220     if (jac->head) {
221       PetscCall(KSPView(jac->head->ksp, viewer));
222     } else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
223     PetscCall(PetscViewerASCIIPopTab(viewer));
224     if (jac->head && jac->kspupper != jac->head->ksp) {
225       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor \n"));
226       PetscCall(PetscViewerASCIIPushTab(viewer));
227       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
228       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
229       PetscCall(PetscViewerASCIIPopTab(viewer));
230     }
231     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01 \n"));
232     PetscCall(PetscViewerASCIIPushTab(viewer));
233     if (jac->kspschur) {
234       PetscCall(KSPView(jac->kspschur, viewer));
235     } else {
236       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
237     }
238     PetscCall(PetscViewerASCIIPopTab(viewer));
239     PetscCall(PetscViewerASCIIPopTab(viewer));
240   } else if (isdraw && jac->head) {
241     PetscDraw draw;
242     PetscReal x, y, w, wd, h;
243     PetscInt  cnt = 2;
244     char      str[32];
245 
246     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
247     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
248     if (jac->kspupper != jac->head->ksp) cnt++;
249     w  = 2 * PetscMin(1.0 - x, x);
250     wd = w / (cnt + 1);
251 
252     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
253     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
254     y -= h;
255     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
256       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
257     } else {
258       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
259     }
260     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
261     y -= h;
262     x = x - wd * (cnt - 1) / 2.0;
263 
264     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
265     PetscCall(KSPView(jac->head->ksp, viewer));
266     PetscCall(PetscDrawPopCurrentPoint(draw));
267     if (jac->kspupper != jac->head->ksp) {
268       x += wd;
269       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
270       PetscCall(KSPView(jac->kspupper, viewer));
271       PetscCall(PetscDrawPopCurrentPoint(draw));
272     }
273     x += wd;
274     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
275     PetscCall(KSPView(jac->kspschur, viewer));
276     PetscCall(PetscDrawPopCurrentPoint(draw));
277   }
278   PetscFunctionReturn(PETSC_SUCCESS);
279 }
280 
281 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
282 {
283   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
284   PetscBool         iascii, isdraw;
285   PetscInt          i, j;
286   PC_FieldSplitLink ilink = jac->head;
287 
288   PetscFunctionBegin;
289   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
290   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
291   if (iascii) {
292     if (jac->bs > 0) {
293       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
294     } else {
295       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
296     }
297     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
298     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
299     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
300 
301     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
302     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
303     PetscCall(PetscViewerASCIIPushTab(viewer));
304 
305     if (ilink->fields) {
306       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
307       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
308       for (j = 0; j < ilink->nfields; j++) {
309         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
310         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
311       }
312       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
313       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
314     } else {
315       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
316     }
317     PetscCall(KSPView(ilink->ksp, viewer));
318 
319     PetscCall(PetscViewerASCIIPopTab(viewer));
320   }
321 
322   if (isdraw) {
323     PetscDraw draw;
324     PetscReal x, y, w, wd;
325 
326     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
327     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
328     w  = 2 * PetscMin(1.0 - x, x);
329     wd = w / (jac->nsplits + 1);
330     x  = x - wd * (jac->nsplits - 1) / 2.0;
331     for (i = 0; i < jac->nsplits; i++) {
332       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
333       PetscCall(KSPView(ilink->ksp, viewer));
334       PetscCall(PetscDrawPopCurrentPoint(draw));
335       x += wd;
336       ilink = ilink->next;
337     }
338   }
339   PetscFunctionReturn(PETSC_SUCCESS);
340 }
341 
342 /* Precondition: jac->bs is set to a meaningful value */
343 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
344 {
345   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
346   PetscInt       i, nfields, *ifields, nfields_col, *ifields_col;
347   PetscBool      flg, flg_col;
348   char           optionname[128], splitname[8], optionname_col[128];
349 
350   PetscFunctionBegin;
351   PetscCall(PetscMalloc1(jac->bs, &ifields));
352   PetscCall(PetscMalloc1(jac->bs, &ifields_col));
353   for (i = 0, flg = PETSC_TRUE;; i++) {
354     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
355     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
356     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
357     nfields     = jac->bs;
358     nfields_col = jac->bs;
359     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
360     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
361     if (!flg) break;
362     else if (flg && !flg_col) {
363       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
364       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
365     } else {
366       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
367       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
368       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
369     }
370   }
371   if (i > 0) {
372     /* Makes command-line setting of splits take precedence over setting them in code.
373        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
374        create new splits, which would probably not be what the user wanted. */
375     jac->splitdefined = PETSC_TRUE;
376   }
377   PetscCall(PetscFree(ifields));
378   PetscCall(PetscFree(ifields_col));
379   PetscFunctionReturn(PETSC_SUCCESS);
380 }
381 
382 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
383 {
384   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
385   PC_FieldSplitLink ilink              = jac->head;
386   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
387   PetscInt          i;
388 
389   PetscFunctionBegin;
390   /*
391    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
392    Should probably be rewritten.
393    */
394   if (!ilink) {
395     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
396     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
397       PetscInt  numFields, f, i, j;
398       char    **fieldNames;
399       IS       *fields;
400       DM       *dms;
401       DM        subdm[128];
402       PetscBool flg;
403 
404       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
405       /* Allow the user to prescribe the splits */
406       for (i = 0, flg = PETSC_TRUE;; i++) {
407         PetscInt ifields[128];
408         IS       compField;
409         char     optionname[128], splitname[8];
410         PetscInt nfields = numFields;
411 
412         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
413         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
414         if (!flg) break;
415         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
416         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
417         if (nfields == 1) {
418           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
419         } else {
420           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
421           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
422         }
423         PetscCall(ISDestroy(&compField));
424         for (j = 0; j < nfields; ++j) {
425           f = ifields[j];
426           PetscCall(PetscFree(fieldNames[f]));
427           PetscCall(ISDestroy(&fields[f]));
428         }
429       }
430       if (i == 0) {
431         for (f = 0; f < numFields; ++f) {
432           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
433           PetscCall(PetscFree(fieldNames[f]));
434           PetscCall(ISDestroy(&fields[f]));
435         }
436       } else {
437         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
438         PetscCall(PetscFree(dms));
439         PetscCall(PetscMalloc1(i, &dms));
440         for (j = 0; j < i; ++j) dms[j] = subdm[j];
441       }
442       PetscCall(PetscFree(fieldNames));
443       PetscCall(PetscFree(fields));
444       if (dms) {
445         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
446         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
447           const char *prefix;
448           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)(ilink->ksp), &prefix));
449           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)(dms[i]), prefix));
450           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
451           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
452           {
453             PetscErrorCode (*func)(KSP, Mat, Mat, void *);
454             void *ctx;
455 
456             PetscCall(DMKSPGetComputeOperators(pc->dm, &func, &ctx));
457             PetscCall(DMKSPSetComputeOperators(dms[i], func, ctx));
458           }
459           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
460           PetscCall(DMDestroy(&dms[i]));
461         }
462         PetscCall(PetscFree(dms));
463       }
464     } else {
465       if (jac->bs <= 0) {
466         if (pc->pmat) {
467           PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
468         } else jac->bs = 1;
469       }
470 
471       if (jac->detect) {
472         IS       zerodiags, rest;
473         PetscInt nmin, nmax;
474 
475         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
476         if (jac->diag_use_amat) {
477           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
478         } else {
479           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
480         }
481         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
482         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
483         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
484         PetscCall(ISDestroy(&zerodiags));
485         PetscCall(ISDestroy(&rest));
486       } else if (coupling) {
487         IS       coupling, rest;
488         PetscInt nmin, nmax;
489 
490         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
491         if (jac->offdiag_use_amat) {
492           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
493         } else {
494           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
495         }
496         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
497         PetscCall(ISSetIdentity(rest));
498         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
499         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
500         PetscCall(ISDestroy(&coupling));
501         PetscCall(ISDestroy(&rest));
502       } else {
503         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
504         if (!fieldsplit_default) {
505           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
506            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
507           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
508           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
509         }
510         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
511           Mat       M = pc->pmat;
512           PetscBool isnest;
513 
514           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
515           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
516           if (!isnest) {
517             M = pc->mat;
518             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
519           }
520           if (isnest) {
521             IS      *fields;
522             PetscInt nf;
523 
524             PetscCall(MatNestGetSize(M, &nf, NULL));
525             PetscCall(PetscMalloc1(nf, &fields));
526             PetscCall(MatNestGetISs(M, fields, NULL));
527             for (i = 0; i < nf; i++) PetscCall(PCFieldSplitSetIS(pc, NULL, fields[i]));
528             PetscCall(PetscFree(fields));
529           } else {
530             for (i = 0; i < jac->bs; i++) {
531               char splitname[8];
532               PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
533               PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
534             }
535             jac->defaultsplit = PETSC_TRUE;
536           }
537         }
538       }
539     }
540   } else if (jac->nsplits == 1) {
541     IS       is2;
542     PetscInt nmin, nmax;
543 
544     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
545     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
546     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
547     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
548     PetscCall(ISDestroy(&is2));
549   }
550 
551   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
552   PetscFunctionReturn(PETSC_SUCCESS);
553 }
554 
555 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
556 {
557   Mat       BT, T;
558   PetscReal nrmT, nrmB;
559 
560   PetscFunctionBegin;
561   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
562   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
563   PetscCall(MatNorm(T, NORM_1, &nrmT));
564   PetscCall(MatNorm(B, NORM_1, &nrmB));
565   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable.");
566 
567   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
568   /* setting N := 1/nu*I in [Ar13].                                                 */
569   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
570   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, H)); /* H = A01*A01'          */
571   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
572 
573   PetscCall(MatDestroy(&BT));
574   PetscCall(MatDestroy(&T));
575   PetscFunctionReturn(PETSC_SUCCESS);
576 }
577 
578 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *value[], PetscBool *flg);
579 
580 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
581 {
582   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
583   PC_FieldSplitLink ilink;
584   PetscInt          i, nsplit;
585   PetscBool         sorted, sorted_col;
586 
587   PetscFunctionBegin;
588   pc->failedreason = PC_NOERROR;
589   PetscCall(PCFieldSplitSetDefaults(pc));
590   nsplit = jac->nsplits;
591   ilink  = jac->head;
592 
593   /* get the matrices for each split */
594   if (!jac->issetup) {
595     PetscInt rstart, rend, nslots, bs;
596 
597     jac->issetup = PETSC_TRUE;
598 
599     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
600     if (jac->defaultsplit || !ilink->is) {
601       if (jac->bs <= 0) jac->bs = nsplit;
602     }
603 
604     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
605     PetscCall(MatGetBlockSize(pc->pmat, &bs));
606     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
607       PetscBool blk;
608 
609       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
610       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
611     }
612 
613     bs = jac->bs;
614     PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
615     nslots = (rend - rstart) / bs;
616     for (i = 0; i < nsplit; i++) {
617       if (jac->defaultsplit) {
618         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
619         PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
620       } else if (!ilink->is) {
621         if (ilink->nfields > 1) {
622           PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
623           PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
624           PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
625           for (j = 0; j < nslots; j++) {
626             for (k = 0; k < nfields; k++) {
627               ii[nfields * j + k] = rstart + bs * j + fields[k];
628               jj[nfields * j + k] = rstart + bs * j + fields_col[k];
629             }
630           }
631           PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
632           PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
633           PetscCall(ISSetBlockSize(ilink->is, nfields));
634           PetscCall(ISSetBlockSize(ilink->is_col, nfields));
635         } else {
636           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
637           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
638         }
639       }
640       PetscCall(ISSorted(ilink->is, &sorted));
641       if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col));
642       PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split");
643       ilink = ilink->next;
644     }
645   }
646 
647   ilink = jac->head;
648   if (!jac->pmat) {
649     Vec xtmp;
650 
651     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
652     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
653     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
654     for (i = 0; i < nsplit; i++) {
655       MatNullSpace sp;
656 
657       /* Check for preconditioning matrix attached to IS */
658       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
659       if (jac->pmat[i]) {
660         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
661         if (jac->type == PC_COMPOSITE_SCHUR) {
662           jac->schur_user = jac->pmat[i];
663 
664           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
665         }
666       } else {
667         const char *prefix;
668         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
669         PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
670         PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
671         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
672       }
673       /* create work vectors for each split */
674       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
675       ilink->x = jac->x[i];
676       ilink->y = jac->y[i];
677       ilink->z = NULL;
678       /* compute scatter contexts needed by multiplicative versions and non-default splits */
679       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
680       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
681       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
682       ilink = ilink->next;
683     }
684     PetscCall(VecDestroy(&xtmp));
685   } else {
686     MatReuse scall;
687     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
688       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
689       scall = MAT_INITIAL_MATRIX;
690     } else scall = MAT_REUSE_MATRIX;
691 
692     for (i = 0; i < nsplit; i++) {
693       Mat pmat;
694 
695       /* Check for preconditioning matrix attached to IS */
696       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
697       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
698       ilink = ilink->next;
699     }
700   }
701   if (jac->diag_use_amat) {
702     ilink = jac->head;
703     if (!jac->mat) {
704       PetscCall(PetscMalloc1(nsplit, &jac->mat));
705       for (i = 0; i < nsplit; i++) {
706         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
707         ilink = ilink->next;
708       }
709     } else {
710       MatReuse scall;
711       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
712         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
713         scall = MAT_INITIAL_MATRIX;
714       } else scall = MAT_REUSE_MATRIX;
715 
716       for (i = 0; i < nsplit; i++) {
717         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
718         ilink = ilink->next;
719       }
720     }
721   } else {
722     jac->mat = jac->pmat;
723   }
724 
725   /* Check for null space attached to IS */
726   ilink = jac->head;
727   for (i = 0; i < nsplit; i++) {
728     MatNullSpace sp;
729 
730     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
731     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
732     ilink = ilink->next;
733   }
734 
735   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
736     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
737     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
738     ilink = jac->head;
739     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
740       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
741       if (!jac->Afield) {
742         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
743         if (jac->offdiag_use_amat) {
744           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
745         } else {
746           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
747         }
748       } else {
749         MatReuse scall;
750 
751         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
752           PetscCall(MatDestroy(&jac->Afield[1]));
753           scall = MAT_INITIAL_MATRIX;
754         } else scall = MAT_REUSE_MATRIX;
755 
756         if (jac->offdiag_use_amat) {
757           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
758         } else {
759           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
760         }
761       }
762     } else {
763       if (!jac->Afield) {
764         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
765         for (i = 0; i < nsplit; i++) {
766           if (jac->offdiag_use_amat) {
767             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
768           } else {
769             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
770           }
771           ilink = ilink->next;
772         }
773       } else {
774         MatReuse scall;
775         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
776           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
777           scall = MAT_INITIAL_MATRIX;
778         } else scall = MAT_REUSE_MATRIX;
779 
780         for (i = 0; i < nsplit; i++) {
781           if (jac->offdiag_use_amat) {
782             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
783           } else {
784             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
785           }
786           ilink = ilink->next;
787         }
788       }
789     }
790   }
791 
792   if (jac->type == PC_COMPOSITE_SCHUR) {
793     IS          ccis;
794     PetscBool   isset, isspd;
795     PetscInt    rstart, rend;
796     char        lscname[256];
797     PetscObject LSC_L;
798 
799     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
800 
801     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
802     if (jac->schurscale == (PetscScalar)-1.0) {
803       PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
804       jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
805     }
806 
807     /* When extracting off-diagonal submatrices, we take complements from this range */
808     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
809 
810     if (jac->schur) {
811       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
812       MatReuse scall;
813 
814       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
815         scall = MAT_INITIAL_MATRIX;
816         PetscCall(MatDestroy(&jac->B));
817         PetscCall(MatDestroy(&jac->C));
818       } else scall = MAT_REUSE_MATRIX;
819 
820       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
821       ilink = jac->head;
822       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
823       if (jac->offdiag_use_amat) {
824         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B));
825       } else {
826         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B));
827       }
828       PetscCall(ISDestroy(&ccis));
829       ilink = ilink->next;
830       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
831       if (jac->offdiag_use_amat) {
832         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C));
833       } else {
834         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C));
835       }
836       PetscCall(ISDestroy(&ccis));
837       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
838       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
839         PetscCall(MatDestroy(&jac->schurp));
840         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
841       }
842       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
843       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
844       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
845     } else {
846       const char  *Dprefix;
847       char         schurprefix[256], schurmatprefix[256];
848       char         schurtestoption[256];
849       MatNullSpace sp;
850       PetscBool    flg;
851       KSP          kspt;
852 
853       /* extract the A01 and A10 matrices */
854       ilink = jac->head;
855       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
856       if (jac->offdiag_use_amat) {
857         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
858       } else {
859         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
860       }
861       PetscCall(ISDestroy(&ccis));
862       ilink = ilink->next;
863       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
864       if (jac->offdiag_use_amat) {
865         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
866       } else {
867         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
868       }
869       PetscCall(ISDestroy(&ccis));
870 
871       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
872       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
873       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
874       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
875       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
876       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
877       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
878       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
879 
880       /* Note: this is not true in general */
881       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
882       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
883 
884       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
885       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg));
886       if (flg) {
887         DM  dmInner;
888         KSP kspInner;
889         PC  pcInner;
890 
891         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
892         PetscCall(KSPReset(kspInner));
893         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
894         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
895         /* Indent this deeper to emphasize the "inner" nature of this solver. */
896         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
897         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
898         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
899 
900         /* Set DM for new solver */
901         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
902         PetscCall(KSPSetDM(kspInner, dmInner));
903         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
904 
905         /* Defaults to PCKSP as preconditioner */
906         PetscCall(KSPGetPC(kspInner, &pcInner));
907         PetscCall(PCSetType(pcInner, PCKSP));
908         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
909       } else {
910         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
911           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
912           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
913           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
914           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
915           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
916         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
917         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
918       }
919       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
920       PetscCall(KSPSetFromOptions(jac->head->ksp));
921       PetscCall(MatSetFromOptions(jac->schur));
922 
923       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
924       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
925         KSP kspInner;
926         PC  pcInner;
927 
928         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
929         PetscCall(KSPGetPC(kspInner, &pcInner));
930         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
931         if (flg) {
932           KSP ksp;
933 
934           PetscCall(PCKSPGetKSP(pcInner, &ksp));
935           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
936         }
937       }
938       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
939       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg));
940       if (flg) {
941         DM dmInner;
942 
943         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
944         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
945         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
946         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
947         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
948         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
949         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
950         PetscCall(KSPSetDM(jac->kspupper, dmInner));
951         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
952         PetscCall(KSPSetFromOptions(jac->kspupper));
953         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
954         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
955       } else {
956         jac->kspupper = jac->head->ksp;
957         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
958       }
959 
960       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
961       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
962       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
963       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
964       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
965         PC pcschur;
966         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
967         PetscCall(PCSetType(pcschur, PCNONE));
968         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
969       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
970         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
971       }
972       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
973       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
974       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
975       /* propagate DM */
976       {
977         DM sdm;
978         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
979         if (sdm) {
980           PetscCall(KSPSetDM(jac->kspschur, sdm));
981           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
982         }
983       }
984       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
985       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
986       PetscCall(KSPSetFromOptions(jac->kspschur));
987     }
988     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
989     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
990 
991     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
992     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
993     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
994     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
995     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L));
996     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
997     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
998     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
999     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L));
1000   } else if (jac->type == PC_COMPOSITE_GKB) {
1001     IS       ccis;
1002     PetscInt rstart, rend;
1003 
1004     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1005 
1006     ilink = jac->head;
1007 
1008     /* When extracting off-diagonal submatrices, we take complements from this range */
1009     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
1010 
1011     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1012     if (jac->offdiag_use_amat) {
1013       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1014     } else {
1015       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1016     }
1017     PetscCall(ISDestroy(&ccis));
1018     /* Create work vectors for GKB algorithm */
1019     PetscCall(VecDuplicate(ilink->x, &jac->u));
1020     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1021     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1022     ilink = ilink->next;
1023     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1024     if (jac->offdiag_use_amat) {
1025       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1026     } else {
1027       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1028     }
1029     PetscCall(ISDestroy(&ccis));
1030     /* Create work vectors for GKB algorithm */
1031     PetscCall(VecDuplicate(ilink->x, &jac->v));
1032     PetscCall(VecDuplicate(ilink->x, &jac->d));
1033     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1034     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1035     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1036 
1037     ilink = jac->head;
1038     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1039     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1040     /* Create gkb_monitor context */
1041     if (jac->gkbmonitor) {
1042       PetscInt tablevel;
1043       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1044       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1045       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1046       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1047       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1048     }
1049   } else {
1050     /* set up the individual splits' PCs */
1051     i     = 0;
1052     ilink = jac->head;
1053     while (ilink) {
1054       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1055       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1056       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1057       i++;
1058       ilink = ilink->next;
1059     }
1060   }
1061 
1062   /* Set coordinates to the sub PC objects whenever these are set */
1063   if (jac->coordinates_set) {
1064     PC pc_coords;
1065     if (jac->type == PC_COMPOSITE_SCHUR) {
1066       // Head is first block.
1067       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1068       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1069       // Second one is Schur block, but its KSP object is in kspschur.
1070       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1071       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1072     } else if (jac->type == PC_COMPOSITE_GKB) {
1073       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner"));
1074     } else {
1075       ilink = jac->head;
1076       while (ilink) {
1077         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1078         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1079         ilink = ilink->next;
1080       }
1081     }
1082   }
1083 
1084   jac->suboptionsset = PETSC_TRUE;
1085   PetscFunctionReturn(PETSC_SUCCESS);
1086 }
1087 
1088 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1089   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1090                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1091                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1092 
1093 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1094 {
1095   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1096   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1097   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1098 
1099   PetscFunctionBegin;
1100   switch (jac->schurfactorization) {
1101   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1102     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1103     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1104     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1105     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1106     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1107     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1108     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1109     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1110     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1111     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1112     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1113     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1114     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1115     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1116     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1117     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1118     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1119     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1120     break;
1121   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1122     /* [A00 0; A10 S], suitable for left preconditioning */
1123     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1124     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1125     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1126     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1127     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1128     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1129     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1130     PetscCall(VecScale(ilinkD->x, -1.));
1131     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1132     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1133     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1134     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1135     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1136     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1137     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1138     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1139     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1140     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1141     break;
1142   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1143     /* [A00 A01; 0 S], suitable for right preconditioning */
1144     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1145     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1146     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1147     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1148     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1149     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1150     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1151     PetscCall(VecScale(ilinkA->x, -1.));
1152     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1153     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1154     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1155     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1156     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1157     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1158     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1159     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1160     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1161     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1162     break;
1163   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1164     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1165     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1166     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1167     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1168     PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1169     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1170     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1171     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1172     PetscCall(VecScale(ilinkD->x, -1.0));
1173     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1174     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1175 
1176     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1177     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1178     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1179     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1180     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1181 
1182     if (kspUpper == kspA) {
1183       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1184       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1185       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1186       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1187       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1188       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1189     } else {
1190       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1191       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1192       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1193       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1194       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1195       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1196       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1197       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1198       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1199     }
1200     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1201     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1202     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1203   }
1204   PetscFunctionReturn(PETSC_SUCCESS);
1205 }
1206 
1207 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1208 {
1209   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1210   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1211   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1212 
1213   PetscFunctionBegin;
1214   switch (jac->schurfactorization) {
1215   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1216     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1217     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1218     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1219     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1220     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1221     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1222     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1223     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1224     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1225     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1226     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1227     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1228     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1229     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1230     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1231     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1232     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1233     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1234     break;
1235   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1236     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1237     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1238     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1239     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1240     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1241     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1242     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1243     PetscCall(VecScale(ilinkD->x, -1.));
1244     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1245     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1246     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1247     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1248     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1249     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1250     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1251     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1252     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1253     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1254     break;
1255   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1256     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1257     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1258     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1259     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1260     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1261     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1262     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1263     PetscCall(VecScale(ilinkA->x, -1.));
1264     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1265     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1266     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1267     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1268     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1269     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1270     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1271     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1272     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1273     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1274     break;
1275   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1276     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1277     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1278     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1279     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1280     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1281     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1282     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1283     PetscCall(VecScale(ilinkD->x, -1.0));
1284     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1285     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1286 
1287     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1288     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1289     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1290     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1291     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1292 
1293     if (kspLower == kspA) {
1294       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1295       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1296       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1297       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1298       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1299       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1300     } else {
1301       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1302       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1303       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1304       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1305       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1306       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1307       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1308       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1309       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1310     }
1311     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1312     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1313     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1314   }
1315   PetscFunctionReturn(PETSC_SUCCESS);
1316 }
1317 
1318 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1319 {
1320   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1321   PC_FieldSplitLink ilink = jac->head;
1322   PetscInt          cnt, bs;
1323 
1324   PetscFunctionBegin;
1325   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1326     if (jac->defaultsplit) {
1327       PetscCall(VecGetBlockSize(x, &bs));
1328       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1329       PetscCall(VecGetBlockSize(y, &bs));
1330       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1331       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1332       while (ilink) {
1333         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1334         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1335         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1336         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1337         ilink = ilink->next;
1338       }
1339       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1340     } else {
1341       PetscCall(VecSet(y, 0.0));
1342       while (ilink) {
1343         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1344         ilink = ilink->next;
1345       }
1346     }
1347   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1348     PetscCall(VecSet(y, 0.0));
1349     /* solve on first block for first block variables */
1350     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1351     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1352     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1353     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1354     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1355     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1356     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1357     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1358 
1359     /* compute the residual only onto second block variables using first block variables */
1360     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1361     ilink = ilink->next;
1362     PetscCall(VecScale(ilink->x, -1.0));
1363     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1364     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1365 
1366     /* solve on second block variables */
1367     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1368     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1369     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1370     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1371     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1372     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1373   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1374     if (!jac->w1) {
1375       PetscCall(VecDuplicate(x, &jac->w1));
1376       PetscCall(VecDuplicate(x, &jac->w2));
1377     }
1378     PetscCall(VecSet(y, 0.0));
1379     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1380     cnt = 1;
1381     while (ilink->next) {
1382       ilink = ilink->next;
1383       /* compute the residual only over the part of the vector needed */
1384       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1385       PetscCall(VecScale(ilink->x, -1.0));
1386       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1387       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1388       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1389       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1390       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1391       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1392       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1393       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1394     }
1395     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1396       cnt -= 2;
1397       while (ilink->previous) {
1398         ilink = ilink->previous;
1399         /* compute the residual only over the part of the vector needed */
1400         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1401         PetscCall(VecScale(ilink->x, -1.0));
1402         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1403         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1404         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1405         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1406         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1407         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1408         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1409         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1410       }
1411     }
1412   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1413   PetscFunctionReturn(PETSC_SUCCESS);
1414 }
1415 
1416 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1417 {
1418   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1419   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1420   KSP               ksp = ilinkA->ksp;
1421   Vec               u, v, Hu, d, work1, work2;
1422   PetscScalar       alpha, z, nrmz2, *vecz;
1423   PetscReal         lowbnd, nu, beta;
1424   PetscInt          j, iterGKB;
1425 
1426   PetscFunctionBegin;
1427   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1428   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1429   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1430   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1431 
1432   u     = jac->u;
1433   v     = jac->v;
1434   Hu    = jac->Hu;
1435   d     = jac->d;
1436   work1 = jac->w1;
1437   work2 = jac->w2;
1438   vecz  = jac->vecz;
1439 
1440   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1441   /* Add q = q + nu*B*b */
1442   if (jac->gkbnu) {
1443     nu = jac->gkbnu;
1444     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1445     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1446   } else {
1447     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1448     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1449     nu = 1;
1450   }
1451 
1452   /* Transform rhs from [q,tilde{b}] to [0,b] */
1453   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1454   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1455   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1456   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1457   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1458   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1459 
1460   /* First step of algorithm */
1461   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1462   KSPCheckDot(ksp, beta);
1463   beta = PetscSqrtReal(nu) * beta;
1464   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1465   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1466   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1467   PetscCall(KSPSolve(ksp, work2, u));
1468   PetscCall(KSPCheckSolve(ksp, pc, u));
1469   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1470   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1471   PetscCall(VecDot(Hu, u, &alpha));
1472   KSPCheckDot(ksp, alpha);
1473   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1474   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1475   PetscCall(VecScale(u, 1.0 / alpha));
1476   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1477 
1478   z       = beta / alpha;
1479   vecz[1] = z;
1480 
1481   /* Computation of first iterate x(1) and p(1) */
1482   PetscCall(VecAXPY(ilinkA->y, z, u));
1483   PetscCall(VecCopy(d, ilinkD->y));
1484   PetscCall(VecScale(ilinkD->y, -z));
1485 
1486   iterGKB = 1;
1487   lowbnd  = 2 * jac->gkbtol;
1488   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1489 
1490   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1491     iterGKB += 1;
1492     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1493     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1494     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1495     beta = beta / PetscSqrtReal(nu);
1496     PetscCall(VecScale(v, 1.0 / beta));
1497     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1498     PetscCall(MatMult(jac->H, u, Hu));
1499     PetscCall(VecAXPY(work2, -beta, Hu));
1500     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1501     PetscCall(KSPSolve(ksp, work2, u));
1502     PetscCall(KSPCheckSolve(ksp, pc, u));
1503     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1504     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1505     PetscCall(VecDot(Hu, u, &alpha));
1506     KSPCheckDot(ksp, alpha);
1507     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1508     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1509     PetscCall(VecScale(u, 1.0 / alpha));
1510 
1511     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1512     vecz[0] = z;
1513 
1514     /* Computation of new iterate x(i+1) and p(i+1) */
1515     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1516     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1517     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1518     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1519     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1520 
1521     /* Compute Lower Bound estimate */
1522     if (iterGKB > jac->gkbdelay) {
1523       lowbnd = 0.0;
1524       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1525       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1526     }
1527 
1528     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1529     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1530   }
1531 
1532   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1533   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1534   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1535   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1536 
1537   PetscFunctionReturn(PETSC_SUCCESS);
1538 }
1539 
1540 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1541   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1542                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1543                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1544 
1545 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1546 {
1547   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1548   PC_FieldSplitLink ilink = jac->head;
1549   PetscInt          bs;
1550 
1551   PetscFunctionBegin;
1552   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1553     if (jac->defaultsplit) {
1554       PetscCall(VecGetBlockSize(x, &bs));
1555       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1556       PetscCall(VecGetBlockSize(y, &bs));
1557       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1558       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1559       while (ilink) {
1560         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1561         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1562         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1563         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1564         ilink = ilink->next;
1565       }
1566       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1567     } else {
1568       PetscCall(VecSet(y, 0.0));
1569       while (ilink) {
1570         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1571         ilink = ilink->next;
1572       }
1573     }
1574   } else {
1575     if (!jac->w1) {
1576       PetscCall(VecDuplicate(x, &jac->w1));
1577       PetscCall(VecDuplicate(x, &jac->w2));
1578     }
1579     PetscCall(VecSet(y, 0.0));
1580     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1581       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1582       while (ilink->next) {
1583         ilink = ilink->next;
1584         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1585         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1586         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1587       }
1588       while (ilink->previous) {
1589         ilink = ilink->previous;
1590         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1591         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1592         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1593       }
1594     } else {
1595       while (ilink->next) { /* get to last entry in linked list */
1596         ilink = ilink->next;
1597       }
1598       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1599       while (ilink->previous) {
1600         ilink = ilink->previous;
1601         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1602         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1603         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1604       }
1605     }
1606   }
1607   PetscFunctionReturn(PETSC_SUCCESS);
1608 }
1609 
1610 static PetscErrorCode PCReset_FieldSplit(PC pc)
1611 {
1612   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1613   PC_FieldSplitLink ilink = jac->head, next;
1614 
1615   PetscFunctionBegin;
1616   while (ilink) {
1617     PetscCall(KSPDestroy(&ilink->ksp));
1618     PetscCall(VecDestroy(&ilink->x));
1619     PetscCall(VecDestroy(&ilink->y));
1620     PetscCall(VecDestroy(&ilink->z));
1621     PetscCall(VecScatterDestroy(&ilink->sctx));
1622     PetscCall(ISDestroy(&ilink->is));
1623     PetscCall(ISDestroy(&ilink->is_col));
1624     PetscCall(PetscFree(ilink->splitname));
1625     PetscCall(PetscFree(ilink->fields));
1626     PetscCall(PetscFree(ilink->fields_col));
1627     next = ilink->next;
1628     PetscCall(PetscFree(ilink));
1629     ilink = next;
1630   }
1631   jac->head = NULL;
1632   PetscCall(PetscFree2(jac->x, jac->y));
1633   if (jac->mat && jac->mat != jac->pmat) {
1634     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1635   } else if (jac->mat) {
1636     jac->mat = NULL;
1637   }
1638   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1639   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1640   jac->nsplits = 0;
1641   PetscCall(VecDestroy(&jac->w1));
1642   PetscCall(VecDestroy(&jac->w2));
1643   PetscCall(MatDestroy(&jac->schur));
1644   PetscCall(MatDestroy(&jac->schurp));
1645   PetscCall(MatDestroy(&jac->schur_user));
1646   PetscCall(KSPDestroy(&jac->kspschur));
1647   PetscCall(KSPDestroy(&jac->kspupper));
1648   PetscCall(MatDestroy(&jac->B));
1649   PetscCall(MatDestroy(&jac->C));
1650   PetscCall(MatDestroy(&jac->H));
1651   PetscCall(VecDestroy(&jac->u));
1652   PetscCall(VecDestroy(&jac->v));
1653   PetscCall(VecDestroy(&jac->Hu));
1654   PetscCall(VecDestroy(&jac->d));
1655   PetscCall(PetscFree(jac->vecz));
1656   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1657   jac->isrestrict = PETSC_FALSE;
1658   PetscFunctionReturn(PETSC_SUCCESS);
1659 }
1660 
1661 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1662 {
1663   PetscFunctionBegin;
1664   PetscCall(PCReset_FieldSplit(pc));
1665   PetscCall(PetscFree(pc->data));
1666   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1667   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1668   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1669   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1670   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1671   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1672   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1673   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1674 
1675   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1676   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1677   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1678   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1679   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1680   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1681   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1682   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1683   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1684   PetscFunctionReturn(PETSC_SUCCESS);
1685 }
1686 
1687 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject)
1688 {
1689   PetscInt        bs;
1690   PetscBool       flg;
1691   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1692   PCCompositeType ctype;
1693 
1694   PetscFunctionBegin;
1695   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1696   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1697   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1698   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1699   jac->diag_use_amat = pc->useAmat;
1700   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1701   jac->offdiag_use_amat = pc->useAmat;
1702   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1703   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1704   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1705   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1706   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1707   /* Only setup fields once */
1708   if ((jac->bs > 0) && (jac->nsplits == 0)) {
1709     /* only allow user to set fields from command line if bs is already known.
1710        otherwise user can set them in PCFieldSplitSetDefaults() */
1711     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1712     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1713   }
1714   if (jac->type == PC_COMPOSITE_SCHUR) {
1715     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1716     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1717     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1718     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1719     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1720   } else if (jac->type == PC_COMPOSITE_GKB) {
1721     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1722     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1723     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitGKBNu", jac->gkbnu, &jac->gkbnu, NULL));
1724     PetscCheck(jac->gkbnu >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nu cannot be less than 0: value %g", (double)jac->gkbnu);
1725     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1726     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1727   }
1728   /*
1729     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1730     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1731     is called on the outer solver in case changes were made in the options database
1732 
1733     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1734     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1735     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1736 
1737     There could be a negative side effect of calling the KSPSetFromOptions() below.
1738 
1739     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1740   */
1741   if (jac->issetup) {
1742     PC_FieldSplitLink ilink = jac->head;
1743     if (jac->type == PC_COMPOSITE_SCHUR) {
1744       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1745       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1746     }
1747     while (ilink) {
1748       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1749       ilink = ilink->next;
1750     }
1751   }
1752   PetscOptionsHeadEnd();
1753   PetscFunctionReturn(PETSC_SUCCESS);
1754 }
1755 
1756 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1757 {
1758   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1759   PC_FieldSplitLink ilink, next = jac->head;
1760   char              prefix[128];
1761   PetscInt          i;
1762 
1763   PetscFunctionBegin;
1764   if (jac->splitdefined) {
1765     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1766     PetscFunctionReturn(PETSC_SUCCESS);
1767   }
1768   for (i = 0; i < n; i++) {
1769     PetscCheck(fields[i] < jac->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", fields[i], jac->bs);
1770     PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]);
1771   }
1772   PetscCall(PetscNew(&ilink));
1773   if (splitname) {
1774     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1775   } else {
1776     PetscCall(PetscMalloc1(3, &ilink->splitname));
1777     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1778   }
1779   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1780   PetscCall(PetscMalloc1(n, &ilink->fields));
1781   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1782   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1783   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1784 
1785   ilink->nfields = n;
1786   ilink->next    = NULL;
1787   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1788   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1789   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1790   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1791 
1792   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1793   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1794 
1795   if (!next) {
1796     jac->head       = ilink;
1797     ilink->previous = NULL;
1798   } else {
1799     while (next->next) next = next->next;
1800     next->next      = ilink;
1801     ilink->previous = next;
1802   }
1803   jac->nsplits++;
1804   PetscFunctionReturn(PETSC_SUCCESS);
1805 }
1806 
1807 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1808 {
1809   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1810 
1811   PetscFunctionBegin;
1812   *subksp = NULL;
1813   if (n) *n = 0;
1814   if (jac->type == PC_COMPOSITE_SCHUR) {
1815     PetscInt nn;
1816 
1817     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1818     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1819     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1820     PetscCall(PetscMalloc1(nn, subksp));
1821     (*subksp)[0] = jac->head->ksp;
1822     (*subksp)[1] = jac->kspschur;
1823     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
1824     if (n) *n = nn;
1825   }
1826   PetscFunctionReturn(PETSC_SUCCESS);
1827 }
1828 
1829 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
1830 {
1831   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1832 
1833   PetscFunctionBegin;
1834   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
1835   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1836   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
1837 
1838   (*subksp)[1] = jac->kspschur;
1839   if (n) *n = jac->nsplits;
1840   PetscFunctionReturn(PETSC_SUCCESS);
1841 }
1842 
1843 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1844 {
1845   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1846   PetscInt          cnt   = 0;
1847   PC_FieldSplitLink ilink = jac->head;
1848 
1849   PetscFunctionBegin;
1850   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1851   while (ilink) {
1852     (*subksp)[cnt++] = ilink->ksp;
1853     ilink            = ilink->next;
1854   }
1855   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
1856   if (n) *n = jac->nsplits;
1857   PetscFunctionReturn(PETSC_SUCCESS);
1858 }
1859 
1860 /*@C
1861   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
1862 
1863   Input Parameters:
1864 + pc  - the preconditioner context
1865 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
1866 
1867   Level: advanced
1868 
1869   Developer Notes:
1870   It seems the resulting `IS`s will not cover the entire space, so
1871   how can they define a convergent preconditioner? Needs explaining.
1872 
1873 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
1874 @*/
1875 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
1876 {
1877   PetscFunctionBegin;
1878   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1879   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
1880   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
1881   PetscFunctionReturn(PETSC_SUCCESS);
1882 }
1883 
1884 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
1885 {
1886   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1887   PC_FieldSplitLink ilink = jac->head, next;
1888   PetscInt          localsize, size, sizez, i;
1889   const PetscInt   *ind, *indz;
1890   PetscInt         *indc, *indcz;
1891   PetscBool         flg;
1892 
1893   PetscFunctionBegin;
1894   PetscCall(ISGetLocalSize(isy, &localsize));
1895   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
1896   size -= localsize;
1897   while (ilink) {
1898     IS isrl, isr;
1899     PC subpc;
1900     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
1901     PetscCall(ISGetLocalSize(isrl, &localsize));
1902     PetscCall(PetscMalloc1(localsize, &indc));
1903     PetscCall(ISGetIndices(isrl, &ind));
1904     PetscCall(PetscArraycpy(indc, ind, localsize));
1905     PetscCall(ISRestoreIndices(isrl, &ind));
1906     PetscCall(ISDestroy(&isrl));
1907     for (i = 0; i < localsize; i++) *(indc + i) += size;
1908     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
1909     PetscCall(PetscObjectReference((PetscObject)isr));
1910     PetscCall(ISDestroy(&ilink->is));
1911     ilink->is = isr;
1912     PetscCall(PetscObjectReference((PetscObject)isr));
1913     PetscCall(ISDestroy(&ilink->is_col));
1914     ilink->is_col = isr;
1915     PetscCall(ISDestroy(&isr));
1916     PetscCall(KSPGetPC(ilink->ksp, &subpc));
1917     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
1918     if (flg) {
1919       IS       iszl, isz;
1920       MPI_Comm comm;
1921       PetscCall(ISGetLocalSize(ilink->is, &localsize));
1922       comm = PetscObjectComm((PetscObject)ilink->is);
1923       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
1924       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
1925       sizez -= localsize;
1926       PetscCall(ISGetLocalSize(iszl, &localsize));
1927       PetscCall(PetscMalloc1(localsize, &indcz));
1928       PetscCall(ISGetIndices(iszl, &indz));
1929       PetscCall(PetscArraycpy(indcz, indz, localsize));
1930       PetscCall(ISRestoreIndices(iszl, &indz));
1931       PetscCall(ISDestroy(&iszl));
1932       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
1933       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
1934       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
1935       PetscCall(ISDestroy(&isz));
1936     }
1937     next  = ilink->next;
1938     ilink = next;
1939   }
1940   jac->isrestrict = PETSC_TRUE;
1941   PetscFunctionReturn(PETSC_SUCCESS);
1942 }
1943 
1944 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
1945 {
1946   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1947   PC_FieldSplitLink ilink, next = jac->head;
1948   char              prefix[128];
1949 
1950   PetscFunctionBegin;
1951   if (jac->splitdefined) {
1952     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1953     PetscFunctionReturn(PETSC_SUCCESS);
1954   }
1955   PetscCall(PetscNew(&ilink));
1956   if (splitname) {
1957     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1958   } else {
1959     PetscCall(PetscMalloc1(8, &ilink->splitname));
1960     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
1961   }
1962   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1963   PetscCall(PetscObjectReference((PetscObject)is));
1964   PetscCall(ISDestroy(&ilink->is));
1965   ilink->is = is;
1966   PetscCall(PetscObjectReference((PetscObject)is));
1967   PetscCall(ISDestroy(&ilink->is_col));
1968   ilink->is_col = is;
1969   ilink->next   = NULL;
1970   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1971   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1972   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1973   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1974 
1975   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1976   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1977 
1978   if (!next) {
1979     jac->head       = ilink;
1980     ilink->previous = NULL;
1981   } else {
1982     while (next->next) next = next->next;
1983     next->next      = ilink;
1984     ilink->previous = next;
1985   }
1986   jac->nsplits++;
1987   PetscFunctionReturn(PETSC_SUCCESS);
1988 }
1989 
1990 /*@C
1991   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
1992 
1993   Logically Collective
1994 
1995   Input Parameters:
1996 + pc         - the preconditioner context
1997 . splitname  - name of this split, if `NULL` the number of the split is used
1998 . n          - the number of fields in this split
1999 . fields     - the fields in this split
2000 - fields_col - generally the same as fields, if it does not match fields then the matrix block that is solved for this set of fields comes from an off-diagonal block
2001                  of the matrix and fields_col provides the column indices for that block
2002 
2003   Level: intermediate
2004 
2005   Notes:
2006   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2007 
2008   `PCFieldSplitSetFields()` is for defining fields as strided blocks. For example, if the block
2009   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2010   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x....
2011   where the numbered entries indicate what is in the split.
2012 
2013   This function is called once per split (it creates a new split each time).  Solve options
2014   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2015 
2016   `PCFieldSplitSetIS()` does not support having a fields_col different from fields
2017 
2018   Developer Notes:
2019   This routine does not actually create the `IS` representing the split, that is delayed
2020   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2021   available when this routine is called.
2022 
2023 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`
2024 @*/
2025 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
2026 {
2027   PetscFunctionBegin;
2028   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2029   PetscAssertPointer(splitname, 2);
2030   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2031   PetscAssertPointer(fields, 4);
2032   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2033   PetscFunctionReturn(PETSC_SUCCESS);
2034 }
2035 
2036 /*@
2037   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2038   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
2039 
2040   Logically Collective
2041 
2042   Input Parameters:
2043 + pc  - the preconditioner object
2044 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2045 
2046   Options Database Key:
2047 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2048 
2049   Level: intermediate
2050 
2051 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2052 @*/
2053 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2054 {
2055   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2056   PetscBool      isfs;
2057 
2058   PetscFunctionBegin;
2059   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2060   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2061   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2062   jac->diag_use_amat = flg;
2063   PetscFunctionReturn(PETSC_SUCCESS);
2064 }
2065 
2066 /*@
2067   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2068   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
2069 
2070   Logically Collective
2071 
2072   Input Parameter:
2073 . pc - the preconditioner object
2074 
2075   Output Parameter:
2076 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2077 
2078   Level: intermediate
2079 
2080 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2081 @*/
2082 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2083 {
2084   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2085   PetscBool      isfs;
2086 
2087   PetscFunctionBegin;
2088   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2089   PetscAssertPointer(flg, 2);
2090   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2091   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2092   *flg = jac->diag_use_amat;
2093   PetscFunctionReturn(PETSC_SUCCESS);
2094 }
2095 
2096 /*@
2097   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2098   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
2099 
2100   Logically Collective
2101 
2102   Input Parameters:
2103 + pc  - the preconditioner object
2104 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2105 
2106   Options Database Key:
2107 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2108 
2109   Level: intermediate
2110 
2111 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2112 @*/
2113 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2114 {
2115   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2116   PetscBool      isfs;
2117 
2118   PetscFunctionBegin;
2119   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2120   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2121   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2122   jac->offdiag_use_amat = flg;
2123   PetscFunctionReturn(PETSC_SUCCESS);
2124 }
2125 
2126 /*@
2127   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2128   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
2129 
2130   Logically Collective
2131 
2132   Input Parameter:
2133 . pc - the preconditioner object
2134 
2135   Output Parameter:
2136 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2137 
2138   Level: intermediate
2139 
2140 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2141 @*/
2142 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2143 {
2144   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2145   PetscBool      isfs;
2146 
2147   PetscFunctionBegin;
2148   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2149   PetscAssertPointer(flg, 2);
2150   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2151   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2152   *flg = jac->offdiag_use_amat;
2153   PetscFunctionReturn(PETSC_SUCCESS);
2154 }
2155 
2156 /*@C
2157   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2158 
2159   Logically Collective
2160 
2161   Input Parameters:
2162 + pc        - the preconditioner context
2163 . splitname - name of this split, if `NULL` the number of the split is used
2164 - is        - the index set that defines the elements in this split
2165 
2166   Level: intermediate
2167 
2168   Notes:
2169   Use `PCFieldSplitSetFields()`, for splits defined by strided types.
2170 
2171   This function is called once per split (it creates a new split each time).  Solve options
2172   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2173 
2174 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`
2175 @*/
2176 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2177 {
2178   PetscFunctionBegin;
2179   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2180   if (splitname) PetscAssertPointer(splitname, 2);
2181   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2182   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2183   PetscFunctionReturn(PETSC_SUCCESS);
2184 }
2185 
2186 /*@C
2187   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2188 
2189   Logically Collective
2190 
2191   Input Parameters:
2192 + pc        - the preconditioner context
2193 - splitname - name of this split
2194 
2195   Output Parameter:
2196 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2197 
2198   Level: intermediate
2199 
2200 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`
2201 @*/
2202 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2203 {
2204   PetscFunctionBegin;
2205   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2206   PetscAssertPointer(splitname, 2);
2207   PetscAssertPointer(is, 3);
2208   {
2209     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2210     PC_FieldSplitLink ilink = jac->head;
2211     PetscBool         found;
2212 
2213     *is = NULL;
2214     while (ilink) {
2215       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2216       if (found) {
2217         *is = ilink->is;
2218         break;
2219       }
2220       ilink = ilink->next;
2221     }
2222   }
2223   PetscFunctionReturn(PETSC_SUCCESS);
2224 }
2225 
2226 /*@C
2227   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2228 
2229   Logically Collective
2230 
2231   Input Parameters:
2232 + pc    - the preconditioner context
2233 - index - index of this split
2234 
2235   Output Parameter:
2236 . is - the index set that defines the elements in this split
2237 
2238   Level: intermediate
2239 
2240 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`
2241 @*/
2242 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2243 {
2244   PetscFunctionBegin;
2245   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2246   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2247   PetscAssertPointer(is, 3);
2248   {
2249     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2250     PC_FieldSplitLink ilink = jac->head;
2251     PetscInt          i     = 0;
2252     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2253 
2254     while (i < index) {
2255       ilink = ilink->next;
2256       ++i;
2257     }
2258     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2259   }
2260   PetscFunctionReturn(PETSC_SUCCESS);
2261 }
2262 
2263 /*@
2264   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2265   fieldsplit preconditioner when calling `PCFieldSplitSetIS()`. If not set the matrix block size is used.
2266 
2267   Logically Collective
2268 
2269   Input Parameters:
2270 + pc - the preconditioner context
2271 - bs - the block size
2272 
2273   Level: intermediate
2274 
2275 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2276 @*/
2277 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2278 {
2279   PetscFunctionBegin;
2280   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2281   PetscValidLogicalCollectiveInt(pc, bs, 2);
2282   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2283   PetscFunctionReturn(PETSC_SUCCESS);
2284 }
2285 
2286 /*@C
2287   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2288 
2289   Collective
2290 
2291   Input Parameter:
2292 . pc - the preconditioner context
2293 
2294   Output Parameters:
2295 + n      - the number of splits
2296 - subksp - the array of `KSP` contexts
2297 
2298   Level: advanced
2299 
2300   Notes:
2301   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2302   (not the `KSP`, just the array that contains them).
2303 
2304   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2305 
2306   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2307   Schur complement and the `KSP` object used to iterate over the Schur complement.
2308   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2309 
2310   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2311   inner linear system defined by the matrix H in each loop.
2312 
2313   Fortran Notes:
2314   You must pass in a `KSP` array that is large enough to contain all the `KSP`s.
2315   You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2316   `KSP` array must be.
2317 
2318   Developer Notes:
2319   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2320 
2321   The Fortran interface should be modernized to return directly the array of values.
2322 
2323 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2324 @*/
2325 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2326 {
2327   PetscFunctionBegin;
2328   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2329   if (n) PetscAssertPointer(n, 2);
2330   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2331   PetscFunctionReturn(PETSC_SUCCESS);
2332 }
2333 
2334 /*@C
2335   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2336 
2337   Collective
2338 
2339   Input Parameter:
2340 . pc - the preconditioner context
2341 
2342   Output Parameters:
2343 + n      - the number of splits
2344 - subksp - the array of `KSP` contexts
2345 
2346   Level: advanced
2347 
2348   Notes:
2349   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2350   (not the `KSP` just the array that contains them).
2351 
2352   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2353 
2354   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2355 +  1  - the `KSP` used for the (1,1) block
2356 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2357 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2358 
2359   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2360 
2361   Fortran Notes:
2362   You must pass in a `KSP` array that is large enough to contain all the local `KSP`s.
2363   You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2364   `KSP` array must be.
2365 
2366   Developer Notes:
2367   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2368 
2369   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2370 
2371   The Fortran interface should be modernized to return directly the array of values.
2372 
2373 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2374 @*/
2375 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2376 {
2377   PetscFunctionBegin;
2378   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2379   if (n) PetscAssertPointer(n, 2);
2380   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2381   PetscFunctionReturn(PETSC_SUCCESS);
2382 }
2383 
2384 /*@
2385   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructucted for the Schur complement.
2386   The default is the A11 matrix.
2387 
2388   Collective
2389 
2390   Input Parameters:
2391 + pc    - the preconditioner context
2392 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2393               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2394               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2395 - pre   - matrix to use for preconditioning, or `NULL`
2396 
2397   Options Database Keys:
2398 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is a11. See notes for meaning of various arguments
2399 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2400 
2401   Level: intermediate
2402 
2403   Notes:
2404   If ptype is
2405 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2406   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2407 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2408   The only preconditioner that currently works with this symbolic representation matrix object is the `PCLSC`
2409   preconditioner
2410 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2411   to this function).
2412 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation Sp = A11 - A10 inv(diag(A00)) A01
2413   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2414   lumped before extracting the diagonal using the additional option -fieldsplit_1_mat_schur_complement_ainv_type lump
2415 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2416   computed internally by `PCFIELDSPLIT` (this is expensive)
2417   useful mostly as a test that the Schur complement approach can work for your problem
2418 
2419   When solving a saddle point problem, where the A11 block is identically zero, using a11 as the ptype only makes sense
2420   with the additional option -fieldsplit_1_pc_type none. Usually for saddle point problems one would use a ptype of self and
2421   -fieldsplit_1_pc_type lsc which uses the least squares commutator to compute a preconditioner for the Schur complement.
2422 
2423 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2424           `MatSchurComplementSetAinvType()`, `PCLSC`,
2425 
2426 @*/
2427 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2428 {
2429   PetscFunctionBegin;
2430   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2431   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2432   PetscFunctionReturn(PETSC_SUCCESS);
2433 }
2434 
2435 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2436 {
2437   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2438 } /* Deprecated name */
2439 
2440 /*@
2441   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2442   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2443 
2444   Logically Collective
2445 
2446   Input Parameter:
2447 . pc - the preconditioner context
2448 
2449   Output Parameters:
2450 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_PRE_USER`
2451 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_PRE_USER`), or NULL
2452 
2453   Level: intermediate
2454 
2455 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`,
2456 
2457 @*/
2458 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2459 {
2460   PetscFunctionBegin;
2461   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2462   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2463   PetscFunctionReturn(PETSC_SUCCESS);
2464 }
2465 
2466 /*@
2467   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2468 
2469   Not Collective
2470 
2471   Input Parameter:
2472 . pc - the preconditioner context
2473 
2474   Output Parameter:
2475 . S - the Schur complement matrix
2476 
2477   Level: advanced
2478 
2479   Note:
2480   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2481 
2482 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2483           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2484 @*/
2485 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2486 {
2487   const char    *t;
2488   PetscBool      isfs;
2489   PC_FieldSplit *jac;
2490 
2491   PetscFunctionBegin;
2492   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2493   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2494   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2495   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2496   jac = (PC_FieldSplit *)pc->data;
2497   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2498   if (S) *S = jac->schur;
2499   PetscFunctionReturn(PETSC_SUCCESS);
2500 }
2501 
2502 /*@
2503   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2504 
2505   Not Collective
2506 
2507   Input Parameters:
2508 + pc - the preconditioner context
2509 - S  - the Schur complement matrix
2510 
2511   Level: advanced
2512 
2513 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2514 @*/
2515 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2516 {
2517   const char    *t;
2518   PetscBool      isfs;
2519   PC_FieldSplit *jac;
2520 
2521   PetscFunctionBegin;
2522   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2523   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2524   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2525   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2526   jac = (PC_FieldSplit *)pc->data;
2527   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2528   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2529   PetscFunctionReturn(PETSC_SUCCESS);
2530 }
2531 
2532 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2533 {
2534   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2535 
2536   PetscFunctionBegin;
2537   jac->schurpre = ptype;
2538   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2539     PetscCall(MatDestroy(&jac->schur_user));
2540     jac->schur_user = pre;
2541     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2542   }
2543   PetscFunctionReturn(PETSC_SUCCESS);
2544 }
2545 
2546 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2547 {
2548   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2549 
2550   PetscFunctionBegin;
2551   *ptype = jac->schurpre;
2552   *pre   = jac->schur_user;
2553   PetscFunctionReturn(PETSC_SUCCESS);
2554 }
2555 
2556 /*@
2557   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner
2558 
2559   Collective
2560 
2561   Input Parameters:
2562 + pc    - the preconditioner context
2563 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2564 
2565   Options Database Key:
2566 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is full
2567 
2568   Level: intermediate
2569 
2570   Notes:
2571   The FULL factorization is
2572 
2573 .vb
2574    (A   B)  = (1       0) (A   0) (1  Ainv*B)  = L D U
2575    (C   E)    (C*Ainv  1) (0   S) (0       1)
2576 .vb
2577     where S = E - C*Ainv*B. In practice, the full factorization is applied via block triangular solves with the grouping L*(D*U). UPPER uses D*U, LOWER uses L*D,
2578     and DIAG is the diagonal part with the sign of S flipped (because this makes the preconditioner positive definite for many formulations, thus allowing the use of `KSPMINRES)`.
2579     Sign flipping of S can be turned off with `PCFieldSplitSetSchurScale()`.
2580 
2581     If A and S are solved exactly
2582 .vb
2583       *) FULL factorization is a direct solver.
2584       *) The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2585       *) With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2586 .ve
2587 
2588   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2589   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2590 
2591   For symmetric problems in which A is positive definite and S is negative definite, DIAG can be used with `KSPMINRES`.
2592 
2593   A flexible method like `KSPFGMRES` or `KSPGCR` must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S).
2594 
2595   References:
2596 +   * - Murphy, Golub, and Wathen, A note on preconditioning indefinite linear systems, SIAM J. Sci. Comput., 21 (2000).
2597 -   * - Ipsen, A note on preconditioning nonsymmetric matrices, SIAM J. Sci. Comput., 23 (2001).
2598 
2599 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`
2600 @*/
2601 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2602 {
2603   PetscFunctionBegin;
2604   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2605   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2606   PetscFunctionReturn(PETSC_SUCCESS);
2607 }
2608 
2609 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2610 {
2611   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2612 
2613   PetscFunctionBegin;
2614   jac->schurfactorization = ftype;
2615   PetscFunctionReturn(PETSC_SUCCESS);
2616 }
2617 
2618 /*@
2619   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2620 
2621   Collective
2622 
2623   Input Parameters:
2624 + pc    - the preconditioner context
2625 - scale - scaling factor for the Schur complement
2626 
2627   Options Database Key:
2628 . -pc_fieldsplit_schur_scale - default is -1.0
2629 
2630   Level: intermediate
2631 
2632 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetSchurFactType()`
2633 @*/
2634 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2635 {
2636   PetscFunctionBegin;
2637   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2638   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2639   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2640   PetscFunctionReturn(PETSC_SUCCESS);
2641 }
2642 
2643 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2644 {
2645   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2646 
2647   PetscFunctionBegin;
2648   jac->schurscale = scale;
2649   PetscFunctionReturn(PETSC_SUCCESS);
2650 }
2651 
2652 /*@C
2653   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2654 
2655   Collective
2656 
2657   Input Parameter:
2658 . pc - the preconditioner context
2659 
2660   Output Parameters:
2661 + A00 - the (0,0) block
2662 . A01 - the (0,1) block
2663 . A10 - the (1,0) block
2664 - A11 - the (1,1) block
2665 
2666   Level: advanced
2667 
2668 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2669 @*/
2670 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2671 {
2672   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2673 
2674   PetscFunctionBegin;
2675   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2676   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2677   if (A00) *A00 = jac->pmat[0];
2678   if (A01) *A01 = jac->B;
2679   if (A10) *A10 = jac->C;
2680   if (A11) *A11 = jac->pmat[1];
2681   PetscFunctionReturn(PETSC_SUCCESS);
2682 }
2683 
2684 /*@
2685   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT`
2686 
2687   Collective
2688 
2689   Input Parameters:
2690 + pc        - the preconditioner context
2691 - tolerance - the solver tolerance
2692 
2693   Options Database Key:
2694 . -pc_fieldsplit_gkb_tol - default is 1e-5
2695 
2696   Level: intermediate
2697 
2698   Note:
2699   The generalized GKB algorithm uses a lower bound estimate of the error in energy norm as stopping criterion.
2700   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2701   this estimate, the stopping criterion is satisfactory in practical cases [A13].
2702 
2703   References:
2704   [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2705 
2706 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2707 @*/
2708 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2709 {
2710   PetscFunctionBegin;
2711   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2712   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2713   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2714   PetscFunctionReturn(PETSC_SUCCESS);
2715 }
2716 
2717 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2718 {
2719   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2720 
2721   PetscFunctionBegin;
2722   jac->gkbtol = tolerance;
2723   PetscFunctionReturn(PETSC_SUCCESS);
2724 }
2725 
2726 /*@
2727   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT`
2728 
2729   Collective
2730 
2731   Input Parameters:
2732 + pc    - the preconditioner context
2733 - maxit - the maximum number of iterations
2734 
2735   Options Database Key:
2736 . -pc_fieldsplit_gkb_maxit - default is 100
2737 
2738   Level: intermediate
2739 
2740 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2741 @*/
2742 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2743 {
2744   PetscFunctionBegin;
2745   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2746   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2747   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2748   PetscFunctionReturn(PETSC_SUCCESS);
2749 }
2750 
2751 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2752 {
2753   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2754 
2755   PetscFunctionBegin;
2756   jac->gkbmaxit = maxit;
2757   PetscFunctionReturn(PETSC_SUCCESS);
2758 }
2759 
2760 /*@
2761   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization in `PCFIELDSPLIT`
2762   preconditioner.
2763 
2764   Collective
2765 
2766   Input Parameters:
2767 + pc    - the preconditioner context
2768 - delay - the delay window in the lower bound estimate
2769 
2770   Options Database Key:
2771 . -pc_fieldsplit_gkb_delay - default is 5
2772 
2773   Level: intermediate
2774 
2775   Note:
2776   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error ||u-u^k||_H
2777   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + delay), and thus the algorithm needs
2778   at least (delay + 1) iterations to stop. For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to
2779 
2780   References:
2781   [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2782 
2783 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2784 @*/
2785 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2786 {
2787   PetscFunctionBegin;
2788   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2789   PetscValidLogicalCollectiveInt(pc, delay, 2);
2790   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2791   PetscFunctionReturn(PETSC_SUCCESS);
2792 }
2793 
2794 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2795 {
2796   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2797 
2798   PetscFunctionBegin;
2799   jac->gkbdelay = delay;
2800   PetscFunctionReturn(PETSC_SUCCESS);
2801 }
2802 
2803 /*@
2804   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the Golub-Kahan bidiagonalization preconditioner
2805   in `PCFIELDSPLIT`
2806 
2807   Collective
2808 
2809   Input Parameters:
2810 + pc - the preconditioner context
2811 - nu - the shift parameter
2812 
2813   Options Database Key:
2814 . -pc_fieldsplit_gkb_nu - default is 1
2815 
2816   Level: intermediate
2817 
2818   Notes:
2819   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing nu sufficiently big. However,
2820   if nu is chosen too big, the matrix H might be badly conditioned and the solution of the linear system Hx = b in the inner loop becomes difficult. It is therefore
2821   necessary to find a good balance in between the convergence of the inner and outer loop.
2822 
2823   For nu = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in [Ar13] is then chosen as identity.
2824 
2825   References:
2826   [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2827 
2828 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2829 @*/
2830 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
2831 {
2832   PetscFunctionBegin;
2833   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2834   PetscValidLogicalCollectiveReal(pc, nu, 2);
2835   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
2836   PetscFunctionReturn(PETSC_SUCCESS);
2837 }
2838 
2839 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
2840 {
2841   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2842 
2843   PetscFunctionBegin;
2844   jac->gkbnu = nu;
2845   PetscFunctionReturn(PETSC_SUCCESS);
2846 }
2847 
2848 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
2849 {
2850   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2851 
2852   PetscFunctionBegin;
2853   jac->type = type;
2854   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
2855   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
2856   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
2857   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
2858   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
2859   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
2860   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
2861   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
2862   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
2863 
2864   if (type == PC_COMPOSITE_SCHUR) {
2865     pc->ops->apply          = PCApply_FieldSplit_Schur;
2866     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
2867     pc->ops->view           = PCView_FieldSplit_Schur;
2868 
2869     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
2870     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
2871     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
2872     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
2873     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
2874   } else if (type == PC_COMPOSITE_GKB) {
2875     pc->ops->apply = PCApply_FieldSplit_GKB;
2876     pc->ops->view  = PCView_FieldSplit_GKB;
2877 
2878     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2879     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
2880     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
2881     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
2882     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
2883   } else {
2884     pc->ops->apply = PCApply_FieldSplit;
2885     pc->ops->view  = PCView_FieldSplit;
2886 
2887     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2888   }
2889   PetscFunctionReturn(PETSC_SUCCESS);
2890 }
2891 
2892 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
2893 {
2894   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2895 
2896   PetscFunctionBegin;
2897   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
2898   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
2899   jac->bs = bs;
2900   PetscFunctionReturn(PETSC_SUCCESS);
2901 }
2902 
2903 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
2904 {
2905   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
2906   PC_FieldSplitLink ilink_current = jac->head;
2907   IS                is_owned;
2908 
2909   PetscFunctionBegin;
2910   jac->coordinates_set = PETSC_TRUE; // Internal flag
2911   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
2912 
2913   while (ilink_current) {
2914     // For each IS, embed it to get local coords indces
2915     IS              is_coords;
2916     PetscInt        ndofs_block;
2917     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
2918 
2919     // Setting drop to true for safety. It should make no difference.
2920     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
2921     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
2922     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
2923 
2924     // Allocate coordinates vector and set it directly
2925     PetscCall(PetscMalloc1(ndofs_block * dim, &(ilink_current->coords)));
2926     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
2927       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
2928     }
2929     ilink_current->dim   = dim;
2930     ilink_current->ndofs = ndofs_block;
2931     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
2932     PetscCall(ISDestroy(&is_coords));
2933     ilink_current = ilink_current->next;
2934   }
2935   PetscCall(ISDestroy(&is_owned));
2936   PetscFunctionReturn(PETSC_SUCCESS);
2937 }
2938 
2939 /*@
2940   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
2941 
2942   Collective
2943 
2944   Input Parameters:
2945 + pc   - the preconditioner context
2946 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2947 
2948   Options Database Key:
2949 . -pc_fieldsplit_type <type: one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
2950 
2951   Level: intermediate
2952 
2953 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
2954           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2955 @*/
2956 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
2957 {
2958   PetscFunctionBegin;
2959   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2960   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
2961   PetscFunctionReturn(PETSC_SUCCESS);
2962 }
2963 
2964 /*@
2965   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
2966 
2967   Not collective
2968 
2969   Input Parameter:
2970 . pc - the preconditioner context
2971 
2972   Output Parameter:
2973 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2974 
2975   Level: intermediate
2976 
2977 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
2978           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2979 @*/
2980 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
2981 {
2982   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2983 
2984   PetscFunctionBegin;
2985   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2986   PetscAssertPointer(type, 2);
2987   *type = jac->type;
2988   PetscFunctionReturn(PETSC_SUCCESS);
2989 }
2990 
2991 /*@
2992   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
2993 
2994   Logically Collective
2995 
2996   Input Parameters:
2997 + pc  - the preconditioner context
2998 - flg - boolean indicating whether to use field splits defined by the `DM`
2999 
3000   Options Database Key:
3001 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3002 
3003   Level: intermediate
3004 
3005 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3006 @*/
3007 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3008 {
3009   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3010   PetscBool      isfs;
3011 
3012   PetscFunctionBegin;
3013   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3014   PetscValidLogicalCollectiveBool(pc, flg, 2);
3015   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3016   if (isfs) jac->dm_splits = flg;
3017   PetscFunctionReturn(PETSC_SUCCESS);
3018 }
3019 
3020 /*@
3021   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3022 
3023   Logically Collective
3024 
3025   Input Parameter:
3026 . pc - the preconditioner context
3027 
3028   Output Parameter:
3029 . flg - boolean indicating whether to use field splits defined by the `DM`
3030 
3031   Level: intermediate
3032 
3033 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3034 @*/
3035 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3036 {
3037   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3038   PetscBool      isfs;
3039 
3040   PetscFunctionBegin;
3041   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3042   PetscAssertPointer(flg, 2);
3043   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3044   if (isfs) {
3045     if (flg) *flg = jac->dm_splits;
3046   }
3047   PetscFunctionReturn(PETSC_SUCCESS);
3048 }
3049 
3050 /*@
3051   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3052 
3053   Logically Collective
3054 
3055   Input Parameter:
3056 . pc - the preconditioner context
3057 
3058   Output Parameter:
3059 . flg - boolean indicating whether to detect fields or not
3060 
3061   Level: intermediate
3062 
3063 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3064 @*/
3065 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3066 {
3067   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3068 
3069   PetscFunctionBegin;
3070   *flg = jac->detect;
3071   PetscFunctionReturn(PETSC_SUCCESS);
3072 }
3073 
3074 /*@
3075   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3076 
3077   Logically Collective
3078 
3079   Input Parameter:
3080 . pc - the preconditioner context
3081 
3082   Output Parameter:
3083 . flg - boolean indicating whether to detect fields or not
3084 
3085   Options Database Key:
3086 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3087 
3088   Level: intermediate
3089 
3090   Note:
3091   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3092 
3093 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3094 @*/
3095 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3096 {
3097   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3098 
3099   PetscFunctionBegin;
3100   jac->detect = flg;
3101   if (jac->detect) {
3102     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3103     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3104   }
3105   PetscFunctionReturn(PETSC_SUCCESS);
3106 }
3107 
3108 /*MC
3109    PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3110    collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3111 
3112    Options Database Keys:
3113 +   -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
3114 .   -pc_fieldsplit_default - automatically add any fields to additional splits that have not
3115                               been supplied explicitly by `-pc_fieldsplit_%d_fields`
3116 .   -pc_fieldsplit_block_size <bs> - size of block that defines fields (i.e. there are bs fields)
3117 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3118 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is a11; see `PCFieldSplitSetSchurPre()`
3119 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - set factorization type when using `-pc_fieldsplit_type schur`; see `PCFieldSplitSetSchurFactType()`
3120 -   -pc_fieldsplit_detect_saddle_point - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3121 
3122      Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3123      The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3124      For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3125 
3126      To set options on the solvers for each block append `-fieldsplit_` to all the `PC`
3127      options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`
3128 
3129      To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3130       and set the options directly on the resulting `KSP` object
3131 
3132     Level: intermediate
3133 
3134    Notes:
3135     Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries and `PCFieldSplitSetIS()`
3136      to define a split by an arbitrary collection of entries.
3137 
3138       If no splits are set the default is used. The splits are defined by entries strided by bs,
3139       beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3140       if this is not called the block size defaults to the blocksize of the second matrix passed
3141       to `KSPSetOperators()`/`PCSetOperators()`.
3142 
3143       For the Schur complement preconditioner if
3144 
3145       ```{math}
3146       J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3147       ```
3148 
3149       the preconditioner using `full` factorization is logically
3150       ```{math}
3151       \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3152       ```
3153      where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3154      ```{math}
3155      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3156      ```
3157      which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given
3158      in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0,
3159      it returns the KSP associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default
3160      $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3161 
3162      The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3163      `diag` gives
3164       ```{math}
3165       \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3166       ```
3167      Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3168      can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3169       ```{math}
3170       \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3171       ```
3172      where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of
3173       ```{math}
3174       \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3175       ```
3176      where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3177 
3178      If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3179      is used automatically for a second block.
3180 
3181      The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3182      Generally it should be used with the `MATAIJ` format.
3183 
3184      The forms of these preconditioners are closely related if not identical to forms derived as "Distributive Iterations", see,
3185      for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`Wesseling2009`.
3186      One can also use `PCFIELDSPLIT`
3187      inside a smoother resulting in "Distributive Smoothers".
3188 
3189      See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3190 
3191      The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3192      residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3193 
3194      The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3195      ```{math}
3196      \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3197      ```
3198      with $A_{00}$ positive semi-definite. The implementation follows {cite}`Arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3199      A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3200 
3201    Developer Note:
3202    The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3203    user API.
3204 
3205      References:
3206      ```{bibliography}
3207      :filter: docname in docnames
3208      ```
3209 
3210 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3211           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3212           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3213           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3214 M*/
3215 
3216 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3217 {
3218   PC_FieldSplit *jac;
3219 
3220   PetscFunctionBegin;
3221   PetscCall(PetscNew(&jac));
3222 
3223   jac->bs                 = -1;
3224   jac->nsplits            = 0;
3225   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3226   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3227   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3228   jac->schurscale         = -1.0;
3229   jac->dm_splits          = PETSC_TRUE;
3230   jac->detect             = PETSC_FALSE;
3231   jac->gkbtol             = 1e-5;
3232   jac->gkbdelay           = 5;
3233   jac->gkbnu              = 1;
3234   jac->gkbmaxit           = 100;
3235   jac->gkbmonitor         = PETSC_FALSE;
3236   jac->coordinates_set    = PETSC_FALSE;
3237 
3238   pc->data = (void *)jac;
3239 
3240   pc->ops->apply           = PCApply_FieldSplit;
3241   pc->ops->applytranspose  = PCApplyTranspose_FieldSplit;
3242   pc->ops->setup           = PCSetUp_FieldSplit;
3243   pc->ops->reset           = PCReset_FieldSplit;
3244   pc->ops->destroy         = PCDestroy_FieldSplit;
3245   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3246   pc->ops->view            = PCView_FieldSplit;
3247   pc->ops->applyrichardson = NULL;
3248 
3249   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3250   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3251   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3252   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3253   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3254   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3255   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3256   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3257   PetscFunctionReturn(PETSC_SUCCESS);
3258 }
3259