xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision 98d129c30f3ee9fdddc40fdbc5a989b7be64f888)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 
5 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
6 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
7 
8 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
9 
10 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
11 struct _PC_FieldSplitLink {
12   KSP               ksp;
13   Vec               x, y, z;
14   char             *splitname;
15   PetscInt          nfields;
16   PetscInt         *fields, *fields_col;
17   VecScatter        sctx;
18   IS                is, is_col;
19   PC_FieldSplitLink next, previous;
20   PetscLogEvent     event;
21 
22   /* Used only when setting coordinates with PCSetCoordinates */
23   PetscInt   dim;
24   PetscInt   ndofs;
25   PetscReal *coords;
26 };
27 
28 typedef struct {
29   PCCompositeType type;
30   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
31   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
32   PetscInt        bs;           /* Block size for IS and Mat structures */
33   PetscInt        nsplits;      /* Number of field divisions defined */
34   Vec            *x, *y, w1, w2;
35   Mat            *mat;    /* The diagonal block for each split */
36   Mat            *pmat;   /* The preconditioning diagonal block for each split */
37   Mat            *Afield; /* The rows of the matrix associated with each split */
38   PetscBool       issetup;
39 
40   /* Only used when Schur complement preconditioning is used */
41   Mat                       B;          /* The (0,1) block */
42   Mat                       C;          /* The (1,0) block */
43   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
44   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */
45   Mat                       schur_user; /* User-provided preconditioning matrix for the Schur complement */
46   PCFieldSplitSchurPreType  schurpre;   /* Determines which preconditioning matrix is used for the Schur complement */
47   PCFieldSplitSchurFactType schurfactorization;
48   KSP                       kspschur;   /* The solver for S */
49   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
50   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
51 
52   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
53   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
54   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
55   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
56   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
57   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
58   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
59   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
60   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
61   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
62 
63   PC_FieldSplitLink head;
64   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
65   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
66   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
67   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
68   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
69   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
70   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
71 } PC_FieldSplit;
72 
73 /*
74     Note:
75     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
76    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
77    PC you could change this.
78 */
79 
80 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it.  This way the
81 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
82 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
83 {
84   switch (jac->schurpre) {
85   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
86     return jac->schur;
87   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
88     return jac->schurp;
89   case PC_FIELDSPLIT_SCHUR_PRE_A11:
90     return jac->pmat[1];
91   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
92   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
93   default:
94     return jac->schur_user ? jac->schur_user : jac->pmat[1];
95   }
96 }
97 
98 #include <petscdraw.h>
99 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
100 {
101   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
102   PetscBool         iascii, isdraw;
103   PetscInt          i, j;
104   PC_FieldSplitLink ilink = jac->head;
105 
106   PetscFunctionBegin;
107   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
108   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
109   if (iascii) {
110     if (jac->bs > 0) {
111       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
112     } else {
113       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
114     }
115     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
116     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
117     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
118     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
119     for (i = 0; i < jac->nsplits; i++) {
120       if (ilink->fields) {
121         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
122         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
123         for (j = 0; j < ilink->nfields; j++) {
124           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
125           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
126         }
127         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
128         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
129       } else {
130         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
131       }
132       PetscCall(KSPView(ilink->ksp, viewer));
133       ilink = ilink->next;
134     }
135   }
136 
137   if (isdraw) {
138     PetscDraw draw;
139     PetscReal x, y, w, wd;
140 
141     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
142     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
143     w  = 2 * PetscMin(1.0 - x, x);
144     wd = w / (jac->nsplits + 1);
145     x  = x - wd * (jac->nsplits - 1) / 2.0;
146     for (i = 0; i < jac->nsplits; i++) {
147       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
148       PetscCall(KSPView(ilink->ksp, viewer));
149       PetscCall(PetscDrawPopCurrentPoint(draw));
150       x += wd;
151       ilink = ilink->next;
152     }
153   }
154   PetscFunctionReturn(PETSC_SUCCESS);
155 }
156 
157 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
158 {
159   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
160   PetscBool                  iascii, isdraw;
161   PetscInt                   i, j;
162   PC_FieldSplitLink          ilink = jac->head;
163   MatSchurComplementAinvType atype;
164 
165   PetscFunctionBegin;
166   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
167   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
168   if (iascii) {
169     if (jac->bs > 0) {
170       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
171     } else {
172       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
173     }
174     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
175     switch (jac->schurpre) {
176     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
178       break;
179     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
180       if (jac->schur) {
181         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
182         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
183       }
184       break;
185     case PC_FIELDSPLIT_SCHUR_PRE_A11:
186       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
187       break;
188     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
189       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
190       break;
191     case PC_FIELDSPLIT_SCHUR_PRE_USER:
192       if (jac->schur_user) {
193         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
194       } else {
195         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
196       }
197       break;
198     default:
199       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
200     }
201     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
202     PetscCall(PetscViewerASCIIPushTab(viewer));
203     for (i = 0; i < jac->nsplits; i++) {
204       if (ilink->fields) {
205         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
206         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
207         for (j = 0; j < ilink->nfields; j++) {
208           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
209           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
210         }
211         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
212         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
213       } else {
214         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
215       }
216       ilink = ilink->next;
217     }
218     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
219     PetscCall(PetscViewerASCIIPushTab(viewer));
220     if (jac->head) {
221       PetscCall(KSPView(jac->head->ksp, viewer));
222     } else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
223     PetscCall(PetscViewerASCIIPopTab(viewer));
224     if (jac->head && jac->kspupper != jac->head->ksp) {
225       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n"));
226       PetscCall(PetscViewerASCIIPushTab(viewer));
227       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
228       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
229       PetscCall(PetscViewerASCIIPopTab(viewer));
230     }
231     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n"));
232     PetscCall(PetscViewerASCIIPushTab(viewer));
233     if (jac->kspschur) {
234       PetscCall(KSPView(jac->kspschur, viewer));
235     } else {
236       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
237     }
238     PetscCall(PetscViewerASCIIPopTab(viewer));
239     PetscCall(PetscViewerASCIIPopTab(viewer));
240   } else if (isdraw && jac->head) {
241     PetscDraw draw;
242     PetscReal x, y, w, wd, h;
243     PetscInt  cnt = 2;
244     char      str[32];
245 
246     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
247     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
248     if (jac->kspupper != jac->head->ksp) cnt++;
249     w  = 2 * PetscMin(1.0 - x, x);
250     wd = w / (cnt + 1);
251 
252     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
253     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
254     y -= h;
255     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
256       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
257     } else {
258       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
259     }
260     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
261     y -= h;
262     x = x - wd * (cnt - 1) / 2.0;
263 
264     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
265     PetscCall(KSPView(jac->head->ksp, viewer));
266     PetscCall(PetscDrawPopCurrentPoint(draw));
267     if (jac->kspupper != jac->head->ksp) {
268       x += wd;
269       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
270       PetscCall(KSPView(jac->kspupper, viewer));
271       PetscCall(PetscDrawPopCurrentPoint(draw));
272     }
273     x += wd;
274     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
275     PetscCall(KSPView(jac->kspschur, viewer));
276     PetscCall(PetscDrawPopCurrentPoint(draw));
277   }
278   PetscFunctionReturn(PETSC_SUCCESS);
279 }
280 
281 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
282 {
283   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
284   PetscBool         iascii, isdraw;
285   PetscInt          i, j;
286   PC_FieldSplitLink ilink = jac->head;
287 
288   PetscFunctionBegin;
289   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
290   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
291   if (iascii) {
292     if (jac->bs > 0) {
293       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
294     } else {
295       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
296     }
297     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
298     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
299     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
300 
301     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
302     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
303     PetscCall(PetscViewerASCIIPushTab(viewer));
304 
305     if (ilink->fields) {
306       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
307       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
308       for (j = 0; j < ilink->nfields; j++) {
309         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
310         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
311       }
312       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
313       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
314     } else {
315       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
316     }
317     PetscCall(KSPView(ilink->ksp, viewer));
318 
319     PetscCall(PetscViewerASCIIPopTab(viewer));
320   }
321 
322   if (isdraw) {
323     PetscDraw draw;
324     PetscReal x, y, w, wd;
325 
326     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
327     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
328     w  = 2 * PetscMin(1.0 - x, x);
329     wd = w / (jac->nsplits + 1);
330     x  = x - wd * (jac->nsplits - 1) / 2.0;
331     for (i = 0; i < jac->nsplits; i++) {
332       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
333       PetscCall(KSPView(ilink->ksp, viewer));
334       PetscCall(PetscDrawPopCurrentPoint(draw));
335       x += wd;
336       ilink = ilink->next;
337     }
338   }
339   PetscFunctionReturn(PETSC_SUCCESS);
340 }
341 
342 /* Precondition: jac->bs is set to a meaningful value or MATNEST */
343 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
344 {
345   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
346   PetscInt       bs, i, nfields, *ifields, nfields_col, *ifields_col;
347   PetscBool      flg, flg_col, mnest;
348   char           optionname[128], splitname[8], optionname_col[128];
349 
350   PetscFunctionBegin;
351   PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest));
352   if (mnest) {
353     PetscCall(MatNestGetSize(pc->pmat, &bs, NULL));
354   } else {
355     bs = jac->bs;
356   }
357   PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col));
358   for (i = 0, flg = PETSC_TRUE;; i++) {
359     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
360     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
361     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
362     nfields     = bs;
363     nfields_col = bs;
364     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
365     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
366     if (!flg) break;
367     else if (flg && !flg_col) {
368       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
369       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
370     } else {
371       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
372       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
373       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
374     }
375   }
376   if (i > 0) {
377     /* Makes command-line setting of splits take precedence over setting them in code.
378        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
379        create new splits, which would probably not be what the user wanted. */
380     jac->splitdefined = PETSC_TRUE;
381   }
382   PetscCall(PetscFree2(ifields, ifields_col));
383   PetscFunctionReturn(PETSC_SUCCESS);
384 }
385 
386 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
387 {
388   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
389   PC_FieldSplitLink ilink              = jac->head;
390   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
391   PetscInt          i;
392 
393   PetscFunctionBegin;
394   /*
395    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
396    Should probably be rewritten.
397    */
398   if (!ilink) {
399     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
400     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
401       PetscInt  numFields, f, i, j;
402       char    **fieldNames;
403       IS       *fields;
404       DM       *dms;
405       DM        subdm[128];
406       PetscBool flg;
407 
408       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
409       /* Allow the user to prescribe the splits */
410       for (i = 0, flg = PETSC_TRUE;; i++) {
411         PetscInt ifields[128];
412         IS       compField;
413         char     optionname[128], splitname[8];
414         PetscInt nfields = numFields;
415 
416         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
417         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
418         if (!flg) break;
419         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
420         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
421         if (nfields == 1) {
422           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
423         } else {
424           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
425           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
426         }
427         PetscCall(ISDestroy(&compField));
428         for (j = 0; j < nfields; ++j) {
429           f = ifields[j];
430           PetscCall(PetscFree(fieldNames[f]));
431           PetscCall(ISDestroy(&fields[f]));
432         }
433       }
434       if (i == 0) {
435         for (f = 0; f < numFields; ++f) {
436           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
437           PetscCall(PetscFree(fieldNames[f]));
438           PetscCall(ISDestroy(&fields[f]));
439         }
440       } else {
441         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
442         PetscCall(PetscFree(dms));
443         PetscCall(PetscMalloc1(i, &dms));
444         for (j = 0; j < i; ++j) dms[j] = subdm[j];
445       }
446       PetscCall(PetscFree(fieldNames));
447       PetscCall(PetscFree(fields));
448       if (dms) {
449         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
450         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
451           const char *prefix;
452           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix));
453           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix));
454           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
455           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
456           {
457             PetscErrorCode (*func)(KSP, Mat, Mat, void *);
458             void *ctx;
459 
460             PetscCall(DMKSPGetComputeOperators(pc->dm, &func, &ctx));
461             PetscCall(DMKSPSetComputeOperators(dms[i], func, ctx));
462           }
463           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
464           PetscCall(DMDestroy(&dms[i]));
465         }
466         PetscCall(PetscFree(dms));
467       }
468     } else {
469       if (jac->bs <= 0) {
470         if (pc->pmat) {
471           PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
472         } else jac->bs = 1;
473       }
474 
475       if (jac->detect) {
476         IS       zerodiags, rest;
477         PetscInt nmin, nmax;
478 
479         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
480         if (jac->diag_use_amat) {
481           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
482         } else {
483           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
484         }
485         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
486         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
487         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
488         PetscCall(ISDestroy(&zerodiags));
489         PetscCall(ISDestroy(&rest));
490       } else if (coupling) {
491         IS       coupling, rest;
492         PetscInt nmin, nmax;
493 
494         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
495         if (jac->offdiag_use_amat) {
496           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
497         } else {
498           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
499         }
500         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
501         PetscCall(ISSetIdentity(rest));
502         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
503         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
504         PetscCall(ISDestroy(&coupling));
505         PetscCall(ISDestroy(&rest));
506       } else {
507         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
508         if (!fieldsplit_default) {
509           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
510            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
511           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
512           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
513         }
514         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
515           Mat       M = pc->pmat;
516           PetscBool isnest;
517           PetscInt  nf;
518 
519           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
520           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
521           if (!isnest) {
522             M = pc->mat;
523             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
524           }
525           if (!isnest) nf = jac->bs;
526           else PetscCall(MatNestGetSize(M, &nf, NULL));
527           for (i = 0; i < nf; i++) {
528             char splitname[8];
529 
530             PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
531             PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
532           }
533           jac->defaultsplit = PETSC_TRUE;
534         }
535       }
536     }
537   } else if (jac->nsplits == 1) {
538     IS       is2;
539     PetscInt nmin, nmax;
540 
541     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
542     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
543     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
544     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
545     PetscCall(ISDestroy(&is2));
546   }
547 
548   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
549   PetscFunctionReturn(PETSC_SUCCESS);
550 }
551 
552 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
553 {
554   Mat       BT, T;
555   PetscReal nrmT, nrmB;
556 
557   PetscFunctionBegin;
558   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
559   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
560   PetscCall(MatNorm(T, NORM_1, &nrmT));
561   PetscCall(MatNorm(B, NORM_1, &nrmB));
562   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable.");
563 
564   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
565   /* setting N := 1/nu*I in [Ar13].                                                 */
566   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
567   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, H)); /* H = A01*A01'          */
568   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
569 
570   PetscCall(MatDestroy(&BT));
571   PetscCall(MatDestroy(&T));
572   PetscFunctionReturn(PETSC_SUCCESS);
573 }
574 
575 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg);
576 
577 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
578 {
579   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
580   PC_FieldSplitLink ilink;
581   PetscInt          i, nsplit;
582   PetscBool         sorted, sorted_col, matnest = PETSC_FALSE;
583 
584   PetscFunctionBegin;
585   pc->failedreason = PC_NOERROR;
586   PetscCall(PCFieldSplitSetDefaults(pc));
587   nsplit = jac->nsplits;
588   ilink  = jac->head;
589   if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
590 
591   /* get the matrices for each split */
592   if (!jac->issetup) {
593     PetscInt rstart, rend, nslots, bs;
594 
595     jac->issetup = PETSC_TRUE;
596 
597     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
598     if (jac->defaultsplit || !ilink->is) {
599       if (jac->bs <= 0) jac->bs = nsplit;
600     }
601 
602     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
603     PetscCall(MatGetBlockSize(pc->pmat, &bs));
604     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
605       PetscBool blk;
606 
607       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
608       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
609     }
610 
611     if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */
612       bs = jac->bs;
613       PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
614       nslots = (rend - rstart) / bs;
615       for (i = 0; i < nsplit; i++) {
616         if (jac->defaultsplit) {
617           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
618           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
619         } else if (!ilink->is) {
620           if (ilink->nfields > 1) {
621             PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
622 
623             PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
624             PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
625             for (j = 0; j < nslots; j++) {
626               for (k = 0; k < nfields; k++) {
627                 ii[nfields * j + k] = rstart + bs * j + fields[k];
628                 jj[nfields * j + k] = rstart + bs * j + fields_col[k];
629               }
630             }
631             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
632             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
633             PetscCall(ISSetBlockSize(ilink->is, nfields));
634             PetscCall(ISSetBlockSize(ilink->is_col, nfields));
635           } else {
636             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
637             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
638           }
639         }
640         PetscCall(ISSorted(ilink->is, &sorted));
641         if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col));
642         PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split");
643         ilink = ilink->next;
644       }
645     } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */
646       IS      *rowis, *colis, *ises = NULL;
647       PetscInt mis, nis;
648 
649       PetscCall(MatNestGetSize(pc->pmat, &mis, &nis));
650       PetscCall(PetscMalloc2(mis, &rowis, nis, &colis));
651       PetscCall(MatNestGetISs(pc->pmat, rowis, colis));
652       if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises));
653 
654       for (i = 0; i < nsplit; i++) {
655         if (jac->defaultsplit) {
656           PetscCall(ISDuplicate(rowis[i], &ilink->is));
657           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
658         } else if (!ilink->is) {
659           if (ilink->nfields > 1) {
660             for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]];
661             PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is));
662           } else {
663             PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is));
664           }
665           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
666         }
667         ilink = ilink->next;
668       }
669       PetscCall(PetscFree2(rowis, colis));
670       PetscCall(PetscFree(ises));
671     }
672   }
673 
674   ilink = jac->head;
675   if (!jac->pmat) {
676     Vec xtmp;
677 
678     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
679     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
680     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
681     for (i = 0; i < nsplit; i++) {
682       MatNullSpace sp;
683 
684       /* Check for preconditioning matrix attached to IS */
685       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
686       if (jac->pmat[i]) {
687         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
688         if (jac->type == PC_COMPOSITE_SCHUR) {
689           jac->schur_user = jac->pmat[i];
690 
691           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
692         }
693       } else {
694         const char *prefix;
695         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
696         PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix));
697         if (!prefix) {
698           PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
699           PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
700         }
701         PetscCall(MatSetFromOptions(jac->pmat[i]));
702         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
703       }
704       /* create work vectors for each split */
705       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
706       ilink->x = jac->x[i];
707       ilink->y = jac->y[i];
708       ilink->z = NULL;
709       /* compute scatter contexts needed by multiplicative versions and non-default splits */
710       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
711       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
712       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
713       ilink = ilink->next;
714     }
715     PetscCall(VecDestroy(&xtmp));
716   } else {
717     MatReuse      scall;
718     MatNullSpace *nullsp = NULL;
719 
720     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
721       PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp));
722       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
723       scall = MAT_INITIAL_MATRIX;
724     } else scall = MAT_REUSE_MATRIX;
725 
726     for (i = 0; i < nsplit; i++) {
727       Mat pmat;
728 
729       /* Check for preconditioning matrix attached to IS */
730       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
731       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
732       ilink = ilink->next;
733     }
734     if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp));
735   }
736   if (jac->diag_use_amat) {
737     ilink = jac->head;
738     if (!jac->mat) {
739       PetscCall(PetscMalloc1(nsplit, &jac->mat));
740       for (i = 0; i < nsplit; i++) {
741         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
742         ilink = ilink->next;
743       }
744     } else {
745       MatReuse      scall;
746       MatNullSpace *nullsp = NULL;
747 
748       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
749         PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp));
750         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
751         scall = MAT_INITIAL_MATRIX;
752       } else scall = MAT_REUSE_MATRIX;
753 
754       for (i = 0; i < nsplit; i++) {
755         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
756         ilink = ilink->next;
757       }
758       if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp));
759     }
760   } else {
761     jac->mat = jac->pmat;
762   }
763 
764   /* Check for null space attached to IS */
765   ilink = jac->head;
766   for (i = 0; i < nsplit; i++) {
767     MatNullSpace sp;
768 
769     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
770     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
771     ilink = ilink->next;
772   }
773 
774   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
775     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
776     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
777     ilink = jac->head;
778     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
779       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
780       if (!jac->Afield) {
781         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
782         if (jac->offdiag_use_amat) {
783           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
784         } else {
785           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
786         }
787       } else {
788         MatReuse scall;
789 
790         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
791           PetscCall(MatDestroy(&jac->Afield[1]));
792           scall = MAT_INITIAL_MATRIX;
793         } else scall = MAT_REUSE_MATRIX;
794 
795         if (jac->offdiag_use_amat) {
796           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
797         } else {
798           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
799         }
800       }
801     } else {
802       if (!jac->Afield) {
803         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
804         for (i = 0; i < nsplit; i++) {
805           if (jac->offdiag_use_amat) {
806             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
807           } else {
808             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
809           }
810           ilink = ilink->next;
811         }
812       } else {
813         MatReuse scall;
814         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
815           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
816           scall = MAT_INITIAL_MATRIX;
817         } else scall = MAT_REUSE_MATRIX;
818 
819         for (i = 0; i < nsplit; i++) {
820           if (jac->offdiag_use_amat) {
821             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
822           } else {
823             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
824           }
825           ilink = ilink->next;
826         }
827       }
828     }
829   }
830 
831   if (jac->type == PC_COMPOSITE_SCHUR) {
832     IS          ccis;
833     PetscBool   isset, isspd;
834     PetscInt    rstart, rend;
835     char        lscname[256];
836     PetscObject LSC_L;
837     PetscBool   set, flg;
838 
839     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
840 
841     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
842     if (jac->schurscale == (PetscScalar)-1.0) {
843       PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
844       jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
845     }
846 
847     /* When extracting off-diagonal submatrices, we take complements from this range */
848     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
849     PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
850 
851     if (jac->schur) {
852       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
853       MatReuse scall;
854 
855       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
856         scall = MAT_INITIAL_MATRIX;
857         PetscCall(MatDestroy(&jac->B));
858         PetscCall(MatDestroy(&jac->C));
859       } else scall = MAT_REUSE_MATRIX;
860 
861       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
862       ilink = jac->head;
863       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
864       if (jac->offdiag_use_amat) {
865         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B));
866       } else {
867         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B));
868       }
869       PetscCall(ISDestroy(&ccis));
870       if (!flg) {
871         ilink = ilink->next;
872         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
873         if (jac->offdiag_use_amat) {
874           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C));
875         } else {
876           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C));
877         }
878         PetscCall(ISDestroy(&ccis));
879       } else {
880         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
881         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
882         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
883       }
884       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
885       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
886         PetscCall(MatDestroy(&jac->schurp));
887         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
888       }
889       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
890       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
891       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
892     } else {
893       const char  *Dprefix;
894       char         schurprefix[256], schurmatprefix[256];
895       char         schurtestoption[256];
896       MatNullSpace sp;
897       KSP          kspt;
898 
899       /* extract the A01 and A10 matrices */
900       ilink = jac->head;
901       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
902       if (jac->offdiag_use_amat) {
903         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
904       } else {
905         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
906       }
907       PetscCall(ISDestroy(&ccis));
908       ilink = ilink->next;
909       if (!flg) {
910         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
911         if (jac->offdiag_use_amat) {
912           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
913         } else {
914           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
915         }
916         PetscCall(ISDestroy(&ccis));
917       } else {
918         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
919         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
920         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
921       }
922       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
923       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
924       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
925       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
926       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
927       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
928       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
929       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
930 
931       /* Note: this is not true in general */
932       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
933       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
934 
935       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
936       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
937       if (flg) {
938         DM  dmInner;
939         KSP kspInner;
940         PC  pcInner;
941 
942         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
943         PetscCall(KSPReset(kspInner));
944         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
945         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
946         /* Indent this deeper to emphasize the "inner" nature of this solver. */
947         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
948         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
949         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
950 
951         /* Set DM for new solver */
952         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
953         PetscCall(KSPSetDM(kspInner, dmInner));
954         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
955 
956         /* Defaults to PCKSP as preconditioner */
957         PetscCall(KSPGetPC(kspInner, &pcInner));
958         PetscCall(PCSetType(pcInner, PCKSP));
959         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
960       } else {
961         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
962           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
963           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
964           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
965           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
966           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
967         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
968         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
969       }
970       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
971       PetscCall(KSPSetFromOptions(jac->head->ksp));
972       PetscCall(MatSetFromOptions(jac->schur));
973 
974       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
975       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
976         KSP kspInner;
977         PC  pcInner;
978 
979         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
980         PetscCall(KSPGetPC(kspInner, &pcInner));
981         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
982         if (flg) {
983           KSP ksp;
984 
985           PetscCall(PCKSPGetKSP(pcInner, &ksp));
986           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
987         }
988       }
989       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
990       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
991       if (flg) {
992         DM dmInner;
993 
994         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
995         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
996         PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
997         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
998         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
999         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
1000         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
1001         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
1002         PetscCall(KSPSetDM(jac->kspupper, dmInner));
1003         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
1004         PetscCall(KSPSetFromOptions(jac->kspupper));
1005         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
1006         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
1007       } else {
1008         jac->kspupper = jac->head->ksp;
1009         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
1010       }
1011 
1012       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
1013       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
1014       PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
1015       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
1016       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
1017       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
1018         PC pcschur;
1019         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
1020         PetscCall(PCSetType(pcschur, PCNONE));
1021         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
1022       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1023         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1024       }
1025       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
1026       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
1027       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
1028       /* propagate DM */
1029       {
1030         DM sdm;
1031         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
1032         if (sdm) {
1033           PetscCall(KSPSetDM(jac->kspschur, sdm));
1034           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
1035         }
1036       }
1037       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1038       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
1039       PetscCall(KSPSetFromOptions(jac->kspschur));
1040     }
1041     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
1042     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
1043 
1044     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
1045     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
1046     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1047     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1048     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L));
1049     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
1050     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1051     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1052     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L));
1053   } else if (jac->type == PC_COMPOSITE_GKB) {
1054     IS       ccis;
1055     PetscInt rstart, rend;
1056 
1057     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1058 
1059     ilink = jac->head;
1060 
1061     /* When extracting off-diagonal submatrices, we take complements from this range */
1062     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
1063 
1064     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1065     if (jac->offdiag_use_amat) {
1066       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1067     } else {
1068       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1069     }
1070     PetscCall(ISDestroy(&ccis));
1071     /* Create work vectors for GKB algorithm */
1072     PetscCall(VecDuplicate(ilink->x, &jac->u));
1073     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1074     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1075     ilink = ilink->next;
1076     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1077     if (jac->offdiag_use_amat) {
1078       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1079     } else {
1080       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1081     }
1082     PetscCall(ISDestroy(&ccis));
1083     /* Create work vectors for GKB algorithm */
1084     PetscCall(VecDuplicate(ilink->x, &jac->v));
1085     PetscCall(VecDuplicate(ilink->x, &jac->d));
1086     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1087     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1088     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1089 
1090     ilink = jac->head;
1091     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1092     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1093     /* Create gkb_monitor context */
1094     if (jac->gkbmonitor) {
1095       PetscInt tablevel;
1096       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1097       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1098       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1099       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1100       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1101     }
1102   } else {
1103     /* set up the individual splits' PCs */
1104     i     = 0;
1105     ilink = jac->head;
1106     while (ilink) {
1107       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1108       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1109       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1110       i++;
1111       ilink = ilink->next;
1112     }
1113   }
1114 
1115   /* Set coordinates to the sub PC objects whenever these are set */
1116   if (jac->coordinates_set) {
1117     PC pc_coords;
1118     if (jac->type == PC_COMPOSITE_SCHUR) {
1119       // Head is first block.
1120       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1121       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1122       // Second one is Schur block, but its KSP object is in kspschur.
1123       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1124       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1125     } else if (jac->type == PC_COMPOSITE_GKB) {
1126       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1127     } else {
1128       ilink = jac->head;
1129       while (ilink) {
1130         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1131         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1132         ilink = ilink->next;
1133       }
1134     }
1135   }
1136 
1137   jac->suboptionsset = PETSC_TRUE;
1138   PetscFunctionReturn(PETSC_SUCCESS);
1139 }
1140 
1141 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1142   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1143                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1144                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1145 
1146 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1147 {
1148   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1149   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1150   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1151 
1152   PetscFunctionBegin;
1153   switch (jac->schurfactorization) {
1154   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1155     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1156     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1157     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1158     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1159     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1160     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1161     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1162     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1163     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1164     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1165     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1166     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1167     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1168     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1169     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1170     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1171     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1172     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1173     break;
1174   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1175     /* [A00 0; A10 S], suitable for left preconditioning */
1176     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1177     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1178     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1179     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1180     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1181     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1182     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1183     PetscCall(VecScale(ilinkD->x, -1.));
1184     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1185     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1186     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1187     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1188     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1189     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1190     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1191     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1192     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1193     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1194     break;
1195   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1196     /* [A00 A01; 0 S], suitable for right preconditioning */
1197     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1198     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1199     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1200     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1201     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1202     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1203     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1204     PetscCall(VecScale(ilinkA->x, -1.));
1205     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1206     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1207     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1208     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1209     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1210     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1211     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1212     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1213     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1214     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1215     break;
1216   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1217     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1218     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1219     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1220     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1221     PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1222     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1223     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1224     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1225     PetscCall(VecScale(ilinkD->x, -1.0));
1226     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1227     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1228 
1229     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1230     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1231     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1232     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1233     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1234 
1235     if (kspUpper == kspA) {
1236       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1237       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1238       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1239       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1240       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1241       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1242     } else {
1243       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1244       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1245       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1246       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1247       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1248       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1249       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1250       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1251       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1252     }
1253     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1254     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1255     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1256   }
1257   PetscFunctionReturn(PETSC_SUCCESS);
1258 }
1259 
1260 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1261 {
1262   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1263   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1264   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1265 
1266   PetscFunctionBegin;
1267   switch (jac->schurfactorization) {
1268   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1269     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1270     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1271     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1272     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1273     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1274     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1275     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1276     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1277     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1278     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1279     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1280     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1281     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1282     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1283     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1284     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1285     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1286     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1287     break;
1288   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1289     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1290     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1291     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1292     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1293     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1294     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1295     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1296     PetscCall(VecScale(ilinkD->x, -1.));
1297     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1298     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1299     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1300     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1301     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1302     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1303     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1304     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1305     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1306     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1307     break;
1308   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1309     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1310     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1311     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1312     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1313     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1314     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1315     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1316     PetscCall(VecScale(ilinkA->x, -1.));
1317     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1318     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1319     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1320     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1321     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1322     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1323     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1324     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1325     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1326     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1327     break;
1328   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1329     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1330     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1331     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1332     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1333     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1334     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1335     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1336     PetscCall(VecScale(ilinkD->x, -1.0));
1337     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1338     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1339 
1340     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1341     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1342     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1343     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1344     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1345 
1346     if (kspLower == kspA) {
1347       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1348       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1349       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1350       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1351       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1352       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1353     } else {
1354       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1355       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1356       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1357       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1358       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1359       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1360       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1361       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1362       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1363     }
1364     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1365     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1366     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1367   }
1368   PetscFunctionReturn(PETSC_SUCCESS);
1369 }
1370 
1371 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1372 {
1373   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1374   PC_FieldSplitLink ilink = jac->head;
1375   PetscInt          cnt, bs;
1376 
1377   PetscFunctionBegin;
1378   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1379     PetscBool matnest;
1380 
1381     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1382     if (jac->defaultsplit && !matnest) {
1383       PetscCall(VecGetBlockSize(x, &bs));
1384       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1385       PetscCall(VecGetBlockSize(y, &bs));
1386       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1387       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1388       while (ilink) {
1389         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1390         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1391         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1392         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1393         ilink = ilink->next;
1394       }
1395       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1396     } else {
1397       PetscCall(VecSet(y, 0.0));
1398       while (ilink) {
1399         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1400         ilink = ilink->next;
1401       }
1402     }
1403   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1404     PetscCall(VecSet(y, 0.0));
1405     /* solve on first block for first block variables */
1406     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1407     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1408     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1409     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1410     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1411     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1412     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1413     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1414 
1415     /* compute the residual only onto second block variables using first block variables */
1416     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1417     ilink = ilink->next;
1418     PetscCall(VecScale(ilink->x, -1.0));
1419     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1420     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1421 
1422     /* solve on second block variables */
1423     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1424     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1425     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1426     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1427     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1428     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1429   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1430     if (!jac->w1) {
1431       PetscCall(VecDuplicate(x, &jac->w1));
1432       PetscCall(VecDuplicate(x, &jac->w2));
1433     }
1434     PetscCall(VecSet(y, 0.0));
1435     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1436     cnt = 1;
1437     while (ilink->next) {
1438       ilink = ilink->next;
1439       /* compute the residual only over the part of the vector needed */
1440       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1441       PetscCall(VecScale(ilink->x, -1.0));
1442       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1443       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1444       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1445       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1446       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1447       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1448       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1449       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1450     }
1451     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1452       cnt -= 2;
1453       while (ilink->previous) {
1454         ilink = ilink->previous;
1455         /* compute the residual only over the part of the vector needed */
1456         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1457         PetscCall(VecScale(ilink->x, -1.0));
1458         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1459         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1460         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1461         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1462         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1463         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1464         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1465         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1466       }
1467     }
1468   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1469   PetscFunctionReturn(PETSC_SUCCESS);
1470 }
1471 
1472 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1473 {
1474   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1475   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1476   KSP               ksp = ilinkA->ksp;
1477   Vec               u, v, Hu, d, work1, work2;
1478   PetscScalar       alpha, z, nrmz2, *vecz;
1479   PetscReal         lowbnd, nu, beta;
1480   PetscInt          j, iterGKB;
1481 
1482   PetscFunctionBegin;
1483   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1484   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1485   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1486   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1487 
1488   u     = jac->u;
1489   v     = jac->v;
1490   Hu    = jac->Hu;
1491   d     = jac->d;
1492   work1 = jac->w1;
1493   work2 = jac->w2;
1494   vecz  = jac->vecz;
1495 
1496   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1497   /* Add q = q + nu*B*b */
1498   if (jac->gkbnu) {
1499     nu = jac->gkbnu;
1500     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1501     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1502   } else {
1503     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1504     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1505     nu = 1;
1506   }
1507 
1508   /* Transform rhs from [q,tilde{b}] to [0,b] */
1509   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1510   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1511   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1512   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1513   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1514   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1515 
1516   /* First step of algorithm */
1517   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1518   KSPCheckDot(ksp, beta);
1519   beta = PetscSqrtReal(nu) * beta;
1520   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1521   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1522   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1523   PetscCall(KSPSolve(ksp, work2, u));
1524   PetscCall(KSPCheckSolve(ksp, pc, u));
1525   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1526   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1527   PetscCall(VecDot(Hu, u, &alpha));
1528   KSPCheckDot(ksp, alpha);
1529   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1530   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1531   PetscCall(VecScale(u, 1.0 / alpha));
1532   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1533 
1534   z       = beta / alpha;
1535   vecz[1] = z;
1536 
1537   /* Computation of first iterate x(1) and p(1) */
1538   PetscCall(VecAXPY(ilinkA->y, z, u));
1539   PetscCall(VecCopy(d, ilinkD->y));
1540   PetscCall(VecScale(ilinkD->y, -z));
1541 
1542   iterGKB = 1;
1543   lowbnd  = 2 * jac->gkbtol;
1544   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1545 
1546   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1547     iterGKB += 1;
1548     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1549     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1550     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1551     beta = beta / PetscSqrtReal(nu);
1552     PetscCall(VecScale(v, 1.0 / beta));
1553     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1554     PetscCall(MatMult(jac->H, u, Hu));
1555     PetscCall(VecAXPY(work2, -beta, Hu));
1556     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1557     PetscCall(KSPSolve(ksp, work2, u));
1558     PetscCall(KSPCheckSolve(ksp, pc, u));
1559     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1560     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1561     PetscCall(VecDot(Hu, u, &alpha));
1562     KSPCheckDot(ksp, alpha);
1563     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1564     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1565     PetscCall(VecScale(u, 1.0 / alpha));
1566 
1567     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1568     vecz[0] = z;
1569 
1570     /* Computation of new iterate x(i+1) and p(i+1) */
1571     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1572     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1573     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1574     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1575     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1576 
1577     /* Compute Lower Bound estimate */
1578     if (iterGKB > jac->gkbdelay) {
1579       lowbnd = 0.0;
1580       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1581       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1582     }
1583 
1584     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1585     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1586   }
1587 
1588   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1589   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1590   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1591   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1592   PetscFunctionReturn(PETSC_SUCCESS);
1593 }
1594 
1595 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1596   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1597                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1598                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1599 
1600 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1601 {
1602   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1603   PC_FieldSplitLink ilink = jac->head;
1604   PetscInt          bs;
1605 
1606   PetscFunctionBegin;
1607   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1608     PetscBool matnest;
1609 
1610     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1611     if (jac->defaultsplit && !matnest) {
1612       PetscCall(VecGetBlockSize(x, &bs));
1613       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1614       PetscCall(VecGetBlockSize(y, &bs));
1615       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1616       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1617       while (ilink) {
1618         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1619         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1620         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1621         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1622         ilink = ilink->next;
1623       }
1624       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1625     } else {
1626       PetscCall(VecSet(y, 0.0));
1627       while (ilink) {
1628         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1629         ilink = ilink->next;
1630       }
1631     }
1632   } else {
1633     if (!jac->w1) {
1634       PetscCall(VecDuplicate(x, &jac->w1));
1635       PetscCall(VecDuplicate(x, &jac->w2));
1636     }
1637     PetscCall(VecSet(y, 0.0));
1638     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1639       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1640       while (ilink->next) {
1641         ilink = ilink->next;
1642         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1643         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1644         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1645       }
1646       while (ilink->previous) {
1647         ilink = ilink->previous;
1648         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1649         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1650         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1651       }
1652     } else {
1653       while (ilink->next) { /* get to last entry in linked list */
1654         ilink = ilink->next;
1655       }
1656       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1657       while (ilink->previous) {
1658         ilink = ilink->previous;
1659         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1660         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1661         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1662       }
1663     }
1664   }
1665   PetscFunctionReturn(PETSC_SUCCESS);
1666 }
1667 
1668 static PetscErrorCode PCReset_FieldSplit(PC pc)
1669 {
1670   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1671   PC_FieldSplitLink ilink = jac->head, next;
1672 
1673   PetscFunctionBegin;
1674   while (ilink) {
1675     PetscCall(KSPDestroy(&ilink->ksp));
1676     PetscCall(VecDestroy(&ilink->x));
1677     PetscCall(VecDestroy(&ilink->y));
1678     PetscCall(VecDestroy(&ilink->z));
1679     PetscCall(VecScatterDestroy(&ilink->sctx));
1680     PetscCall(ISDestroy(&ilink->is));
1681     PetscCall(ISDestroy(&ilink->is_col));
1682     PetscCall(PetscFree(ilink->splitname));
1683     PetscCall(PetscFree(ilink->fields));
1684     PetscCall(PetscFree(ilink->fields_col));
1685     next = ilink->next;
1686     PetscCall(PetscFree(ilink));
1687     ilink = next;
1688   }
1689   jac->head = NULL;
1690   PetscCall(PetscFree2(jac->x, jac->y));
1691   if (jac->mat && jac->mat != jac->pmat) {
1692     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1693   } else if (jac->mat) {
1694     jac->mat = NULL;
1695   }
1696   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1697   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1698   jac->nsplits = 0;
1699   PetscCall(VecDestroy(&jac->w1));
1700   PetscCall(VecDestroy(&jac->w2));
1701   PetscCall(MatDestroy(&jac->schur));
1702   PetscCall(MatDestroy(&jac->schurp));
1703   PetscCall(MatDestroy(&jac->schur_user));
1704   PetscCall(KSPDestroy(&jac->kspschur));
1705   PetscCall(KSPDestroy(&jac->kspupper));
1706   PetscCall(MatDestroy(&jac->B));
1707   PetscCall(MatDestroy(&jac->C));
1708   PetscCall(MatDestroy(&jac->H));
1709   PetscCall(VecDestroy(&jac->u));
1710   PetscCall(VecDestroy(&jac->v));
1711   PetscCall(VecDestroy(&jac->Hu));
1712   PetscCall(VecDestroy(&jac->d));
1713   PetscCall(PetscFree(jac->vecz));
1714   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1715   jac->isrestrict = PETSC_FALSE;
1716   PetscFunctionReturn(PETSC_SUCCESS);
1717 }
1718 
1719 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1720 {
1721   PetscFunctionBegin;
1722   PetscCall(PCReset_FieldSplit(pc));
1723   PetscCall(PetscFree(pc->data));
1724   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1725   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1726   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1727   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1728   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1729   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1730   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1731   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1732 
1733   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1734   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1735   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1736   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1737   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1738   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1739   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1740   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1741   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1742   PetscFunctionReturn(PETSC_SUCCESS);
1743 }
1744 
1745 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject)
1746 {
1747   PetscInt        bs;
1748   PetscBool       flg;
1749   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1750   PCCompositeType ctype;
1751 
1752   PetscFunctionBegin;
1753   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1754   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1755   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1756   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1757   jac->diag_use_amat = pc->useAmat;
1758   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1759   jac->offdiag_use_amat = pc->useAmat;
1760   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1761   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1762   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1763   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1764   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1765   /* Only setup fields once */
1766   if ((jac->bs > 0) && (jac->nsplits == 0)) {
1767     /* only allow user to set fields from command line.
1768        otherwise user can set them in PCFieldSplitSetDefaults() */
1769     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1770     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1771   }
1772   if (jac->type == PC_COMPOSITE_SCHUR) {
1773     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1774     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1775     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1776     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1777     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1778   } else if (jac->type == PC_COMPOSITE_GKB) {
1779     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1780     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1781     PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0));
1782     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1783     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1784   }
1785   /*
1786     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1787     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1788     is called on the outer solver in case changes were made in the options database
1789 
1790     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1791     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1792     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1793 
1794     There could be a negative side effect of calling the KSPSetFromOptions() below.
1795 
1796     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1797   */
1798   if (jac->issetup) {
1799     PC_FieldSplitLink ilink = jac->head;
1800     if (jac->type == PC_COMPOSITE_SCHUR) {
1801       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1802       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1803     }
1804     while (ilink) {
1805       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1806       ilink = ilink->next;
1807     }
1808   }
1809   PetscOptionsHeadEnd();
1810   PetscFunctionReturn(PETSC_SUCCESS);
1811 }
1812 
1813 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1814 {
1815   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1816   PC_FieldSplitLink ilink, next = jac->head;
1817   char              prefix[128];
1818   PetscInt          i;
1819 
1820   PetscFunctionBegin;
1821   if (jac->splitdefined) {
1822     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1823     PetscFunctionReturn(PETSC_SUCCESS);
1824   }
1825   for (i = 0; i < n; i++) { PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); }
1826   PetscCall(PetscNew(&ilink));
1827   if (splitname) {
1828     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1829   } else {
1830     PetscCall(PetscMalloc1(3, &ilink->splitname));
1831     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1832   }
1833   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1834   PetscCall(PetscMalloc1(n, &ilink->fields));
1835   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1836   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1837   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1838 
1839   ilink->nfields = n;
1840   ilink->next    = NULL;
1841   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1842   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1843   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1844   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1845   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1846 
1847   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1848   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1849 
1850   if (!next) {
1851     jac->head       = ilink;
1852     ilink->previous = NULL;
1853   } else {
1854     while (next->next) next = next->next;
1855     next->next      = ilink;
1856     ilink->previous = next;
1857   }
1858   jac->nsplits++;
1859   PetscFunctionReturn(PETSC_SUCCESS);
1860 }
1861 
1862 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1863 {
1864   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1865 
1866   PetscFunctionBegin;
1867   *subksp = NULL;
1868   if (n) *n = 0;
1869   if (jac->type == PC_COMPOSITE_SCHUR) {
1870     PetscInt nn;
1871 
1872     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1873     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1874     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1875     PetscCall(PetscMalloc1(nn, subksp));
1876     (*subksp)[0] = jac->head->ksp;
1877     (*subksp)[1] = jac->kspschur;
1878     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
1879     if (n) *n = nn;
1880   }
1881   PetscFunctionReturn(PETSC_SUCCESS);
1882 }
1883 
1884 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
1885 {
1886   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1887 
1888   PetscFunctionBegin;
1889   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
1890   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1891   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
1892 
1893   (*subksp)[1] = jac->kspschur;
1894   if (n) *n = jac->nsplits;
1895   PetscFunctionReturn(PETSC_SUCCESS);
1896 }
1897 
1898 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1899 {
1900   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1901   PetscInt          cnt   = 0;
1902   PC_FieldSplitLink ilink = jac->head;
1903 
1904   PetscFunctionBegin;
1905   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1906   while (ilink) {
1907     (*subksp)[cnt++] = ilink->ksp;
1908     ilink            = ilink->next;
1909   }
1910   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
1911   if (n) *n = jac->nsplits;
1912   PetscFunctionReturn(PETSC_SUCCESS);
1913 }
1914 
1915 /*@C
1916   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
1917 
1918   Input Parameters:
1919 + pc  - the preconditioner context
1920 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
1921 
1922   Level: advanced
1923 
1924   Developer Notes:
1925   It seems the resulting `IS`s will not cover the entire space, so
1926   how can they define a convergent preconditioner? Needs explaining.
1927 
1928 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
1929 @*/
1930 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
1931 {
1932   PetscFunctionBegin;
1933   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1934   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
1935   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
1936   PetscFunctionReturn(PETSC_SUCCESS);
1937 }
1938 
1939 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
1940 {
1941   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1942   PC_FieldSplitLink ilink = jac->head, next;
1943   PetscInt          localsize, size, sizez, i;
1944   const PetscInt   *ind, *indz;
1945   PetscInt         *indc, *indcz;
1946   PetscBool         flg;
1947 
1948   PetscFunctionBegin;
1949   PetscCall(ISGetLocalSize(isy, &localsize));
1950   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
1951   size -= localsize;
1952   while (ilink) {
1953     IS isrl, isr;
1954     PC subpc;
1955     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
1956     PetscCall(ISGetLocalSize(isrl, &localsize));
1957     PetscCall(PetscMalloc1(localsize, &indc));
1958     PetscCall(ISGetIndices(isrl, &ind));
1959     PetscCall(PetscArraycpy(indc, ind, localsize));
1960     PetscCall(ISRestoreIndices(isrl, &ind));
1961     PetscCall(ISDestroy(&isrl));
1962     for (i = 0; i < localsize; i++) *(indc + i) += size;
1963     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
1964     PetscCall(PetscObjectReference((PetscObject)isr));
1965     PetscCall(ISDestroy(&ilink->is));
1966     ilink->is = isr;
1967     PetscCall(PetscObjectReference((PetscObject)isr));
1968     PetscCall(ISDestroy(&ilink->is_col));
1969     ilink->is_col = isr;
1970     PetscCall(ISDestroy(&isr));
1971     PetscCall(KSPGetPC(ilink->ksp, &subpc));
1972     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
1973     if (flg) {
1974       IS       iszl, isz;
1975       MPI_Comm comm;
1976       PetscCall(ISGetLocalSize(ilink->is, &localsize));
1977       comm = PetscObjectComm((PetscObject)ilink->is);
1978       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
1979       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
1980       sizez -= localsize;
1981       PetscCall(ISGetLocalSize(iszl, &localsize));
1982       PetscCall(PetscMalloc1(localsize, &indcz));
1983       PetscCall(ISGetIndices(iszl, &indz));
1984       PetscCall(PetscArraycpy(indcz, indz, localsize));
1985       PetscCall(ISRestoreIndices(iszl, &indz));
1986       PetscCall(ISDestroy(&iszl));
1987       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
1988       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
1989       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
1990       PetscCall(ISDestroy(&isz));
1991     }
1992     next  = ilink->next;
1993     ilink = next;
1994   }
1995   jac->isrestrict = PETSC_TRUE;
1996   PetscFunctionReturn(PETSC_SUCCESS);
1997 }
1998 
1999 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
2000 {
2001   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
2002   PC_FieldSplitLink ilink, next = jac->head;
2003   char              prefix[128];
2004 
2005   PetscFunctionBegin;
2006   if (jac->splitdefined) {
2007     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
2008     PetscFunctionReturn(PETSC_SUCCESS);
2009   }
2010   PetscCall(PetscNew(&ilink));
2011   if (splitname) {
2012     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
2013   } else {
2014     PetscCall(PetscMalloc1(8, &ilink->splitname));
2015     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
2016   }
2017   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
2018   PetscCall(PetscObjectReference((PetscObject)is));
2019   PetscCall(ISDestroy(&ilink->is));
2020   ilink->is = is;
2021   PetscCall(PetscObjectReference((PetscObject)is));
2022   PetscCall(ISDestroy(&ilink->is_col));
2023   ilink->is_col = is;
2024   ilink->next   = NULL;
2025   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
2026   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
2027   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
2028   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
2029   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
2030 
2031   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
2032   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
2033 
2034   if (!next) {
2035     jac->head       = ilink;
2036     ilink->previous = NULL;
2037   } else {
2038     while (next->next) next = next->next;
2039     next->next      = ilink;
2040     ilink->previous = next;
2041   }
2042   jac->nsplits++;
2043   PetscFunctionReturn(PETSC_SUCCESS);
2044 }
2045 
2046 /*@C
2047   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
2048 
2049   Logically Collective
2050 
2051   Input Parameters:
2052 + pc         - the preconditioner context
2053 . splitname  - name of this split, if `NULL` the number of the split is used
2054 . n          - the number of fields in this split
2055 . fields     - the fields in this split
2056 - fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block
2057                of the matrix and `fields_col` provides the column indices for that block
2058 
2059   Options Database Key:
2060 . -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
2061 
2062   Level: intermediate
2063 
2064   Notes:
2065   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2066 
2067   If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`.
2068 
2069   If the matrix used to construct the preconditioner is not `MATNEST` then
2070   `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlocksize()` or
2071   to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block
2072   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2073   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x....
2074   where the numbered entries indicate what is in the split.
2075 
2076   This function is called once per split (it creates a new split each time).  Solve options
2077   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2078 
2079   `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields`
2080 
2081   Developer Notes:
2082   This routine does not actually create the `IS` representing the split, that is delayed
2083   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2084   available when this routine is called.
2085 
2086 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`,
2087           `MatSetBlocksize()`, `MatCreateNest()`
2088 @*/
2089 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
2090 {
2091   PetscFunctionBegin;
2092   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2093   PetscAssertPointer(splitname, 2);
2094   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2095   PetscAssertPointer(fields, 4);
2096   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2097   PetscFunctionReturn(PETSC_SUCCESS);
2098 }
2099 
2100 /*@
2101   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2102   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2103 
2104   Logically Collective
2105 
2106   Input Parameters:
2107 + pc  - the preconditioner object
2108 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2109 
2110   Options Database Key:
2111 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2112 
2113   Level: intermediate
2114 
2115 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2116 @*/
2117 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2118 {
2119   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2120   PetscBool      isfs;
2121 
2122   PetscFunctionBegin;
2123   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2124   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2125   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2126   jac->diag_use_amat = flg;
2127   PetscFunctionReturn(PETSC_SUCCESS);
2128 }
2129 
2130 /*@
2131   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2132   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2133 
2134   Logically Collective
2135 
2136   Input Parameter:
2137 . pc - the preconditioner object
2138 
2139   Output Parameter:
2140 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2141 
2142   Level: intermediate
2143 
2144 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2145 @*/
2146 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2147 {
2148   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2149   PetscBool      isfs;
2150 
2151   PetscFunctionBegin;
2152   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2153   PetscAssertPointer(flg, 2);
2154   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2155   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2156   *flg = jac->diag_use_amat;
2157   PetscFunctionReturn(PETSC_SUCCESS);
2158 }
2159 
2160 /*@
2161   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2162   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2163 
2164   Logically Collective
2165 
2166   Input Parameters:
2167 + pc  - the preconditioner object
2168 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2169 
2170   Options Database Key:
2171 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2172 
2173   Level: intermediate
2174 
2175 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2176 @*/
2177 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2178 {
2179   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2180   PetscBool      isfs;
2181 
2182   PetscFunctionBegin;
2183   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2184   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2185   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2186   jac->offdiag_use_amat = flg;
2187   PetscFunctionReturn(PETSC_SUCCESS);
2188 }
2189 
2190 /*@
2191   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2192   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2193 
2194   Logically Collective
2195 
2196   Input Parameter:
2197 . pc - the preconditioner object
2198 
2199   Output Parameter:
2200 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2201 
2202   Level: intermediate
2203 
2204 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2205 @*/
2206 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2207 {
2208   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2209   PetscBool      isfs;
2210 
2211   PetscFunctionBegin;
2212   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2213   PetscAssertPointer(flg, 2);
2214   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2215   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2216   *flg = jac->offdiag_use_amat;
2217   PetscFunctionReturn(PETSC_SUCCESS);
2218 }
2219 
2220 /*@C
2221   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2222 
2223   Logically Collective
2224 
2225   Input Parameters:
2226 + pc        - the preconditioner context
2227 . splitname - name of this split, if `NULL` the number of the split is used
2228 - is        - the index set that defines the elements in this split
2229 
2230   Level: intermediate
2231 
2232   Notes:
2233   Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST`
2234 
2235   This function is called once per split (it creates a new split each time).  Solve options
2236   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2237 
2238 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()`
2239 @*/
2240 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2241 {
2242   PetscFunctionBegin;
2243   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2244   if (splitname) PetscAssertPointer(splitname, 2);
2245   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2246   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2247   PetscFunctionReturn(PETSC_SUCCESS);
2248 }
2249 
2250 /*@C
2251   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2252 
2253   Logically Collective
2254 
2255   Input Parameters:
2256 + pc        - the preconditioner context
2257 - splitname - name of this split
2258 
2259   Output Parameter:
2260 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2261 
2262   Level: intermediate
2263 
2264 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()`
2265 @*/
2266 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2267 {
2268   PetscFunctionBegin;
2269   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2270   PetscAssertPointer(splitname, 2);
2271   PetscAssertPointer(is, 3);
2272   {
2273     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2274     PC_FieldSplitLink ilink = jac->head;
2275     PetscBool         found;
2276 
2277     *is = NULL;
2278     while (ilink) {
2279       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2280       if (found) {
2281         *is = ilink->is;
2282         break;
2283       }
2284       ilink = ilink->next;
2285     }
2286   }
2287   PetscFunctionReturn(PETSC_SUCCESS);
2288 }
2289 
2290 /*@C
2291   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2292 
2293   Logically Collective
2294 
2295   Input Parameters:
2296 + pc    - the preconditioner context
2297 - index - index of this split
2298 
2299   Output Parameter:
2300 . is - the index set that defines the elements in this split
2301 
2302   Level: intermediate
2303 
2304 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`,
2305 
2306 @*/
2307 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2308 {
2309   PetscFunctionBegin;
2310   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2311   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2312   PetscAssertPointer(is, 3);
2313   {
2314     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2315     PC_FieldSplitLink ilink = jac->head;
2316     PetscInt          i     = 0;
2317     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2318 
2319     while (i < index) {
2320       ilink = ilink->next;
2321       ++i;
2322     }
2323     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2324   }
2325   PetscFunctionReturn(PETSC_SUCCESS);
2326 }
2327 
2328 /*@
2329   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2330   fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used.
2331 
2332   Logically Collective
2333 
2334   Input Parameters:
2335 + pc - the preconditioner context
2336 - bs - the block size
2337 
2338   Level: intermediate
2339 
2340   Note:
2341   If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields.
2342 
2343 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2344 @*/
2345 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2346 {
2347   PetscFunctionBegin;
2348   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2349   PetscValidLogicalCollectiveInt(pc, bs, 2);
2350   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2351   PetscFunctionReturn(PETSC_SUCCESS);
2352 }
2353 
2354 /*@C
2355   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2356 
2357   Collective
2358 
2359   Input Parameter:
2360 . pc - the preconditioner context
2361 
2362   Output Parameters:
2363 + n      - the number of splits
2364 - subksp - the array of `KSP` contexts
2365 
2366   Level: advanced
2367 
2368   Notes:
2369   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2370   (not the `KSP`, just the array that contains them).
2371 
2372   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2373 
2374   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2375   Schur complement and the `KSP` object used to iterate over the Schur complement.
2376   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2377 
2378   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2379   inner linear system defined by the matrix H in each loop.
2380 
2381   Fortran Notes:
2382   You must pass in a `KSP` array that is large enough to contain all the `KSP`s.
2383   You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2384   `KSP` array must be.
2385 
2386   Developer Notes:
2387   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2388 
2389   The Fortran interface should be modernized to return directly the array of values.
2390 
2391 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2392 @*/
2393 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2394 {
2395   PetscFunctionBegin;
2396   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2397   if (n) PetscAssertPointer(n, 2);
2398   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2399   PetscFunctionReturn(PETSC_SUCCESS);
2400 }
2401 
2402 /*@C
2403   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2404 
2405   Collective
2406 
2407   Input Parameter:
2408 . pc - the preconditioner context
2409 
2410   Output Parameters:
2411 + n      - the number of splits
2412 - subksp - the array of `KSP` contexts
2413 
2414   Level: advanced
2415 
2416   Notes:
2417   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2418   (not the `KSP` just the array that contains them).
2419 
2420   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2421 
2422   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2423 +  1  - the `KSP` used for the (1,1) block
2424 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2425 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2426 
2427   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2428 
2429   Fortran Notes:
2430   You must pass in a `KSP` array that is large enough to contain all the local `KSP`s.
2431   You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2432   `KSP` array must be.
2433 
2434   Developer Notes:
2435   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2436 
2437   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2438 
2439   The Fortran interface should be modernized to return directly the array of values.
2440 
2441 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2442 @*/
2443 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2444 {
2445   PetscFunctionBegin;
2446   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2447   if (n) PetscAssertPointer(n, 2);
2448   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2449   PetscFunctionReturn(PETSC_SUCCESS);
2450 }
2451 
2452 /*@
2453   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructed for the Schur complement.
2454   The default is the A11 matrix.
2455 
2456   Collective
2457 
2458   Input Parameters:
2459 + pc    - the preconditioner context
2460 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2461               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2462               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2463 - pre   - matrix to use for preconditioning, or `NULL`
2464 
2465   Options Database Keys:
2466 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2467 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2468 
2469   Level: intermediate
2470 
2471   Notes:
2472   If ptype is
2473 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2474   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2475 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2476   The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM`
2477 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2478   to this function).
2479 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation Sp = A11 - A10 inv(diag(A00)) A01
2480   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2481   lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2482 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2483   computed internally by `PCFIELDSPLIT` (this is expensive)
2484   useful mostly as a test that the Schur complement approach can work for your problem
2485 
2486   When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2487   with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a ptype of self and
2488   `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2489 
2490 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2491           `MatSchurComplementSetAinvType()`, `PCLSC`
2492 
2493 @*/
2494 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2495 {
2496   PetscFunctionBegin;
2497   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2498   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2499   PetscFunctionReturn(PETSC_SUCCESS);
2500 }
2501 
2502 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2503 {
2504   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2505 } /* Deprecated name */
2506 
2507 /*@
2508   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2509   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2510 
2511   Logically Collective
2512 
2513   Input Parameter:
2514 . pc - the preconditioner context
2515 
2516   Output Parameters:
2517 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2518 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2519 
2520   Level: intermediate
2521 
2522 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2523 
2524 @*/
2525 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2526 {
2527   PetscFunctionBegin;
2528   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2529   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2530   PetscFunctionReturn(PETSC_SUCCESS);
2531 }
2532 
2533 /*@
2534   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2535 
2536   Not Collective
2537 
2538   Input Parameter:
2539 . pc - the preconditioner context
2540 
2541   Output Parameter:
2542 . S - the Schur complement matrix
2543 
2544   Level: advanced
2545 
2546   Note:
2547   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2548 
2549 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2550           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2551 @*/
2552 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2553 {
2554   const char    *t;
2555   PetscBool      isfs;
2556   PC_FieldSplit *jac;
2557 
2558   PetscFunctionBegin;
2559   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2560   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2561   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2562   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2563   jac = (PC_FieldSplit *)pc->data;
2564   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2565   if (S) *S = jac->schur;
2566   PetscFunctionReturn(PETSC_SUCCESS);
2567 }
2568 
2569 /*@
2570   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2571 
2572   Not Collective
2573 
2574   Input Parameters:
2575 + pc - the preconditioner context
2576 - S  - the Schur complement matrix
2577 
2578   Level: advanced
2579 
2580 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2581 @*/
2582 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2583 {
2584   const char    *t;
2585   PetscBool      isfs;
2586   PC_FieldSplit *jac;
2587 
2588   PetscFunctionBegin;
2589   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2590   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2591   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2592   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2593   jac = (PC_FieldSplit *)pc->data;
2594   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2595   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2596   PetscFunctionReturn(PETSC_SUCCESS);
2597 }
2598 
2599 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2600 {
2601   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2602 
2603   PetscFunctionBegin;
2604   jac->schurpre = ptype;
2605   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2606     PetscCall(MatDestroy(&jac->schur_user));
2607     jac->schur_user = pre;
2608     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2609   }
2610   PetscFunctionReturn(PETSC_SUCCESS);
2611 }
2612 
2613 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2614 {
2615   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2616 
2617   PetscFunctionBegin;
2618   if (ptype) *ptype = jac->schurpre;
2619   if (pre) *pre = jac->schur_user;
2620   PetscFunctionReturn(PETSC_SUCCESS);
2621 }
2622 
2623 /*@
2624   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note`
2625 
2626   Collective
2627 
2628   Input Parameters:
2629 + pc    - the preconditioner context
2630 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2631 
2632   Options Database Key:
2633 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
2634 
2635   Level: intermediate
2636 
2637   Notes:
2638   The FULL factorization is
2639 .vb
2640   (A   B)  = (1       0) (A   0) (1  Ainv*B)  = L D U
2641   (C   E)    (C*Ainv  1) (0   S) (0       1)
2642 .vb
2643   where S = E - C*Ainv*B. In practice, the full factorization is applied via block triangular solves with the grouping $L*(D*U)$. UPPER uses $D*U$, LOWER uses $L*D$,
2644   and DIAG is the diagonal part with the sign of S flipped (because this makes the preconditioner positive definite for many formulations,
2645   thus allowing the use of `KSPMINRES)`. Sign flipping of S can be turned off with `PCFieldSplitSetSchurScale()`.
2646 
2647   If A and S are solved exactly
2648 .vb
2649   *) FULL factorization is a direct solver.
2650   *) The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2651   *) With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2652 .ve
2653 
2654   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2655   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2656 
2657   For symmetric problems in which A is positive definite and S is negative definite, DIAG can be used with `KSPMINRES`.
2658 
2659   A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S).
2660 
2661 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`,
2662           [](sec_flexibleksp)
2663 @*/
2664 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2665 {
2666   PetscFunctionBegin;
2667   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2668   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2669   PetscFunctionReturn(PETSC_SUCCESS);
2670 }
2671 
2672 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2673 {
2674   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2675 
2676   PetscFunctionBegin;
2677   jac->schurfactorization = ftype;
2678   PetscFunctionReturn(PETSC_SUCCESS);
2679 }
2680 
2681 /*@
2682   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2683 
2684   Collective
2685 
2686   Input Parameters:
2687 + pc    - the preconditioner context
2688 - scale - scaling factor for the Schur complement
2689 
2690   Options Database Key:
2691 . -pc_fieldsplit_schur_scale <scale> - default is -1.0
2692 
2693   Level: intermediate
2694 
2695 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
2696 @*/
2697 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2698 {
2699   PetscFunctionBegin;
2700   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2701   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2702   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2703   PetscFunctionReturn(PETSC_SUCCESS);
2704 }
2705 
2706 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2707 {
2708   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2709 
2710   PetscFunctionBegin;
2711   jac->schurscale = scale;
2712   PetscFunctionReturn(PETSC_SUCCESS);
2713 }
2714 
2715 /*@C
2716   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2717 
2718   Collective
2719 
2720   Input Parameter:
2721 . pc - the preconditioner context
2722 
2723   Output Parameters:
2724 + A00 - the (0,0) block
2725 . A01 - the (0,1) block
2726 . A10 - the (1,0) block
2727 - A11 - the (1,1) block
2728 
2729   Level: advanced
2730 
2731 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2732 @*/
2733 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2734 {
2735   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2736 
2737   PetscFunctionBegin;
2738   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2739   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2740   if (A00) *A00 = jac->pmat[0];
2741   if (A01) *A01 = jac->B;
2742   if (A10) *A10 = jac->C;
2743   if (A11) *A11 = jac->pmat[1];
2744   PetscFunctionReturn(PETSC_SUCCESS);
2745 }
2746 
2747 /*@
2748   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2749 
2750   Collective
2751 
2752   Input Parameters:
2753 + pc        - the preconditioner context
2754 - tolerance - the solver tolerance
2755 
2756   Options Database Key:
2757 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5
2758 
2759   Level: intermediate
2760 
2761   Note:
2762   The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion.
2763   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2764   this estimate, the stopping criterion is satisfactory in practical cases.
2765 
2766 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2767 @*/
2768 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2769 {
2770   PetscFunctionBegin;
2771   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2772   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2773   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2774   PetscFunctionReturn(PETSC_SUCCESS);
2775 }
2776 
2777 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2778 {
2779   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2780 
2781   PetscFunctionBegin;
2782   jac->gkbtol = tolerance;
2783   PetscFunctionReturn(PETSC_SUCCESS);
2784 }
2785 
2786 /*@
2787   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2788 
2789   Collective
2790 
2791   Input Parameters:
2792 + pc    - the preconditioner context
2793 - maxit - the maximum number of iterations
2794 
2795   Options Database Key:
2796 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100
2797 
2798   Level: intermediate
2799 
2800 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2801 @*/
2802 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2803 {
2804   PetscFunctionBegin;
2805   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2806   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2807   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2808   PetscFunctionReturn(PETSC_SUCCESS);
2809 }
2810 
2811 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2812 {
2813   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2814 
2815   PetscFunctionBegin;
2816   jac->gkbmaxit = maxit;
2817   PetscFunctionReturn(PETSC_SUCCESS);
2818 }
2819 
2820 /*@
2821   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT`
2822   preconditioner.
2823 
2824   Collective
2825 
2826   Input Parameters:
2827 + pc    - the preconditioner context
2828 - delay - the delay window in the lower bound estimate
2829 
2830   Options Database Key:
2831 . -pc_fieldsplit_gkb_delay <delay> - default is 5
2832 
2833   Level: intermediate
2834 
2835   Notes:
2836   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $
2837   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs
2838   at least (`delay` + 1) iterations to stop.
2839 
2840   For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013`
2841 
2842 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2843 @*/
2844 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2845 {
2846   PetscFunctionBegin;
2847   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2848   PetscValidLogicalCollectiveInt(pc, delay, 2);
2849   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2850   PetscFunctionReturn(PETSC_SUCCESS);
2851 }
2852 
2853 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2854 {
2855   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2856 
2857   PetscFunctionBegin;
2858   jac->gkbdelay = delay;
2859   PetscFunctionReturn(PETSC_SUCCESS);
2860 }
2861 
2862 /*@
2863   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the
2864   Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2865 
2866   Collective
2867 
2868   Input Parameters:
2869 + pc - the preconditioner context
2870 - nu - the shift parameter
2871 
2872   Options Database Key:
2873 . -pc_fieldsplit_gkb_nu <nu> - default is 1
2874 
2875   Level: intermediate
2876 
2877   Notes:
2878   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However,
2879   if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore
2880   necessary to find a good balance in between the convergence of the inner and outer loop.
2881 
2882   For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity.
2883 
2884 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2885 @*/
2886 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
2887 {
2888   PetscFunctionBegin;
2889   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2890   PetscValidLogicalCollectiveReal(pc, nu, 2);
2891   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
2892   PetscFunctionReturn(PETSC_SUCCESS);
2893 }
2894 
2895 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
2896 {
2897   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2898 
2899   PetscFunctionBegin;
2900   jac->gkbnu = nu;
2901   PetscFunctionReturn(PETSC_SUCCESS);
2902 }
2903 
2904 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
2905 {
2906   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2907 
2908   PetscFunctionBegin;
2909   jac->type = type;
2910   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
2911   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
2912   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
2913   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
2914   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
2915   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
2916   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
2917   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
2918   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
2919 
2920   if (type == PC_COMPOSITE_SCHUR) {
2921     pc->ops->apply          = PCApply_FieldSplit_Schur;
2922     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
2923     pc->ops->view           = PCView_FieldSplit_Schur;
2924 
2925     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
2926     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
2927     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
2928     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
2929     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
2930   } else if (type == PC_COMPOSITE_GKB) {
2931     pc->ops->apply = PCApply_FieldSplit_GKB;
2932     pc->ops->view  = PCView_FieldSplit_GKB;
2933 
2934     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2935     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
2936     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
2937     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
2938     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
2939   } else {
2940     pc->ops->apply = PCApply_FieldSplit;
2941     pc->ops->view  = PCView_FieldSplit;
2942 
2943     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2944   }
2945   PetscFunctionReturn(PETSC_SUCCESS);
2946 }
2947 
2948 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
2949 {
2950   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2951 
2952   PetscFunctionBegin;
2953   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
2954   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
2955   jac->bs = bs;
2956   PetscFunctionReturn(PETSC_SUCCESS);
2957 }
2958 
2959 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
2960 {
2961   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
2962   PC_FieldSplitLink ilink_current = jac->head;
2963   IS                is_owned;
2964 
2965   PetscFunctionBegin;
2966   jac->coordinates_set = PETSC_TRUE; // Internal flag
2967   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
2968 
2969   while (ilink_current) {
2970     // For each IS, embed it to get local coords indces
2971     IS              is_coords;
2972     PetscInt        ndofs_block;
2973     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
2974 
2975     // Setting drop to true for safety. It should make no difference.
2976     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
2977     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
2978     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
2979 
2980     // Allocate coordinates vector and set it directly
2981     PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords));
2982     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
2983       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
2984     }
2985     ilink_current->dim   = dim;
2986     ilink_current->ndofs = ndofs_block;
2987     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
2988     PetscCall(ISDestroy(&is_coords));
2989     ilink_current = ilink_current->next;
2990   }
2991   PetscCall(ISDestroy(&is_owned));
2992   PetscFunctionReturn(PETSC_SUCCESS);
2993 }
2994 
2995 /*@
2996   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
2997 
2998   Collective
2999 
3000   Input Parameters:
3001 + pc   - the preconditioner context
3002 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3003 
3004   Options Database Key:
3005 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
3006 
3007   Level: intermediate
3008 
3009 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3010           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3011 @*/
3012 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
3013 {
3014   PetscFunctionBegin;
3015   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3016   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
3017   PetscFunctionReturn(PETSC_SUCCESS);
3018 }
3019 
3020 /*@
3021   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3022 
3023   Not collective
3024 
3025   Input Parameter:
3026 . pc - the preconditioner context
3027 
3028   Output Parameter:
3029 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3030 
3031   Level: intermediate
3032 
3033 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3034           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3035 @*/
3036 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
3037 {
3038   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3039 
3040   PetscFunctionBegin;
3041   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3042   PetscAssertPointer(type, 2);
3043   *type = jac->type;
3044   PetscFunctionReturn(PETSC_SUCCESS);
3045 }
3046 
3047 /*@
3048   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3049 
3050   Logically Collective
3051 
3052   Input Parameters:
3053 + pc  - the preconditioner context
3054 - flg - boolean indicating whether to use field splits defined by the `DM`
3055 
3056   Options Database Key:
3057 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3058 
3059   Level: intermediate
3060 
3061   Developer Note:
3062   The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database
3063 
3064 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3065 @*/
3066 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3067 {
3068   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3069   PetscBool      isfs;
3070 
3071   PetscFunctionBegin;
3072   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3073   PetscValidLogicalCollectiveBool(pc, flg, 2);
3074   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3075   if (isfs) jac->dm_splits = flg;
3076   PetscFunctionReturn(PETSC_SUCCESS);
3077 }
3078 
3079 /*@
3080   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3081 
3082   Logically Collective
3083 
3084   Input Parameter:
3085 . pc - the preconditioner context
3086 
3087   Output Parameter:
3088 . flg - boolean indicating whether to use field splits defined by the `DM`
3089 
3090   Level: intermediate
3091 
3092   Developer Note:
3093   The name should be `PCFieldSplitGetUseDMSplits()`
3094 
3095 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3096 @*/
3097 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3098 {
3099   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3100   PetscBool      isfs;
3101 
3102   PetscFunctionBegin;
3103   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3104   PetscAssertPointer(flg, 2);
3105   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3106   if (isfs) {
3107     if (flg) *flg = jac->dm_splits;
3108   }
3109   PetscFunctionReturn(PETSC_SUCCESS);
3110 }
3111 
3112 /*@
3113   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3114 
3115   Logically Collective
3116 
3117   Input Parameter:
3118 . pc - the preconditioner context
3119 
3120   Output Parameter:
3121 . flg - boolean indicating whether to detect fields or not
3122 
3123   Level: intermediate
3124 
3125 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3126 @*/
3127 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3128 {
3129   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3130 
3131   PetscFunctionBegin;
3132   *flg = jac->detect;
3133   PetscFunctionReturn(PETSC_SUCCESS);
3134 }
3135 
3136 /*@
3137   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3138 
3139   Logically Collective
3140 
3141   Input Parameter:
3142 . pc - the preconditioner context
3143 
3144   Output Parameter:
3145 . flg - boolean indicating whether to detect fields or not
3146 
3147   Options Database Key:
3148 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3149 
3150   Level: intermediate
3151 
3152   Note:
3153   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3154 
3155 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3156 @*/
3157 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3158 {
3159   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3160 
3161   PetscFunctionBegin;
3162   jac->detect = flg;
3163   if (jac->detect) {
3164     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3165     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3166   }
3167   PetscFunctionReturn(PETSC_SUCCESS);
3168 }
3169 
3170 /*MC
3171   PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3172   collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3173 
3174   Options Database Keys:
3175 +   -pc_fieldsplit_%d_fields <a,b,..>                                                - indicates the fields to be used in the `%d`'th split
3176 .   -pc_fieldsplit_default                                                           - automatically add any fields to additional splits that have not
3177                                                                                        been supplied explicitly by `-pc_fieldsplit_%d_fields`
3178 .   -pc_fieldsplit_block_size <bs>                                                   - size of block that defines fields (i.e. there are bs fields)
3179                                                                                        when the matrix is not of `MatType` `MATNEST`
3180 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3181 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full>                     - default is `a11`; see `PCFieldSplitSetSchurPre()`
3182 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full>                           - set factorization type when using `-pc_fieldsplit_type schur`;
3183                                                                                        see `PCFieldSplitSetSchurFactType()`
3184 .   -pc_fieldsplit_dm_splits <true,false> (default is true)                          - Whether to use `DMCreateFieldDecomposition()` for splits
3185 -   -pc_fieldsplit_detect_saddle_point                                               - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3186 
3187   Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3188   The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3189   For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3190 
3191   To set options on the solvers for each block append `-fieldsplit_` to all the `PC`
3192   options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`
3193 
3194   To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3195   and set the options directly on the resulting `KSP` object
3196 
3197   Level: intermediate
3198 
3199   Notes:
3200   Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()`
3201   to define a split by an arbitrary collection of entries.
3202 
3203   If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports
3204   `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs,
3205   beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3206   if this is not called the block size defaults to the blocksize of the second matrix passed
3207   to `KSPSetOperators()`/`PCSetOperators()`.
3208 
3209   For the Schur complement preconditioner if
3210   ```{math}
3211     J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3212   ```
3213 
3214   the preconditioner using `full` factorization is logically
3215   ```{math}
3216     \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3217       ```
3218   where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3219   ```{math}
3220      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3221   ```
3222   which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given
3223   in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0,
3224   it returns the `KSP` associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default
3225   $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3226 
3227   The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3228   `diag` gives
3229   ```{math}
3230     \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3231   ```
3232   Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3233   can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3234   ```{math}
3235     \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3236   ```
3237   where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of
3238   ```{math}
3239     \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3240   ```
3241   where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3242 
3243   If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3244   is used automatically for a second submatrix.
3245 
3246   The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3247   Generally it should be used with the `MATAIJ` or `MATNEST` `MatType`
3248 
3249   The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see,
3250   for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3251   One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers".
3252 
3253   See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3254 
3255   The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3256   residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3257 
3258   The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3259   ```{math}
3260     \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3261   ```
3262   with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3263   A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3264 
3265   Developer Note:
3266   The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3267   user API.
3268 
3269 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3270           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3271           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3272           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3273 M*/
3274 
3275 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3276 {
3277   PC_FieldSplit *jac;
3278 
3279   PetscFunctionBegin;
3280   PetscCall(PetscNew(&jac));
3281 
3282   jac->bs                 = -1;
3283   jac->nsplits            = 0;
3284   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3285   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3286   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3287   jac->schurscale         = -1.0;
3288   jac->dm_splits          = PETSC_TRUE;
3289   jac->detect             = PETSC_FALSE;
3290   jac->gkbtol             = 1e-5;
3291   jac->gkbdelay           = 5;
3292   jac->gkbnu              = 1;
3293   jac->gkbmaxit           = 100;
3294   jac->gkbmonitor         = PETSC_FALSE;
3295   jac->coordinates_set    = PETSC_FALSE;
3296 
3297   pc->data = (void *)jac;
3298 
3299   pc->ops->apply           = PCApply_FieldSplit;
3300   pc->ops->applytranspose  = PCApplyTranspose_FieldSplit;
3301   pc->ops->setup           = PCSetUp_FieldSplit;
3302   pc->ops->reset           = PCReset_FieldSplit;
3303   pc->ops->destroy         = PCDestroy_FieldSplit;
3304   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3305   pc->ops->view            = PCView_FieldSplit;
3306   pc->ops->applyrichardson = NULL;
3307 
3308   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3309   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3310   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3311   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3312   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3313   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3314   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3315   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3316   PetscFunctionReturn(PETSC_SUCCESS);
3317 }
3318