xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision a1fd7ae3ce1a484b2c7fddb18ad0962aba6eb1a3)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 
5 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
6 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
7 
8 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
9 
10 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
11 struct _PC_FieldSplitLink {
12   KSP               ksp;
13   Vec               x, y, z;
14   char             *splitname;
15   PetscInt          nfields;
16   PetscInt         *fields, *fields_col;
17   VecScatter        sctx;
18   IS                is, is_col;
19   PC_FieldSplitLink next, previous;
20   PetscLogEvent     event;
21 
22   /* Used only when setting coordinates with PCSetCoordinates */
23   PetscInt   dim;
24   PetscInt   ndofs;
25   PetscReal *coords;
26 };
27 
28 typedef struct {
29   PCCompositeType type;
30   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
31   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
32   PetscInt        bs;           /* Block size for IS and Mat structures */
33   PetscInt        nsplits;      /* Number of field divisions defined */
34   Vec            *x, *y, w1, w2;
35   Mat            *mat;    /* The diagonal block for each split */
36   Mat            *pmat;   /* The preconditioning diagonal block for each split */
37   Mat            *Afield; /* The rows of the matrix associated with each split */
38   PetscBool       issetup;
39 
40   /* Only used when Schur complement preconditioning is used */
41   Mat                       B;          /* The (0,1) block */
42   Mat                       C;          /* The (1,0) block */
43   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
44   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */
45   Mat                       schur_user; /* User-provided preconditioning matrix for the Schur complement */
46   PCFieldSplitSchurPreType  schurpre;   /* Determines which preconditioning matrix is used for the Schur complement */
47   PCFieldSplitSchurFactType schurfactorization;
48   KSP                       kspschur;   /* The solver for S */
49   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
50   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
51 
52   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
53   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
54   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
55   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
56   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
57   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
58   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
59   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
60   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
61   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
62 
63   PC_FieldSplitLink head;
64   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
65   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
66   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
67   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
68   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
69   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
70   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
71 } PC_FieldSplit;
72 
73 /*
74     Note:
75     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
76    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
77    PC you could change this.
78 */
79 
80 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it.  This way the
81 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
82 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
83 {
84   switch (jac->schurpre) {
85   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
86     return jac->schur;
87   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
88     return jac->schurp;
89   case PC_FIELDSPLIT_SCHUR_PRE_A11:
90     return jac->pmat[1];
91   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
92   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
93   default:
94     return jac->schur_user ? jac->schur_user : jac->pmat[1];
95   }
96 }
97 
98 #include <petscdraw.h>
99 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
100 {
101   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
102   PetscBool         iascii, isdraw;
103   PetscInt          i, j;
104   PC_FieldSplitLink ilink = jac->head;
105 
106   PetscFunctionBegin;
107   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
108   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
109   if (iascii) {
110     if (jac->bs > 0) {
111       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
112     } else {
113       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
114     }
115     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
116     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
117     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
118     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
119     for (i = 0; i < jac->nsplits; i++) {
120       if (ilink->fields) {
121         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
122         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
123         for (j = 0; j < ilink->nfields; j++) {
124           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
125           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
126         }
127         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
128         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
129       } else {
130         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
131       }
132       PetscCall(KSPView(ilink->ksp, viewer));
133       ilink = ilink->next;
134     }
135   }
136 
137   if (isdraw) {
138     PetscDraw draw;
139     PetscReal x, y, w, wd;
140 
141     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
142     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
143     w  = 2 * PetscMin(1.0 - x, x);
144     wd = w / (jac->nsplits + 1);
145     x  = x - wd * (jac->nsplits - 1) / 2.0;
146     for (i = 0; i < jac->nsplits; i++) {
147       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
148       PetscCall(KSPView(ilink->ksp, viewer));
149       PetscCall(PetscDrawPopCurrentPoint(draw));
150       x += wd;
151       ilink = ilink->next;
152     }
153   }
154   PetscFunctionReturn(PETSC_SUCCESS);
155 }
156 
157 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
158 {
159   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
160   PetscBool                  iascii, isdraw;
161   PetscInt                   i, j;
162   PC_FieldSplitLink          ilink = jac->head;
163   MatSchurComplementAinvType atype;
164 
165   PetscFunctionBegin;
166   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
167   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
168   if (iascii) {
169     if (jac->bs > 0) {
170       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
171     } else {
172       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
173     }
174     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
175     switch (jac->schurpre) {
176     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
178       break;
179     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
180       if (jac->schur) {
181         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
182         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
183       }
184       break;
185     case PC_FIELDSPLIT_SCHUR_PRE_A11:
186       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
187       break;
188     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
189       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
190       break;
191     case PC_FIELDSPLIT_SCHUR_PRE_USER:
192       if (jac->schur_user) {
193         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
194       } else {
195         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
196       }
197       break;
198     default:
199       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
200     }
201     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
202     PetscCall(PetscViewerASCIIPushTab(viewer));
203     for (i = 0; i < jac->nsplits; i++) {
204       if (ilink->fields) {
205         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
206         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
207         for (j = 0; j < ilink->nfields; j++) {
208           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
209           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
210         }
211         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
212         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
213       } else {
214         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
215       }
216       ilink = ilink->next;
217     }
218     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
219     PetscCall(PetscViewerASCIIPushTab(viewer));
220     if (jac->head) {
221       PetscCall(KSPView(jac->head->ksp, viewer));
222     } else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
223     PetscCall(PetscViewerASCIIPopTab(viewer));
224     if (jac->head && jac->kspupper != jac->head->ksp) {
225       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n"));
226       PetscCall(PetscViewerASCIIPushTab(viewer));
227       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
228       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
229       PetscCall(PetscViewerASCIIPopTab(viewer));
230     }
231     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n"));
232     PetscCall(PetscViewerASCIIPushTab(viewer));
233     if (jac->kspschur) {
234       PetscCall(KSPView(jac->kspschur, viewer));
235     } else {
236       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
237     }
238     PetscCall(PetscViewerASCIIPopTab(viewer));
239     PetscCall(PetscViewerASCIIPopTab(viewer));
240   } else if (isdraw && jac->head) {
241     PetscDraw draw;
242     PetscReal x, y, w, wd, h;
243     PetscInt  cnt = 2;
244     char      str[32];
245 
246     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
247     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
248     if (jac->kspupper != jac->head->ksp) cnt++;
249     w  = 2 * PetscMin(1.0 - x, x);
250     wd = w / (cnt + 1);
251 
252     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
253     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
254     y -= h;
255     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
256       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
257     } else {
258       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
259     }
260     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
261     y -= h;
262     x = x - wd * (cnt - 1) / 2.0;
263 
264     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
265     PetscCall(KSPView(jac->head->ksp, viewer));
266     PetscCall(PetscDrawPopCurrentPoint(draw));
267     if (jac->kspupper != jac->head->ksp) {
268       x += wd;
269       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
270       PetscCall(KSPView(jac->kspupper, viewer));
271       PetscCall(PetscDrawPopCurrentPoint(draw));
272     }
273     x += wd;
274     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
275     PetscCall(KSPView(jac->kspschur, viewer));
276     PetscCall(PetscDrawPopCurrentPoint(draw));
277   }
278   PetscFunctionReturn(PETSC_SUCCESS);
279 }
280 
281 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
282 {
283   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
284   PetscBool         iascii, isdraw;
285   PetscInt          i, j;
286   PC_FieldSplitLink ilink = jac->head;
287 
288   PetscFunctionBegin;
289   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
290   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
291   if (iascii) {
292     if (jac->bs > 0) {
293       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
294     } else {
295       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
296     }
297     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
298     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
299     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
300 
301     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
302     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
303     PetscCall(PetscViewerASCIIPushTab(viewer));
304 
305     if (ilink->fields) {
306       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
307       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
308       for (j = 0; j < ilink->nfields; j++) {
309         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
310         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
311       }
312       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
313       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
314     } else {
315       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
316     }
317     PetscCall(KSPView(ilink->ksp, viewer));
318 
319     PetscCall(PetscViewerASCIIPopTab(viewer));
320   }
321 
322   if (isdraw) {
323     PetscDraw draw;
324     PetscReal x, y, w, wd;
325 
326     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
327     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
328     w  = 2 * PetscMin(1.0 - x, x);
329     wd = w / (jac->nsplits + 1);
330     x  = x - wd * (jac->nsplits - 1) / 2.0;
331     for (i = 0; i < jac->nsplits; i++) {
332       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
333       PetscCall(KSPView(ilink->ksp, viewer));
334       PetscCall(PetscDrawPopCurrentPoint(draw));
335       x += wd;
336       ilink = ilink->next;
337     }
338   }
339   PetscFunctionReturn(PETSC_SUCCESS);
340 }
341 
342 /* Precondition: jac->bs is set to a meaningful value or MATNEST */
343 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
344 {
345   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
346   PetscInt       bs, i, nfields, *ifields, nfields_col, *ifields_col;
347   PetscBool      flg, flg_col, mnest;
348   char           optionname[128], splitname[8], optionname_col[128];
349 
350   PetscFunctionBegin;
351   PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest));
352   if (mnest) {
353     PetscCall(MatNestGetSize(pc->pmat, &bs, NULL));
354   } else {
355     bs = jac->bs;
356   }
357   PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col));
358   for (i = 0, flg = PETSC_TRUE;; i++) {
359     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
360     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
361     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
362     nfields     = bs;
363     nfields_col = bs;
364     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
365     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
366     if (!flg) break;
367     else if (flg && !flg_col) {
368       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
369       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
370     } else {
371       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
372       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
373       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
374     }
375   }
376   if (i > 0) {
377     /* Makes command-line setting of splits take precedence over setting them in code.
378        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
379        create new splits, which would probably not be what the user wanted. */
380     jac->splitdefined = PETSC_TRUE;
381   }
382   PetscCall(PetscFree2(ifields, ifields_col));
383   PetscFunctionReturn(PETSC_SUCCESS);
384 }
385 
386 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
387 {
388   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
389   PC_FieldSplitLink ilink              = jac->head;
390   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
391   PetscInt          i;
392 
393   PetscFunctionBegin;
394   /*
395    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
396    Should probably be rewritten.
397    */
398   if (!ilink) {
399     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
400     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
401       PetscInt  numFields, f, i, j;
402       char    **fieldNames;
403       IS       *fields;
404       DM       *dms;
405       DM        subdm[128];
406       PetscBool flg;
407 
408       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
409       /* Allow the user to prescribe the splits */
410       for (i = 0, flg = PETSC_TRUE;; i++) {
411         PetscInt ifields[128];
412         IS       compField;
413         char     optionname[128], splitname[8];
414         PetscInt nfields = numFields;
415 
416         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
417         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
418         if (!flg) break;
419         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
420         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
421         if (nfields == 1) {
422           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
423         } else {
424           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
425           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
426         }
427         PetscCall(ISDestroy(&compField));
428         for (j = 0; j < nfields; ++j) {
429           f = ifields[j];
430           PetscCall(PetscFree(fieldNames[f]));
431           PetscCall(ISDestroy(&fields[f]));
432         }
433       }
434       if (i == 0) {
435         for (f = 0; f < numFields; ++f) {
436           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
437           PetscCall(PetscFree(fieldNames[f]));
438           PetscCall(ISDestroy(&fields[f]));
439         }
440       } else {
441         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
442         PetscCall(PetscFree(dms));
443         PetscCall(PetscMalloc1(i, &dms));
444         for (j = 0; j < i; ++j) dms[j] = subdm[j];
445       }
446       PetscCall(PetscFree(fieldNames));
447       PetscCall(PetscFree(fields));
448       if (dms) {
449         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
450         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
451           const char *prefix;
452           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix));
453           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix));
454           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
455           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
456           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
457           PetscCall(DMDestroy(&dms[i]));
458         }
459         PetscCall(PetscFree(dms));
460       }
461     } else {
462       if (jac->bs <= 0) {
463         if (pc->pmat) {
464           PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
465         } else jac->bs = 1;
466       }
467 
468       if (jac->detect) {
469         IS       zerodiags, rest;
470         PetscInt nmin, nmax;
471 
472         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
473         if (jac->diag_use_amat) {
474           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
475         } else {
476           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
477         }
478         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
479         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
480         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
481         PetscCall(ISDestroy(&zerodiags));
482         PetscCall(ISDestroy(&rest));
483       } else if (coupling) {
484         IS       coupling, rest;
485         PetscInt nmin, nmax;
486 
487         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
488         if (jac->offdiag_use_amat) {
489           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
490         } else {
491           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
492         }
493         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
494         PetscCall(ISSetIdentity(rest));
495         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
496         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
497         PetscCall(ISDestroy(&coupling));
498         PetscCall(ISDestroy(&rest));
499       } else {
500         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
501         if (!fieldsplit_default) {
502           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
503            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
504           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
505           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
506         }
507         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
508           Mat       M = pc->pmat;
509           PetscBool isnest;
510           PetscInt  nf;
511 
512           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
513           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
514           if (!isnest) {
515             M = pc->mat;
516             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
517           }
518           if (!isnest) nf = jac->bs;
519           else PetscCall(MatNestGetSize(M, &nf, NULL));
520           for (i = 0; i < nf; i++) {
521             char splitname[8];
522 
523             PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
524             PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
525           }
526           jac->defaultsplit = PETSC_TRUE;
527         }
528       }
529     }
530   } else if (jac->nsplits == 1) {
531     IS       is2;
532     PetscInt nmin, nmax;
533 
534     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
535     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
536     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
537     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
538     PetscCall(ISDestroy(&is2));
539   }
540 
541   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
542   PetscFunctionReturn(PETSC_SUCCESS);
543 }
544 
545 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
546 {
547   Mat       BT, T;
548   PetscReal nrmT, nrmB;
549 
550   PetscFunctionBegin;
551   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
552   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
553   PetscCall(MatNorm(T, NORM_1, &nrmT));
554   PetscCall(MatNorm(B, NORM_1, &nrmB));
555   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable.");
556 
557   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
558   /* setting N := 1/nu*I in [Ar13].                                                 */
559   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
560   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, H)); /* H = A01*A01'          */
561   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
562 
563   PetscCall(MatDestroy(&BT));
564   PetscCall(MatDestroy(&T));
565   PetscFunctionReturn(PETSC_SUCCESS);
566 }
567 
568 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg);
569 
570 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
571 {
572   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
573   PC_FieldSplitLink ilink;
574   PetscInt          i, nsplit;
575   PetscBool         sorted, sorted_col, matnest = PETSC_FALSE;
576 
577   PetscFunctionBegin;
578   pc->failedreason = PC_NOERROR;
579   PetscCall(PCFieldSplitSetDefaults(pc));
580   nsplit = jac->nsplits;
581   ilink  = jac->head;
582   if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
583 
584   /* get the matrices for each split */
585   if (!jac->issetup) {
586     PetscInt rstart, rend, nslots, bs;
587 
588     jac->issetup = PETSC_TRUE;
589 
590     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
591     if (jac->defaultsplit || !ilink->is) {
592       if (jac->bs <= 0) jac->bs = nsplit;
593     }
594 
595     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
596     PetscCall(MatGetBlockSize(pc->pmat, &bs));
597     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
598       PetscBool blk;
599 
600       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
601       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
602     }
603 
604     if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */
605       bs = jac->bs;
606       PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
607       nslots = (rend - rstart) / bs;
608       for (i = 0; i < nsplit; i++) {
609         if (jac->defaultsplit) {
610           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
611           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
612         } else if (!ilink->is) {
613           if (ilink->nfields > 1) {
614             PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
615 
616             PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
617             PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
618             for (j = 0; j < nslots; j++) {
619               for (k = 0; k < nfields; k++) {
620                 ii[nfields * j + k] = rstart + bs * j + fields[k];
621                 jj[nfields * j + k] = rstart + bs * j + fields_col[k];
622               }
623             }
624             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
625             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
626             PetscCall(ISSetBlockSize(ilink->is, nfields));
627             PetscCall(ISSetBlockSize(ilink->is_col, nfields));
628           } else {
629             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
630             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
631           }
632         }
633         PetscCall(ISSorted(ilink->is, &sorted));
634         if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col));
635         PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split");
636         ilink = ilink->next;
637       }
638     } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */
639       IS      *rowis, *colis, *ises = NULL;
640       PetscInt mis, nis;
641 
642       PetscCall(MatNestGetSize(pc->pmat, &mis, &nis));
643       PetscCall(PetscMalloc2(mis, &rowis, nis, &colis));
644       PetscCall(MatNestGetISs(pc->pmat, rowis, colis));
645       if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises));
646 
647       for (i = 0; i < nsplit; i++) {
648         if (jac->defaultsplit) {
649           PetscCall(ISDuplicate(rowis[i], &ilink->is));
650           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
651         } else if (!ilink->is) {
652           if (ilink->nfields > 1) {
653             for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]];
654             PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is));
655           } else {
656             PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is));
657           }
658           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
659         }
660         ilink = ilink->next;
661       }
662       PetscCall(PetscFree2(rowis, colis));
663       PetscCall(PetscFree(ises));
664     }
665   }
666 
667   ilink = jac->head;
668   if (!jac->pmat) {
669     Vec xtmp;
670 
671     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
672     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
673     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
674     for (i = 0; i < nsplit; i++) {
675       MatNullSpace sp;
676 
677       /* Check for preconditioning matrix attached to IS */
678       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
679       if (jac->pmat[i]) {
680         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
681         if (jac->type == PC_COMPOSITE_SCHUR) {
682           jac->schur_user = jac->pmat[i];
683 
684           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
685         }
686       } else {
687         const char *prefix;
688         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
689         PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix));
690         if (!prefix) {
691           PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
692           PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
693         }
694         PetscCall(MatSetFromOptions(jac->pmat[i]));
695         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
696       }
697       /* create work vectors for each split */
698       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
699       ilink->x = jac->x[i];
700       ilink->y = jac->y[i];
701       ilink->z = NULL;
702       /* compute scatter contexts needed by multiplicative versions and non-default splits */
703       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
704       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
705       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
706       ilink = ilink->next;
707     }
708     PetscCall(VecDestroy(&xtmp));
709   } else {
710     MatReuse      scall;
711     MatNullSpace *nullsp = NULL;
712 
713     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
714       PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp));
715       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
716       scall = MAT_INITIAL_MATRIX;
717     } else scall = MAT_REUSE_MATRIX;
718 
719     for (i = 0; i < nsplit; i++) {
720       Mat pmat;
721 
722       /* Check for preconditioning matrix attached to IS */
723       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
724       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
725       ilink = ilink->next;
726     }
727     if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp));
728   }
729   if (jac->diag_use_amat) {
730     ilink = jac->head;
731     if (!jac->mat) {
732       PetscCall(PetscMalloc1(nsplit, &jac->mat));
733       for (i = 0; i < nsplit; i++) {
734         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
735         ilink = ilink->next;
736       }
737     } else {
738       MatReuse      scall;
739       MatNullSpace *nullsp = NULL;
740 
741       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
742         PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp));
743         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
744         scall = MAT_INITIAL_MATRIX;
745       } else scall = MAT_REUSE_MATRIX;
746 
747       for (i = 0; i < nsplit; i++) {
748         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
749         ilink = ilink->next;
750       }
751       if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp));
752     }
753   } else {
754     jac->mat = jac->pmat;
755   }
756 
757   /* Check for null space attached to IS */
758   ilink = jac->head;
759   for (i = 0; i < nsplit; i++) {
760     MatNullSpace sp;
761 
762     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
763     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
764     ilink = ilink->next;
765   }
766 
767   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
768     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
769     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
770     ilink = jac->head;
771     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
772       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
773       if (!jac->Afield) {
774         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
775         if (jac->offdiag_use_amat) {
776           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
777         } else {
778           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
779         }
780       } else {
781         MatReuse scall;
782 
783         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
784           PetscCall(MatDestroy(&jac->Afield[1]));
785           scall = MAT_INITIAL_MATRIX;
786         } else scall = MAT_REUSE_MATRIX;
787 
788         if (jac->offdiag_use_amat) {
789           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
790         } else {
791           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
792         }
793       }
794     } else {
795       if (!jac->Afield) {
796         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
797         for (i = 0; i < nsplit; i++) {
798           if (jac->offdiag_use_amat) {
799             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
800           } else {
801             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
802           }
803           ilink = ilink->next;
804         }
805       } else {
806         MatReuse scall;
807         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
808           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
809           scall = MAT_INITIAL_MATRIX;
810         } else scall = MAT_REUSE_MATRIX;
811 
812         for (i = 0; i < nsplit; i++) {
813           if (jac->offdiag_use_amat) {
814             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
815           } else {
816             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
817           }
818           ilink = ilink->next;
819         }
820       }
821     }
822   }
823 
824   if (jac->type == PC_COMPOSITE_SCHUR) {
825     IS          ccis;
826     PetscBool   isset, isspd;
827     PetscInt    rstart, rend;
828     char        lscname[256];
829     PetscObject LSC_L;
830     PetscBool   set, flg;
831 
832     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
833 
834     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
835     if (jac->schurscale == (PetscScalar)-1.0) {
836       PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
837       jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
838     }
839 
840     /* When extracting off-diagonal submatrices, we take complements from this range */
841     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
842     PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
843 
844     if (jac->schur) {
845       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
846       MatReuse scall;
847 
848       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
849         scall = MAT_INITIAL_MATRIX;
850         PetscCall(MatDestroy(&jac->B));
851         PetscCall(MatDestroy(&jac->C));
852       } else scall = MAT_REUSE_MATRIX;
853 
854       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
855       ilink = jac->head;
856       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
857       if (jac->offdiag_use_amat) {
858         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B));
859       } else {
860         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B));
861       }
862       PetscCall(ISDestroy(&ccis));
863       if (!flg) {
864         ilink = ilink->next;
865         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
866         if (jac->offdiag_use_amat) {
867           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C));
868         } else {
869           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C));
870         }
871         PetscCall(ISDestroy(&ccis));
872       } else {
873         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
874         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
875         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
876       }
877       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
878       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
879         PetscCall(MatDestroy(&jac->schurp));
880         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
881       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
882         PetscCall(MatDestroy(&jac->schur_user));
883         if (jac->kspupper == jac->head->ksp) {
884           Mat AinvB;
885 
886           PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB));
887           PetscCall(MatDestroy(&AinvB));
888           PetscCall(MatCreate(PetscObjectComm((PetscObject)jac->schur), &AinvB));
889           PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)AinvB));
890         }
891         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
892       }
893       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
894       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
895       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
896     } else {
897       const char  *Dprefix;
898       char         schurprefix[256], schurmatprefix[256];
899       char         schurtestoption[256];
900       MatNullSpace sp;
901       KSP          kspt;
902 
903       /* extract the A01 and A10 matrices */
904       ilink = jac->head;
905       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
906       if (jac->offdiag_use_amat) {
907         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
908       } else {
909         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
910       }
911       PetscCall(ISDestroy(&ccis));
912       ilink = ilink->next;
913       if (!flg) {
914         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
915         if (jac->offdiag_use_amat) {
916           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
917         } else {
918           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
919         }
920         PetscCall(ISDestroy(&ccis));
921       } else {
922         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
923         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
924         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
925       }
926       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
927       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
928       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
929       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
930       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
931       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
932       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
933       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
934 
935       /* Note: this is not true in general */
936       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
937       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
938 
939       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
940       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
941       if (flg) {
942         DM  dmInner;
943         KSP kspInner;
944         PC  pcInner;
945 
946         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
947         PetscCall(KSPReset(kspInner));
948         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
949         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
950         /* Indent this deeper to emphasize the "inner" nature of this solver. */
951         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
952         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
953         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
954 
955         /* Set DM for new solver */
956         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
957         PetscCall(KSPSetDM(kspInner, dmInner));
958         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
959 
960         /* Defaults to PCKSP as preconditioner */
961         PetscCall(KSPGetPC(kspInner, &pcInner));
962         PetscCall(PCSetType(pcInner, PCKSP));
963         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
964       } else {
965         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
966           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
967           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
968           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
969           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
970           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
971         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
972         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
973       }
974       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
975       PetscCall(KSPSetFromOptions(jac->head->ksp));
976       PetscCall(MatSetFromOptions(jac->schur));
977 
978       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
979       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
980         KSP kspInner;
981         PC  pcInner;
982 
983         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
984         PetscCall(KSPGetPC(kspInner, &pcInner));
985         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
986         if (flg) {
987           KSP ksp;
988 
989           PetscCall(PCKSPGetKSP(pcInner, &ksp));
990           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
991         }
992       }
993       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
994       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
995       if (flg) {
996         DM dmInner;
997 
998         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
999         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
1000         PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
1001         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
1002         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
1003         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
1004         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
1005         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
1006         PetscCall(KSPSetDM(jac->kspupper, dmInner));
1007         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
1008         PetscCall(KSPSetFromOptions(jac->kspupper));
1009         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
1010         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
1011       } else {
1012         jac->kspupper = jac->head->ksp;
1013         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
1014       }
1015 
1016       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
1017       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
1018       PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
1019       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
1020       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
1021       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
1022         PC pcschur;
1023         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
1024         PetscCall(PCSetType(pcschur, PCNONE));
1025         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
1026       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1027         if (jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL && jac->kspupper == jac->head->ksp) {
1028           Mat AinvB;
1029 
1030           PetscCall(MatCreate(PetscObjectComm((PetscObject)jac->schur), &AinvB));
1031           PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)AinvB));
1032         }
1033         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1034       }
1035       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
1036       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
1037       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
1038       /* propagate DM */
1039       {
1040         DM sdm;
1041         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
1042         if (sdm) {
1043           PetscCall(KSPSetDM(jac->kspschur, sdm));
1044           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
1045         }
1046       }
1047       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1048       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
1049       PetscCall(KSPSetFromOptions(jac->kspschur));
1050     }
1051     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
1052     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
1053 
1054     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
1055     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
1056     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1057     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1058     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L));
1059     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
1060     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1061     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1062     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L));
1063   } else if (jac->type == PC_COMPOSITE_GKB) {
1064     IS       ccis;
1065     PetscInt rstart, rend;
1066 
1067     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1068 
1069     ilink = jac->head;
1070 
1071     /* When extracting off-diagonal submatrices, we take complements from this range */
1072     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
1073 
1074     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1075     if (jac->offdiag_use_amat) {
1076       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1077     } else {
1078       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1079     }
1080     PetscCall(ISDestroy(&ccis));
1081     /* Create work vectors for GKB algorithm */
1082     PetscCall(VecDuplicate(ilink->x, &jac->u));
1083     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1084     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1085     ilink = ilink->next;
1086     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1087     if (jac->offdiag_use_amat) {
1088       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1089     } else {
1090       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1091     }
1092     PetscCall(ISDestroy(&ccis));
1093     /* Create work vectors for GKB algorithm */
1094     PetscCall(VecDuplicate(ilink->x, &jac->v));
1095     PetscCall(VecDuplicate(ilink->x, &jac->d));
1096     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1097     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1098     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1099 
1100     ilink = jac->head;
1101     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1102     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1103     /* Create gkb_monitor context */
1104     if (jac->gkbmonitor) {
1105       PetscInt tablevel;
1106       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1107       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1108       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1109       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1110       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1111     }
1112   } else {
1113     /* set up the individual splits' PCs */
1114     i     = 0;
1115     ilink = jac->head;
1116     while (ilink) {
1117       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1118       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1119       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1120       i++;
1121       ilink = ilink->next;
1122     }
1123   }
1124 
1125   /* Set coordinates to the sub PC objects whenever these are set */
1126   if (jac->coordinates_set) {
1127     PC pc_coords;
1128     if (jac->type == PC_COMPOSITE_SCHUR) {
1129       // Head is first block.
1130       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1131       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1132       // Second one is Schur block, but its KSP object is in kspschur.
1133       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1134       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1135     } else if (jac->type == PC_COMPOSITE_GKB) {
1136       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1137     } else {
1138       ilink = jac->head;
1139       while (ilink) {
1140         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1141         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1142         ilink = ilink->next;
1143       }
1144     }
1145   }
1146 
1147   jac->suboptionsset = PETSC_TRUE;
1148   PetscFunctionReturn(PETSC_SUCCESS);
1149 }
1150 
1151 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1152   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1153                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1154                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1155 
1156 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1157 {
1158   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1159   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1160   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1161   Mat               AinvB = NULL;
1162   PetscInt          N     = -1;
1163 
1164   PetscFunctionBegin;
1165   switch (jac->schurfactorization) {
1166   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1167     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1168     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1169     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1170     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1171     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1172     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1173     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1174     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1175     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1176     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1177     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1178     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1179     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1180     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1181     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1182     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1183     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1184     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1185     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1186     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1187     break;
1188   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1189     /* [A00 0; A10 S], suitable for left preconditioning */
1190     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1191     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1192     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1193     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1194     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1195     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1196     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1197     PetscCall(VecScale(ilinkD->x, -1.));
1198     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1199     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1200     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1201     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1202     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1203     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1204     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1205     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1206     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1207     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1208     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1209     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1210     break;
1211   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1212     /* [A00 A01; 0 S], suitable for right preconditioning */
1213     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1214     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1215     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1216     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1217     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1218     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1219     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1220     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1221     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1222     PetscCall(VecScale(ilinkA->x, -1.));
1223     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1224     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1225     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1226     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1227     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1228     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1229     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1230     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1231     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1232     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1233     break;
1234   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1235     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1236     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1237     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1238     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1239     if (kspUpper == kspA) {
1240       PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB));
1241       if (AinvB) {
1242         PetscCall(MatGetSize(AinvB, NULL, &N));
1243         if (N == -1) { // first time PCApply_FieldSplit_Schur() is called
1244           Mat                A;
1245           PetscInt           m, M, N;
1246           PetscScalar       *v, *write;
1247           const PetscScalar *read;
1248 
1249           PetscCall(MatGetSize(jac->B, &M, &N));
1250           PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1251           PetscCall(VecGetArrayRead(ilinkA->x, &read));
1252           PetscCall(PetscMalloc1(m * (N + 1), &v));
1253           PetscCall(PetscArraycpy(v + m * N, read, m)); // copy the input Vec in the last column of the composed Mat
1254           PetscCall(VecRestoreArrayRead(ilinkA->x, &read));
1255           PetscCall(MatCreateDense(PetscObjectComm((PetscObject)jac->schur), m, PETSC_DECIDE, M, N + 1, v, &A)); // number of columns of the Schur complement plus one
1256           PetscCall(MatHeaderReplace(AinvB, &A));
1257           PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1258           PetscCall(KSPSetOperators(jac->kspschur, jac->schur, jac->schur_user));
1259           PetscCall(VecGetArrayWrite(ilinkA->y, &write));
1260           PetscCall(PetscArraycpy(write, v + m * N, m)); // retrieve the solution as the last column of the composed Mat
1261           PetscCall(VecRestoreArrayWrite(ilinkA->y, &write));
1262         }
1263       }
1264     }
1265     if (!AinvB || N != -1) PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1266     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1267     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1268     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1269     PetscCall(VecScale(ilinkD->x, -1.0));
1270     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1271     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1272 
1273     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1274     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1275     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1276     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1277     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1278     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1279     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1280 
1281     if (kspUpper == kspA) {
1282       if (!AinvB) {
1283         PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1284         PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1285         PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1286         PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1287         PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1288         PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1289       } else PetscCall(MatMultAdd(AinvB, ilinkD->y, ilinkA->y, ilinkA->y));
1290     } else {
1291       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1292       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1293       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1294       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1295       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1296       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1297       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1298       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1299       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1300     }
1301     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1302     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1303     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1304   }
1305   PetscFunctionReturn(PETSC_SUCCESS);
1306 }
1307 
1308 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1309 {
1310   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1311   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1312   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1313 
1314   PetscFunctionBegin;
1315   switch (jac->schurfactorization) {
1316   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1317     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1318     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1319     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1320     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1321     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1322     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1323     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1324     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1325     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1326     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1327     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1328     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1329     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1330     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1331     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1332     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1333     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1334     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1335     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1336     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1337     break;
1338   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1339     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1340     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1341     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1342     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1343     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1344     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1345     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1346     PetscCall(VecScale(ilinkD->x, -1.));
1347     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1348     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1349     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1350     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1351     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1352     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1353     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1354     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1355     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1356     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1357     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1358     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1359     break;
1360   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1361     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1362     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1363     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1364     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1365     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1366     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1367     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1368     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1369     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1370     PetscCall(VecScale(ilinkA->x, -1.));
1371     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1372     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1373     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1374     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1375     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1376     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1377     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1378     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1379     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1380     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1381     break;
1382   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1383     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1384     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1385     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1386     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1387     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1388     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1389     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1390     PetscCall(VecScale(ilinkD->x, -1.0));
1391     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1392     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1393 
1394     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1395     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1396     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1397     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1398     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1399     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1400     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1401 
1402     if (kspLower == kspA) {
1403       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1404       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1405       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1406       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1407       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1408       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1409     } else {
1410       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1411       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1412       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1413       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1414       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1415       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1416       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1417       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1418       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1419     }
1420     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1421     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1422     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1423   }
1424   PetscFunctionReturn(PETSC_SUCCESS);
1425 }
1426 
1427 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1428 {
1429   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1430   PC_FieldSplitLink ilink = jac->head;
1431   PetscInt          cnt, bs;
1432 
1433   PetscFunctionBegin;
1434   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1435     PetscBool matnest;
1436 
1437     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1438     if (jac->defaultsplit && !matnest) {
1439       PetscCall(VecGetBlockSize(x, &bs));
1440       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1441       PetscCall(VecGetBlockSize(y, &bs));
1442       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1443       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1444       while (ilink) {
1445         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1446         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1447         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1448         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1449         ilink = ilink->next;
1450       }
1451       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1452     } else {
1453       PetscCall(VecSet(y, 0.0));
1454       while (ilink) {
1455         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1456         ilink = ilink->next;
1457       }
1458     }
1459   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1460     PetscCall(VecSet(y, 0.0));
1461     /* solve on first block for first block variables */
1462     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1463     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1464     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1465     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1466     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1467     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1468     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1469     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1470 
1471     /* compute the residual only onto second block variables using first block variables */
1472     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1473     ilink = ilink->next;
1474     PetscCall(VecScale(ilink->x, -1.0));
1475     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1476     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1477 
1478     /* solve on second block variables */
1479     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1480     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1481     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1482     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1483     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1484     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1485   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1486     if (!jac->w1) {
1487       PetscCall(VecDuplicate(x, &jac->w1));
1488       PetscCall(VecDuplicate(x, &jac->w2));
1489     }
1490     PetscCall(VecSet(y, 0.0));
1491     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1492     cnt = 1;
1493     while (ilink->next) {
1494       ilink = ilink->next;
1495       /* compute the residual only over the part of the vector needed */
1496       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1497       PetscCall(VecScale(ilink->x, -1.0));
1498       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1499       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1500       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1501       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1502       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1503       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1504       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1505       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1506     }
1507     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1508       cnt -= 2;
1509       while (ilink->previous) {
1510         ilink = ilink->previous;
1511         /* compute the residual only over the part of the vector needed */
1512         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1513         PetscCall(VecScale(ilink->x, -1.0));
1514         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1515         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1516         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1517         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1518         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1519         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1520         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1521         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1522       }
1523     }
1524   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1525   PetscFunctionReturn(PETSC_SUCCESS);
1526 }
1527 
1528 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1529 {
1530   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1531   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1532   KSP               ksp = ilinkA->ksp;
1533   Vec               u, v, Hu, d, work1, work2;
1534   PetscScalar       alpha, z, nrmz2, *vecz;
1535   PetscReal         lowbnd, nu, beta;
1536   PetscInt          j, iterGKB;
1537 
1538   PetscFunctionBegin;
1539   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1540   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1541   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1542   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1543 
1544   u     = jac->u;
1545   v     = jac->v;
1546   Hu    = jac->Hu;
1547   d     = jac->d;
1548   work1 = jac->w1;
1549   work2 = jac->w2;
1550   vecz  = jac->vecz;
1551 
1552   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1553   /* Add q = q + nu*B*b */
1554   if (jac->gkbnu) {
1555     nu = jac->gkbnu;
1556     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1557     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1558   } else {
1559     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1560     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1561     nu = 1;
1562   }
1563 
1564   /* Transform rhs from [q,tilde{b}] to [0,b] */
1565   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1566   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1567   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1568   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1569   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1570   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1571 
1572   /* First step of algorithm */
1573   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1574   KSPCheckDot(ksp, beta);
1575   beta = PetscSqrtReal(nu) * beta;
1576   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1577   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1578   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1579   PetscCall(KSPSolve(ksp, work2, u));
1580   PetscCall(KSPCheckSolve(ksp, pc, u));
1581   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1582   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1583   PetscCall(VecDot(Hu, u, &alpha));
1584   KSPCheckDot(ksp, alpha);
1585   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1586   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1587   PetscCall(VecScale(u, 1.0 / alpha));
1588   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1589 
1590   z       = beta / alpha;
1591   vecz[1] = z;
1592 
1593   /* Computation of first iterate x(1) and p(1) */
1594   PetscCall(VecAXPY(ilinkA->y, z, u));
1595   PetscCall(VecCopy(d, ilinkD->y));
1596   PetscCall(VecScale(ilinkD->y, -z));
1597 
1598   iterGKB = 1;
1599   lowbnd  = 2 * jac->gkbtol;
1600   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1601 
1602   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1603     iterGKB += 1;
1604     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1605     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1606     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1607     beta = beta / PetscSqrtReal(nu);
1608     PetscCall(VecScale(v, 1.0 / beta));
1609     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1610     PetscCall(MatMult(jac->H, u, Hu));
1611     PetscCall(VecAXPY(work2, -beta, Hu));
1612     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1613     PetscCall(KSPSolve(ksp, work2, u));
1614     PetscCall(KSPCheckSolve(ksp, pc, u));
1615     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1616     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1617     PetscCall(VecDot(Hu, u, &alpha));
1618     KSPCheckDot(ksp, alpha);
1619     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1620     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1621     PetscCall(VecScale(u, 1.0 / alpha));
1622 
1623     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1624     vecz[0] = z;
1625 
1626     /* Computation of new iterate x(i+1) and p(i+1) */
1627     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1628     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1629     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1630     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1631     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1632 
1633     /* Compute Lower Bound estimate */
1634     if (iterGKB > jac->gkbdelay) {
1635       lowbnd = 0.0;
1636       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1637       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1638     }
1639 
1640     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1641     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1642   }
1643 
1644   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1645   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1646   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1647   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1648   PetscFunctionReturn(PETSC_SUCCESS);
1649 }
1650 
1651 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1652   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1653                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1654                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1655 
1656 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1657 {
1658   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1659   PC_FieldSplitLink ilink = jac->head;
1660   PetscInt          bs;
1661 
1662   PetscFunctionBegin;
1663   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1664     PetscBool matnest;
1665 
1666     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1667     if (jac->defaultsplit && !matnest) {
1668       PetscCall(VecGetBlockSize(x, &bs));
1669       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1670       PetscCall(VecGetBlockSize(y, &bs));
1671       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1672       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1673       while (ilink) {
1674         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1675         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1676         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1677         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1678         ilink = ilink->next;
1679       }
1680       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1681     } else {
1682       PetscCall(VecSet(y, 0.0));
1683       while (ilink) {
1684         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1685         ilink = ilink->next;
1686       }
1687     }
1688   } else {
1689     if (!jac->w1) {
1690       PetscCall(VecDuplicate(x, &jac->w1));
1691       PetscCall(VecDuplicate(x, &jac->w2));
1692     }
1693     PetscCall(VecSet(y, 0.0));
1694     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1695       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1696       while (ilink->next) {
1697         ilink = ilink->next;
1698         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1699         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1700         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1701       }
1702       while (ilink->previous) {
1703         ilink = ilink->previous;
1704         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1705         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1706         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1707       }
1708     } else {
1709       while (ilink->next) { /* get to last entry in linked list */
1710         ilink = ilink->next;
1711       }
1712       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1713       while (ilink->previous) {
1714         ilink = ilink->previous;
1715         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1716         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1717         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1718       }
1719     }
1720   }
1721   PetscFunctionReturn(PETSC_SUCCESS);
1722 }
1723 
1724 static PetscErrorCode PCReset_FieldSplit(PC pc)
1725 {
1726   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1727   PC_FieldSplitLink ilink = jac->head, next;
1728   Mat               AinvB;
1729 
1730   PetscFunctionBegin;
1731   while (ilink) {
1732     PetscCall(KSPDestroy(&ilink->ksp));
1733     PetscCall(VecDestroy(&ilink->x));
1734     PetscCall(VecDestroy(&ilink->y));
1735     PetscCall(VecDestroy(&ilink->z));
1736     PetscCall(VecScatterDestroy(&ilink->sctx));
1737     PetscCall(ISDestroy(&ilink->is));
1738     PetscCall(ISDestroy(&ilink->is_col));
1739     PetscCall(PetscFree(ilink->splitname));
1740     PetscCall(PetscFree(ilink->fields));
1741     PetscCall(PetscFree(ilink->fields_col));
1742     next = ilink->next;
1743     PetscCall(PetscFree(ilink));
1744     ilink = next;
1745   }
1746   jac->head = NULL;
1747   PetscCall(PetscFree2(jac->x, jac->y));
1748   if (jac->mat && jac->mat != jac->pmat) {
1749     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1750   } else if (jac->mat) {
1751     jac->mat = NULL;
1752   }
1753   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1754   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1755   jac->nsplits = 0;
1756   PetscCall(VecDestroy(&jac->w1));
1757   PetscCall(VecDestroy(&jac->w2));
1758   if (jac->schur) {
1759     PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB));
1760     PetscCall(MatDestroy(&AinvB));
1761     PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", NULL));
1762   }
1763   PetscCall(MatDestroy(&jac->schur));
1764   PetscCall(MatDestroy(&jac->schurp));
1765   PetscCall(MatDestroy(&jac->schur_user));
1766   PetscCall(KSPDestroy(&jac->kspschur));
1767   PetscCall(KSPDestroy(&jac->kspupper));
1768   PetscCall(MatDestroy(&jac->B));
1769   PetscCall(MatDestroy(&jac->C));
1770   PetscCall(MatDestroy(&jac->H));
1771   PetscCall(VecDestroy(&jac->u));
1772   PetscCall(VecDestroy(&jac->v));
1773   PetscCall(VecDestroy(&jac->Hu));
1774   PetscCall(VecDestroy(&jac->d));
1775   PetscCall(PetscFree(jac->vecz));
1776   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1777   jac->isrestrict = PETSC_FALSE;
1778   PetscFunctionReturn(PETSC_SUCCESS);
1779 }
1780 
1781 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1782 {
1783   PetscFunctionBegin;
1784   PetscCall(PCReset_FieldSplit(pc));
1785   PetscCall(PetscFree(pc->data));
1786   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1787   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1788   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1789   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1790   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1791   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1792   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1793   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1794 
1795   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1796   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1797   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1798   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1799   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1800   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1801   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1802   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1803   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1804   PetscFunctionReturn(PETSC_SUCCESS);
1805 }
1806 
1807 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject)
1808 {
1809   PetscInt        bs;
1810   PetscBool       flg;
1811   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1812   PCCompositeType ctype;
1813 
1814   PetscFunctionBegin;
1815   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1816   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1817   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1818   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1819   jac->diag_use_amat = pc->useAmat;
1820   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1821   jac->offdiag_use_amat = pc->useAmat;
1822   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1823   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1824   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1825   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1826   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1827   /* Only setup fields once */
1828   if ((jac->bs > 0) && (jac->nsplits == 0)) {
1829     /* only allow user to set fields from command line.
1830        otherwise user can set them in PCFieldSplitSetDefaults() */
1831     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1832     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1833   }
1834   if (jac->type == PC_COMPOSITE_SCHUR) {
1835     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1836     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1837     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1838     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1839     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1840   } else if (jac->type == PC_COMPOSITE_GKB) {
1841     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1842     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1843     PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0));
1844     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1845     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1846   }
1847   /*
1848     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1849     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1850     is called on the outer solver in case changes were made in the options database
1851 
1852     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1853     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1854     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1855 
1856     There could be a negative side effect of calling the KSPSetFromOptions() below.
1857 
1858     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1859   */
1860   if (jac->issetup) {
1861     PC_FieldSplitLink ilink = jac->head;
1862     if (jac->type == PC_COMPOSITE_SCHUR) {
1863       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1864       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1865     }
1866     while (ilink) {
1867       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1868       ilink = ilink->next;
1869     }
1870   }
1871   PetscOptionsHeadEnd();
1872   PetscFunctionReturn(PETSC_SUCCESS);
1873 }
1874 
1875 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1876 {
1877   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1878   PC_FieldSplitLink ilink, next = jac->head;
1879   char              prefix[128];
1880   PetscInt          i;
1881 
1882   PetscFunctionBegin;
1883   if (jac->splitdefined) {
1884     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1885     PetscFunctionReturn(PETSC_SUCCESS);
1886   }
1887   for (i = 0; i < n; i++) { PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); }
1888   PetscCall(PetscNew(&ilink));
1889   if (splitname) {
1890     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1891   } else {
1892     PetscCall(PetscMalloc1(3, &ilink->splitname));
1893     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1894   }
1895   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1896   PetscCall(PetscMalloc1(n, &ilink->fields));
1897   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1898   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1899   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1900 
1901   ilink->nfields = n;
1902   ilink->next    = NULL;
1903   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1904   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1905   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1906   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1907   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1908 
1909   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1910   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1911 
1912   if (!next) {
1913     jac->head       = ilink;
1914     ilink->previous = NULL;
1915   } else {
1916     while (next->next) next = next->next;
1917     next->next      = ilink;
1918     ilink->previous = next;
1919   }
1920   jac->nsplits++;
1921   PetscFunctionReturn(PETSC_SUCCESS);
1922 }
1923 
1924 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1925 {
1926   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1927 
1928   PetscFunctionBegin;
1929   *subksp = NULL;
1930   if (n) *n = 0;
1931   if (jac->type == PC_COMPOSITE_SCHUR) {
1932     PetscInt nn;
1933 
1934     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1935     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1936     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1937     PetscCall(PetscMalloc1(nn, subksp));
1938     (*subksp)[0] = jac->head->ksp;
1939     (*subksp)[1] = jac->kspschur;
1940     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
1941     if (n) *n = nn;
1942   }
1943   PetscFunctionReturn(PETSC_SUCCESS);
1944 }
1945 
1946 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
1947 {
1948   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1949 
1950   PetscFunctionBegin;
1951   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
1952   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1953   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
1954 
1955   (*subksp)[1] = jac->kspschur;
1956   if (n) *n = jac->nsplits;
1957   PetscFunctionReturn(PETSC_SUCCESS);
1958 }
1959 
1960 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1961 {
1962   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1963   PetscInt          cnt   = 0;
1964   PC_FieldSplitLink ilink = jac->head;
1965 
1966   PetscFunctionBegin;
1967   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1968   while (ilink) {
1969     (*subksp)[cnt++] = ilink->ksp;
1970     ilink            = ilink->next;
1971   }
1972   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
1973   if (n) *n = jac->nsplits;
1974   PetscFunctionReturn(PETSC_SUCCESS);
1975 }
1976 
1977 /*@
1978   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
1979 
1980   Input Parameters:
1981 + pc  - the preconditioner context
1982 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
1983 
1984   Level: advanced
1985 
1986   Developer Notes:
1987   It seems the resulting `IS`s will not cover the entire space, so
1988   how can they define a convergent preconditioner? Needs explaining.
1989 
1990 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
1991 @*/
1992 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
1993 {
1994   PetscFunctionBegin;
1995   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1996   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
1997   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
1998   PetscFunctionReturn(PETSC_SUCCESS);
1999 }
2000 
2001 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
2002 {
2003   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2004   PC_FieldSplitLink ilink = jac->head, next;
2005   PetscInt          localsize, size, sizez, i;
2006   const PetscInt   *ind, *indz;
2007   PetscInt         *indc, *indcz;
2008   PetscBool         flg;
2009 
2010   PetscFunctionBegin;
2011   PetscCall(ISGetLocalSize(isy, &localsize));
2012   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
2013   size -= localsize;
2014   while (ilink) {
2015     IS isrl, isr;
2016     PC subpc;
2017     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
2018     PetscCall(ISGetLocalSize(isrl, &localsize));
2019     PetscCall(PetscMalloc1(localsize, &indc));
2020     PetscCall(ISGetIndices(isrl, &ind));
2021     PetscCall(PetscArraycpy(indc, ind, localsize));
2022     PetscCall(ISRestoreIndices(isrl, &ind));
2023     PetscCall(ISDestroy(&isrl));
2024     for (i = 0; i < localsize; i++) *(indc + i) += size;
2025     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
2026     PetscCall(PetscObjectReference((PetscObject)isr));
2027     PetscCall(ISDestroy(&ilink->is));
2028     ilink->is = isr;
2029     PetscCall(PetscObjectReference((PetscObject)isr));
2030     PetscCall(ISDestroy(&ilink->is_col));
2031     ilink->is_col = isr;
2032     PetscCall(ISDestroy(&isr));
2033     PetscCall(KSPGetPC(ilink->ksp, &subpc));
2034     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
2035     if (flg) {
2036       IS       iszl, isz;
2037       MPI_Comm comm;
2038       PetscCall(ISGetLocalSize(ilink->is, &localsize));
2039       comm = PetscObjectComm((PetscObject)ilink->is);
2040       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
2041       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
2042       sizez -= localsize;
2043       PetscCall(ISGetLocalSize(iszl, &localsize));
2044       PetscCall(PetscMalloc1(localsize, &indcz));
2045       PetscCall(ISGetIndices(iszl, &indz));
2046       PetscCall(PetscArraycpy(indcz, indz, localsize));
2047       PetscCall(ISRestoreIndices(iszl, &indz));
2048       PetscCall(ISDestroy(&iszl));
2049       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
2050       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
2051       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
2052       PetscCall(ISDestroy(&isz));
2053     }
2054     next  = ilink->next;
2055     ilink = next;
2056   }
2057   jac->isrestrict = PETSC_TRUE;
2058   PetscFunctionReturn(PETSC_SUCCESS);
2059 }
2060 
2061 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
2062 {
2063   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
2064   PC_FieldSplitLink ilink, next = jac->head;
2065   char              prefix[128];
2066 
2067   PetscFunctionBegin;
2068   if (jac->splitdefined) {
2069     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
2070     PetscFunctionReturn(PETSC_SUCCESS);
2071   }
2072   PetscCall(PetscNew(&ilink));
2073   if (splitname) {
2074     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
2075   } else {
2076     PetscCall(PetscMalloc1(8, &ilink->splitname));
2077     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
2078   }
2079   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
2080   PetscCall(PetscObjectReference((PetscObject)is));
2081   PetscCall(ISDestroy(&ilink->is));
2082   ilink->is = is;
2083   PetscCall(PetscObjectReference((PetscObject)is));
2084   PetscCall(ISDestroy(&ilink->is_col));
2085   ilink->is_col = is;
2086   ilink->next   = NULL;
2087   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
2088   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
2089   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
2090   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
2091   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
2092 
2093   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
2094   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
2095 
2096   if (!next) {
2097     jac->head       = ilink;
2098     ilink->previous = NULL;
2099   } else {
2100     while (next->next) next = next->next;
2101     next->next      = ilink;
2102     ilink->previous = next;
2103   }
2104   jac->nsplits++;
2105   PetscFunctionReturn(PETSC_SUCCESS);
2106 }
2107 
2108 /*@
2109   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
2110 
2111   Logically Collective
2112 
2113   Input Parameters:
2114 + pc         - the preconditioner context
2115 . splitname  - name of this split, if `NULL` the number of the split is used
2116 . n          - the number of fields in this split
2117 . fields     - the fields in this split
2118 - fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block
2119                of the matrix and `fields_col` provides the column indices for that block
2120 
2121   Options Database Key:
2122 . -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
2123 
2124   Level: intermediate
2125 
2126   Notes:
2127   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2128 
2129   If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`.
2130 
2131   If the matrix used to construct the preconditioner is not `MATNEST` then
2132   `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlocksize()` or
2133   to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block
2134   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2135   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x....
2136   where the numbered entries indicate what is in the split.
2137 
2138   This function is called once per split (it creates a new split each time).  Solve options
2139   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2140 
2141   `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields`
2142 
2143   Developer Notes:
2144   This routine does not actually create the `IS` representing the split, that is delayed
2145   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2146   available when this routine is called.
2147 
2148 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`,
2149           `MatSetBlocksize()`, `MatCreateNest()`
2150 @*/
2151 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt fields[], const PetscInt fields_col[])
2152 {
2153   PetscFunctionBegin;
2154   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2155   PetscAssertPointer(splitname, 2);
2156   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2157   PetscAssertPointer(fields, 4);
2158   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2159   PetscFunctionReturn(PETSC_SUCCESS);
2160 }
2161 
2162 /*@
2163   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2164   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2165 
2166   Logically Collective
2167 
2168   Input Parameters:
2169 + pc  - the preconditioner object
2170 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2171 
2172   Options Database Key:
2173 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2174 
2175   Level: intermediate
2176 
2177 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2178 @*/
2179 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2180 {
2181   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2182   PetscBool      isfs;
2183 
2184   PetscFunctionBegin;
2185   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2186   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2187   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2188   jac->diag_use_amat = flg;
2189   PetscFunctionReturn(PETSC_SUCCESS);
2190 }
2191 
2192 /*@
2193   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2194   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2195 
2196   Logically Collective
2197 
2198   Input Parameter:
2199 . pc - the preconditioner object
2200 
2201   Output Parameter:
2202 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2203 
2204   Level: intermediate
2205 
2206 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2207 @*/
2208 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2209 {
2210   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2211   PetscBool      isfs;
2212 
2213   PetscFunctionBegin;
2214   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2215   PetscAssertPointer(flg, 2);
2216   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2217   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2218   *flg = jac->diag_use_amat;
2219   PetscFunctionReturn(PETSC_SUCCESS);
2220 }
2221 
2222 /*@
2223   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2224   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2225 
2226   Logically Collective
2227 
2228   Input Parameters:
2229 + pc  - the preconditioner object
2230 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2231 
2232   Options Database Key:
2233 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2234 
2235   Level: intermediate
2236 
2237 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2238 @*/
2239 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2240 {
2241   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2242   PetscBool      isfs;
2243 
2244   PetscFunctionBegin;
2245   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2246   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2247   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2248   jac->offdiag_use_amat = flg;
2249   PetscFunctionReturn(PETSC_SUCCESS);
2250 }
2251 
2252 /*@
2253   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2254   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2255 
2256   Logically Collective
2257 
2258   Input Parameter:
2259 . pc - the preconditioner object
2260 
2261   Output Parameter:
2262 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2263 
2264   Level: intermediate
2265 
2266 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2267 @*/
2268 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2269 {
2270   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2271   PetscBool      isfs;
2272 
2273   PetscFunctionBegin;
2274   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2275   PetscAssertPointer(flg, 2);
2276   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2277   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2278   *flg = jac->offdiag_use_amat;
2279   PetscFunctionReturn(PETSC_SUCCESS);
2280 }
2281 
2282 /*@
2283   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2284 
2285   Logically Collective
2286 
2287   Input Parameters:
2288 + pc        - the preconditioner context
2289 . splitname - name of this split, if `NULL` the number of the split is used
2290 - is        - the index set that defines the elements in this split
2291 
2292   Level: intermediate
2293 
2294   Notes:
2295   Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST`
2296 
2297   This function is called once per split (it creates a new split each time).  Solve options
2298   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2299 
2300 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()`
2301 @*/
2302 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2303 {
2304   PetscFunctionBegin;
2305   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2306   if (splitname) PetscAssertPointer(splitname, 2);
2307   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2308   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2309   PetscFunctionReturn(PETSC_SUCCESS);
2310 }
2311 
2312 /*@
2313   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2314 
2315   Logically Collective
2316 
2317   Input Parameters:
2318 + pc        - the preconditioner context
2319 - splitname - name of this split
2320 
2321   Output Parameter:
2322 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2323 
2324   Level: intermediate
2325 
2326 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()`
2327 @*/
2328 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2329 {
2330   PetscFunctionBegin;
2331   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2332   PetscAssertPointer(splitname, 2);
2333   PetscAssertPointer(is, 3);
2334   {
2335     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2336     PC_FieldSplitLink ilink = jac->head;
2337     PetscBool         found;
2338 
2339     *is = NULL;
2340     while (ilink) {
2341       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2342       if (found) {
2343         *is = ilink->is;
2344         break;
2345       }
2346       ilink = ilink->next;
2347     }
2348   }
2349   PetscFunctionReturn(PETSC_SUCCESS);
2350 }
2351 
2352 /*@
2353   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2354 
2355   Logically Collective
2356 
2357   Input Parameters:
2358 + pc    - the preconditioner context
2359 - index - index of this split
2360 
2361   Output Parameter:
2362 . is - the index set that defines the elements in this split
2363 
2364   Level: intermediate
2365 
2366 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`,
2367 
2368 @*/
2369 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2370 {
2371   PetscFunctionBegin;
2372   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2373   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2374   PetscAssertPointer(is, 3);
2375   {
2376     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2377     PC_FieldSplitLink ilink = jac->head;
2378     PetscInt          i     = 0;
2379     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2380 
2381     while (i < index) {
2382       ilink = ilink->next;
2383       ++i;
2384     }
2385     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2386   }
2387   PetscFunctionReturn(PETSC_SUCCESS);
2388 }
2389 
2390 /*@
2391   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2392   fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used.
2393 
2394   Logically Collective
2395 
2396   Input Parameters:
2397 + pc - the preconditioner context
2398 - bs - the block size
2399 
2400   Level: intermediate
2401 
2402   Note:
2403   If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields.
2404 
2405 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2406 @*/
2407 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2408 {
2409   PetscFunctionBegin;
2410   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2411   PetscValidLogicalCollectiveInt(pc, bs, 2);
2412   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2413   PetscFunctionReturn(PETSC_SUCCESS);
2414 }
2415 
2416 /*@C
2417   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2418 
2419   Collective
2420 
2421   Input Parameter:
2422 . pc - the preconditioner context
2423 
2424   Output Parameters:
2425 + n      - the number of splits
2426 - subksp - the array of `KSP` contexts
2427 
2428   Level: advanced
2429 
2430   Notes:
2431   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2432   (not the `KSP`, just the array that contains them).
2433 
2434   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2435 
2436   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2437   Schur complement and the `KSP` object used to iterate over the Schur complement.
2438   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2439 
2440   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2441   inner linear system defined by the matrix H in each loop.
2442 
2443   Fortran Notes:
2444   You must pass in a `KSP` array that is large enough to contain all the `KSP`s.
2445   You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2446   `KSP` array must be.
2447 
2448   Developer Notes:
2449   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2450 
2451   The Fortran interface could be modernized to return directly the array of values.
2452 
2453 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2454 @*/
2455 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2456 {
2457   PetscFunctionBegin;
2458   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2459   if (n) PetscAssertPointer(n, 2);
2460   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2461   PetscFunctionReturn(PETSC_SUCCESS);
2462 }
2463 
2464 /*@C
2465   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2466 
2467   Collective
2468 
2469   Input Parameter:
2470 . pc - the preconditioner context
2471 
2472   Output Parameters:
2473 + n      - the number of splits
2474 - subksp - the array of `KSP` contexts
2475 
2476   Level: advanced
2477 
2478   Notes:
2479   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2480   (not the `KSP` just the array that contains them).
2481 
2482   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2483 
2484   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2485 +  1  - the `KSP` used for the (1,1) block
2486 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2487 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2488 
2489   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2490 
2491   Fortran Notes:
2492   You must pass in a `KSP` array that is large enough to contain all the local `KSP`s.
2493   You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2494   `KSP` array must be.
2495 
2496   Developer Notes:
2497   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2498 
2499   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2500 
2501   The Fortran interface should be modernized to return directly the array of values.
2502 
2503 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2504 @*/
2505 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2506 {
2507   PetscFunctionBegin;
2508   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2509   if (n) PetscAssertPointer(n, 2);
2510   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2511   PetscFunctionReturn(PETSC_SUCCESS);
2512 }
2513 
2514 /*@
2515   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructed for the Schur complement.
2516   The default is the A11 matrix.
2517 
2518   Collective
2519 
2520   Input Parameters:
2521 + pc    - the preconditioner context
2522 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2523               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2524               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2525 - pre   - matrix to use for preconditioning, or `NULL`
2526 
2527   Options Database Keys:
2528 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2529 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2530 
2531   Level: intermediate
2532 
2533   Notes:
2534   If ptype is
2535 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2536   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2537 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2538   The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM`
2539 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2540   to this function).
2541 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $
2542   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2543   lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2544 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2545   computed internally by `PCFIELDSPLIT` (this is expensive)
2546   useful mostly as a test that the Schur complement approach can work for your problem
2547 
2548   When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2549   with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and
2550   `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2551 
2552   Developer Note:
2553   The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere.
2554 
2555 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2556           `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()`
2557 @*/
2558 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2559 {
2560   PetscFunctionBegin;
2561   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2562   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2563   PetscFunctionReturn(PETSC_SUCCESS);
2564 }
2565 
2566 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2567 {
2568   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2569 } /* Deprecated name */
2570 
2571 /*@
2572   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2573   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2574 
2575   Logically Collective
2576 
2577   Input Parameter:
2578 . pc - the preconditioner context
2579 
2580   Output Parameters:
2581 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2582 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2583 
2584   Level: intermediate
2585 
2586 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2587 @*/
2588 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2589 {
2590   PetscFunctionBegin;
2591   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2592   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2593   PetscFunctionReturn(PETSC_SUCCESS);
2594 }
2595 
2596 /*@
2597   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2598 
2599   Not Collective
2600 
2601   Input Parameter:
2602 . pc - the preconditioner context
2603 
2604   Output Parameter:
2605 . S - the Schur complement matrix
2606 
2607   Level: advanced
2608 
2609   Note:
2610   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2611 
2612 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2613           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2614 @*/
2615 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2616 {
2617   const char    *t;
2618   PetscBool      isfs;
2619   PC_FieldSplit *jac;
2620 
2621   PetscFunctionBegin;
2622   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2623   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2624   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2625   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2626   jac = (PC_FieldSplit *)pc->data;
2627   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2628   if (S) *S = jac->schur;
2629   PetscFunctionReturn(PETSC_SUCCESS);
2630 }
2631 
2632 /*@
2633   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2634 
2635   Not Collective
2636 
2637   Input Parameters:
2638 + pc - the preconditioner context
2639 - S  - the Schur complement matrix
2640 
2641   Level: advanced
2642 
2643 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2644 @*/
2645 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2646 {
2647   const char    *t;
2648   PetscBool      isfs;
2649   PC_FieldSplit *jac;
2650 
2651   PetscFunctionBegin;
2652   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2653   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2654   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2655   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2656   jac = (PC_FieldSplit *)pc->data;
2657   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2658   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2659   PetscFunctionReturn(PETSC_SUCCESS);
2660 }
2661 
2662 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2663 {
2664   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2665 
2666   PetscFunctionBegin;
2667   jac->schurpre = ptype;
2668   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2669     PetscCall(MatDestroy(&jac->schur_user));
2670     jac->schur_user = pre;
2671     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2672   }
2673   PetscFunctionReturn(PETSC_SUCCESS);
2674 }
2675 
2676 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2677 {
2678   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2679 
2680   PetscFunctionBegin;
2681   if (ptype) *ptype = jac->schurpre;
2682   if (pre) *pre = jac->schur_user;
2683   PetscFunctionReturn(PETSC_SUCCESS);
2684 }
2685 
2686 /*@
2687   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note`
2688 
2689   Collective
2690 
2691   Input Parameters:
2692 + pc    - the preconditioner context
2693 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2694 
2695   Options Database Key:
2696 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
2697 
2698   Level: intermediate
2699 
2700   Notes:
2701   The FULL factorization is
2702 
2703   ```{math}
2704   \left(\begin{array}{cc} A & B \\
2705   C & E \\
2706   \end{array}\right) =
2707   \left(\begin{array}{cc} 1 & 0 \\
2708   C*A^{-1} & I \\
2709   \end{array}\right)
2710   \left(\begin{array}{cc} A & 0 \\
2711   0 & S \\
2712   \end{array}\right)
2713   \left(\begin{array}{cc} I & A^{-1}B \\
2714   0 & I \\
2715   \end{array}\right) = L D U.
2716   ```
2717 
2718   where $ S = E - C*A^{-1}*B $. In practice, the full factorization is applied via block triangular solves with the grouping $L*(D*U)$. UPPER uses $D*U$, LOWER uses $L*D$,
2719   and DIAG is the diagonal part with the sign of $ S $ flipped (because this makes the preconditioner positive definite for many formulations,
2720   thus allowing the use of `KSPMINRES)`. Sign flipping of $ S $ can be turned off with `PCFieldSplitSetSchurScale()`.
2721 
2722   If $A$ and $S$ are solved exactly
2723 +  1 - FULL factorization is a direct solver.
2724 .  2 - The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2725 -  3 -  With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2726 
2727   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2728   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2729 
2730   For symmetric problems in which $A$ is positive definite and $S$ is negative definite, DIAG can be used with `KSPMINRES`.
2731 
2732   A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S).
2733 
2734 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`,
2735           [](sec_flexibleksp), `PCFieldSplitSetSchurPre()`
2736 @*/
2737 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2738 {
2739   PetscFunctionBegin;
2740   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2741   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2742   PetscFunctionReturn(PETSC_SUCCESS);
2743 }
2744 
2745 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2746 {
2747   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2748 
2749   PetscFunctionBegin;
2750   jac->schurfactorization = ftype;
2751   PetscFunctionReturn(PETSC_SUCCESS);
2752 }
2753 
2754 /*@
2755   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2756 
2757   Collective
2758 
2759   Input Parameters:
2760 + pc    - the preconditioner context
2761 - scale - scaling factor for the Schur complement
2762 
2763   Options Database Key:
2764 . -pc_fieldsplit_schur_scale <scale> - default is -1.0
2765 
2766   Level: intermediate
2767 
2768 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
2769 @*/
2770 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2771 {
2772   PetscFunctionBegin;
2773   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2774   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2775   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2776   PetscFunctionReturn(PETSC_SUCCESS);
2777 }
2778 
2779 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2780 {
2781   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2782 
2783   PetscFunctionBegin;
2784   jac->schurscale = scale;
2785   PetscFunctionReturn(PETSC_SUCCESS);
2786 }
2787 
2788 /*@C
2789   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2790 
2791   Collective
2792 
2793   Input Parameter:
2794 . pc - the preconditioner context
2795 
2796   Output Parameters:
2797 + A00 - the (0,0) block
2798 . A01 - the (0,1) block
2799 . A10 - the (1,0) block
2800 - A11 - the (1,1) block
2801 
2802   Level: advanced
2803 
2804   Note:
2805   Use `NULL` for any unneeded output arguments
2806 
2807 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2808 @*/
2809 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2810 {
2811   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2812 
2813   PetscFunctionBegin;
2814   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2815   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2816   if (A00) *A00 = jac->pmat[0];
2817   if (A01) *A01 = jac->B;
2818   if (A10) *A10 = jac->C;
2819   if (A11) *A11 = jac->pmat[1];
2820   PetscFunctionReturn(PETSC_SUCCESS);
2821 }
2822 
2823 /*@
2824   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2825 
2826   Collective
2827 
2828   Input Parameters:
2829 + pc        - the preconditioner context
2830 - tolerance - the solver tolerance
2831 
2832   Options Database Key:
2833 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5
2834 
2835   Level: intermediate
2836 
2837   Note:
2838   The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion.
2839   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2840   this estimate, the stopping criterion is satisfactory in practical cases.
2841 
2842 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2843 @*/
2844 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2845 {
2846   PetscFunctionBegin;
2847   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2848   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2849   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2850   PetscFunctionReturn(PETSC_SUCCESS);
2851 }
2852 
2853 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2854 {
2855   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2856 
2857   PetscFunctionBegin;
2858   jac->gkbtol = tolerance;
2859   PetscFunctionReturn(PETSC_SUCCESS);
2860 }
2861 
2862 /*@
2863   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2864 
2865   Collective
2866 
2867   Input Parameters:
2868 + pc    - the preconditioner context
2869 - maxit - the maximum number of iterations
2870 
2871   Options Database Key:
2872 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100
2873 
2874   Level: intermediate
2875 
2876 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2877 @*/
2878 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2879 {
2880   PetscFunctionBegin;
2881   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2882   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2883   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2884   PetscFunctionReturn(PETSC_SUCCESS);
2885 }
2886 
2887 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2888 {
2889   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2890 
2891   PetscFunctionBegin;
2892   jac->gkbmaxit = maxit;
2893   PetscFunctionReturn(PETSC_SUCCESS);
2894 }
2895 
2896 /*@
2897   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT`
2898   preconditioner.
2899 
2900   Collective
2901 
2902   Input Parameters:
2903 + pc    - the preconditioner context
2904 - delay - the delay window in the lower bound estimate
2905 
2906   Options Database Key:
2907 . -pc_fieldsplit_gkb_delay <delay> - default is 5
2908 
2909   Level: intermediate
2910 
2911   Notes:
2912   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $
2913   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs
2914   at least (`delay` + 1) iterations to stop.
2915 
2916   For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013`
2917 
2918 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2919 @*/
2920 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2921 {
2922   PetscFunctionBegin;
2923   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2924   PetscValidLogicalCollectiveInt(pc, delay, 2);
2925   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2926   PetscFunctionReturn(PETSC_SUCCESS);
2927 }
2928 
2929 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2930 {
2931   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2932 
2933   PetscFunctionBegin;
2934   jac->gkbdelay = delay;
2935   PetscFunctionReturn(PETSC_SUCCESS);
2936 }
2937 
2938 /*@
2939   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the
2940   Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2941 
2942   Collective
2943 
2944   Input Parameters:
2945 + pc - the preconditioner context
2946 - nu - the shift parameter
2947 
2948   Options Database Key:
2949 . -pc_fieldsplit_gkb_nu <nu> - default is 1
2950 
2951   Level: intermediate
2952 
2953   Notes:
2954   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However,
2955   if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore
2956   necessary to find a good balance in between the convergence of the inner and outer loop.
2957 
2958   For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity.
2959 
2960 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2961 @*/
2962 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
2963 {
2964   PetscFunctionBegin;
2965   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2966   PetscValidLogicalCollectiveReal(pc, nu, 2);
2967   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
2968   PetscFunctionReturn(PETSC_SUCCESS);
2969 }
2970 
2971 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
2972 {
2973   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2974 
2975   PetscFunctionBegin;
2976   jac->gkbnu = nu;
2977   PetscFunctionReturn(PETSC_SUCCESS);
2978 }
2979 
2980 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
2981 {
2982   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2983 
2984   PetscFunctionBegin;
2985   jac->type = type;
2986   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
2987   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
2988   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
2989   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
2990   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
2991   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
2992   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
2993   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
2994   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
2995 
2996   if (type == PC_COMPOSITE_SCHUR) {
2997     pc->ops->apply          = PCApply_FieldSplit_Schur;
2998     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
2999     pc->ops->view           = PCView_FieldSplit_Schur;
3000 
3001     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
3002     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
3003     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
3004     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
3005     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
3006   } else if (type == PC_COMPOSITE_GKB) {
3007     pc->ops->apply = PCApply_FieldSplit_GKB;
3008     pc->ops->view  = PCView_FieldSplit_GKB;
3009 
3010     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3011     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
3012     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
3013     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
3014     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
3015   } else {
3016     pc->ops->apply = PCApply_FieldSplit;
3017     pc->ops->view  = PCView_FieldSplit;
3018 
3019     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3020   }
3021   PetscFunctionReturn(PETSC_SUCCESS);
3022 }
3023 
3024 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
3025 {
3026   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3027 
3028   PetscFunctionBegin;
3029   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
3030   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
3031   jac->bs = bs;
3032   PetscFunctionReturn(PETSC_SUCCESS);
3033 }
3034 
3035 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
3036 {
3037   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
3038   PC_FieldSplitLink ilink_current = jac->head;
3039   IS                is_owned;
3040 
3041   PetscFunctionBegin;
3042   jac->coordinates_set = PETSC_TRUE; // Internal flag
3043   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
3044 
3045   while (ilink_current) {
3046     // For each IS, embed it to get local coords indces
3047     IS              is_coords;
3048     PetscInt        ndofs_block;
3049     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
3050 
3051     // Setting drop to true for safety. It should make no difference.
3052     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
3053     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
3054     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
3055 
3056     // Allocate coordinates vector and set it directly
3057     PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords));
3058     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
3059       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
3060     }
3061     ilink_current->dim   = dim;
3062     ilink_current->ndofs = ndofs_block;
3063     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
3064     PetscCall(ISDestroy(&is_coords));
3065     ilink_current = ilink_current->next;
3066   }
3067   PetscCall(ISDestroy(&is_owned));
3068   PetscFunctionReturn(PETSC_SUCCESS);
3069 }
3070 
3071 /*@
3072   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3073 
3074   Collective
3075 
3076   Input Parameters:
3077 + pc   - the preconditioner context
3078 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`,
3079          `PC_COMPOSITE_GKB`
3080 
3081   Options Database Key:
3082 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
3083 
3084   Level: intermediate
3085 
3086 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3087           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()`
3088 @*/
3089 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
3090 {
3091   PetscFunctionBegin;
3092   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3093   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
3094   PetscFunctionReturn(PETSC_SUCCESS);
3095 }
3096 
3097 /*@
3098   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3099 
3100   Not collective
3101 
3102   Input Parameter:
3103 . pc - the preconditioner context
3104 
3105   Output Parameter:
3106 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3107 
3108   Level: intermediate
3109 
3110 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3111           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3112 @*/
3113 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
3114 {
3115   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3116 
3117   PetscFunctionBegin;
3118   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3119   PetscAssertPointer(type, 2);
3120   *type = jac->type;
3121   PetscFunctionReturn(PETSC_SUCCESS);
3122 }
3123 
3124 /*@
3125   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3126 
3127   Logically Collective
3128 
3129   Input Parameters:
3130 + pc  - the preconditioner context
3131 - flg - boolean indicating whether to use field splits defined by the `DM`
3132 
3133   Options Database Key:
3134 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3135 
3136   Level: intermediate
3137 
3138   Developer Note:
3139   The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database
3140 
3141 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3142 @*/
3143 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3144 {
3145   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3146   PetscBool      isfs;
3147 
3148   PetscFunctionBegin;
3149   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3150   PetscValidLogicalCollectiveBool(pc, flg, 2);
3151   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3152   if (isfs) jac->dm_splits = flg;
3153   PetscFunctionReturn(PETSC_SUCCESS);
3154 }
3155 
3156 /*@
3157   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3158 
3159   Logically Collective
3160 
3161   Input Parameter:
3162 . pc - the preconditioner context
3163 
3164   Output Parameter:
3165 . flg - boolean indicating whether to use field splits defined by the `DM`
3166 
3167   Level: intermediate
3168 
3169   Developer Note:
3170   The name should be `PCFieldSplitGetUseDMSplits()`
3171 
3172 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3173 @*/
3174 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3175 {
3176   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3177   PetscBool      isfs;
3178 
3179   PetscFunctionBegin;
3180   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3181   PetscAssertPointer(flg, 2);
3182   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3183   if (isfs) {
3184     if (flg) *flg = jac->dm_splits;
3185   }
3186   PetscFunctionReturn(PETSC_SUCCESS);
3187 }
3188 
3189 /*@
3190   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3191 
3192   Logically Collective
3193 
3194   Input Parameter:
3195 . pc - the preconditioner context
3196 
3197   Output Parameter:
3198 . flg - boolean indicating whether to detect fields or not
3199 
3200   Level: intermediate
3201 
3202 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3203 @*/
3204 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3205 {
3206   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3207 
3208   PetscFunctionBegin;
3209   *flg = jac->detect;
3210   PetscFunctionReturn(PETSC_SUCCESS);
3211 }
3212 
3213 /*@
3214   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3215 
3216   Logically Collective
3217 
3218   Input Parameter:
3219 . pc - the preconditioner context
3220 
3221   Output Parameter:
3222 . flg - boolean indicating whether to detect fields or not
3223 
3224   Options Database Key:
3225 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3226 
3227   Level: intermediate
3228 
3229   Note:
3230   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3231 
3232 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3233 @*/
3234 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3235 {
3236   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3237 
3238   PetscFunctionBegin;
3239   jac->detect = flg;
3240   if (jac->detect) {
3241     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3242     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3243   }
3244   PetscFunctionReturn(PETSC_SUCCESS);
3245 }
3246 
3247 /*MC
3248   PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3249   collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3250 
3251   Options Database Keys:
3252 +   -pc_fieldsplit_%d_fields <a,b,..>                                                - indicates the fields to be used in the `%d`'th split
3253 .   -pc_fieldsplit_default                                                           - automatically add any fields to additional splits that have not
3254                                                                                        been supplied explicitly by `-pc_fieldsplit_%d_fields`
3255 .   -pc_fieldsplit_block_size <bs>                                                   - size of block that defines fields (i.e. there are bs fields)
3256                                                                                        when the matrix is not of `MatType` `MATNEST`
3257 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3258 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full>                     - default is `a11`; see `PCFieldSplitSetSchurPre()`
3259 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full>                           - set factorization type when using `-pc_fieldsplit_type schur`;
3260                                                                                        see `PCFieldSplitSetSchurFactType()`
3261 .   -pc_fieldsplit_dm_splits <true,false> (default is true)                          - Whether to use `DMCreateFieldDecomposition()` for splits
3262 -   -pc_fieldsplit_detect_saddle_point                                               - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3263 
3264   Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3265   The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3266   For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3267 
3268   To set options on the solvers for each block append `-fieldsplit_` to all the `PC`
3269   options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`
3270 
3271   To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3272   and set the options directly on the resulting `KSP` object
3273 
3274   Level: intermediate
3275 
3276   Notes:
3277   Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()`
3278   to define a split by an arbitrary collection of entries.
3279 
3280   If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports
3281   `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs,
3282   beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3283   if this is not called the block size defaults to the blocksize of the second matrix passed
3284   to `KSPSetOperators()`/`PCSetOperators()`.
3285 
3286   For the Schur complement preconditioner if
3287   ```{math}
3288     J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3289   ```
3290 
3291   the preconditioner using `full` factorization is logically
3292   ```{math}
3293     \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3294       ```
3295   where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3296   ```{math}
3297      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3298   ```
3299   which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given
3300   in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0,
3301   it returns the `KSP` associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default
3302   $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3303 
3304   The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3305   `diag` gives
3306   ```{math}
3307     \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3308   ```
3309   Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3310   can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3311   ```{math}
3312     \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3313   ```
3314   where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of
3315   ```{math}
3316     \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3317   ```
3318   where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3319 
3320   If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3321   is used automatically for a second submatrix.
3322 
3323   The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3324   Generally it should be used with the `MATAIJ` or `MATNEST` `MatType`
3325 
3326   The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see,
3327   for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3328   One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers".
3329 
3330   See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3331 
3332   The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3333   residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3334 
3335   The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3336   ```{math}
3337     \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3338   ```
3339   with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3340   A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3341 
3342   Developer Note:
3343   The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3344   user API.
3345 
3346 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3347           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3348           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3349           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3350 M*/
3351 
3352 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3353 {
3354   PC_FieldSplit *jac;
3355 
3356   PetscFunctionBegin;
3357   PetscCall(PetscNew(&jac));
3358 
3359   jac->bs                 = -1;
3360   jac->nsplits            = 0;
3361   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3362   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3363   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3364   jac->schurscale         = -1.0;
3365   jac->dm_splits          = PETSC_TRUE;
3366   jac->detect             = PETSC_FALSE;
3367   jac->gkbtol             = 1e-5;
3368   jac->gkbdelay           = 5;
3369   jac->gkbnu              = 1;
3370   jac->gkbmaxit           = 100;
3371   jac->gkbmonitor         = PETSC_FALSE;
3372   jac->coordinates_set    = PETSC_FALSE;
3373 
3374   pc->data = (void *)jac;
3375 
3376   pc->ops->apply           = PCApply_FieldSplit;
3377   pc->ops->applytranspose  = PCApplyTranspose_FieldSplit;
3378   pc->ops->setup           = PCSetUp_FieldSplit;
3379   pc->ops->reset           = PCReset_FieldSplit;
3380   pc->ops->destroy         = PCDestroy_FieldSplit;
3381   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3382   pc->ops->view            = PCView_FieldSplit;
3383   pc->ops->applyrichardson = NULL;
3384 
3385   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3386   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3387   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3388   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3389   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3390   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3391   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3392   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3393   PetscFunctionReturn(PETSC_SUCCESS);
3394 }
3395