xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision a102dd6935e515e8d3756a15a69ef4a5e49e4f43) !
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 
5 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
6 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
7 
8 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
9 
10 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
11 struct _PC_FieldSplitLink {
12   KSP               ksp;
13   Vec               x, y, z;
14   char             *splitname;
15   PetscInt          nfields;
16   PetscInt         *fields, *fields_col;
17   VecScatter        sctx;
18   IS                is, is_col;
19   PC_FieldSplitLink next, previous;
20   PetscLogEvent     event;
21 
22   /* Used only when setting coordinates with PCSetCoordinates */
23   PetscInt   dim;
24   PetscInt   ndofs;
25   PetscReal *coords;
26 };
27 
28 typedef struct {
29   PCCompositeType type;
30   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
31   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
32   PetscInt        bs;           /* Block size for IS and Mat structures */
33   PetscInt        nsplits;      /* Number of field divisions defined */
34   Vec            *x, *y, w1, w2;
35   Mat            *mat;    /* The diagonal block for each split */
36   Mat            *pmat;   /* The preconditioning diagonal block for each split */
37   Mat            *Afield; /* The rows of the matrix associated with each split */
38   PetscBool       issetup;
39 
40   /* Only used when Schur complement preconditioning is used */
41   Mat                       B;          /* The (0,1) block */
42   Mat                       C;          /* The (1,0) block */
43   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
44   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */
45   Mat                       schur_user; /* User-provided preconditioning matrix for the Schur complement */
46   PCFieldSplitSchurPreType  schurpre;   /* Determines which preconditioning matrix is used for the Schur complement */
47   PCFieldSplitSchurFactType schurfactorization;
48   KSP                       kspschur;   /* The solver for S */
49   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
50   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
51 
52   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
53   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
54   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
55   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
56   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
57   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
58   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
59   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
60   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
61   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
62 
63   PC_FieldSplitLink head;
64   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
65   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
66   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
67   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
68   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
69   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
70   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
71 } PC_FieldSplit;
72 
73 /*
74     Note:
75     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
76    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
77    PC you could change this.
78 */
79 
80 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it.  This way the
81 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
82 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
83 {
84   switch (jac->schurpre) {
85   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
86     return jac->schur;
87   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
88     return jac->schurp;
89   case PC_FIELDSPLIT_SCHUR_PRE_A11:
90     return jac->pmat[1];
91   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
92   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
93   default:
94     return jac->schur_user ? jac->schur_user : jac->pmat[1];
95   }
96 }
97 
98 #include <petscdraw.h>
99 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
100 {
101   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
102   PetscBool         iascii, isdraw;
103   PetscInt          i, j;
104   PC_FieldSplitLink ilink = jac->head;
105 
106   PetscFunctionBegin;
107   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
108   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
109   if (iascii) {
110     if (jac->bs > 0) {
111       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
112     } else {
113       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
114     }
115     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
116     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
117     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
118     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
119     for (i = 0; i < jac->nsplits; i++) {
120       if (ilink->fields) {
121         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
122         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
123         for (j = 0; j < ilink->nfields; j++) {
124           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
125           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
126         }
127         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
128         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
129       } else {
130         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
131       }
132       PetscCall(KSPView(ilink->ksp, viewer));
133       ilink = ilink->next;
134     }
135   }
136 
137   if (isdraw) {
138     PetscDraw draw;
139     PetscReal x, y, w, wd;
140 
141     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
142     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
143     w  = 2 * PetscMin(1.0 - x, x);
144     wd = w / (jac->nsplits + 1);
145     x  = x - wd * (jac->nsplits - 1) / 2.0;
146     for (i = 0; i < jac->nsplits; i++) {
147       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
148       PetscCall(KSPView(ilink->ksp, viewer));
149       PetscCall(PetscDrawPopCurrentPoint(draw));
150       x += wd;
151       ilink = ilink->next;
152     }
153   }
154   PetscFunctionReturn(PETSC_SUCCESS);
155 }
156 
157 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
158 {
159   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
160   PetscBool                  iascii, isdraw;
161   PetscInt                   i, j;
162   PC_FieldSplitLink          ilink = jac->head;
163   MatSchurComplementAinvType atype;
164 
165   PetscFunctionBegin;
166   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
167   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
168   if (iascii) {
169     if (jac->bs > 0) {
170       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
171     } else {
172       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
173     }
174     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
175     switch (jac->schurpre) {
176     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
178       break;
179     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
180       if (jac->schur) {
181         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
182         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
183       }
184       break;
185     case PC_FIELDSPLIT_SCHUR_PRE_A11:
186       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
187       break;
188     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
189       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
190       break;
191     case PC_FIELDSPLIT_SCHUR_PRE_USER:
192       if (jac->schur_user) {
193         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
194       } else {
195         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
196       }
197       break;
198     default:
199       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
200     }
201     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
202     PetscCall(PetscViewerASCIIPushTab(viewer));
203     for (i = 0; i < jac->nsplits; i++) {
204       if (ilink->fields) {
205         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
206         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
207         for (j = 0; j < ilink->nfields; j++) {
208           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
209           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
210         }
211         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
212         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
213       } else {
214         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
215       }
216       ilink = ilink->next;
217     }
218     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
219     PetscCall(PetscViewerASCIIPushTab(viewer));
220     if (jac->head) {
221       PetscCall(KSPView(jac->head->ksp, viewer));
222     } else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
223     PetscCall(PetscViewerASCIIPopTab(viewer));
224     if (jac->head && jac->kspupper != jac->head->ksp) {
225       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n"));
226       PetscCall(PetscViewerASCIIPushTab(viewer));
227       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
228       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
229       PetscCall(PetscViewerASCIIPopTab(viewer));
230     }
231     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n"));
232     PetscCall(PetscViewerASCIIPushTab(viewer));
233     if (jac->kspschur) {
234       PetscCall(KSPView(jac->kspschur, viewer));
235     } else {
236       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
237     }
238     PetscCall(PetscViewerASCIIPopTab(viewer));
239     PetscCall(PetscViewerASCIIPopTab(viewer));
240   } else if (isdraw && jac->head) {
241     PetscDraw draw;
242     PetscReal x, y, w, wd, h;
243     PetscInt  cnt = 2;
244     char      str[32];
245 
246     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
247     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
248     if (jac->kspupper != jac->head->ksp) cnt++;
249     w  = 2 * PetscMin(1.0 - x, x);
250     wd = w / (cnt + 1);
251 
252     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
253     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
254     y -= h;
255     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
256       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
257     } else {
258       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
259     }
260     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
261     y -= h;
262     x = x - wd * (cnt - 1) / 2.0;
263 
264     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
265     PetscCall(KSPView(jac->head->ksp, viewer));
266     PetscCall(PetscDrawPopCurrentPoint(draw));
267     if (jac->kspupper != jac->head->ksp) {
268       x += wd;
269       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
270       PetscCall(KSPView(jac->kspupper, viewer));
271       PetscCall(PetscDrawPopCurrentPoint(draw));
272     }
273     x += wd;
274     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
275     PetscCall(KSPView(jac->kspschur, viewer));
276     PetscCall(PetscDrawPopCurrentPoint(draw));
277   }
278   PetscFunctionReturn(PETSC_SUCCESS);
279 }
280 
281 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
282 {
283   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
284   PetscBool         iascii, isdraw;
285   PetscInt          i, j;
286   PC_FieldSplitLink ilink = jac->head;
287 
288   PetscFunctionBegin;
289   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
290   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
291   if (iascii) {
292     if (jac->bs > 0) {
293       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
294     } else {
295       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
296     }
297     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
298     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
299     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
300 
301     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
302     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
303     PetscCall(PetscViewerASCIIPushTab(viewer));
304 
305     if (ilink->fields) {
306       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
307       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
308       for (j = 0; j < ilink->nfields; j++) {
309         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
310         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
311       }
312       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
313       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
314     } else {
315       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
316     }
317     PetscCall(KSPView(ilink->ksp, viewer));
318 
319     PetscCall(PetscViewerASCIIPopTab(viewer));
320   }
321 
322   if (isdraw) {
323     PetscDraw draw;
324     PetscReal x, y, w, wd;
325 
326     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
327     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
328     w  = 2 * PetscMin(1.0 - x, x);
329     wd = w / (jac->nsplits + 1);
330     x  = x - wd * (jac->nsplits - 1) / 2.0;
331     for (i = 0; i < jac->nsplits; i++) {
332       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
333       PetscCall(KSPView(ilink->ksp, viewer));
334       PetscCall(PetscDrawPopCurrentPoint(draw));
335       x += wd;
336       ilink = ilink->next;
337     }
338   }
339   PetscFunctionReturn(PETSC_SUCCESS);
340 }
341 
342 /* Precondition: jac->bs is set to a meaningful value or MATNEST */
343 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
344 {
345   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
346   PetscInt       bs, i, nfields, *ifields, nfields_col, *ifields_col;
347   PetscBool      flg, flg_col, mnest;
348   char           optionname[128], splitname[8], optionname_col[128];
349 
350   PetscFunctionBegin;
351   PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest));
352   if (mnest) {
353     PetscCall(MatNestGetSize(pc->pmat, &bs, NULL));
354   } else {
355     bs = jac->bs;
356   }
357   PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col));
358   for (i = 0, flg = PETSC_TRUE;; i++) {
359     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
360     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
361     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
362     nfields     = bs;
363     nfields_col = bs;
364     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
365     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
366     if (!flg) break;
367     else if (flg && !flg_col) {
368       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
369       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
370     } else {
371       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
372       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
373       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
374     }
375   }
376   if (i > 0) {
377     /* Makes command-line setting of splits take precedence over setting them in code.
378        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
379        create new splits, which would probably not be what the user wanted. */
380     jac->splitdefined = PETSC_TRUE;
381   }
382   PetscCall(PetscFree2(ifields, ifields_col));
383   PetscFunctionReturn(PETSC_SUCCESS);
384 }
385 
386 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
387 {
388   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
389   PC_FieldSplitLink ilink              = jac->head;
390   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
391   PetscInt          i;
392 
393   PetscFunctionBegin;
394   /*
395    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
396    Should probably be rewritten.
397    */
398   if (!ilink) {
399     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
400     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
401       PetscInt  numFields, f, i, j;
402       char    **fieldNames;
403       IS       *fields;
404       DM       *dms;
405       DM        subdm[128];
406       PetscBool flg;
407 
408       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
409       /* Allow the user to prescribe the splits */
410       for (i = 0, flg = PETSC_TRUE;; i++) {
411         PetscInt ifields[128];
412         IS       compField;
413         char     optionname[128], splitname[8];
414         PetscInt nfields = numFields;
415 
416         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
417         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
418         if (!flg) break;
419         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
420         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
421         if (nfields == 1) {
422           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
423         } else {
424           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
425           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
426         }
427         PetscCall(ISDestroy(&compField));
428         for (j = 0; j < nfields; ++j) {
429           f = ifields[j];
430           PetscCall(PetscFree(fieldNames[f]));
431           PetscCall(ISDestroy(&fields[f]));
432         }
433       }
434       if (i == 0) {
435         for (f = 0; f < numFields; ++f) {
436           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
437           PetscCall(PetscFree(fieldNames[f]));
438           PetscCall(ISDestroy(&fields[f]));
439         }
440       } else {
441         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
442         PetscCall(PetscFree(dms));
443         PetscCall(PetscMalloc1(i, &dms));
444         for (j = 0; j < i; ++j) dms[j] = subdm[j];
445       }
446       PetscCall(PetscFree(fieldNames));
447       PetscCall(PetscFree(fields));
448       if (dms) {
449         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
450         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
451           const char *prefix;
452           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix));
453           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix));
454           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
455           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
456           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
457           PetscCall(DMDestroy(&dms[i]));
458         }
459         PetscCall(PetscFree(dms));
460       }
461     } else {
462       if (jac->bs <= 0) {
463         if (pc->pmat) {
464           PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
465         } else jac->bs = 1;
466       }
467 
468       if (jac->detect) {
469         IS       zerodiags, rest;
470         PetscInt nmin, nmax;
471 
472         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
473         if (jac->diag_use_amat) {
474           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
475         } else {
476           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
477         }
478         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
479         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
480         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
481         PetscCall(ISDestroy(&zerodiags));
482         PetscCall(ISDestroy(&rest));
483       } else if (coupling) {
484         IS       coupling, rest;
485         PetscInt nmin, nmax;
486 
487         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
488         if (jac->offdiag_use_amat) {
489           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
490         } else {
491           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
492         }
493         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
494         PetscCall(ISSetIdentity(rest));
495         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
496         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
497         PetscCall(ISDestroy(&coupling));
498         PetscCall(ISDestroy(&rest));
499       } else {
500         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
501         if (!fieldsplit_default) {
502           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
503            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
504           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
505           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
506         }
507         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
508           Mat       M = pc->pmat;
509           PetscBool isnest;
510           PetscInt  nf;
511 
512           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
513           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
514           if (!isnest) {
515             M = pc->mat;
516             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
517           }
518           if (!isnest) nf = jac->bs;
519           else PetscCall(MatNestGetSize(M, &nf, NULL));
520           for (i = 0; i < nf; i++) {
521             char splitname[8];
522 
523             PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
524             PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
525           }
526           jac->defaultsplit = PETSC_TRUE;
527         }
528       }
529     }
530   } else if (jac->nsplits == 1) {
531     IS       is2;
532     PetscInt nmin, nmax;
533 
534     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
535     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
536     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
537     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
538     PetscCall(ISDestroy(&is2));
539   }
540 
541   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
542   PetscFunctionReturn(PETSC_SUCCESS);
543 }
544 
545 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
546 {
547   Mat       BT, T;
548   PetscReal nrmT, nrmB;
549 
550   PetscFunctionBegin;
551   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
552   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
553   PetscCall(MatNorm(T, NORM_1, &nrmT));
554   PetscCall(MatNorm(B, NORM_1, &nrmB));
555   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable.");
556 
557   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
558   /* setting N := 1/nu*I in [Ar13].                                                 */
559   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
560   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, H)); /* H = A01*A01'          */
561   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
562 
563   PetscCall(MatDestroy(&BT));
564   PetscCall(MatDestroy(&T));
565   PetscFunctionReturn(PETSC_SUCCESS);
566 }
567 
568 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg);
569 
570 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
571 {
572   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
573   PC_FieldSplitLink ilink;
574   PetscInt          i, nsplit;
575   PetscBool         sorted, sorted_col, matnest = PETSC_FALSE;
576 
577   PetscFunctionBegin;
578   pc->failedreason = PC_NOERROR;
579   PetscCall(PCFieldSplitSetDefaults(pc));
580   nsplit = jac->nsplits;
581   ilink  = jac->head;
582   if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
583 
584   /* get the matrices for each split */
585   if (!jac->issetup) {
586     PetscInt rstart, rend, nslots, bs;
587 
588     jac->issetup = PETSC_TRUE;
589 
590     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
591     if (jac->defaultsplit || !ilink->is) {
592       if (jac->bs <= 0) jac->bs = nsplit;
593     }
594 
595     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
596     PetscCall(MatGetBlockSize(pc->pmat, &bs));
597     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
598       PetscBool blk;
599 
600       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
601       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
602     }
603 
604     if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */
605       bs = jac->bs;
606       PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
607       nslots = (rend - rstart) / bs;
608       for (i = 0; i < nsplit; i++) {
609         if (jac->defaultsplit) {
610           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
611           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
612         } else if (!ilink->is) {
613           if (ilink->nfields > 1) {
614             PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
615 
616             PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
617             PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
618             for (j = 0; j < nslots; j++) {
619               for (k = 0; k < nfields; k++) {
620                 ii[nfields * j + k] = rstart + bs * j + fields[k];
621                 jj[nfields * j + k] = rstart + bs * j + fields_col[k];
622               }
623             }
624             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
625             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
626             PetscCall(ISSetBlockSize(ilink->is, nfields));
627             PetscCall(ISSetBlockSize(ilink->is_col, nfields));
628           } else {
629             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
630             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
631           }
632         }
633         PetscCall(ISSorted(ilink->is, &sorted));
634         if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col));
635         PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split");
636         ilink = ilink->next;
637       }
638     } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */
639       IS      *rowis, *colis, *ises = NULL;
640       PetscInt mis, nis;
641 
642       PetscCall(MatNestGetSize(pc->pmat, &mis, &nis));
643       PetscCall(PetscMalloc2(mis, &rowis, nis, &colis));
644       PetscCall(MatNestGetISs(pc->pmat, rowis, colis));
645       if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises));
646 
647       for (i = 0; i < nsplit; i++) {
648         if (jac->defaultsplit) {
649           PetscCall(ISDuplicate(rowis[i], &ilink->is));
650           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
651         } else if (!ilink->is) {
652           if (ilink->nfields > 1) {
653             for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]];
654             PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is));
655           } else {
656             PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is));
657           }
658           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
659         }
660         ilink = ilink->next;
661       }
662       PetscCall(PetscFree2(rowis, colis));
663       PetscCall(PetscFree(ises));
664     }
665   }
666 
667   ilink = jac->head;
668   if (!jac->pmat) {
669     Vec xtmp;
670 
671     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
672     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
673     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
674     for (i = 0; i < nsplit; i++) {
675       MatNullSpace sp;
676 
677       /* Check for preconditioning matrix attached to IS */
678       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
679       if (jac->pmat[i]) {
680         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
681         if (jac->type == PC_COMPOSITE_SCHUR) {
682           jac->schur_user = jac->pmat[i];
683 
684           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
685         }
686       } else {
687         const char *prefix;
688         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
689         PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix));
690         if (!prefix) {
691           PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
692           PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
693         }
694         PetscCall(MatSetFromOptions(jac->pmat[i]));
695         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
696       }
697       /* create work vectors for each split */
698       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
699       ilink->x = jac->x[i];
700       ilink->y = jac->y[i];
701       ilink->z = NULL;
702       /* compute scatter contexts needed by multiplicative versions and non-default splits */
703       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
704       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
705       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
706       ilink = ilink->next;
707     }
708     PetscCall(VecDestroy(&xtmp));
709   } else {
710     MatReuse      scall;
711     MatNullSpace *nullsp = NULL;
712 
713     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
714       PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp));
715       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
716       scall = MAT_INITIAL_MATRIX;
717     } else scall = MAT_REUSE_MATRIX;
718 
719     for (i = 0; i < nsplit; i++) {
720       Mat pmat;
721 
722       /* Check for preconditioning matrix attached to IS */
723       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
724       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
725       ilink = ilink->next;
726     }
727     if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp));
728   }
729   if (jac->diag_use_amat) {
730     ilink = jac->head;
731     if (!jac->mat) {
732       PetscCall(PetscMalloc1(nsplit, &jac->mat));
733       for (i = 0; i < nsplit; i++) {
734         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
735         ilink = ilink->next;
736       }
737     } else {
738       MatReuse      scall;
739       MatNullSpace *nullsp = NULL;
740 
741       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
742         PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp));
743         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
744         scall = MAT_INITIAL_MATRIX;
745       } else scall = MAT_REUSE_MATRIX;
746 
747       for (i = 0; i < nsplit; i++) {
748         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
749         ilink = ilink->next;
750       }
751       if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp));
752     }
753   } else {
754     jac->mat = jac->pmat;
755   }
756 
757   /* Check for null space attached to IS */
758   ilink = jac->head;
759   for (i = 0; i < nsplit; i++) {
760     MatNullSpace sp;
761 
762     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
763     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
764     ilink = ilink->next;
765   }
766 
767   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
768     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
769     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
770     ilink = jac->head;
771     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
772       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
773       if (!jac->Afield) {
774         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
775         if (jac->offdiag_use_amat) {
776           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
777         } else {
778           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
779         }
780       } else {
781         MatReuse scall;
782 
783         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
784           PetscCall(MatDestroy(&jac->Afield[1]));
785           scall = MAT_INITIAL_MATRIX;
786         } else scall = MAT_REUSE_MATRIX;
787 
788         if (jac->offdiag_use_amat) {
789           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
790         } else {
791           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
792         }
793       }
794     } else {
795       if (!jac->Afield) {
796         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
797         for (i = 0; i < nsplit; i++) {
798           if (jac->offdiag_use_amat) {
799             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
800           } else {
801             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
802           }
803           ilink = ilink->next;
804         }
805       } else {
806         MatReuse scall;
807         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
808           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
809           scall = MAT_INITIAL_MATRIX;
810         } else scall = MAT_REUSE_MATRIX;
811 
812         for (i = 0; i < nsplit; i++) {
813           if (jac->offdiag_use_amat) {
814             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
815           } else {
816             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
817           }
818           ilink = ilink->next;
819         }
820       }
821     }
822   }
823 
824   if (jac->type == PC_COMPOSITE_SCHUR) {
825     IS          ccis;
826     PetscBool   isset, isspd;
827     PetscInt    rstart, rend;
828     char        lscname[256];
829     PetscObject LSC_L;
830     PetscBool   set, flg;
831 
832     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
833 
834     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
835     if (jac->schurscale == (PetscScalar)-1.0) {
836       PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
837       jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
838     }
839 
840     /* When extracting off-diagonal submatrices, we take complements from this range */
841     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
842     PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
843 
844     if (jac->schur) {
845       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
846       MatReuse scall;
847 
848       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
849         scall = MAT_INITIAL_MATRIX;
850         PetscCall(MatDestroy(&jac->B));
851         PetscCall(MatDestroy(&jac->C));
852       } else scall = MAT_REUSE_MATRIX;
853 
854       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
855       ilink = jac->head;
856       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
857       if (jac->offdiag_use_amat) {
858         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B));
859       } else {
860         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B));
861       }
862       PetscCall(ISDestroy(&ccis));
863       if (!flg) {
864         ilink = ilink->next;
865         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
866         if (jac->offdiag_use_amat) {
867           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C));
868         } else {
869           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C));
870         }
871         PetscCall(ISDestroy(&ccis));
872       } else {
873         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
874         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
875         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
876       }
877       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
878       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
879         PetscCall(MatDestroy(&jac->schurp));
880         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
881       }
882       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
883       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
884       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
885     } else {
886       const char  *Dprefix;
887       char         schurprefix[256], schurmatprefix[256];
888       char         schurtestoption[256];
889       MatNullSpace sp;
890       KSP          kspt;
891 
892       /* extract the A01 and A10 matrices */
893       ilink = jac->head;
894       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
895       if (jac->offdiag_use_amat) {
896         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
897       } else {
898         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
899       }
900       PetscCall(ISDestroy(&ccis));
901       ilink = ilink->next;
902       if (!flg) {
903         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
904         if (jac->offdiag_use_amat) {
905           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
906         } else {
907           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
908         }
909         PetscCall(ISDestroy(&ccis));
910       } else {
911         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
912         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
913         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
914       }
915       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
916       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
917       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
918       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
919       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
920       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
921       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
922       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
923 
924       /* Note: this is not true in general */
925       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
926       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
927 
928       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
929       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
930       if (flg) {
931         DM  dmInner;
932         KSP kspInner;
933         PC  pcInner;
934 
935         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
936         PetscCall(KSPReset(kspInner));
937         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
938         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
939         /* Indent this deeper to emphasize the "inner" nature of this solver. */
940         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
941         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
942         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
943 
944         /* Set DM for new solver */
945         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
946         PetscCall(KSPSetDM(kspInner, dmInner));
947         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
948 
949         /* Defaults to PCKSP as preconditioner */
950         PetscCall(KSPGetPC(kspInner, &pcInner));
951         PetscCall(PCSetType(pcInner, PCKSP));
952         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
953       } else {
954         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
955           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
956           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
957           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
958           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
959           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
960         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
961         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
962       }
963       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
964       PetscCall(KSPSetFromOptions(jac->head->ksp));
965       PetscCall(MatSetFromOptions(jac->schur));
966 
967       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
968       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
969         KSP kspInner;
970         PC  pcInner;
971 
972         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
973         PetscCall(KSPGetPC(kspInner, &pcInner));
974         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
975         if (flg) {
976           KSP ksp;
977 
978           PetscCall(PCKSPGetKSP(pcInner, &ksp));
979           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
980         }
981       }
982       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
983       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
984       if (flg) {
985         DM dmInner;
986 
987         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
988         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
989         PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
990         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
991         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
992         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
993         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
994         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
995         PetscCall(KSPSetDM(jac->kspupper, dmInner));
996         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
997         PetscCall(KSPSetFromOptions(jac->kspupper));
998         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
999         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
1000       } else {
1001         jac->kspupper = jac->head->ksp;
1002         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
1003       }
1004 
1005       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
1006       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
1007       PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
1008       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
1009       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
1010       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
1011         PC pcschur;
1012         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
1013         PetscCall(PCSetType(pcschur, PCNONE));
1014         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
1015       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1016         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1017       }
1018       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
1019       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
1020       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
1021       /* propagate DM */
1022       {
1023         DM sdm;
1024         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
1025         if (sdm) {
1026           PetscCall(KSPSetDM(jac->kspschur, sdm));
1027           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
1028         }
1029       }
1030       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1031       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
1032       PetscCall(KSPSetFromOptions(jac->kspschur));
1033     }
1034     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
1035     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
1036 
1037     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
1038     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
1039     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1040     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1041     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L));
1042     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
1043     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1044     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1045     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L));
1046   } else if (jac->type == PC_COMPOSITE_GKB) {
1047     IS       ccis;
1048     PetscInt rstart, rend;
1049 
1050     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1051 
1052     ilink = jac->head;
1053 
1054     /* When extracting off-diagonal submatrices, we take complements from this range */
1055     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
1056 
1057     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1058     if (jac->offdiag_use_amat) {
1059       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1060     } else {
1061       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1062     }
1063     PetscCall(ISDestroy(&ccis));
1064     /* Create work vectors for GKB algorithm */
1065     PetscCall(VecDuplicate(ilink->x, &jac->u));
1066     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1067     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1068     ilink = ilink->next;
1069     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1070     if (jac->offdiag_use_amat) {
1071       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1072     } else {
1073       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1074     }
1075     PetscCall(ISDestroy(&ccis));
1076     /* Create work vectors for GKB algorithm */
1077     PetscCall(VecDuplicate(ilink->x, &jac->v));
1078     PetscCall(VecDuplicate(ilink->x, &jac->d));
1079     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1080     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1081     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1082 
1083     ilink = jac->head;
1084     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1085     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1086     /* Create gkb_monitor context */
1087     if (jac->gkbmonitor) {
1088       PetscInt tablevel;
1089       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1090       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1091       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1092       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1093       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1094     }
1095   } else {
1096     /* set up the individual splits' PCs */
1097     i     = 0;
1098     ilink = jac->head;
1099     while (ilink) {
1100       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1101       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1102       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1103       i++;
1104       ilink = ilink->next;
1105     }
1106   }
1107 
1108   /* Set coordinates to the sub PC objects whenever these are set */
1109   if (jac->coordinates_set) {
1110     PC pc_coords;
1111     if (jac->type == PC_COMPOSITE_SCHUR) {
1112       // Head is first block.
1113       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1114       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1115       // Second one is Schur block, but its KSP object is in kspschur.
1116       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1117       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1118     } else if (jac->type == PC_COMPOSITE_GKB) {
1119       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1120     } else {
1121       ilink = jac->head;
1122       while (ilink) {
1123         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1124         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1125         ilink = ilink->next;
1126       }
1127     }
1128   }
1129 
1130   jac->suboptionsset = PETSC_TRUE;
1131   PetscFunctionReturn(PETSC_SUCCESS);
1132 }
1133 
1134 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1135   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1136                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1137                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1138 
1139 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1140 {
1141   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1142   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1143   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1144 
1145   PetscFunctionBegin;
1146   switch (jac->schurfactorization) {
1147   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1148     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1149     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1150     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1151     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1152     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1153     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1154     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1155     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1156     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1157     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1158     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1159     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1160     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1161     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1162     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1163     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1164     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1165     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1166     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1167     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1168     break;
1169   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1170     /* [A00 0; A10 S], suitable for left preconditioning */
1171     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1172     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1173     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1174     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1175     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1176     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1177     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1178     PetscCall(VecScale(ilinkD->x, -1.));
1179     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1180     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1181     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1182     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1183     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1184     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1185     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1186     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1187     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1188     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1189     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1190     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1191     break;
1192   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1193     /* [A00 A01; 0 S], suitable for right preconditioning */
1194     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1195     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1196     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1197     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1198     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1199     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1200     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1201     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1202     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1203     PetscCall(VecScale(ilinkA->x, -1.));
1204     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1205     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1206     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1207     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1208     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1209     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1210     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1211     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1212     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1213     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1214     break;
1215   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1216     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1217     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1218     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1219     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1220     PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1221     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1222     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1223     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1224     PetscCall(VecScale(ilinkD->x, -1.0));
1225     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1226     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1227 
1228     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1229     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1230     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1231     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1232     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1233     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1234     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1235 
1236     if (kspUpper == kspA) {
1237       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1238       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1239       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1240       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1241       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1242       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1243     } else {
1244       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1245       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1246       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1247       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1248       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1249       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1250       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1251       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1252       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1253     }
1254     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1255     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1256     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1257   }
1258   PetscFunctionReturn(PETSC_SUCCESS);
1259 }
1260 
1261 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1262 {
1263   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1264   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1265   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1266 
1267   PetscFunctionBegin;
1268   switch (jac->schurfactorization) {
1269   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1270     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1271     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1272     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1273     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1274     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1275     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1276     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1277     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1278     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1279     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1280     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1281     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1282     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1283     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1284     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1285     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1286     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1287     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1288     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1289     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1290     break;
1291   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1292     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1293     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1294     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1295     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1296     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1297     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1298     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1299     PetscCall(VecScale(ilinkD->x, -1.));
1300     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1301     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1302     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1303     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1304     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1305     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1306     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1307     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1308     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1309     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1310     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1311     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1312     break;
1313   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1314     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1315     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1316     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1317     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1318     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1319     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1320     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1321     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1322     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1323     PetscCall(VecScale(ilinkA->x, -1.));
1324     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1325     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1326     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1327     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1328     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1329     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1330     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1331     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1332     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1333     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1334     break;
1335   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1336     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1337     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1338     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1339     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1340     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1341     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1342     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1343     PetscCall(VecScale(ilinkD->x, -1.0));
1344     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1345     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1346 
1347     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1348     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1349     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1350     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1351     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1352     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1353     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1354 
1355     if (kspLower == kspA) {
1356       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1357       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1358       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1359       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1360       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1361       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1362     } else {
1363       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1364       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1365       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1366       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1367       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1368       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1369       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1370       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1371       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1372     }
1373     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1374     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1375     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1376   }
1377   PetscFunctionReturn(PETSC_SUCCESS);
1378 }
1379 
1380 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1381 {
1382   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1383   PC_FieldSplitLink ilink = jac->head;
1384   PetscInt          cnt, bs;
1385 
1386   PetscFunctionBegin;
1387   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1388     PetscBool matnest;
1389 
1390     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1391     if (jac->defaultsplit && !matnest) {
1392       PetscCall(VecGetBlockSize(x, &bs));
1393       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1394       PetscCall(VecGetBlockSize(y, &bs));
1395       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1396       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1397       while (ilink) {
1398         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1399         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1400         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1401         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1402         ilink = ilink->next;
1403       }
1404       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1405     } else {
1406       PetscCall(VecSet(y, 0.0));
1407       while (ilink) {
1408         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1409         ilink = ilink->next;
1410       }
1411     }
1412   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1413     PetscCall(VecSet(y, 0.0));
1414     /* solve on first block for first block variables */
1415     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1416     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1417     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1418     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1419     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1420     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1421     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1422     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1423 
1424     /* compute the residual only onto second block variables using first block variables */
1425     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1426     ilink = ilink->next;
1427     PetscCall(VecScale(ilink->x, -1.0));
1428     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1429     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1430 
1431     /* solve on second block variables */
1432     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1433     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1434     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1435     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1436     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1437     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1438   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1439     if (!jac->w1) {
1440       PetscCall(VecDuplicate(x, &jac->w1));
1441       PetscCall(VecDuplicate(x, &jac->w2));
1442     }
1443     PetscCall(VecSet(y, 0.0));
1444     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1445     cnt = 1;
1446     while (ilink->next) {
1447       ilink = ilink->next;
1448       /* compute the residual only over the part of the vector needed */
1449       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1450       PetscCall(VecScale(ilink->x, -1.0));
1451       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1452       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1453       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1454       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1455       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1456       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1457       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1458       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1459     }
1460     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1461       cnt -= 2;
1462       while (ilink->previous) {
1463         ilink = ilink->previous;
1464         /* compute the residual only over the part of the vector needed */
1465         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1466         PetscCall(VecScale(ilink->x, -1.0));
1467         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1468         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1469         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1470         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1471         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1472         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1473         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1474         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1475       }
1476     }
1477   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1478   PetscFunctionReturn(PETSC_SUCCESS);
1479 }
1480 
1481 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1482 {
1483   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1484   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1485   KSP               ksp = ilinkA->ksp;
1486   Vec               u, v, Hu, d, work1, work2;
1487   PetscScalar       alpha, z, nrmz2, *vecz;
1488   PetscReal         lowbnd, nu, beta;
1489   PetscInt          j, iterGKB;
1490 
1491   PetscFunctionBegin;
1492   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1493   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1494   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1495   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1496 
1497   u     = jac->u;
1498   v     = jac->v;
1499   Hu    = jac->Hu;
1500   d     = jac->d;
1501   work1 = jac->w1;
1502   work2 = jac->w2;
1503   vecz  = jac->vecz;
1504 
1505   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1506   /* Add q = q + nu*B*b */
1507   if (jac->gkbnu) {
1508     nu = jac->gkbnu;
1509     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1510     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1511   } else {
1512     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1513     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1514     nu = 1;
1515   }
1516 
1517   /* Transform rhs from [q,tilde{b}] to [0,b] */
1518   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1519   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1520   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1521   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1522   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1523   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1524 
1525   /* First step of algorithm */
1526   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1527   KSPCheckDot(ksp, beta);
1528   beta = PetscSqrtReal(nu) * beta;
1529   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1530   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1531   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1532   PetscCall(KSPSolve(ksp, work2, u));
1533   PetscCall(KSPCheckSolve(ksp, pc, u));
1534   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1535   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1536   PetscCall(VecDot(Hu, u, &alpha));
1537   KSPCheckDot(ksp, alpha);
1538   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1539   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1540   PetscCall(VecScale(u, 1.0 / alpha));
1541   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1542 
1543   z       = beta / alpha;
1544   vecz[1] = z;
1545 
1546   /* Computation of first iterate x(1) and p(1) */
1547   PetscCall(VecAXPY(ilinkA->y, z, u));
1548   PetscCall(VecCopy(d, ilinkD->y));
1549   PetscCall(VecScale(ilinkD->y, -z));
1550 
1551   iterGKB = 1;
1552   lowbnd  = 2 * jac->gkbtol;
1553   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1554 
1555   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1556     iterGKB += 1;
1557     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1558     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1559     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1560     beta = beta / PetscSqrtReal(nu);
1561     PetscCall(VecScale(v, 1.0 / beta));
1562     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1563     PetscCall(MatMult(jac->H, u, Hu));
1564     PetscCall(VecAXPY(work2, -beta, Hu));
1565     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1566     PetscCall(KSPSolve(ksp, work2, u));
1567     PetscCall(KSPCheckSolve(ksp, pc, u));
1568     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1569     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1570     PetscCall(VecDot(Hu, u, &alpha));
1571     KSPCheckDot(ksp, alpha);
1572     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1573     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1574     PetscCall(VecScale(u, 1.0 / alpha));
1575 
1576     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1577     vecz[0] = z;
1578 
1579     /* Computation of new iterate x(i+1) and p(i+1) */
1580     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1581     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1582     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1583     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1584     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1585 
1586     /* Compute Lower Bound estimate */
1587     if (iterGKB > jac->gkbdelay) {
1588       lowbnd = 0.0;
1589       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1590       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1591     }
1592 
1593     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1594     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1595   }
1596 
1597   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1598   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1599   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1600   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1601   PetscFunctionReturn(PETSC_SUCCESS);
1602 }
1603 
1604 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1605   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1606                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1607                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1608 
1609 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1610 {
1611   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1612   PC_FieldSplitLink ilink = jac->head;
1613   PetscInt          bs;
1614 
1615   PetscFunctionBegin;
1616   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1617     PetscBool matnest;
1618 
1619     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1620     if (jac->defaultsplit && !matnest) {
1621       PetscCall(VecGetBlockSize(x, &bs));
1622       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1623       PetscCall(VecGetBlockSize(y, &bs));
1624       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1625       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1626       while (ilink) {
1627         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1628         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1629         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1630         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1631         ilink = ilink->next;
1632       }
1633       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1634     } else {
1635       PetscCall(VecSet(y, 0.0));
1636       while (ilink) {
1637         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1638         ilink = ilink->next;
1639       }
1640     }
1641   } else {
1642     if (!jac->w1) {
1643       PetscCall(VecDuplicate(x, &jac->w1));
1644       PetscCall(VecDuplicate(x, &jac->w2));
1645     }
1646     PetscCall(VecSet(y, 0.0));
1647     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1648       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1649       while (ilink->next) {
1650         ilink = ilink->next;
1651         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1652         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1653         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1654       }
1655       while (ilink->previous) {
1656         ilink = ilink->previous;
1657         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1658         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1659         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1660       }
1661     } else {
1662       while (ilink->next) { /* get to last entry in linked list */
1663         ilink = ilink->next;
1664       }
1665       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1666       while (ilink->previous) {
1667         ilink = ilink->previous;
1668         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1669         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1670         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1671       }
1672     }
1673   }
1674   PetscFunctionReturn(PETSC_SUCCESS);
1675 }
1676 
1677 static PetscErrorCode PCReset_FieldSplit(PC pc)
1678 {
1679   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1680   PC_FieldSplitLink ilink = jac->head, next;
1681 
1682   PetscFunctionBegin;
1683   while (ilink) {
1684     PetscCall(KSPDestroy(&ilink->ksp));
1685     PetscCall(VecDestroy(&ilink->x));
1686     PetscCall(VecDestroy(&ilink->y));
1687     PetscCall(VecDestroy(&ilink->z));
1688     PetscCall(VecScatterDestroy(&ilink->sctx));
1689     PetscCall(ISDestroy(&ilink->is));
1690     PetscCall(ISDestroy(&ilink->is_col));
1691     PetscCall(PetscFree(ilink->splitname));
1692     PetscCall(PetscFree(ilink->fields));
1693     PetscCall(PetscFree(ilink->fields_col));
1694     next = ilink->next;
1695     PetscCall(PetscFree(ilink));
1696     ilink = next;
1697   }
1698   jac->head = NULL;
1699   PetscCall(PetscFree2(jac->x, jac->y));
1700   if (jac->mat && jac->mat != jac->pmat) {
1701     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1702   } else if (jac->mat) {
1703     jac->mat = NULL;
1704   }
1705   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1706   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1707   jac->nsplits = 0;
1708   PetscCall(VecDestroy(&jac->w1));
1709   PetscCall(VecDestroy(&jac->w2));
1710   PetscCall(MatDestroy(&jac->schur));
1711   PetscCall(MatDestroy(&jac->schurp));
1712   PetscCall(MatDestroy(&jac->schur_user));
1713   PetscCall(KSPDestroy(&jac->kspschur));
1714   PetscCall(KSPDestroy(&jac->kspupper));
1715   PetscCall(MatDestroy(&jac->B));
1716   PetscCall(MatDestroy(&jac->C));
1717   PetscCall(MatDestroy(&jac->H));
1718   PetscCall(VecDestroy(&jac->u));
1719   PetscCall(VecDestroy(&jac->v));
1720   PetscCall(VecDestroy(&jac->Hu));
1721   PetscCall(VecDestroy(&jac->d));
1722   PetscCall(PetscFree(jac->vecz));
1723   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1724   jac->isrestrict = PETSC_FALSE;
1725   PetscFunctionReturn(PETSC_SUCCESS);
1726 }
1727 
1728 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1729 {
1730   PetscFunctionBegin;
1731   PetscCall(PCReset_FieldSplit(pc));
1732   PetscCall(PetscFree(pc->data));
1733   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1734   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1735   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1736   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1737   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1738   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1739   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1740   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1741 
1742   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1743   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1744   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1745   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1746   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1747   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1748   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1749   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1750   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1751   PetscFunctionReturn(PETSC_SUCCESS);
1752 }
1753 
1754 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject)
1755 {
1756   PetscInt        bs;
1757   PetscBool       flg;
1758   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1759   PCCompositeType ctype;
1760 
1761   PetscFunctionBegin;
1762   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1763   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1764   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1765   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1766   jac->diag_use_amat = pc->useAmat;
1767   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1768   jac->offdiag_use_amat = pc->useAmat;
1769   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1770   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1771   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1772   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1773   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1774   /* Only setup fields once */
1775   if ((jac->bs > 0) && (jac->nsplits == 0)) {
1776     /* only allow user to set fields from command line.
1777        otherwise user can set them in PCFieldSplitSetDefaults() */
1778     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1779     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1780   }
1781   if (jac->type == PC_COMPOSITE_SCHUR) {
1782     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1783     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1784     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1785     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1786     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1787   } else if (jac->type == PC_COMPOSITE_GKB) {
1788     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1789     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1790     PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0));
1791     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1792     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1793   }
1794   /*
1795     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1796     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1797     is called on the outer solver in case changes were made in the options database
1798 
1799     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1800     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1801     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1802 
1803     There could be a negative side effect of calling the KSPSetFromOptions() below.
1804 
1805     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1806   */
1807   if (jac->issetup) {
1808     PC_FieldSplitLink ilink = jac->head;
1809     if (jac->type == PC_COMPOSITE_SCHUR) {
1810       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1811       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1812     }
1813     while (ilink) {
1814       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1815       ilink = ilink->next;
1816     }
1817   }
1818   PetscOptionsHeadEnd();
1819   PetscFunctionReturn(PETSC_SUCCESS);
1820 }
1821 
1822 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1823 {
1824   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1825   PC_FieldSplitLink ilink, next = jac->head;
1826   char              prefix[128];
1827   PetscInt          i;
1828 
1829   PetscFunctionBegin;
1830   if (jac->splitdefined) {
1831     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1832     PetscFunctionReturn(PETSC_SUCCESS);
1833   }
1834   for (i = 0; i < n; i++) { PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); }
1835   PetscCall(PetscNew(&ilink));
1836   if (splitname) {
1837     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1838   } else {
1839     PetscCall(PetscMalloc1(3, &ilink->splitname));
1840     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1841   }
1842   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1843   PetscCall(PetscMalloc1(n, &ilink->fields));
1844   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1845   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1846   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1847 
1848   ilink->nfields = n;
1849   ilink->next    = NULL;
1850   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1851   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1852   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1853   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1854   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1855 
1856   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1857   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1858 
1859   if (!next) {
1860     jac->head       = ilink;
1861     ilink->previous = NULL;
1862   } else {
1863     while (next->next) next = next->next;
1864     next->next      = ilink;
1865     ilink->previous = next;
1866   }
1867   jac->nsplits++;
1868   PetscFunctionReturn(PETSC_SUCCESS);
1869 }
1870 
1871 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1872 {
1873   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1874 
1875   PetscFunctionBegin;
1876   *subksp = NULL;
1877   if (n) *n = 0;
1878   if (jac->type == PC_COMPOSITE_SCHUR) {
1879     PetscInt nn;
1880 
1881     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1882     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1883     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1884     PetscCall(PetscMalloc1(nn, subksp));
1885     (*subksp)[0] = jac->head->ksp;
1886     (*subksp)[1] = jac->kspschur;
1887     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
1888     if (n) *n = nn;
1889   }
1890   PetscFunctionReturn(PETSC_SUCCESS);
1891 }
1892 
1893 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
1894 {
1895   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1896 
1897   PetscFunctionBegin;
1898   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
1899   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1900   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
1901 
1902   (*subksp)[1] = jac->kspschur;
1903   if (n) *n = jac->nsplits;
1904   PetscFunctionReturn(PETSC_SUCCESS);
1905 }
1906 
1907 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1908 {
1909   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1910   PetscInt          cnt   = 0;
1911   PC_FieldSplitLink ilink = jac->head;
1912 
1913   PetscFunctionBegin;
1914   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1915   while (ilink) {
1916     (*subksp)[cnt++] = ilink->ksp;
1917     ilink            = ilink->next;
1918   }
1919   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
1920   if (n) *n = jac->nsplits;
1921   PetscFunctionReturn(PETSC_SUCCESS);
1922 }
1923 
1924 /*@
1925   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
1926 
1927   Input Parameters:
1928 + pc  - the preconditioner context
1929 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
1930 
1931   Level: advanced
1932 
1933   Developer Notes:
1934   It seems the resulting `IS`s will not cover the entire space, so
1935   how can they define a convergent preconditioner? Needs explaining.
1936 
1937 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
1938 @*/
1939 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
1940 {
1941   PetscFunctionBegin;
1942   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1943   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
1944   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
1945   PetscFunctionReturn(PETSC_SUCCESS);
1946 }
1947 
1948 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
1949 {
1950   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1951   PC_FieldSplitLink ilink = jac->head, next;
1952   PetscInt          localsize, size, sizez, i;
1953   const PetscInt   *ind, *indz;
1954   PetscInt         *indc, *indcz;
1955   PetscBool         flg;
1956 
1957   PetscFunctionBegin;
1958   PetscCall(ISGetLocalSize(isy, &localsize));
1959   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
1960   size -= localsize;
1961   while (ilink) {
1962     IS isrl, isr;
1963     PC subpc;
1964     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
1965     PetscCall(ISGetLocalSize(isrl, &localsize));
1966     PetscCall(PetscMalloc1(localsize, &indc));
1967     PetscCall(ISGetIndices(isrl, &ind));
1968     PetscCall(PetscArraycpy(indc, ind, localsize));
1969     PetscCall(ISRestoreIndices(isrl, &ind));
1970     PetscCall(ISDestroy(&isrl));
1971     for (i = 0; i < localsize; i++) *(indc + i) += size;
1972     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
1973     PetscCall(PetscObjectReference((PetscObject)isr));
1974     PetscCall(ISDestroy(&ilink->is));
1975     ilink->is = isr;
1976     PetscCall(PetscObjectReference((PetscObject)isr));
1977     PetscCall(ISDestroy(&ilink->is_col));
1978     ilink->is_col = isr;
1979     PetscCall(ISDestroy(&isr));
1980     PetscCall(KSPGetPC(ilink->ksp, &subpc));
1981     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
1982     if (flg) {
1983       IS       iszl, isz;
1984       MPI_Comm comm;
1985       PetscCall(ISGetLocalSize(ilink->is, &localsize));
1986       comm = PetscObjectComm((PetscObject)ilink->is);
1987       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
1988       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
1989       sizez -= localsize;
1990       PetscCall(ISGetLocalSize(iszl, &localsize));
1991       PetscCall(PetscMalloc1(localsize, &indcz));
1992       PetscCall(ISGetIndices(iszl, &indz));
1993       PetscCall(PetscArraycpy(indcz, indz, localsize));
1994       PetscCall(ISRestoreIndices(iszl, &indz));
1995       PetscCall(ISDestroy(&iszl));
1996       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
1997       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
1998       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
1999       PetscCall(ISDestroy(&isz));
2000     }
2001     next  = ilink->next;
2002     ilink = next;
2003   }
2004   jac->isrestrict = PETSC_TRUE;
2005   PetscFunctionReturn(PETSC_SUCCESS);
2006 }
2007 
2008 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
2009 {
2010   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
2011   PC_FieldSplitLink ilink, next = jac->head;
2012   char              prefix[128];
2013 
2014   PetscFunctionBegin;
2015   if (jac->splitdefined) {
2016     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
2017     PetscFunctionReturn(PETSC_SUCCESS);
2018   }
2019   PetscCall(PetscNew(&ilink));
2020   if (splitname) {
2021     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
2022   } else {
2023     PetscCall(PetscMalloc1(8, &ilink->splitname));
2024     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
2025   }
2026   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
2027   PetscCall(PetscObjectReference((PetscObject)is));
2028   PetscCall(ISDestroy(&ilink->is));
2029   ilink->is = is;
2030   PetscCall(PetscObjectReference((PetscObject)is));
2031   PetscCall(ISDestroy(&ilink->is_col));
2032   ilink->is_col = is;
2033   ilink->next   = NULL;
2034   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
2035   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
2036   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
2037   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
2038   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
2039 
2040   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
2041   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
2042 
2043   if (!next) {
2044     jac->head       = ilink;
2045     ilink->previous = NULL;
2046   } else {
2047     while (next->next) next = next->next;
2048     next->next      = ilink;
2049     ilink->previous = next;
2050   }
2051   jac->nsplits++;
2052   PetscFunctionReturn(PETSC_SUCCESS);
2053 }
2054 
2055 /*@
2056   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
2057 
2058   Logically Collective
2059 
2060   Input Parameters:
2061 + pc         - the preconditioner context
2062 . splitname  - name of this split, if `NULL` the number of the split is used
2063 . n          - the number of fields in this split
2064 . fields     - the fields in this split
2065 - fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block
2066                of the matrix and `fields_col` provides the column indices for that block
2067 
2068   Options Database Key:
2069 . -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
2070 
2071   Level: intermediate
2072 
2073   Notes:
2074   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2075 
2076   If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`.
2077 
2078   If the matrix used to construct the preconditioner is not `MATNEST` then
2079   `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlocksize()` or
2080   to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block
2081   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2082   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x....
2083   where the numbered entries indicate what is in the split.
2084 
2085   This function is called once per split (it creates a new split each time).  Solve options
2086   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2087 
2088   `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields`
2089 
2090   Developer Notes:
2091   This routine does not actually create the `IS` representing the split, that is delayed
2092   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2093   available when this routine is called.
2094 
2095 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`,
2096           `MatSetBlocksize()`, `MatCreateNest()`
2097 @*/
2098 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt fields[], const PetscInt fields_col[])
2099 {
2100   PetscFunctionBegin;
2101   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2102   PetscAssertPointer(splitname, 2);
2103   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2104   PetscAssertPointer(fields, 4);
2105   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2106   PetscFunctionReturn(PETSC_SUCCESS);
2107 }
2108 
2109 /*@
2110   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2111   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2112 
2113   Logically Collective
2114 
2115   Input Parameters:
2116 + pc  - the preconditioner object
2117 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2118 
2119   Options Database Key:
2120 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2121 
2122   Level: intermediate
2123 
2124 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2125 @*/
2126 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2127 {
2128   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2129   PetscBool      isfs;
2130 
2131   PetscFunctionBegin;
2132   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2133   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2134   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2135   jac->diag_use_amat = flg;
2136   PetscFunctionReturn(PETSC_SUCCESS);
2137 }
2138 
2139 /*@
2140   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2141   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2142 
2143   Logically Collective
2144 
2145   Input Parameter:
2146 . pc - the preconditioner object
2147 
2148   Output Parameter:
2149 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2150 
2151   Level: intermediate
2152 
2153 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2154 @*/
2155 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2156 {
2157   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2158   PetscBool      isfs;
2159 
2160   PetscFunctionBegin;
2161   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2162   PetscAssertPointer(flg, 2);
2163   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2164   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2165   *flg = jac->diag_use_amat;
2166   PetscFunctionReturn(PETSC_SUCCESS);
2167 }
2168 
2169 /*@
2170   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2171   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2172 
2173   Logically Collective
2174 
2175   Input Parameters:
2176 + pc  - the preconditioner object
2177 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2178 
2179   Options Database Key:
2180 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2181 
2182   Level: intermediate
2183 
2184 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2185 @*/
2186 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2187 {
2188   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2189   PetscBool      isfs;
2190 
2191   PetscFunctionBegin;
2192   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2193   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2194   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2195   jac->offdiag_use_amat = flg;
2196   PetscFunctionReturn(PETSC_SUCCESS);
2197 }
2198 
2199 /*@
2200   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2201   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2202 
2203   Logically Collective
2204 
2205   Input Parameter:
2206 . pc - the preconditioner object
2207 
2208   Output Parameter:
2209 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2210 
2211   Level: intermediate
2212 
2213 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2214 @*/
2215 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2216 {
2217   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2218   PetscBool      isfs;
2219 
2220   PetscFunctionBegin;
2221   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2222   PetscAssertPointer(flg, 2);
2223   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2224   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2225   *flg = jac->offdiag_use_amat;
2226   PetscFunctionReturn(PETSC_SUCCESS);
2227 }
2228 
2229 /*@
2230   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2231 
2232   Logically Collective
2233 
2234   Input Parameters:
2235 + pc        - the preconditioner context
2236 . splitname - name of this split, if `NULL` the number of the split is used
2237 - is        - the index set that defines the elements in this split
2238 
2239   Level: intermediate
2240 
2241   Notes:
2242   Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST`
2243 
2244   This function is called once per split (it creates a new split each time).  Solve options
2245   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2246 
2247 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()`
2248 @*/
2249 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2250 {
2251   PetscFunctionBegin;
2252   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2253   if (splitname) PetscAssertPointer(splitname, 2);
2254   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2255   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2256   PetscFunctionReturn(PETSC_SUCCESS);
2257 }
2258 
2259 /*@
2260   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2261 
2262   Logically Collective
2263 
2264   Input Parameters:
2265 + pc        - the preconditioner context
2266 - splitname - name of this split
2267 
2268   Output Parameter:
2269 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2270 
2271   Level: intermediate
2272 
2273 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()`
2274 @*/
2275 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2276 {
2277   PetscFunctionBegin;
2278   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2279   PetscAssertPointer(splitname, 2);
2280   PetscAssertPointer(is, 3);
2281   {
2282     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2283     PC_FieldSplitLink ilink = jac->head;
2284     PetscBool         found;
2285 
2286     *is = NULL;
2287     while (ilink) {
2288       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2289       if (found) {
2290         *is = ilink->is;
2291         break;
2292       }
2293       ilink = ilink->next;
2294     }
2295   }
2296   PetscFunctionReturn(PETSC_SUCCESS);
2297 }
2298 
2299 /*@
2300   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2301 
2302   Logically Collective
2303 
2304   Input Parameters:
2305 + pc    - the preconditioner context
2306 - index - index of this split
2307 
2308   Output Parameter:
2309 . is - the index set that defines the elements in this split
2310 
2311   Level: intermediate
2312 
2313 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`,
2314 
2315 @*/
2316 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2317 {
2318   PetscFunctionBegin;
2319   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2320   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2321   PetscAssertPointer(is, 3);
2322   {
2323     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2324     PC_FieldSplitLink ilink = jac->head;
2325     PetscInt          i     = 0;
2326     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2327 
2328     while (i < index) {
2329       ilink = ilink->next;
2330       ++i;
2331     }
2332     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2333   }
2334   PetscFunctionReturn(PETSC_SUCCESS);
2335 }
2336 
2337 /*@
2338   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2339   fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used.
2340 
2341   Logically Collective
2342 
2343   Input Parameters:
2344 + pc - the preconditioner context
2345 - bs - the block size
2346 
2347   Level: intermediate
2348 
2349   Note:
2350   If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields.
2351 
2352 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2353 @*/
2354 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2355 {
2356   PetscFunctionBegin;
2357   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2358   PetscValidLogicalCollectiveInt(pc, bs, 2);
2359   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2360   PetscFunctionReturn(PETSC_SUCCESS);
2361 }
2362 
2363 /*@C
2364   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2365 
2366   Collective
2367 
2368   Input Parameter:
2369 . pc - the preconditioner context
2370 
2371   Output Parameters:
2372 + n      - the number of splits
2373 - subksp - the array of `KSP` contexts
2374 
2375   Level: advanced
2376 
2377   Notes:
2378   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2379   (not the `KSP`, just the array that contains them).
2380 
2381   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2382 
2383   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2384   Schur complement and the `KSP` object used to iterate over the Schur complement.
2385   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2386 
2387   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2388   inner linear system defined by the matrix H in each loop.
2389 
2390   Fortran Notes:
2391   You must pass in a `KSP` array that is large enough to contain all the `KSP`s.
2392   You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2393   `KSP` array must be.
2394 
2395   Developer Notes:
2396   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2397 
2398   The Fortran interface could be modernized to return directly the array of values.
2399 
2400 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2401 @*/
2402 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2403 {
2404   PetscFunctionBegin;
2405   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2406   if (n) PetscAssertPointer(n, 2);
2407   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2408   PetscFunctionReturn(PETSC_SUCCESS);
2409 }
2410 
2411 /*@C
2412   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2413 
2414   Collective
2415 
2416   Input Parameter:
2417 . pc - the preconditioner context
2418 
2419   Output Parameters:
2420 + n      - the number of splits
2421 - subksp - the array of `KSP` contexts
2422 
2423   Level: advanced
2424 
2425   Notes:
2426   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2427   (not the `KSP` just the array that contains them).
2428 
2429   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2430 
2431   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2432 +  1  - the `KSP` used for the (1,1) block
2433 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2434 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2435 
2436   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2437 
2438   Fortran Notes:
2439   You must pass in a `KSP` array that is large enough to contain all the local `KSP`s.
2440   You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2441   `KSP` array must be.
2442 
2443   Developer Notes:
2444   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2445 
2446   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2447 
2448   The Fortran interface should be modernized to return directly the array of values.
2449 
2450 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2451 @*/
2452 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2453 {
2454   PetscFunctionBegin;
2455   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2456   if (n) PetscAssertPointer(n, 2);
2457   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2458   PetscFunctionReturn(PETSC_SUCCESS);
2459 }
2460 
2461 /*@
2462   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructed for the Schur complement.
2463   The default is the A11 matrix.
2464 
2465   Collective
2466 
2467   Input Parameters:
2468 + pc    - the preconditioner context
2469 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2470               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2471               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2472 - pre   - matrix to use for preconditioning, or `NULL`
2473 
2474   Options Database Keys:
2475 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2476 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2477 
2478   Level: intermediate
2479 
2480   Notes:
2481   If ptype is
2482 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2483   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2484 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2485   The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM`
2486 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2487   to this function).
2488 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $
2489   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2490   lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2491 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2492   computed internally by `PCFIELDSPLIT` (this is expensive)
2493   useful mostly as a test that the Schur complement approach can work for your problem
2494 
2495   When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2496   with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and
2497   `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2498 
2499   Developer Note:
2500   The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere.
2501 
2502 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2503           `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()`
2504 @*/
2505 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2506 {
2507   PetscFunctionBegin;
2508   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2509   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2510   PetscFunctionReturn(PETSC_SUCCESS);
2511 }
2512 
2513 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2514 {
2515   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2516 } /* Deprecated name */
2517 
2518 /*@
2519   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2520   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2521 
2522   Logically Collective
2523 
2524   Input Parameter:
2525 . pc - the preconditioner context
2526 
2527   Output Parameters:
2528 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2529 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2530 
2531   Level: intermediate
2532 
2533 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2534 @*/
2535 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2536 {
2537   PetscFunctionBegin;
2538   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2539   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2540   PetscFunctionReturn(PETSC_SUCCESS);
2541 }
2542 
2543 /*@
2544   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2545 
2546   Not Collective
2547 
2548   Input Parameter:
2549 . pc - the preconditioner context
2550 
2551   Output Parameter:
2552 . S - the Schur complement matrix
2553 
2554   Level: advanced
2555 
2556   Note:
2557   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2558 
2559 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2560           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2561 @*/
2562 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2563 {
2564   const char    *t;
2565   PetscBool      isfs;
2566   PC_FieldSplit *jac;
2567 
2568   PetscFunctionBegin;
2569   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2570   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2571   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2572   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2573   jac = (PC_FieldSplit *)pc->data;
2574   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2575   if (S) *S = jac->schur;
2576   PetscFunctionReturn(PETSC_SUCCESS);
2577 }
2578 
2579 /*@
2580   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2581 
2582   Not Collective
2583 
2584   Input Parameters:
2585 + pc - the preconditioner context
2586 - S  - the Schur complement matrix
2587 
2588   Level: advanced
2589 
2590 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2591 @*/
2592 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2593 {
2594   const char    *t;
2595   PetscBool      isfs;
2596   PC_FieldSplit *jac;
2597 
2598   PetscFunctionBegin;
2599   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2600   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2601   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2602   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2603   jac = (PC_FieldSplit *)pc->data;
2604   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2605   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2606   PetscFunctionReturn(PETSC_SUCCESS);
2607 }
2608 
2609 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2610 {
2611   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2612 
2613   PetscFunctionBegin;
2614   jac->schurpre = ptype;
2615   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2616     PetscCall(MatDestroy(&jac->schur_user));
2617     jac->schur_user = pre;
2618     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2619   }
2620   PetscFunctionReturn(PETSC_SUCCESS);
2621 }
2622 
2623 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2624 {
2625   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2626 
2627   PetscFunctionBegin;
2628   if (ptype) *ptype = jac->schurpre;
2629   if (pre) *pre = jac->schur_user;
2630   PetscFunctionReturn(PETSC_SUCCESS);
2631 }
2632 
2633 /*@
2634   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note`
2635 
2636   Collective
2637 
2638   Input Parameters:
2639 + pc    - the preconditioner context
2640 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2641 
2642   Options Database Key:
2643 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
2644 
2645   Level: intermediate
2646 
2647   Notes:
2648   The FULL factorization is
2649 
2650   ```{math}
2651   \left(\begin{array}{cc} A & B \\
2652   C & E \\
2653   \end{array}\right) =
2654   \left(\begin{array}{cc} 1 & 0 \\
2655   C*A^{-1} & I \\
2656   \end{array}\right)
2657   \left(\begin{array}{cc} A & 0 \\
2658   0 & S \\
2659   \end{array}\right)
2660   \left(\begin{array}{cc} I & A^{-1}B \\
2661   0 & I \\
2662   \end{array}\right) = L D U.
2663   ```
2664 
2665   where $ S = E - C*A^{-1}*B $. In practice, the full factorization is applied via block triangular solves with the grouping $L*(D*U)$. UPPER uses $D*U$, LOWER uses $L*D$,
2666   and DIAG is the diagonal part with the sign of $ S $ flipped (because this makes the preconditioner positive definite for many formulations,
2667   thus allowing the use of `KSPMINRES)`. Sign flipping of $ S $ can be turned off with `PCFieldSplitSetSchurScale()`.
2668 
2669   If $A$ and $S$ are solved exactly
2670 +  1 - FULL factorization is a direct solver.
2671 .  2 - The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2672 -  3 -  With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2673 
2674   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2675   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2676 
2677   For symmetric problems in which $A$ is positive definite and $S$ is negative definite, DIAG can be used with `KSPMINRES`.
2678 
2679   A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S).
2680 
2681 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`,
2682           [](sec_flexibleksp), `PCFieldSplitSetSchurPre()`
2683 @*/
2684 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2685 {
2686   PetscFunctionBegin;
2687   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2688   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2689   PetscFunctionReturn(PETSC_SUCCESS);
2690 }
2691 
2692 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2693 {
2694   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2695 
2696   PetscFunctionBegin;
2697   jac->schurfactorization = ftype;
2698   PetscFunctionReturn(PETSC_SUCCESS);
2699 }
2700 
2701 /*@
2702   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2703 
2704   Collective
2705 
2706   Input Parameters:
2707 + pc    - the preconditioner context
2708 - scale - scaling factor for the Schur complement
2709 
2710   Options Database Key:
2711 . -pc_fieldsplit_schur_scale <scale> - default is -1.0
2712 
2713   Level: intermediate
2714 
2715 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
2716 @*/
2717 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2718 {
2719   PetscFunctionBegin;
2720   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2721   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2722   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2723   PetscFunctionReturn(PETSC_SUCCESS);
2724 }
2725 
2726 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2727 {
2728   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2729 
2730   PetscFunctionBegin;
2731   jac->schurscale = scale;
2732   PetscFunctionReturn(PETSC_SUCCESS);
2733 }
2734 
2735 /*@C
2736   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2737 
2738   Collective
2739 
2740   Input Parameter:
2741 . pc - the preconditioner context
2742 
2743   Output Parameters:
2744 + A00 - the (0,0) block
2745 . A01 - the (0,1) block
2746 . A10 - the (1,0) block
2747 - A11 - the (1,1) block
2748 
2749   Level: advanced
2750 
2751   Note:
2752   Use `NULL` for any unneeded output arguments
2753 
2754 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2755 @*/
2756 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2757 {
2758   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2759 
2760   PetscFunctionBegin;
2761   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2762   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2763   if (A00) *A00 = jac->pmat[0];
2764   if (A01) *A01 = jac->B;
2765   if (A10) *A10 = jac->C;
2766   if (A11) *A11 = jac->pmat[1];
2767   PetscFunctionReturn(PETSC_SUCCESS);
2768 }
2769 
2770 /*@
2771   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2772 
2773   Collective
2774 
2775   Input Parameters:
2776 + pc        - the preconditioner context
2777 - tolerance - the solver tolerance
2778 
2779   Options Database Key:
2780 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5
2781 
2782   Level: intermediate
2783 
2784   Note:
2785   The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion.
2786   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2787   this estimate, the stopping criterion is satisfactory in practical cases.
2788 
2789 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2790 @*/
2791 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2792 {
2793   PetscFunctionBegin;
2794   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2795   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2796   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2797   PetscFunctionReturn(PETSC_SUCCESS);
2798 }
2799 
2800 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2801 {
2802   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2803 
2804   PetscFunctionBegin;
2805   jac->gkbtol = tolerance;
2806   PetscFunctionReturn(PETSC_SUCCESS);
2807 }
2808 
2809 /*@
2810   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2811 
2812   Collective
2813 
2814   Input Parameters:
2815 + pc    - the preconditioner context
2816 - maxit - the maximum number of iterations
2817 
2818   Options Database Key:
2819 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100
2820 
2821   Level: intermediate
2822 
2823 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2824 @*/
2825 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2826 {
2827   PetscFunctionBegin;
2828   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2829   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2830   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2831   PetscFunctionReturn(PETSC_SUCCESS);
2832 }
2833 
2834 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2835 {
2836   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2837 
2838   PetscFunctionBegin;
2839   jac->gkbmaxit = maxit;
2840   PetscFunctionReturn(PETSC_SUCCESS);
2841 }
2842 
2843 /*@
2844   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT`
2845   preconditioner.
2846 
2847   Collective
2848 
2849   Input Parameters:
2850 + pc    - the preconditioner context
2851 - delay - the delay window in the lower bound estimate
2852 
2853   Options Database Key:
2854 . -pc_fieldsplit_gkb_delay <delay> - default is 5
2855 
2856   Level: intermediate
2857 
2858   Notes:
2859   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $
2860   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs
2861   at least (`delay` + 1) iterations to stop.
2862 
2863   For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013`
2864 
2865 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2866 @*/
2867 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2868 {
2869   PetscFunctionBegin;
2870   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2871   PetscValidLogicalCollectiveInt(pc, delay, 2);
2872   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2873   PetscFunctionReturn(PETSC_SUCCESS);
2874 }
2875 
2876 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2877 {
2878   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2879 
2880   PetscFunctionBegin;
2881   jac->gkbdelay = delay;
2882   PetscFunctionReturn(PETSC_SUCCESS);
2883 }
2884 
2885 /*@
2886   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the
2887   Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2888 
2889   Collective
2890 
2891   Input Parameters:
2892 + pc - the preconditioner context
2893 - nu - the shift parameter
2894 
2895   Options Database Key:
2896 . -pc_fieldsplit_gkb_nu <nu> - default is 1
2897 
2898   Level: intermediate
2899 
2900   Notes:
2901   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However,
2902   if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore
2903   necessary to find a good balance in between the convergence of the inner and outer loop.
2904 
2905   For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity.
2906 
2907 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2908 @*/
2909 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
2910 {
2911   PetscFunctionBegin;
2912   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2913   PetscValidLogicalCollectiveReal(pc, nu, 2);
2914   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
2915   PetscFunctionReturn(PETSC_SUCCESS);
2916 }
2917 
2918 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
2919 {
2920   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2921 
2922   PetscFunctionBegin;
2923   jac->gkbnu = nu;
2924   PetscFunctionReturn(PETSC_SUCCESS);
2925 }
2926 
2927 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
2928 {
2929   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2930 
2931   PetscFunctionBegin;
2932   jac->type = type;
2933   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
2934   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
2935   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
2936   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
2937   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
2938   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
2939   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
2940   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
2941   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
2942 
2943   if (type == PC_COMPOSITE_SCHUR) {
2944     pc->ops->apply          = PCApply_FieldSplit_Schur;
2945     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
2946     pc->ops->view           = PCView_FieldSplit_Schur;
2947 
2948     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
2949     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
2950     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
2951     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
2952     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
2953   } else if (type == PC_COMPOSITE_GKB) {
2954     pc->ops->apply = PCApply_FieldSplit_GKB;
2955     pc->ops->view  = PCView_FieldSplit_GKB;
2956 
2957     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2958     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
2959     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
2960     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
2961     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
2962   } else {
2963     pc->ops->apply = PCApply_FieldSplit;
2964     pc->ops->view  = PCView_FieldSplit;
2965 
2966     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2967   }
2968   PetscFunctionReturn(PETSC_SUCCESS);
2969 }
2970 
2971 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
2972 {
2973   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2974 
2975   PetscFunctionBegin;
2976   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
2977   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
2978   jac->bs = bs;
2979   PetscFunctionReturn(PETSC_SUCCESS);
2980 }
2981 
2982 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
2983 {
2984   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
2985   PC_FieldSplitLink ilink_current = jac->head;
2986   IS                is_owned;
2987 
2988   PetscFunctionBegin;
2989   jac->coordinates_set = PETSC_TRUE; // Internal flag
2990   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
2991 
2992   while (ilink_current) {
2993     // For each IS, embed it to get local coords indces
2994     IS              is_coords;
2995     PetscInt        ndofs_block;
2996     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
2997 
2998     // Setting drop to true for safety. It should make no difference.
2999     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
3000     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
3001     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
3002 
3003     // Allocate coordinates vector and set it directly
3004     PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords));
3005     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
3006       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
3007     }
3008     ilink_current->dim   = dim;
3009     ilink_current->ndofs = ndofs_block;
3010     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
3011     PetscCall(ISDestroy(&is_coords));
3012     ilink_current = ilink_current->next;
3013   }
3014   PetscCall(ISDestroy(&is_owned));
3015   PetscFunctionReturn(PETSC_SUCCESS);
3016 }
3017 
3018 /*@
3019   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3020 
3021   Collective
3022 
3023   Input Parameters:
3024 + pc   - the preconditioner context
3025 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`,
3026          `PC_COMPOSITE_GKB`
3027 
3028   Options Database Key:
3029 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
3030 
3031   Level: intermediate
3032 
3033 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3034           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()`
3035 @*/
3036 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
3037 {
3038   PetscFunctionBegin;
3039   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3040   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
3041   PetscFunctionReturn(PETSC_SUCCESS);
3042 }
3043 
3044 /*@
3045   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3046 
3047   Not collective
3048 
3049   Input Parameter:
3050 . pc - the preconditioner context
3051 
3052   Output Parameter:
3053 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3054 
3055   Level: intermediate
3056 
3057 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3058           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3059 @*/
3060 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
3061 {
3062   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3063 
3064   PetscFunctionBegin;
3065   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3066   PetscAssertPointer(type, 2);
3067   *type = jac->type;
3068   PetscFunctionReturn(PETSC_SUCCESS);
3069 }
3070 
3071 /*@
3072   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3073 
3074   Logically Collective
3075 
3076   Input Parameters:
3077 + pc  - the preconditioner context
3078 - flg - boolean indicating whether to use field splits defined by the `DM`
3079 
3080   Options Database Key:
3081 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3082 
3083   Level: intermediate
3084 
3085   Developer Note:
3086   The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database
3087 
3088 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3089 @*/
3090 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3091 {
3092   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3093   PetscBool      isfs;
3094 
3095   PetscFunctionBegin;
3096   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3097   PetscValidLogicalCollectiveBool(pc, flg, 2);
3098   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3099   if (isfs) jac->dm_splits = flg;
3100   PetscFunctionReturn(PETSC_SUCCESS);
3101 }
3102 
3103 /*@
3104   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3105 
3106   Logically Collective
3107 
3108   Input Parameter:
3109 . pc - the preconditioner context
3110 
3111   Output Parameter:
3112 . flg - boolean indicating whether to use field splits defined by the `DM`
3113 
3114   Level: intermediate
3115 
3116   Developer Note:
3117   The name should be `PCFieldSplitGetUseDMSplits()`
3118 
3119 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3120 @*/
3121 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3122 {
3123   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3124   PetscBool      isfs;
3125 
3126   PetscFunctionBegin;
3127   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3128   PetscAssertPointer(flg, 2);
3129   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3130   if (isfs) {
3131     if (flg) *flg = jac->dm_splits;
3132   }
3133   PetscFunctionReturn(PETSC_SUCCESS);
3134 }
3135 
3136 /*@
3137   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3138 
3139   Logically Collective
3140 
3141   Input Parameter:
3142 . pc - the preconditioner context
3143 
3144   Output Parameter:
3145 . flg - boolean indicating whether to detect fields or not
3146 
3147   Level: intermediate
3148 
3149 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3150 @*/
3151 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3152 {
3153   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3154 
3155   PetscFunctionBegin;
3156   *flg = jac->detect;
3157   PetscFunctionReturn(PETSC_SUCCESS);
3158 }
3159 
3160 /*@
3161   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3162 
3163   Logically Collective
3164 
3165   Input Parameter:
3166 . pc - the preconditioner context
3167 
3168   Output Parameter:
3169 . flg - boolean indicating whether to detect fields or not
3170 
3171   Options Database Key:
3172 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3173 
3174   Level: intermediate
3175 
3176   Note:
3177   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3178 
3179 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3180 @*/
3181 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3182 {
3183   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3184 
3185   PetscFunctionBegin;
3186   jac->detect = flg;
3187   if (jac->detect) {
3188     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3189     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3190   }
3191   PetscFunctionReturn(PETSC_SUCCESS);
3192 }
3193 
3194 /*MC
3195   PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3196   collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3197 
3198   Options Database Keys:
3199 +   -pc_fieldsplit_%d_fields <a,b,..>                                                - indicates the fields to be used in the `%d`'th split
3200 .   -pc_fieldsplit_default                                                           - automatically add any fields to additional splits that have not
3201                                                                                        been supplied explicitly by `-pc_fieldsplit_%d_fields`
3202 .   -pc_fieldsplit_block_size <bs>                                                   - size of block that defines fields (i.e. there are bs fields)
3203                                                                                        when the matrix is not of `MatType` `MATNEST`
3204 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3205 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full>                     - default is `a11`; see `PCFieldSplitSetSchurPre()`
3206 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full>                           - set factorization type when using `-pc_fieldsplit_type schur`;
3207                                                                                        see `PCFieldSplitSetSchurFactType()`
3208 .   -pc_fieldsplit_dm_splits <true,false> (default is true)                          - Whether to use `DMCreateFieldDecomposition()` for splits
3209 -   -pc_fieldsplit_detect_saddle_point                                               - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3210 
3211   Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3212   The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3213   For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3214 
3215   To set options on the solvers for each block append `-fieldsplit_` to all the `PC`
3216   options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`
3217 
3218   To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3219   and set the options directly on the resulting `KSP` object
3220 
3221   Level: intermediate
3222 
3223   Notes:
3224   Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()`
3225   to define a split by an arbitrary collection of entries.
3226 
3227   If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports
3228   `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs,
3229   beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3230   if this is not called the block size defaults to the blocksize of the second matrix passed
3231   to `KSPSetOperators()`/`PCSetOperators()`.
3232 
3233   For the Schur complement preconditioner if
3234   ```{math}
3235     J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3236   ```
3237 
3238   the preconditioner using `full` factorization is logically
3239   ```{math}
3240     \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3241       ```
3242   where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3243   ```{math}
3244      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3245   ```
3246   which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given
3247   in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0,
3248   it returns the `KSP` associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default
3249   $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3250 
3251   The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3252   `diag` gives
3253   ```{math}
3254     \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3255   ```
3256   Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3257   can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3258   ```{math}
3259     \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3260   ```
3261   where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of
3262   ```{math}
3263     \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3264   ```
3265   where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3266 
3267   If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3268   is used automatically for a second submatrix.
3269 
3270   The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3271   Generally it should be used with the `MATAIJ` or `MATNEST` `MatType`
3272 
3273   The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see,
3274   for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3275   One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers".
3276 
3277   See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3278 
3279   The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3280   residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3281 
3282   The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3283   ```{math}
3284     \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3285   ```
3286   with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3287   A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3288 
3289   Developer Note:
3290   The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3291   user API.
3292 
3293 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3294           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3295           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3296           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3297 M*/
3298 
3299 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3300 {
3301   PC_FieldSplit *jac;
3302 
3303   PetscFunctionBegin;
3304   PetscCall(PetscNew(&jac));
3305 
3306   jac->bs                 = -1;
3307   jac->nsplits            = 0;
3308   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3309   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3310   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3311   jac->schurscale         = -1.0;
3312   jac->dm_splits          = PETSC_TRUE;
3313   jac->detect             = PETSC_FALSE;
3314   jac->gkbtol             = 1e-5;
3315   jac->gkbdelay           = 5;
3316   jac->gkbnu              = 1;
3317   jac->gkbmaxit           = 100;
3318   jac->gkbmonitor         = PETSC_FALSE;
3319   jac->coordinates_set    = PETSC_FALSE;
3320 
3321   pc->data = (void *)jac;
3322 
3323   pc->ops->apply           = PCApply_FieldSplit;
3324   pc->ops->applytranspose  = PCApplyTranspose_FieldSplit;
3325   pc->ops->setup           = PCSetUp_FieldSplit;
3326   pc->ops->reset           = PCReset_FieldSplit;
3327   pc->ops->destroy         = PCDestroy_FieldSplit;
3328   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3329   pc->ops->view            = PCView_FieldSplit;
3330   pc->ops->applyrichardson = NULL;
3331 
3332   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3333   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3334   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3335   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3336   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3337   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3338   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3339   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3340   PetscFunctionReturn(PETSC_SUCCESS);
3341 }
3342