xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision cac3574093404e1e5401e84b5847fc709f0d30b9)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 
5 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
6 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
7 
8 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
9 
10 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
11 struct _PC_FieldSplitLink {
12   KSP               ksp;
13   Vec               x, y, z;
14   char             *splitname;
15   PetscInt          nfields;
16   PetscInt         *fields, *fields_col;
17   VecScatter        sctx;
18   IS                is, is_col;
19   PC_FieldSplitLink next, previous;
20   PetscLogEvent     event;
21 
22   /* Used only when setting coordinates with PCSetCoordinates */
23   PetscInt   dim;
24   PetscInt   ndofs;
25   PetscReal *coords;
26 };
27 
28 typedef struct {
29   PCCompositeType type;
30   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
31   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
32   PetscInt        bs;           /* Block size for IS and Mat structures */
33   PetscInt        nsplits;      /* Number of field divisions defined */
34   Vec            *x, *y, w1, w2;
35   Mat            *mat;    /* The diagonal block for each split */
36   Mat            *pmat;   /* The preconditioning diagonal block for each split */
37   Mat            *Afield; /* The rows of the matrix associated with each split */
38   PetscBool       issetup;
39 
40   /* Only used when Schur complement preconditioning is used */
41   Mat                       B;          /* The (0,1) block */
42   Mat                       C;          /* The (1,0) block */
43   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
44   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */
45   Mat                       schur_user; /* User-provided preconditioning matrix for the Schur complement */
46   PCFieldSplitSchurPreType  schurpre;   /* Determines which preconditioning matrix is used for the Schur complement */
47   PCFieldSplitSchurFactType schurfactorization;
48   KSP                       kspschur;   /* The solver for S */
49   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
50   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
51 
52   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
53   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
54   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
55   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
56   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
57   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
58   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
59   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
60   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
61   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
62 
63   PC_FieldSplitLink head;
64   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
65   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
66   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
67   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
68   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
69   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
70   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
71 } PC_FieldSplit;
72 
73 /*
74     Note:
75     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
76    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
77    PC you could change this.
78 */
79 
80 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it.  This way the
81 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
82 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
83 {
84   switch (jac->schurpre) {
85   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
86     return jac->schur;
87   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
88     return jac->schurp;
89   case PC_FIELDSPLIT_SCHUR_PRE_A11:
90     return jac->pmat[1];
91   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
92   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
93   default:
94     return jac->schur_user ? jac->schur_user : jac->pmat[1];
95   }
96 }
97 
98 #include <petscdraw.h>
99 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
100 {
101   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
102   PetscBool         iascii, isdraw;
103   PetscInt          i, j;
104   PC_FieldSplitLink ilink = jac->head;
105 
106   PetscFunctionBegin;
107   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
108   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
109   if (iascii) {
110     if (jac->bs > 0) {
111       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
112     } else {
113       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
114     }
115     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
116     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
117     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
118     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
119     for (i = 0; i < jac->nsplits; i++) {
120       if (ilink->fields) {
121         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
122         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
123         for (j = 0; j < ilink->nfields; j++) {
124           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
125           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
126         }
127         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
128         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
129       } else {
130         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
131       }
132       PetscCall(KSPView(ilink->ksp, viewer));
133       ilink = ilink->next;
134     }
135   }
136 
137   if (isdraw) {
138     PetscDraw draw;
139     PetscReal x, y, w, wd;
140 
141     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
142     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
143     w  = 2 * PetscMin(1.0 - x, x);
144     wd = w / (jac->nsplits + 1);
145     x  = x - wd * (jac->nsplits - 1) / 2.0;
146     for (i = 0; i < jac->nsplits; i++) {
147       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
148       PetscCall(KSPView(ilink->ksp, viewer));
149       PetscCall(PetscDrawPopCurrentPoint(draw));
150       x += wd;
151       ilink = ilink->next;
152     }
153   }
154   PetscFunctionReturn(PETSC_SUCCESS);
155 }
156 
157 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
158 {
159   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
160   PetscBool                  iascii, isdraw;
161   PetscInt                   i, j;
162   PC_FieldSplitLink          ilink = jac->head;
163   MatSchurComplementAinvType atype;
164 
165   PetscFunctionBegin;
166   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
167   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
168   if (iascii) {
169     if (jac->bs > 0) {
170       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
171     } else {
172       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
173     }
174     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
175     switch (jac->schurpre) {
176     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
178       break;
179     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
180       if (jac->schur) {
181         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
182         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
183       }
184       break;
185     case PC_FIELDSPLIT_SCHUR_PRE_A11:
186       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
187       break;
188     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
189       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
190       break;
191     case PC_FIELDSPLIT_SCHUR_PRE_USER:
192       if (jac->schur_user) {
193         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
194       } else {
195         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
196       }
197       break;
198     default:
199       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
200     }
201     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
202     PetscCall(PetscViewerASCIIPushTab(viewer));
203     for (i = 0; i < jac->nsplits; i++) {
204       if (ilink->fields) {
205         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
206         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
207         for (j = 0; j < ilink->nfields; j++) {
208           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
209           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
210         }
211         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
212         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
213       } else {
214         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
215       }
216       ilink = ilink->next;
217     }
218     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
219     PetscCall(PetscViewerASCIIPushTab(viewer));
220     if (jac->head) {
221       PetscCall(KSPView(jac->head->ksp, viewer));
222     } else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
223     PetscCall(PetscViewerASCIIPopTab(viewer));
224     if (jac->head && jac->kspupper != jac->head->ksp) {
225       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n"));
226       PetscCall(PetscViewerASCIIPushTab(viewer));
227       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
228       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
229       PetscCall(PetscViewerASCIIPopTab(viewer));
230     }
231     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n"));
232     PetscCall(PetscViewerASCIIPushTab(viewer));
233     if (jac->kspschur) {
234       PetscCall(KSPView(jac->kspschur, viewer));
235     } else {
236       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
237     }
238     PetscCall(PetscViewerASCIIPopTab(viewer));
239     PetscCall(PetscViewerASCIIPopTab(viewer));
240   } else if (isdraw && jac->head) {
241     PetscDraw draw;
242     PetscReal x, y, w, wd, h;
243     PetscInt  cnt = 2;
244     char      str[32];
245 
246     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
247     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
248     if (jac->kspupper != jac->head->ksp) cnt++;
249     w  = 2 * PetscMin(1.0 - x, x);
250     wd = w / (cnt + 1);
251 
252     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
253     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
254     y -= h;
255     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
256       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
257     } else {
258       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
259     }
260     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
261     y -= h;
262     x = x - wd * (cnt - 1) / 2.0;
263 
264     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
265     PetscCall(KSPView(jac->head->ksp, viewer));
266     PetscCall(PetscDrawPopCurrentPoint(draw));
267     if (jac->kspupper != jac->head->ksp) {
268       x += wd;
269       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
270       PetscCall(KSPView(jac->kspupper, viewer));
271       PetscCall(PetscDrawPopCurrentPoint(draw));
272     }
273     x += wd;
274     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
275     PetscCall(KSPView(jac->kspschur, viewer));
276     PetscCall(PetscDrawPopCurrentPoint(draw));
277   }
278   PetscFunctionReturn(PETSC_SUCCESS);
279 }
280 
281 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
282 {
283   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
284   PetscBool         iascii, isdraw;
285   PetscInt          i, j;
286   PC_FieldSplitLink ilink = jac->head;
287 
288   PetscFunctionBegin;
289   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
290   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
291   if (iascii) {
292     if (jac->bs > 0) {
293       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
294     } else {
295       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
296     }
297     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
298     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
299     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
300 
301     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
302     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
303     PetscCall(PetscViewerASCIIPushTab(viewer));
304 
305     if (ilink->fields) {
306       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
307       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
308       for (j = 0; j < ilink->nfields; j++) {
309         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
310         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
311       }
312       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
313       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
314     } else {
315       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
316     }
317     PetscCall(KSPView(ilink->ksp, viewer));
318 
319     PetscCall(PetscViewerASCIIPopTab(viewer));
320   }
321 
322   if (isdraw) {
323     PetscDraw draw;
324     PetscReal x, y, w, wd;
325 
326     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
327     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
328     w  = 2 * PetscMin(1.0 - x, x);
329     wd = w / (jac->nsplits + 1);
330     x  = x - wd * (jac->nsplits - 1) / 2.0;
331     for (i = 0; i < jac->nsplits; i++) {
332       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
333       PetscCall(KSPView(ilink->ksp, viewer));
334       PetscCall(PetscDrawPopCurrentPoint(draw));
335       x += wd;
336       ilink = ilink->next;
337     }
338   }
339   PetscFunctionReturn(PETSC_SUCCESS);
340 }
341 
342 /* Precondition: jac->bs is set to a meaningful value */
343 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
344 {
345   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
346   PetscInt       i, nfields, *ifields, nfields_col, *ifields_col;
347   PetscBool      flg, flg_col;
348   char           optionname[128], splitname[8], optionname_col[128];
349 
350   PetscFunctionBegin;
351   PetscCall(PetscMalloc1(jac->bs, &ifields));
352   PetscCall(PetscMalloc1(jac->bs, &ifields_col));
353   for (i = 0, flg = PETSC_TRUE;; i++) {
354     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
355     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
356     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
357     nfields     = jac->bs;
358     nfields_col = jac->bs;
359     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
360     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
361     if (!flg) break;
362     else if (flg && !flg_col) {
363       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
364       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
365     } else {
366       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
367       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
368       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
369     }
370   }
371   if (i > 0) {
372     /* Makes command-line setting of splits take precedence over setting them in code.
373        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
374        create new splits, which would probably not be what the user wanted. */
375     jac->splitdefined = PETSC_TRUE;
376   }
377   PetscCall(PetscFree(ifields));
378   PetscCall(PetscFree(ifields_col));
379   PetscFunctionReturn(PETSC_SUCCESS);
380 }
381 
382 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
383 {
384   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
385   PC_FieldSplitLink ilink              = jac->head;
386   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
387   PetscInt          i;
388 
389   PetscFunctionBegin;
390   /*
391    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
392    Should probably be rewritten.
393    */
394   if (!ilink) {
395     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
396     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
397       PetscInt  numFields, f, i, j;
398       char    **fieldNames;
399       IS       *fields;
400       DM       *dms;
401       DM        subdm[128];
402       PetscBool flg;
403 
404       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
405       /* Allow the user to prescribe the splits */
406       for (i = 0, flg = PETSC_TRUE;; i++) {
407         PetscInt ifields[128];
408         IS       compField;
409         char     optionname[128], splitname[8];
410         PetscInt nfields = numFields;
411 
412         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
413         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
414         if (!flg) break;
415         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
416         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
417         if (nfields == 1) {
418           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
419         } else {
420           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
421           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
422         }
423         PetscCall(ISDestroy(&compField));
424         for (j = 0; j < nfields; ++j) {
425           f = ifields[j];
426           PetscCall(PetscFree(fieldNames[f]));
427           PetscCall(ISDestroy(&fields[f]));
428         }
429       }
430       if (i == 0) {
431         for (f = 0; f < numFields; ++f) {
432           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
433           PetscCall(PetscFree(fieldNames[f]));
434           PetscCall(ISDestroy(&fields[f]));
435         }
436       } else {
437         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
438         PetscCall(PetscFree(dms));
439         PetscCall(PetscMalloc1(i, &dms));
440         for (j = 0; j < i; ++j) dms[j] = subdm[j];
441       }
442       PetscCall(PetscFree(fieldNames));
443       PetscCall(PetscFree(fields));
444       if (dms) {
445         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
446         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
447           const char *prefix;
448           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix));
449           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix));
450           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
451           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
452           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
453           PetscCall(DMDestroy(&dms[i]));
454         }
455         PetscCall(PetscFree(dms));
456       }
457     } else {
458       if (jac->bs <= 0) {
459         if (pc->pmat) {
460           PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
461         } else jac->bs = 1;
462       }
463 
464       if (jac->detect) {
465         IS       zerodiags, rest;
466         PetscInt nmin, nmax;
467 
468         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
469         if (jac->diag_use_amat) {
470           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
471         } else {
472           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
473         }
474         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
475         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
476         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
477         PetscCall(ISDestroy(&zerodiags));
478         PetscCall(ISDestroy(&rest));
479       } else if (coupling) {
480         IS       coupling, rest;
481         PetscInt nmin, nmax;
482 
483         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
484         if (jac->offdiag_use_amat) {
485           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
486         } else {
487           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
488         }
489         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
490         PetscCall(ISSetIdentity(rest));
491         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
492         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
493         PetscCall(ISDestroy(&coupling));
494         PetscCall(ISDestroy(&rest));
495       } else {
496         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
497         if (!fieldsplit_default) {
498           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
499            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
500           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
501           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
502         }
503         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
504           Mat       M = pc->pmat;
505           PetscBool isnest;
506 
507           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
508           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
509           if (!isnest) {
510             M = pc->mat;
511             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
512           }
513           if (isnest) {
514             IS      *fields;
515             PetscInt nf;
516 
517             PetscCall(MatNestGetSize(M, &nf, NULL));
518             PetscCall(PetscMalloc1(nf, &fields));
519             PetscCall(MatNestGetISs(M, fields, NULL));
520             for (i = 0; i < nf; i++) PetscCall(PCFieldSplitSetIS(pc, NULL, fields[i]));
521             PetscCall(PetscFree(fields));
522           } else {
523             for (i = 0; i < jac->bs; i++) {
524               char splitname[8];
525               PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
526               PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
527             }
528             jac->defaultsplit = PETSC_TRUE;
529           }
530         }
531       }
532     }
533   } else if (jac->nsplits == 1) {
534     IS       is2;
535     PetscInt nmin, nmax;
536 
537     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
538     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
539     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
540     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
541     PetscCall(ISDestroy(&is2));
542   }
543 
544   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
545   PetscFunctionReturn(PETSC_SUCCESS);
546 }
547 
548 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
549 {
550   Mat       BT, T;
551   PetscReal nrmT, nrmB;
552 
553   PetscFunctionBegin;
554   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
555   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
556   PetscCall(MatNorm(T, NORM_1, &nrmT));
557   PetscCall(MatNorm(B, NORM_1, &nrmB));
558   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable.");
559 
560   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
561   /* setting N := 1/nu*I in [Ar13].                                                 */
562   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
563   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, H)); /* H = A01*A01'          */
564   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
565 
566   PetscCall(MatDestroy(&BT));
567   PetscCall(MatDestroy(&T));
568   PetscFunctionReturn(PETSC_SUCCESS);
569 }
570 
571 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg);
572 
573 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
574 {
575   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
576   PC_FieldSplitLink ilink;
577   PetscInt          i, nsplit;
578   PetscBool         sorted, sorted_col;
579 
580   PetscFunctionBegin;
581   pc->failedreason = PC_NOERROR;
582   PetscCall(PCFieldSplitSetDefaults(pc));
583   nsplit = jac->nsplits;
584   ilink  = jac->head;
585 
586   /* get the matrices for each split */
587   if (!jac->issetup) {
588     PetscInt rstart, rend, nslots, bs;
589 
590     jac->issetup = PETSC_TRUE;
591 
592     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
593     if (jac->defaultsplit || !ilink->is) {
594       if (jac->bs <= 0) jac->bs = nsplit;
595     }
596 
597     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
598     PetscCall(MatGetBlockSize(pc->pmat, &bs));
599     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
600       PetscBool blk;
601 
602       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
603       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
604     }
605 
606     bs = jac->bs;
607     PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
608     nslots = (rend - rstart) / bs;
609     for (i = 0; i < nsplit; i++) {
610       if (jac->defaultsplit) {
611         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
612         PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
613       } else if (!ilink->is) {
614         if (ilink->nfields > 1) {
615           PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
616           PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
617           PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
618           for (j = 0; j < nslots; j++) {
619             for (k = 0; k < nfields; k++) {
620               ii[nfields * j + k] = rstart + bs * j + fields[k];
621               jj[nfields * j + k] = rstart + bs * j + fields_col[k];
622             }
623           }
624           PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
625           PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
626           PetscCall(ISSetBlockSize(ilink->is, nfields));
627           PetscCall(ISSetBlockSize(ilink->is_col, nfields));
628         } else {
629           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
630           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
631         }
632       }
633       PetscCall(ISSorted(ilink->is, &sorted));
634       if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col));
635       PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split");
636       ilink = ilink->next;
637     }
638   }
639 
640   ilink = jac->head;
641   if (!jac->pmat) {
642     Vec xtmp;
643 
644     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
645     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
646     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
647     for (i = 0; i < nsplit; i++) {
648       MatNullSpace sp;
649 
650       /* Check for preconditioning matrix attached to IS */
651       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
652       if (jac->pmat[i]) {
653         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
654         if (jac->type == PC_COMPOSITE_SCHUR) {
655           jac->schur_user = jac->pmat[i];
656 
657           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
658         }
659       } else {
660         const char *prefix;
661         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
662         PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix));
663         if (!prefix) {
664           PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
665           PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
666         }
667         PetscCall(MatSetFromOptions(jac->pmat[i]));
668         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
669       }
670       /* create work vectors for each split */
671       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
672       ilink->x = jac->x[i];
673       ilink->y = jac->y[i];
674       ilink->z = NULL;
675       /* compute scatter contexts needed by multiplicative versions and non-default splits */
676       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
677       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
678       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
679       ilink = ilink->next;
680     }
681     PetscCall(VecDestroy(&xtmp));
682   } else {
683     MatReuse      scall;
684     MatNullSpace *nullsp = NULL;
685 
686     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
687       PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp));
688       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
689       scall = MAT_INITIAL_MATRIX;
690     } else scall = MAT_REUSE_MATRIX;
691 
692     for (i = 0; i < nsplit; i++) {
693       Mat pmat;
694 
695       /* Check for preconditioning matrix attached to IS */
696       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
697       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
698       ilink = ilink->next;
699     }
700     if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp));
701   }
702   if (jac->diag_use_amat) {
703     ilink = jac->head;
704     if (!jac->mat) {
705       PetscCall(PetscMalloc1(nsplit, &jac->mat));
706       for (i = 0; i < nsplit; i++) {
707         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
708         ilink = ilink->next;
709       }
710     } else {
711       MatReuse      scall;
712       MatNullSpace *nullsp = NULL;
713 
714       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
715         PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp));
716         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
717         scall = MAT_INITIAL_MATRIX;
718       } else scall = MAT_REUSE_MATRIX;
719 
720       for (i = 0; i < nsplit; i++) {
721         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
722         ilink = ilink->next;
723       }
724       if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp));
725     }
726   } else {
727     jac->mat = jac->pmat;
728   }
729 
730   /* Check for null space attached to IS */
731   ilink = jac->head;
732   for (i = 0; i < nsplit; i++) {
733     MatNullSpace sp;
734 
735     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
736     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
737     ilink = ilink->next;
738   }
739 
740   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
741     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
742     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
743     ilink = jac->head;
744     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
745       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
746       if (!jac->Afield) {
747         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
748         if (jac->offdiag_use_amat) {
749           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
750         } else {
751           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
752         }
753       } else {
754         MatReuse scall;
755 
756         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
757           PetscCall(MatDestroy(&jac->Afield[1]));
758           scall = MAT_INITIAL_MATRIX;
759         } else scall = MAT_REUSE_MATRIX;
760 
761         if (jac->offdiag_use_amat) {
762           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
763         } else {
764           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
765         }
766       }
767     } else {
768       if (!jac->Afield) {
769         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
770         for (i = 0; i < nsplit; i++) {
771           if (jac->offdiag_use_amat) {
772             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
773           } else {
774             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
775           }
776           ilink = ilink->next;
777         }
778       } else {
779         MatReuse scall;
780         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
781           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
782           scall = MAT_INITIAL_MATRIX;
783         } else scall = MAT_REUSE_MATRIX;
784 
785         for (i = 0; i < nsplit; i++) {
786           if (jac->offdiag_use_amat) {
787             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
788           } else {
789             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
790           }
791           ilink = ilink->next;
792         }
793       }
794     }
795   }
796 
797   if (jac->type == PC_COMPOSITE_SCHUR) {
798     IS          ccis;
799     PetscBool   isset, isspd;
800     PetscInt    rstart, rend;
801     char        lscname[256];
802     PetscObject LSC_L;
803     PetscBool   set, flg;
804 
805     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
806 
807     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
808     if (jac->schurscale == (PetscScalar)-1.0) {
809       PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
810       jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
811     }
812 
813     /* When extracting off-diagonal submatrices, we take complements from this range */
814     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
815     PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
816 
817     if (jac->schur) {
818       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
819       MatReuse scall;
820 
821       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
822         scall = MAT_INITIAL_MATRIX;
823         PetscCall(MatDestroy(&jac->B));
824         PetscCall(MatDestroy(&jac->C));
825       } else scall = MAT_REUSE_MATRIX;
826 
827       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
828       ilink = jac->head;
829       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
830       if (jac->offdiag_use_amat) {
831         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B));
832       } else {
833         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B));
834       }
835       PetscCall(ISDestroy(&ccis));
836       if (!flg) {
837         ilink = ilink->next;
838         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
839         if (jac->offdiag_use_amat) {
840           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C));
841         } else {
842           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C));
843         }
844         PetscCall(ISDestroy(&ccis));
845       } else {
846         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
847         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
848         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
849       }
850       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
851       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
852         PetscCall(MatDestroy(&jac->schurp));
853         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
854       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
855         PetscCall(MatDestroy(&jac->schur_user));
856         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
857       }
858       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
859       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
860       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
861     } else {
862       const char  *Dprefix;
863       char         schurprefix[256], schurmatprefix[256];
864       char         schurtestoption[256];
865       MatNullSpace sp;
866       KSP          kspt;
867 
868       /* extract the A01 and A10 matrices */
869       ilink = jac->head;
870       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
871       if (jac->offdiag_use_amat) {
872         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
873       } else {
874         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
875       }
876       PetscCall(ISDestroy(&ccis));
877       ilink = ilink->next;
878       if (!flg) {
879         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
880         if (jac->offdiag_use_amat) {
881           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
882         } else {
883           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
884         }
885         PetscCall(ISDestroy(&ccis));
886       } else {
887         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
888         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
889         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
890       }
891       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
892       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
893       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
894       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
895       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
896       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
897       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
898       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
899 
900       /* Note: this is not true in general */
901       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
902       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
903 
904       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
905       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
906       if (flg) {
907         DM  dmInner;
908         KSP kspInner;
909         PC  pcInner;
910 
911         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
912         PetscCall(KSPReset(kspInner));
913         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
914         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
915         /* Indent this deeper to emphasize the "inner" nature of this solver. */
916         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
917         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
918         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
919 
920         /* Set DM for new solver */
921         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
922         PetscCall(KSPSetDM(kspInner, dmInner));
923         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
924 
925         /* Defaults to PCKSP as preconditioner */
926         PetscCall(KSPGetPC(kspInner, &pcInner));
927         PetscCall(PCSetType(pcInner, PCKSP));
928         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
929       } else {
930         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
931           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
932           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
933           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
934           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
935           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
936         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
937         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
938       }
939       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
940       PetscCall(KSPSetFromOptions(jac->head->ksp));
941       PetscCall(MatSetFromOptions(jac->schur));
942 
943       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
944       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
945         KSP kspInner;
946         PC  pcInner;
947 
948         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
949         PetscCall(KSPGetPC(kspInner, &pcInner));
950         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
951         if (flg) {
952           KSP ksp;
953 
954           PetscCall(PCKSPGetKSP(pcInner, &ksp));
955           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
956         }
957       }
958       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
959       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
960       if (flg) {
961         DM dmInner;
962 
963         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
964         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
965         PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
966         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
967         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
968         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
969         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
970         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
971         PetscCall(KSPSetDM(jac->kspupper, dmInner));
972         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
973         PetscCall(KSPSetFromOptions(jac->kspupper));
974         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
975         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
976       } else {
977         jac->kspupper = jac->head->ksp;
978         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
979       }
980 
981       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
982       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
983       PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
984       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
985       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
986       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
987         PC pcschur;
988         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
989         PetscCall(PCSetType(pcschur, PCNONE));
990         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
991       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
992         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
993       }
994       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
995       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
996       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
997       /* propagate DM */
998       {
999         DM sdm;
1000         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
1001         if (sdm) {
1002           PetscCall(KSPSetDM(jac->kspschur, sdm));
1003           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
1004         }
1005       }
1006       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1007       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
1008       PetscCall(KSPSetFromOptions(jac->kspschur));
1009     }
1010     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
1011     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
1012 
1013     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
1014     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
1015     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1016     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1017     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L));
1018     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
1019     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1020     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1021     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L));
1022   } else if (jac->type == PC_COMPOSITE_GKB) {
1023     IS       ccis;
1024     PetscInt rstart, rend;
1025 
1026     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1027 
1028     ilink = jac->head;
1029 
1030     /* When extracting off-diagonal submatrices, we take complements from this range */
1031     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
1032 
1033     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1034     if (jac->offdiag_use_amat) {
1035       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1036     } else {
1037       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1038     }
1039     PetscCall(ISDestroy(&ccis));
1040     /* Create work vectors for GKB algorithm */
1041     PetscCall(VecDuplicate(ilink->x, &jac->u));
1042     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1043     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1044     ilink = ilink->next;
1045     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1046     if (jac->offdiag_use_amat) {
1047       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1048     } else {
1049       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1050     }
1051     PetscCall(ISDestroy(&ccis));
1052     /* Create work vectors for GKB algorithm */
1053     PetscCall(VecDuplicate(ilink->x, &jac->v));
1054     PetscCall(VecDuplicate(ilink->x, &jac->d));
1055     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1056     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1057     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1058 
1059     ilink = jac->head;
1060     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1061     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1062     /* Create gkb_monitor context */
1063     if (jac->gkbmonitor) {
1064       PetscInt tablevel;
1065       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1066       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1067       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1068       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1069       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1070     }
1071   } else {
1072     /* set up the individual splits' PCs */
1073     i     = 0;
1074     ilink = jac->head;
1075     while (ilink) {
1076       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1077       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1078       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1079       i++;
1080       ilink = ilink->next;
1081     }
1082   }
1083 
1084   /* Set coordinates to the sub PC objects whenever these are set */
1085   if (jac->coordinates_set) {
1086     PC pc_coords;
1087     if (jac->type == PC_COMPOSITE_SCHUR) {
1088       // Head is first block.
1089       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1090       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1091       // Second one is Schur block, but its KSP object is in kspschur.
1092       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1093       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1094     } else if (jac->type == PC_COMPOSITE_GKB) {
1095       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1096     } else {
1097       ilink = jac->head;
1098       while (ilink) {
1099         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1100         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1101         ilink = ilink->next;
1102       }
1103     }
1104   }
1105 
1106   jac->suboptionsset = PETSC_TRUE;
1107   PetscFunctionReturn(PETSC_SUCCESS);
1108 }
1109 
1110 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1111   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1112                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1113                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1114 
1115 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1116 {
1117   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1118   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1119   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1120 
1121   PetscFunctionBegin;
1122   switch (jac->schurfactorization) {
1123   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1124     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1125     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1126     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1127     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1128     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1129     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1130     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1131     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1132     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1133     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1134     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1135     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1136     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1137     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1138     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1139     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1140     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1141     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1142     break;
1143   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1144     /* [A00 0; A10 S], suitable for left preconditioning */
1145     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1146     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1147     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1148     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1149     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1150     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1151     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1152     PetscCall(VecScale(ilinkD->x, -1.));
1153     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1154     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1155     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1156     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1157     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1158     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1159     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1160     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1161     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1162     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1163     break;
1164   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1165     /* [A00 A01; 0 S], suitable for right preconditioning */
1166     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1167     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1168     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1169     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1170     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1171     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1172     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1173     PetscCall(VecScale(ilinkA->x, -1.));
1174     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1175     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1176     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1177     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1178     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1179     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1180     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1181     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1182     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1183     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1184     break;
1185   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1186     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1187     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1188     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1189     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1190     PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1191     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1192     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1193     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1194     PetscCall(VecScale(ilinkD->x, -1.0));
1195     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1196     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1197 
1198     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1199     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1200     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1201     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1202     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1203 
1204     if (kspUpper == kspA) {
1205       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1206       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1207       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1208       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1209       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1210       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1211     } else {
1212       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1213       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1214       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1215       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1216       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1217       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1218       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1219       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1220       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1221     }
1222     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1223     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1224     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1225   }
1226   PetscFunctionReturn(PETSC_SUCCESS);
1227 }
1228 
1229 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1230 {
1231   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1232   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1233   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1234 
1235   PetscFunctionBegin;
1236   switch (jac->schurfactorization) {
1237   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1238     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1239     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1240     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1241     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1242     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1243     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1244     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1245     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1246     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1247     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1248     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1249     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1250     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1251     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1252     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1253     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1254     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1255     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1256     break;
1257   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1258     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1259     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1260     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1261     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1262     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1263     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1264     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1265     PetscCall(VecScale(ilinkD->x, -1.));
1266     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1267     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1268     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1269     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1270     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1271     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1272     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1273     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1274     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1275     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1276     break;
1277   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1278     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1279     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1280     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1281     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1282     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1283     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1284     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1285     PetscCall(VecScale(ilinkA->x, -1.));
1286     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1287     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1288     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1289     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1290     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1291     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1292     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1293     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1294     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1295     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1296     break;
1297   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1298     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1299     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1300     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1301     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1302     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1303     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1304     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1305     PetscCall(VecScale(ilinkD->x, -1.0));
1306     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1307     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1308 
1309     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1310     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1311     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1312     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1313     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1314 
1315     if (kspLower == kspA) {
1316       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1317       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1318       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1319       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1320       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1321       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1322     } else {
1323       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1324       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1325       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1326       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1327       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1328       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1329       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1330       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1331       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1332     }
1333     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1334     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1335     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1336   }
1337   PetscFunctionReturn(PETSC_SUCCESS);
1338 }
1339 
1340 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1341 {
1342   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1343   PC_FieldSplitLink ilink = jac->head;
1344   PetscInt          cnt, bs;
1345 
1346   PetscFunctionBegin;
1347   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1348     if (jac->defaultsplit) {
1349       PetscCall(VecGetBlockSize(x, &bs));
1350       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1351       PetscCall(VecGetBlockSize(y, &bs));
1352       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1353       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1354       while (ilink) {
1355         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1356         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1357         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1358         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1359         ilink = ilink->next;
1360       }
1361       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1362     } else {
1363       PetscCall(VecSet(y, 0.0));
1364       while (ilink) {
1365         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1366         ilink = ilink->next;
1367       }
1368     }
1369   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1370     PetscCall(VecSet(y, 0.0));
1371     /* solve on first block for first block variables */
1372     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1373     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1374     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1375     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1376     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1377     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1378     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1379     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1380 
1381     /* compute the residual only onto second block variables using first block variables */
1382     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1383     ilink = ilink->next;
1384     PetscCall(VecScale(ilink->x, -1.0));
1385     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1386     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1387 
1388     /* solve on second block variables */
1389     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1390     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1391     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1392     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1393     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1394     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1395   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1396     if (!jac->w1) {
1397       PetscCall(VecDuplicate(x, &jac->w1));
1398       PetscCall(VecDuplicate(x, &jac->w2));
1399     }
1400     PetscCall(VecSet(y, 0.0));
1401     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1402     cnt = 1;
1403     while (ilink->next) {
1404       ilink = ilink->next;
1405       /* compute the residual only over the part of the vector needed */
1406       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1407       PetscCall(VecScale(ilink->x, -1.0));
1408       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1409       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1410       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1411       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1412       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1413       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1414       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1415       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1416     }
1417     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1418       cnt -= 2;
1419       while (ilink->previous) {
1420         ilink = ilink->previous;
1421         /* compute the residual only over the part of the vector needed */
1422         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1423         PetscCall(VecScale(ilink->x, -1.0));
1424         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1425         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1426         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1427         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1428         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1429         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1430         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1431         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1432       }
1433     }
1434   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1435   PetscFunctionReturn(PETSC_SUCCESS);
1436 }
1437 
1438 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1439 {
1440   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1441   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1442   KSP               ksp = ilinkA->ksp;
1443   Vec               u, v, Hu, d, work1, work2;
1444   PetscScalar       alpha, z, nrmz2, *vecz;
1445   PetscReal         lowbnd, nu, beta;
1446   PetscInt          j, iterGKB;
1447 
1448   PetscFunctionBegin;
1449   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1450   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1451   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1452   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1453 
1454   u     = jac->u;
1455   v     = jac->v;
1456   Hu    = jac->Hu;
1457   d     = jac->d;
1458   work1 = jac->w1;
1459   work2 = jac->w2;
1460   vecz  = jac->vecz;
1461 
1462   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1463   /* Add q = q + nu*B*b */
1464   if (jac->gkbnu) {
1465     nu = jac->gkbnu;
1466     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1467     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1468   } else {
1469     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1470     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1471     nu = 1;
1472   }
1473 
1474   /* Transform rhs from [q,tilde{b}] to [0,b] */
1475   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1476   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1477   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1478   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1479   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1480   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1481 
1482   /* First step of algorithm */
1483   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1484   KSPCheckDot(ksp, beta);
1485   beta = PetscSqrtReal(nu) * beta;
1486   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1487   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1488   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1489   PetscCall(KSPSolve(ksp, work2, u));
1490   PetscCall(KSPCheckSolve(ksp, pc, u));
1491   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1492   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1493   PetscCall(VecDot(Hu, u, &alpha));
1494   KSPCheckDot(ksp, alpha);
1495   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1496   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1497   PetscCall(VecScale(u, 1.0 / alpha));
1498   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1499 
1500   z       = beta / alpha;
1501   vecz[1] = z;
1502 
1503   /* Computation of first iterate x(1) and p(1) */
1504   PetscCall(VecAXPY(ilinkA->y, z, u));
1505   PetscCall(VecCopy(d, ilinkD->y));
1506   PetscCall(VecScale(ilinkD->y, -z));
1507 
1508   iterGKB = 1;
1509   lowbnd  = 2 * jac->gkbtol;
1510   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1511 
1512   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1513     iterGKB += 1;
1514     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1515     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1516     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1517     beta = beta / PetscSqrtReal(nu);
1518     PetscCall(VecScale(v, 1.0 / beta));
1519     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1520     PetscCall(MatMult(jac->H, u, Hu));
1521     PetscCall(VecAXPY(work2, -beta, Hu));
1522     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1523     PetscCall(KSPSolve(ksp, work2, u));
1524     PetscCall(KSPCheckSolve(ksp, pc, u));
1525     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1526     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1527     PetscCall(VecDot(Hu, u, &alpha));
1528     KSPCheckDot(ksp, alpha);
1529     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1530     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1531     PetscCall(VecScale(u, 1.0 / alpha));
1532 
1533     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1534     vecz[0] = z;
1535 
1536     /* Computation of new iterate x(i+1) and p(i+1) */
1537     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1538     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1539     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1540     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1541     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1542 
1543     /* Compute Lower Bound estimate */
1544     if (iterGKB > jac->gkbdelay) {
1545       lowbnd = 0.0;
1546       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1547       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1548     }
1549 
1550     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1551     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1552   }
1553 
1554   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1555   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1556   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1557   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1558   PetscFunctionReturn(PETSC_SUCCESS);
1559 }
1560 
1561 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1562   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1563                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1564                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1565 
1566 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1567 {
1568   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1569   PC_FieldSplitLink ilink = jac->head;
1570   PetscInt          bs;
1571 
1572   PetscFunctionBegin;
1573   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1574     if (jac->defaultsplit) {
1575       PetscCall(VecGetBlockSize(x, &bs));
1576       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1577       PetscCall(VecGetBlockSize(y, &bs));
1578       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1579       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1580       while (ilink) {
1581         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1582         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1583         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1584         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1585         ilink = ilink->next;
1586       }
1587       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1588     } else {
1589       PetscCall(VecSet(y, 0.0));
1590       while (ilink) {
1591         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1592         ilink = ilink->next;
1593       }
1594     }
1595   } else {
1596     if (!jac->w1) {
1597       PetscCall(VecDuplicate(x, &jac->w1));
1598       PetscCall(VecDuplicate(x, &jac->w2));
1599     }
1600     PetscCall(VecSet(y, 0.0));
1601     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1602       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1603       while (ilink->next) {
1604         ilink = ilink->next;
1605         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1606         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1607         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1608       }
1609       while (ilink->previous) {
1610         ilink = ilink->previous;
1611         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1612         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1613         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1614       }
1615     } else {
1616       while (ilink->next) { /* get to last entry in linked list */
1617         ilink = ilink->next;
1618       }
1619       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1620       while (ilink->previous) {
1621         ilink = ilink->previous;
1622         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1623         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1624         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1625       }
1626     }
1627   }
1628   PetscFunctionReturn(PETSC_SUCCESS);
1629 }
1630 
1631 static PetscErrorCode PCReset_FieldSplit(PC pc)
1632 {
1633   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1634   PC_FieldSplitLink ilink = jac->head, next;
1635 
1636   PetscFunctionBegin;
1637   while (ilink) {
1638     PetscCall(KSPDestroy(&ilink->ksp));
1639     PetscCall(VecDestroy(&ilink->x));
1640     PetscCall(VecDestroy(&ilink->y));
1641     PetscCall(VecDestroy(&ilink->z));
1642     PetscCall(VecScatterDestroy(&ilink->sctx));
1643     PetscCall(ISDestroy(&ilink->is));
1644     PetscCall(ISDestroy(&ilink->is_col));
1645     PetscCall(PetscFree(ilink->splitname));
1646     PetscCall(PetscFree(ilink->fields));
1647     PetscCall(PetscFree(ilink->fields_col));
1648     next = ilink->next;
1649     PetscCall(PetscFree(ilink));
1650     ilink = next;
1651   }
1652   jac->head = NULL;
1653   PetscCall(PetscFree2(jac->x, jac->y));
1654   if (jac->mat && jac->mat != jac->pmat) {
1655     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1656   } else if (jac->mat) {
1657     jac->mat = NULL;
1658   }
1659   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1660   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1661   jac->nsplits = 0;
1662   PetscCall(VecDestroy(&jac->w1));
1663   PetscCall(VecDestroy(&jac->w2));
1664   PetscCall(MatDestroy(&jac->schur));
1665   PetscCall(MatDestroy(&jac->schurp));
1666   PetscCall(MatDestroy(&jac->schur_user));
1667   PetscCall(KSPDestroy(&jac->kspschur));
1668   PetscCall(KSPDestroy(&jac->kspupper));
1669   PetscCall(MatDestroy(&jac->B));
1670   PetscCall(MatDestroy(&jac->C));
1671   PetscCall(MatDestroy(&jac->H));
1672   PetscCall(VecDestroy(&jac->u));
1673   PetscCall(VecDestroy(&jac->v));
1674   PetscCall(VecDestroy(&jac->Hu));
1675   PetscCall(VecDestroy(&jac->d));
1676   PetscCall(PetscFree(jac->vecz));
1677   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1678   jac->isrestrict = PETSC_FALSE;
1679   PetscFunctionReturn(PETSC_SUCCESS);
1680 }
1681 
1682 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1683 {
1684   PetscFunctionBegin;
1685   PetscCall(PCReset_FieldSplit(pc));
1686   PetscCall(PetscFree(pc->data));
1687   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1688   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1689   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1690   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1691   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1692   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1693   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1694   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1695 
1696   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1697   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1698   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1699   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1700   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1701   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1702   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1703   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1704   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1705   PetscFunctionReturn(PETSC_SUCCESS);
1706 }
1707 
1708 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject)
1709 {
1710   PetscInt        bs;
1711   PetscBool       flg;
1712   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1713   PCCompositeType ctype;
1714 
1715   PetscFunctionBegin;
1716   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1717   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1718   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1719   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1720   jac->diag_use_amat = pc->useAmat;
1721   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1722   jac->offdiag_use_amat = pc->useAmat;
1723   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1724   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1725   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1726   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1727   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1728   /* Only setup fields once */
1729   if ((jac->bs > 0) && (jac->nsplits == 0)) {
1730     /* only allow user to set fields from command line if bs is already known.
1731        otherwise user can set them in PCFieldSplitSetDefaults() */
1732     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1733     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1734   }
1735   if (jac->type == PC_COMPOSITE_SCHUR) {
1736     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1737     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1738     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1739     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1740     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1741   } else if (jac->type == PC_COMPOSITE_GKB) {
1742     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1743     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1744     PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0));
1745     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1746     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1747   }
1748   /*
1749     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1750     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1751     is called on the outer solver in case changes were made in the options database
1752 
1753     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1754     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1755     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1756 
1757     There could be a negative side effect of calling the KSPSetFromOptions() below.
1758 
1759     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1760   */
1761   if (jac->issetup) {
1762     PC_FieldSplitLink ilink = jac->head;
1763     if (jac->type == PC_COMPOSITE_SCHUR) {
1764       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1765       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1766     }
1767     while (ilink) {
1768       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1769       ilink = ilink->next;
1770     }
1771   }
1772   PetscOptionsHeadEnd();
1773   PetscFunctionReturn(PETSC_SUCCESS);
1774 }
1775 
1776 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1777 {
1778   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1779   PC_FieldSplitLink ilink, next = jac->head;
1780   char              prefix[128];
1781   PetscInt          i;
1782 
1783   PetscFunctionBegin;
1784   if (jac->splitdefined) {
1785     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1786     PetscFunctionReturn(PETSC_SUCCESS);
1787   }
1788   for (i = 0; i < n; i++) {
1789     PetscCheck(fields[i] < jac->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", fields[i], jac->bs);
1790     PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]);
1791   }
1792   PetscCall(PetscNew(&ilink));
1793   if (splitname) {
1794     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1795   } else {
1796     PetscCall(PetscMalloc1(3, &ilink->splitname));
1797     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1798   }
1799   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1800   PetscCall(PetscMalloc1(n, &ilink->fields));
1801   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1802   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1803   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1804 
1805   ilink->nfields = n;
1806   ilink->next    = NULL;
1807   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1808   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1809   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1810   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1811   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1812 
1813   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1814   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1815 
1816   if (!next) {
1817     jac->head       = ilink;
1818     ilink->previous = NULL;
1819   } else {
1820     while (next->next) next = next->next;
1821     next->next      = ilink;
1822     ilink->previous = next;
1823   }
1824   jac->nsplits++;
1825   PetscFunctionReturn(PETSC_SUCCESS);
1826 }
1827 
1828 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1829 {
1830   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1831 
1832   PetscFunctionBegin;
1833   *subksp = NULL;
1834   if (n) *n = 0;
1835   if (jac->type == PC_COMPOSITE_SCHUR) {
1836     PetscInt nn;
1837 
1838     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1839     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1840     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1841     PetscCall(PetscMalloc1(nn, subksp));
1842     (*subksp)[0] = jac->head->ksp;
1843     (*subksp)[1] = jac->kspschur;
1844     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
1845     if (n) *n = nn;
1846   }
1847   PetscFunctionReturn(PETSC_SUCCESS);
1848 }
1849 
1850 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
1851 {
1852   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1853 
1854   PetscFunctionBegin;
1855   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
1856   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1857   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
1858 
1859   (*subksp)[1] = jac->kspschur;
1860   if (n) *n = jac->nsplits;
1861   PetscFunctionReturn(PETSC_SUCCESS);
1862 }
1863 
1864 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1865 {
1866   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1867   PetscInt          cnt   = 0;
1868   PC_FieldSplitLink ilink = jac->head;
1869 
1870   PetscFunctionBegin;
1871   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1872   while (ilink) {
1873     (*subksp)[cnt++] = ilink->ksp;
1874     ilink            = ilink->next;
1875   }
1876   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
1877   if (n) *n = jac->nsplits;
1878   PetscFunctionReturn(PETSC_SUCCESS);
1879 }
1880 
1881 /*@C
1882   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
1883 
1884   Input Parameters:
1885 + pc  - the preconditioner context
1886 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
1887 
1888   Level: advanced
1889 
1890   Developer Notes:
1891   It seems the resulting `IS`s will not cover the entire space, so
1892   how can they define a convergent preconditioner? Needs explaining.
1893 
1894 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
1895 @*/
1896 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
1897 {
1898   PetscFunctionBegin;
1899   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1900   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
1901   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
1902   PetscFunctionReturn(PETSC_SUCCESS);
1903 }
1904 
1905 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
1906 {
1907   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1908   PC_FieldSplitLink ilink = jac->head, next;
1909   PetscInt          localsize, size, sizez, i;
1910   const PetscInt   *ind, *indz;
1911   PetscInt         *indc, *indcz;
1912   PetscBool         flg;
1913 
1914   PetscFunctionBegin;
1915   PetscCall(ISGetLocalSize(isy, &localsize));
1916   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
1917   size -= localsize;
1918   while (ilink) {
1919     IS isrl, isr;
1920     PC subpc;
1921     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
1922     PetscCall(ISGetLocalSize(isrl, &localsize));
1923     PetscCall(PetscMalloc1(localsize, &indc));
1924     PetscCall(ISGetIndices(isrl, &ind));
1925     PetscCall(PetscArraycpy(indc, ind, localsize));
1926     PetscCall(ISRestoreIndices(isrl, &ind));
1927     PetscCall(ISDestroy(&isrl));
1928     for (i = 0; i < localsize; i++) *(indc + i) += size;
1929     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
1930     PetscCall(PetscObjectReference((PetscObject)isr));
1931     PetscCall(ISDestroy(&ilink->is));
1932     ilink->is = isr;
1933     PetscCall(PetscObjectReference((PetscObject)isr));
1934     PetscCall(ISDestroy(&ilink->is_col));
1935     ilink->is_col = isr;
1936     PetscCall(ISDestroy(&isr));
1937     PetscCall(KSPGetPC(ilink->ksp, &subpc));
1938     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
1939     if (flg) {
1940       IS       iszl, isz;
1941       MPI_Comm comm;
1942       PetscCall(ISGetLocalSize(ilink->is, &localsize));
1943       comm = PetscObjectComm((PetscObject)ilink->is);
1944       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
1945       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
1946       sizez -= localsize;
1947       PetscCall(ISGetLocalSize(iszl, &localsize));
1948       PetscCall(PetscMalloc1(localsize, &indcz));
1949       PetscCall(ISGetIndices(iszl, &indz));
1950       PetscCall(PetscArraycpy(indcz, indz, localsize));
1951       PetscCall(ISRestoreIndices(iszl, &indz));
1952       PetscCall(ISDestroy(&iszl));
1953       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
1954       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
1955       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
1956       PetscCall(ISDestroy(&isz));
1957     }
1958     next  = ilink->next;
1959     ilink = next;
1960   }
1961   jac->isrestrict = PETSC_TRUE;
1962   PetscFunctionReturn(PETSC_SUCCESS);
1963 }
1964 
1965 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
1966 {
1967   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1968   PC_FieldSplitLink ilink, next = jac->head;
1969   char              prefix[128];
1970 
1971   PetscFunctionBegin;
1972   if (jac->splitdefined) {
1973     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1974     PetscFunctionReturn(PETSC_SUCCESS);
1975   }
1976   PetscCall(PetscNew(&ilink));
1977   if (splitname) {
1978     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1979   } else {
1980     PetscCall(PetscMalloc1(8, &ilink->splitname));
1981     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
1982   }
1983   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1984   PetscCall(PetscObjectReference((PetscObject)is));
1985   PetscCall(ISDestroy(&ilink->is));
1986   ilink->is = is;
1987   PetscCall(PetscObjectReference((PetscObject)is));
1988   PetscCall(ISDestroy(&ilink->is_col));
1989   ilink->is_col = is;
1990   ilink->next   = NULL;
1991   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1992   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1993   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1994   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1995   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1996 
1997   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1998   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1999 
2000   if (!next) {
2001     jac->head       = ilink;
2002     ilink->previous = NULL;
2003   } else {
2004     while (next->next) next = next->next;
2005     next->next      = ilink;
2006     ilink->previous = next;
2007   }
2008   jac->nsplits++;
2009   PetscFunctionReturn(PETSC_SUCCESS);
2010 }
2011 
2012 /*@C
2013   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
2014 
2015   Logically Collective
2016 
2017   Input Parameters:
2018 + pc         - the preconditioner context
2019 . splitname  - name of this split, if `NULL` the number of the split is used
2020 . n          - the number of fields in this split
2021 . fields     - the fields in this split
2022 - fields_col - generally the same as fields, if it does not match fields then the matrix block that is solved for this set of fields comes from an off-diagonal block
2023                  of the matrix and fields_col provides the column indices for that block
2024 
2025   Level: intermediate
2026 
2027   Notes:
2028   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2029 
2030   `PCFieldSplitSetFields()` is for defining fields as strided blocks. For example, if the block
2031   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2032   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x....
2033   where the numbered entries indicate what is in the split.
2034 
2035   This function is called once per split (it creates a new split each time).  Solve options
2036   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2037 
2038   `PCFieldSplitSetIS()` does not support having a fields_col different from fields
2039 
2040   Developer Notes:
2041   This routine does not actually create the `IS` representing the split, that is delayed
2042   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2043   available when this routine is called.
2044 
2045 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`
2046 @*/
2047 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
2048 {
2049   PetscFunctionBegin;
2050   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2051   PetscAssertPointer(splitname, 2);
2052   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2053   PetscAssertPointer(fields, 4);
2054   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2055   PetscFunctionReturn(PETSC_SUCCESS);
2056 }
2057 
2058 /*@
2059   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2060   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2061 
2062   Logically Collective
2063 
2064   Input Parameters:
2065 + pc  - the preconditioner object
2066 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2067 
2068   Options Database Key:
2069 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2070 
2071   Level: intermediate
2072 
2073 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2074 @*/
2075 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2076 {
2077   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2078   PetscBool      isfs;
2079 
2080   PetscFunctionBegin;
2081   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2082   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2083   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2084   jac->diag_use_amat = flg;
2085   PetscFunctionReturn(PETSC_SUCCESS);
2086 }
2087 
2088 /*@
2089   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2090   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2091 
2092   Logically Collective
2093 
2094   Input Parameter:
2095 . pc - the preconditioner object
2096 
2097   Output Parameter:
2098 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2099 
2100   Level: intermediate
2101 
2102 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2103 @*/
2104 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2105 {
2106   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2107   PetscBool      isfs;
2108 
2109   PetscFunctionBegin;
2110   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2111   PetscAssertPointer(flg, 2);
2112   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2113   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2114   *flg = jac->diag_use_amat;
2115   PetscFunctionReturn(PETSC_SUCCESS);
2116 }
2117 
2118 /*@
2119   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2120   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2121 
2122   Logically Collective
2123 
2124   Input Parameters:
2125 + pc  - the preconditioner object
2126 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2127 
2128   Options Database Key:
2129 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2130 
2131   Level: intermediate
2132 
2133 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2134 @*/
2135 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2136 {
2137   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2138   PetscBool      isfs;
2139 
2140   PetscFunctionBegin;
2141   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2142   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2143   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2144   jac->offdiag_use_amat = flg;
2145   PetscFunctionReturn(PETSC_SUCCESS);
2146 }
2147 
2148 /*@
2149   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2150   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2151 
2152   Logically Collective
2153 
2154   Input Parameter:
2155 . pc - the preconditioner object
2156 
2157   Output Parameter:
2158 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2159 
2160   Level: intermediate
2161 
2162 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2163 @*/
2164 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2165 {
2166   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2167   PetscBool      isfs;
2168 
2169   PetscFunctionBegin;
2170   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2171   PetscAssertPointer(flg, 2);
2172   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2173   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2174   *flg = jac->offdiag_use_amat;
2175   PetscFunctionReturn(PETSC_SUCCESS);
2176 }
2177 
2178 /*@C
2179   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2180 
2181   Logically Collective
2182 
2183   Input Parameters:
2184 + pc        - the preconditioner context
2185 . splitname - name of this split, if `NULL` the number of the split is used
2186 - is        - the index set that defines the elements in this split
2187 
2188   Level: intermediate
2189 
2190   Notes:
2191   Use `PCFieldSplitSetFields()`, for splits defined by strided types.
2192 
2193   This function is called once per split (it creates a new split each time).  Solve options
2194   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2195 
2196 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`
2197 @*/
2198 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2199 {
2200   PetscFunctionBegin;
2201   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2202   if (splitname) PetscAssertPointer(splitname, 2);
2203   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2204   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2205   PetscFunctionReturn(PETSC_SUCCESS);
2206 }
2207 
2208 /*@C
2209   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2210 
2211   Logically Collective
2212 
2213   Input Parameters:
2214 + pc        - the preconditioner context
2215 - splitname - name of this split
2216 
2217   Output Parameter:
2218 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2219 
2220   Level: intermediate
2221 
2222 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`
2223 @*/
2224 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2225 {
2226   PetscFunctionBegin;
2227   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2228   PetscAssertPointer(splitname, 2);
2229   PetscAssertPointer(is, 3);
2230   {
2231     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2232     PC_FieldSplitLink ilink = jac->head;
2233     PetscBool         found;
2234 
2235     *is = NULL;
2236     while (ilink) {
2237       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2238       if (found) {
2239         *is = ilink->is;
2240         break;
2241       }
2242       ilink = ilink->next;
2243     }
2244   }
2245   PetscFunctionReturn(PETSC_SUCCESS);
2246 }
2247 
2248 /*@C
2249   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2250 
2251   Logically Collective
2252 
2253   Input Parameters:
2254 + pc    - the preconditioner context
2255 - index - index of this split
2256 
2257   Output Parameter:
2258 . is - the index set that defines the elements in this split
2259 
2260   Level: intermediate
2261 
2262 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`
2263 @*/
2264 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2265 {
2266   PetscFunctionBegin;
2267   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2268   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2269   PetscAssertPointer(is, 3);
2270   {
2271     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2272     PC_FieldSplitLink ilink = jac->head;
2273     PetscInt          i     = 0;
2274     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2275 
2276     while (i < index) {
2277       ilink = ilink->next;
2278       ++i;
2279     }
2280     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2281   }
2282   PetscFunctionReturn(PETSC_SUCCESS);
2283 }
2284 
2285 /*@
2286   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2287   fieldsplit preconditioner when calling `PCFieldSplitSetIS()`. If not set the matrix block size is used.
2288 
2289   Logically Collective
2290 
2291   Input Parameters:
2292 + pc - the preconditioner context
2293 - bs - the block size
2294 
2295   Level: intermediate
2296 
2297 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2298 @*/
2299 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2300 {
2301   PetscFunctionBegin;
2302   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2303   PetscValidLogicalCollectiveInt(pc, bs, 2);
2304   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2305   PetscFunctionReturn(PETSC_SUCCESS);
2306 }
2307 
2308 /*@C
2309   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2310 
2311   Collective
2312 
2313   Input Parameter:
2314 . pc - the preconditioner context
2315 
2316   Output Parameters:
2317 + n      - the number of splits
2318 - subksp - the array of `KSP` contexts
2319 
2320   Level: advanced
2321 
2322   Notes:
2323   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2324   (not the `KSP`, just the array that contains them).
2325 
2326   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2327 
2328   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2329   Schur complement and the `KSP` object used to iterate over the Schur complement.
2330   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2331 
2332   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2333   inner linear system defined by the matrix H in each loop.
2334 
2335   Fortran Notes:
2336   You must pass in a `KSP` array that is large enough to contain all the `KSP`s.
2337   You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2338   `KSP` array must be.
2339 
2340   Developer Notes:
2341   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2342 
2343   The Fortran interface should be modernized to return directly the array of values.
2344 
2345 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2346 @*/
2347 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2348 {
2349   PetscFunctionBegin;
2350   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2351   if (n) PetscAssertPointer(n, 2);
2352   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2353   PetscFunctionReturn(PETSC_SUCCESS);
2354 }
2355 
2356 /*@C
2357   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2358 
2359   Collective
2360 
2361   Input Parameter:
2362 . pc - the preconditioner context
2363 
2364   Output Parameters:
2365 + n      - the number of splits
2366 - subksp - the array of `KSP` contexts
2367 
2368   Level: advanced
2369 
2370   Notes:
2371   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2372   (not the `KSP` just the array that contains them).
2373 
2374   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2375 
2376   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2377 +  1  - the `KSP` used for the (1,1) block
2378 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2379 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2380 
2381   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2382 
2383   Fortran Notes:
2384   You must pass in a `KSP` array that is large enough to contain all the local `KSP`s.
2385   You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2386   `KSP` array must be.
2387 
2388   Developer Notes:
2389   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2390 
2391   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2392 
2393   The Fortran interface should be modernized to return directly the array of values.
2394 
2395 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2396 @*/
2397 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2398 {
2399   PetscFunctionBegin;
2400   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2401   if (n) PetscAssertPointer(n, 2);
2402   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2403   PetscFunctionReturn(PETSC_SUCCESS);
2404 }
2405 
2406 /*@
2407   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructed for the Schur complement.
2408   The default is the A11 matrix.
2409 
2410   Collective
2411 
2412   Input Parameters:
2413 + pc    - the preconditioner context
2414 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2415               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2416               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2417 - pre   - matrix to use for preconditioning, or `NULL`
2418 
2419   Options Database Keys:
2420 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2421 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2422 
2423   Level: intermediate
2424 
2425   Notes:
2426   If ptype is
2427 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2428   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2429 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2430   The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM`
2431 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2432   to this function).
2433 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $
2434   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2435   lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2436 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2437   computed internally by `PCFIELDSPLIT` (this is expensive)
2438   useful mostly as a test that the Schur complement approach can work for your problem
2439 
2440   When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2441   with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and
2442   `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2443 
2444   Developer Note:
2445   The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere.
2446 
2447 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2448           `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()`
2449 @*/
2450 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2451 {
2452   PetscFunctionBegin;
2453   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2454   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2455   PetscFunctionReturn(PETSC_SUCCESS);
2456 }
2457 
2458 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2459 {
2460   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2461 } /* Deprecated name */
2462 
2463 /*@
2464   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2465   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2466 
2467   Logically Collective
2468 
2469   Input Parameter:
2470 . pc - the preconditioner context
2471 
2472   Output Parameters:
2473 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2474 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2475 
2476   Level: intermediate
2477 
2478 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2479 @*/
2480 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2481 {
2482   PetscFunctionBegin;
2483   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2484   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2485   PetscFunctionReturn(PETSC_SUCCESS);
2486 }
2487 
2488 /*@
2489   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2490 
2491   Not Collective
2492 
2493   Input Parameter:
2494 . pc - the preconditioner context
2495 
2496   Output Parameter:
2497 . S - the Schur complement matrix
2498 
2499   Level: advanced
2500 
2501   Note:
2502   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2503 
2504 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2505           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2506 @*/
2507 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2508 {
2509   const char    *t;
2510   PetscBool      isfs;
2511   PC_FieldSplit *jac;
2512 
2513   PetscFunctionBegin;
2514   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2515   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2516   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2517   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2518   jac = (PC_FieldSplit *)pc->data;
2519   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2520   if (S) *S = jac->schur;
2521   PetscFunctionReturn(PETSC_SUCCESS);
2522 }
2523 
2524 /*@
2525   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2526 
2527   Not Collective
2528 
2529   Input Parameters:
2530 + pc - the preconditioner context
2531 - S  - the Schur complement matrix
2532 
2533   Level: advanced
2534 
2535 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2536 @*/
2537 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2538 {
2539   const char    *t;
2540   PetscBool      isfs;
2541   PC_FieldSplit *jac;
2542 
2543   PetscFunctionBegin;
2544   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2545   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2546   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2547   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2548   jac = (PC_FieldSplit *)pc->data;
2549   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2550   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2551   PetscFunctionReturn(PETSC_SUCCESS);
2552 }
2553 
2554 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2555 {
2556   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2557 
2558   PetscFunctionBegin;
2559   jac->schurpre = ptype;
2560   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2561     PetscCall(MatDestroy(&jac->schur_user));
2562     jac->schur_user = pre;
2563     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2564   }
2565   PetscFunctionReturn(PETSC_SUCCESS);
2566 }
2567 
2568 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2569 {
2570   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2571 
2572   PetscFunctionBegin;
2573   if (ptype) *ptype = jac->schurpre;
2574   if (pre) *pre = jac->schur_user;
2575   PetscFunctionReturn(PETSC_SUCCESS);
2576 }
2577 
2578 /*@
2579   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note`
2580 
2581   Collective
2582 
2583   Input Parameters:
2584 + pc    - the preconditioner context
2585 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2586 
2587   Options Database Key:
2588 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
2589 
2590   Level: intermediate
2591 
2592   Notes:
2593   The FULL factorization is
2594 
2595   ```{math}
2596   \left(\begin{array}{cc} A & B \\
2597   C & E \\
2598   \end{array}\right) =
2599   \left(\begin{array}{cc} 1 & 0 \\
2600   C*A^{-1} & I \\
2601   \end{array}\right)
2602   \left(\begin{array}{cc} A & 0 \\
2603   0 & S \\
2604   \end{array}\right)
2605   \left(\begin{array}{cc} I & A^{-1}B \\
2606   0 & I \\
2607   \end{array}\right) = L D U.
2608   ```
2609 
2610   where $ S = E - C*A^{-1}*B $. In practice, the full factorization is applied via block triangular solves with the grouping $L*(D*U)$. UPPER uses $D*U$, LOWER uses $L*D$,
2611   and DIAG is the diagonal part with the sign of $ S $ flipped (because this makes the preconditioner positive definite for many formulations,
2612   thus allowing the use of `KSPMINRES)`. Sign flipping of $ S $ can be turned off with `PCFieldSplitSetSchurScale()`.
2613 
2614   If $A$ and $S$ are solved exactly
2615 +  1 - FULL factorization is a direct solver.
2616 .  2 - The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2617 -  3 -  With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2618 
2619   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2620   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2621 
2622   For symmetric problems in which $A$ is positive definite and $S$ is negative definite, DIAG can be used with `KSPMINRES`.
2623 
2624   A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S).
2625 
2626 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`,
2627           [](sec_flexibleksp), `PCFieldSplitSetSchurPre()`
2628 @*/
2629 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2630 {
2631   PetscFunctionBegin;
2632   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2633   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2634   PetscFunctionReturn(PETSC_SUCCESS);
2635 }
2636 
2637 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2638 {
2639   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2640 
2641   PetscFunctionBegin;
2642   jac->schurfactorization = ftype;
2643   PetscFunctionReturn(PETSC_SUCCESS);
2644 }
2645 
2646 /*@
2647   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2648 
2649   Collective
2650 
2651   Input Parameters:
2652 + pc    - the preconditioner context
2653 - scale - scaling factor for the Schur complement
2654 
2655   Options Database Key:
2656 . -pc_fieldsplit_schur_scale <scale> - default is -1.0
2657 
2658   Level: intermediate
2659 
2660 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
2661 @*/
2662 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2663 {
2664   PetscFunctionBegin;
2665   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2666   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2667   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2668   PetscFunctionReturn(PETSC_SUCCESS);
2669 }
2670 
2671 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2672 {
2673   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2674 
2675   PetscFunctionBegin;
2676   jac->schurscale = scale;
2677   PetscFunctionReturn(PETSC_SUCCESS);
2678 }
2679 
2680 /*@C
2681   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2682 
2683   Collective
2684 
2685   Input Parameter:
2686 . pc - the preconditioner context
2687 
2688   Output Parameters:
2689 + A00 - the (0,0) block
2690 . A01 - the (0,1) block
2691 . A10 - the (1,0) block
2692 - A11 - the (1,1) block
2693 
2694   Level: advanced
2695 
2696 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2697 @*/
2698 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2699 {
2700   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2701 
2702   PetscFunctionBegin;
2703   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2704   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2705   if (A00) *A00 = jac->pmat[0];
2706   if (A01) *A01 = jac->B;
2707   if (A10) *A10 = jac->C;
2708   if (A11) *A11 = jac->pmat[1];
2709   PetscFunctionReturn(PETSC_SUCCESS);
2710 }
2711 
2712 /*@
2713   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2714 
2715   Collective
2716 
2717   Input Parameters:
2718 + pc        - the preconditioner context
2719 - tolerance - the solver tolerance
2720 
2721   Options Database Key:
2722 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5
2723 
2724   Level: intermediate
2725 
2726   Note:
2727   The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion.
2728   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2729   this estimate, the stopping criterion is satisfactory in practical cases.
2730 
2731 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2732 @*/
2733 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2734 {
2735   PetscFunctionBegin;
2736   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2737   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2738   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2739   PetscFunctionReturn(PETSC_SUCCESS);
2740 }
2741 
2742 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2743 {
2744   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2745 
2746   PetscFunctionBegin;
2747   jac->gkbtol = tolerance;
2748   PetscFunctionReturn(PETSC_SUCCESS);
2749 }
2750 
2751 /*@
2752   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT`
2753 
2754   Collective
2755 
2756   Input Parameters:
2757 + pc    - the preconditioner context
2758 - maxit - the maximum number of iterations
2759 
2760   Options Database Key:
2761 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100
2762 
2763   Level: intermediate
2764 
2765 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2766 @*/
2767 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2768 {
2769   PetscFunctionBegin;
2770   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2771   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2772   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2773   PetscFunctionReturn(PETSC_SUCCESS);
2774 }
2775 
2776 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2777 {
2778   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2779 
2780   PetscFunctionBegin;
2781   jac->gkbmaxit = maxit;
2782   PetscFunctionReturn(PETSC_SUCCESS);
2783 }
2784 
2785 /*@
2786   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT`
2787   preconditioner.
2788 
2789   Collective
2790 
2791   Input Parameters:
2792 + pc    - the preconditioner context
2793 - delay - the delay window in the lower bound estimate
2794 
2795   Options Database Key:
2796 . -pc_fieldsplit_gkb_delay <delay> - default is 5
2797 
2798   Level: intermediate
2799 
2800   Notes:
2801   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $
2802   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs
2803   at least (`delay` + 1) iterations to stop.
2804 
2805   For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013`
2806 
2807 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2808 @*/
2809 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2810 {
2811   PetscFunctionBegin;
2812   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2813   PetscValidLogicalCollectiveInt(pc, delay, 2);
2814   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2815   PetscFunctionReturn(PETSC_SUCCESS);
2816 }
2817 
2818 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2819 {
2820   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2821 
2822   PetscFunctionBegin;
2823   jac->gkbdelay = delay;
2824   PetscFunctionReturn(PETSC_SUCCESS);
2825 }
2826 
2827 /*@
2828   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the
2829   Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2830 
2831   Collective
2832 
2833   Input Parameters:
2834 + pc - the preconditioner context
2835 - nu - the shift parameter
2836 
2837   Options Database Key:
2838 . -pc_fieldsplit_gkb_nu <nu> - default is 1
2839 
2840   Level: intermediate
2841 
2842   Notes:
2843   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However,
2844   if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore
2845   necessary to find a good balance in between the convergence of the inner and outer loop.
2846 
2847   For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity.
2848 
2849 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2850 @*/
2851 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
2852 {
2853   PetscFunctionBegin;
2854   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2855   PetscValidLogicalCollectiveReal(pc, nu, 2);
2856   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
2857   PetscFunctionReturn(PETSC_SUCCESS);
2858 }
2859 
2860 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
2861 {
2862   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2863 
2864   PetscFunctionBegin;
2865   jac->gkbnu = nu;
2866   PetscFunctionReturn(PETSC_SUCCESS);
2867 }
2868 
2869 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
2870 {
2871   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2872 
2873   PetscFunctionBegin;
2874   jac->type = type;
2875   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
2876   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
2877   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
2878   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
2879   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
2880   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
2881   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
2882   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
2883   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
2884 
2885   if (type == PC_COMPOSITE_SCHUR) {
2886     pc->ops->apply          = PCApply_FieldSplit_Schur;
2887     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
2888     pc->ops->view           = PCView_FieldSplit_Schur;
2889 
2890     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
2891     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
2892     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
2893     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
2894     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
2895   } else if (type == PC_COMPOSITE_GKB) {
2896     pc->ops->apply = PCApply_FieldSplit_GKB;
2897     pc->ops->view  = PCView_FieldSplit_GKB;
2898 
2899     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2900     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
2901     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
2902     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
2903     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
2904   } else {
2905     pc->ops->apply = PCApply_FieldSplit;
2906     pc->ops->view  = PCView_FieldSplit;
2907 
2908     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2909   }
2910   PetscFunctionReturn(PETSC_SUCCESS);
2911 }
2912 
2913 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
2914 {
2915   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2916 
2917   PetscFunctionBegin;
2918   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
2919   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
2920   jac->bs = bs;
2921   PetscFunctionReturn(PETSC_SUCCESS);
2922 }
2923 
2924 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
2925 {
2926   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
2927   PC_FieldSplitLink ilink_current = jac->head;
2928   IS                is_owned;
2929 
2930   PetscFunctionBegin;
2931   jac->coordinates_set = PETSC_TRUE; // Internal flag
2932   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
2933 
2934   while (ilink_current) {
2935     // For each IS, embed it to get local coords indces
2936     IS              is_coords;
2937     PetscInt        ndofs_block;
2938     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
2939 
2940     // Setting drop to true for safety. It should make no difference.
2941     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
2942     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
2943     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
2944 
2945     // Allocate coordinates vector and set it directly
2946     PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords));
2947     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
2948       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
2949     }
2950     ilink_current->dim   = dim;
2951     ilink_current->ndofs = ndofs_block;
2952     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
2953     PetscCall(ISDestroy(&is_coords));
2954     ilink_current = ilink_current->next;
2955   }
2956   PetscCall(ISDestroy(&is_owned));
2957   PetscFunctionReturn(PETSC_SUCCESS);
2958 }
2959 
2960 /*@
2961   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
2962 
2963   Collective
2964 
2965   Input Parameters:
2966 + pc   - the preconditioner context
2967 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`,
2968          `PC_COMPOSITE_GKB`
2969 
2970   Options Database Key:
2971 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
2972 
2973   Level: intermediate
2974 
2975 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
2976           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()`
2977 @*/
2978 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
2979 {
2980   PetscFunctionBegin;
2981   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2982   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
2983   PetscFunctionReturn(PETSC_SUCCESS);
2984 }
2985 
2986 /*@
2987   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
2988 
2989   Not collective
2990 
2991   Input Parameter:
2992 . pc - the preconditioner context
2993 
2994   Output Parameter:
2995 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2996 
2997   Level: intermediate
2998 
2999 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3000           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3001 @*/
3002 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
3003 {
3004   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3005 
3006   PetscFunctionBegin;
3007   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3008   PetscAssertPointer(type, 2);
3009   *type = jac->type;
3010   PetscFunctionReturn(PETSC_SUCCESS);
3011 }
3012 
3013 /*@
3014   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3015 
3016   Logically Collective
3017 
3018   Input Parameters:
3019 + pc  - the preconditioner context
3020 - flg - boolean indicating whether to use field splits defined by the `DM`
3021 
3022   Options Database Key:
3023 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3024 
3025   Level: intermediate
3026 
3027   Developer Note:
3028   The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database
3029 
3030 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3031 @*/
3032 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3033 {
3034   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3035   PetscBool      isfs;
3036 
3037   PetscFunctionBegin;
3038   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3039   PetscValidLogicalCollectiveBool(pc, flg, 2);
3040   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3041   if (isfs) jac->dm_splits = flg;
3042   PetscFunctionReturn(PETSC_SUCCESS);
3043 }
3044 
3045 /*@
3046   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3047 
3048   Logically Collective
3049 
3050   Input Parameter:
3051 . pc - the preconditioner context
3052 
3053   Output Parameter:
3054 . flg - boolean indicating whether to use field splits defined by the `DM`
3055 
3056   Level: intermediate
3057 
3058   Developer Note:
3059   The name should be `PCFieldSplitGetUseDMSplits()`
3060 
3061 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3062 @*/
3063 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3064 {
3065   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3066   PetscBool      isfs;
3067 
3068   PetscFunctionBegin;
3069   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3070   PetscAssertPointer(flg, 2);
3071   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3072   if (isfs) {
3073     if (flg) *flg = jac->dm_splits;
3074   }
3075   PetscFunctionReturn(PETSC_SUCCESS);
3076 }
3077 
3078 /*@
3079   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3080 
3081   Logically Collective
3082 
3083   Input Parameter:
3084 . pc - the preconditioner context
3085 
3086   Output Parameter:
3087 . flg - boolean indicating whether to detect fields or not
3088 
3089   Level: intermediate
3090 
3091 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3092 @*/
3093 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3094 {
3095   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3096 
3097   PetscFunctionBegin;
3098   *flg = jac->detect;
3099   PetscFunctionReturn(PETSC_SUCCESS);
3100 }
3101 
3102 /*@
3103   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3104 
3105   Logically Collective
3106 
3107   Input Parameter:
3108 . pc - the preconditioner context
3109 
3110   Output Parameter:
3111 . flg - boolean indicating whether to detect fields or not
3112 
3113   Options Database Key:
3114 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3115 
3116   Level: intermediate
3117 
3118   Note:
3119   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3120 
3121 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3122 @*/
3123 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3124 {
3125   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3126 
3127   PetscFunctionBegin;
3128   jac->detect = flg;
3129   if (jac->detect) {
3130     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3131     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3132   }
3133   PetscFunctionReturn(PETSC_SUCCESS);
3134 }
3135 
3136 /*MC
3137    PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3138    collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3139 
3140    Options Database Keys:
3141 +   -pc_fieldsplit_%d_fields <a,b,..>                                                - indicates the fields to be used in the `%d`'th split
3142 .   -pc_fieldsplit_default                                                           - automatically add any fields to additional splits that have not
3143                                                                                      been supplied explicitly by `-pc_fieldsplit_%d_fields`
3144 .   -pc_fieldsplit_block_size <bs>                                                   - size of block that defines fields (i.e. there are bs fields)
3145 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3146 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full>                     - default is `a11`; see `PCFieldSplitSetSchurPre()`
3147 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full>                           - set factorization type when using `-pc_fieldsplit_type schur`;
3148                                                                                      see `PCFieldSplitSetSchurFactType()`
3149 .   -pc_fieldsplit_dm_splits <true,false> (default is true)                          - Whether to use `DMCreateFieldDecomposition()` for splits
3150 -   -pc_fieldsplit_detect_saddle_point                                               - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3151 
3152      Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3153      The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3154      For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3155 
3156      To set options on the solvers for each block append `-fieldsplit_` to all the `PC`
3157      options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`
3158 
3159      To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3160       and set the options directly on the resulting `KSP` object
3161 
3162     Level: intermediate
3163 
3164    Notes:
3165     Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries and `PCFieldSplitSetIS()`
3166      to define a split by an arbitrary collection of entries.
3167 
3168       If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports
3169       `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise, the splits are defined by entries strided by bs,
3170       beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3171       if this is not called the block size defaults to the blocksize of the second matrix passed
3172       to `KSPSetOperators()`/`PCSetOperators()`.
3173 
3174       For the Schur complement preconditioner if
3175 
3176       ```{math}
3177       J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3178       ```
3179 
3180       the preconditioner using `full` factorization is logically
3181       ```{math}
3182       \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3183       ```
3184      where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3185      ```{math}
3186      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3187      ```
3188      which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given
3189      in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0,
3190      it returns the KSP associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default
3191      $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3192 
3193      The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3194      `diag` gives
3195       ```{math}
3196       \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3197       ```
3198      Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3199      can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3200       ```{math}
3201       \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3202       ```
3203      where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of
3204       ```{math}
3205       \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3206       ```
3207      where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3208 
3209      If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3210      is used automatically for a second block.
3211 
3212      The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3213      Generally it should be used with the `MATAIJ` format.
3214 
3215      The forms of these preconditioners are closely related if not identical to forms derived as "Distributive Iterations", see,
3216      for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3217      One can also use `PCFIELDSPLIT`
3218      inside a smoother resulting in "Distributive Smoothers".
3219 
3220      See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3221 
3222      The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3223      residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3224 
3225      The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3226      ```{math}
3227      \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3228      ```
3229      with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3230      A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3231 
3232    Developer Note:
3233    The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3234    user API.
3235 
3236 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3237           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3238           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3239           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3240 M*/
3241 
3242 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3243 {
3244   PC_FieldSplit *jac;
3245 
3246   PetscFunctionBegin;
3247   PetscCall(PetscNew(&jac));
3248 
3249   jac->bs                 = -1;
3250   jac->nsplits            = 0;
3251   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3252   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3253   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3254   jac->schurscale         = -1.0;
3255   jac->dm_splits          = PETSC_TRUE;
3256   jac->detect             = PETSC_FALSE;
3257   jac->gkbtol             = 1e-5;
3258   jac->gkbdelay           = 5;
3259   jac->gkbnu              = 1;
3260   jac->gkbmaxit           = 100;
3261   jac->gkbmonitor         = PETSC_FALSE;
3262   jac->coordinates_set    = PETSC_FALSE;
3263 
3264   pc->data = (void *)jac;
3265 
3266   pc->ops->apply           = PCApply_FieldSplit;
3267   pc->ops->applytranspose  = PCApplyTranspose_FieldSplit;
3268   pc->ops->setup           = PCSetUp_FieldSplit;
3269   pc->ops->reset           = PCReset_FieldSplit;
3270   pc->ops->destroy         = PCDestroy_FieldSplit;
3271   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3272   pc->ops->view            = PCView_FieldSplit;
3273   pc->ops->applyrichardson = NULL;
3274 
3275   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3276   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3277   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3278   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3279   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3280   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3281   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3282   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3283   PetscFunctionReturn(PETSC_SUCCESS);
3284 }
3285