xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision f9178db3efe74a707868e3cec6ba6d8a3d666bf9)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 
5 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
6 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
7 
8 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
9 
10 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
11 struct _PC_FieldSplitLink {
12   KSP               ksp;
13   Vec               x, y, z;
14   char             *splitname;
15   PetscInt          nfields;
16   PetscInt         *fields, *fields_col;
17   VecScatter        sctx;
18   IS                is, is_col;
19   PC_FieldSplitLink next, previous;
20   PetscLogEvent     event;
21 
22   /* Used only when setting coordinates with PCSetCoordinates */
23   PetscInt   dim;
24   PetscInt   ndofs;
25   PetscReal *coords;
26 };
27 
28 typedef struct {
29   PCCompositeType type;
30   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
31   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
32   PetscInt        bs;           /* Block size for IS and Mat structures */
33   PetscInt        nsplits;      /* Number of field divisions defined */
34   Vec            *x, *y, w1, w2;
35   Mat            *mat;    /* The diagonal block for each split */
36   Mat            *pmat;   /* The preconditioning diagonal block for each split */
37   Mat            *Afield; /* The rows of the matrix associated with each split */
38   PetscBool       issetup;
39 
40   /* Only used when Schur complement preconditioning is used */
41   Mat                       B;          /* The (0,1) block */
42   Mat                       C;          /* The (1,0) block */
43   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
44   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */
45   Mat                       schur_user; /* User-provided preconditioning matrix for the Schur complement */
46   PCFieldSplitSchurPreType  schurpre;   /* Determines which preconditioning matrix is used for the Schur complement */
47   PCFieldSplitSchurFactType schurfactorization;
48   KSP                       kspschur;   /* The solver for S */
49   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
50   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
51 
52   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
53   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
54   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
55   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
56   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
57   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
58   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
59   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
60   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
61   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
62 
63   PC_FieldSplitLink head;
64   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
65   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
66   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
67   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
68   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
69   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
70   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
71 } PC_FieldSplit;
72 
73 /*
74     Note:
75     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
76    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
77    PC you could change this.
78 */
79 
80 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it.  This way the
81 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
82 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
83 {
84   switch (jac->schurpre) {
85   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
86     return jac->schur;
87   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
88     return jac->schurp;
89   case PC_FIELDSPLIT_SCHUR_PRE_A11:
90     return jac->pmat[1];
91   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
92   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
93   default:
94     return jac->schur_user ? jac->schur_user : jac->pmat[1];
95   }
96 }
97 
98 #include <petscdraw.h>
99 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
100 {
101   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
102   PetscBool         iascii, isdraw;
103   PetscInt          i, j;
104   PC_FieldSplitLink ilink = jac->head;
105 
106   PetscFunctionBegin;
107   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
108   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
109   if (iascii) {
110     if (jac->bs > 0) {
111       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
112     } else {
113       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
114     }
115     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
116     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
117     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
118     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
119     for (i = 0; i < jac->nsplits; i++) {
120       if (ilink->fields) {
121         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
122         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
123         for (j = 0; j < ilink->nfields; j++) {
124           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
125           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
126         }
127         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
128         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
129       } else {
130         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
131       }
132       PetscCall(KSPView(ilink->ksp, viewer));
133       ilink = ilink->next;
134     }
135   }
136 
137   if (isdraw) {
138     PetscDraw draw;
139     PetscReal x, y, w, wd;
140 
141     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
142     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
143     w  = 2 * PetscMin(1.0 - x, x);
144     wd = w / (jac->nsplits + 1);
145     x  = x - wd * (jac->nsplits - 1) / 2.0;
146     for (i = 0; i < jac->nsplits; i++) {
147       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
148       PetscCall(KSPView(ilink->ksp, viewer));
149       PetscCall(PetscDrawPopCurrentPoint(draw));
150       x += wd;
151       ilink = ilink->next;
152     }
153   }
154   PetscFunctionReturn(PETSC_SUCCESS);
155 }
156 
157 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
158 {
159   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
160   PetscBool                  iascii, isdraw;
161   PetscInt                   i, j;
162   PC_FieldSplitLink          ilink = jac->head;
163   MatSchurComplementAinvType atype;
164 
165   PetscFunctionBegin;
166   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
167   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
168   if (iascii) {
169     if (jac->bs > 0) {
170       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
171     } else {
172       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
173     }
174     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
175     switch (jac->schurpre) {
176     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
178       break;
179     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
180       if (jac->schur) {
181         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
182         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
183       }
184       break;
185     case PC_FIELDSPLIT_SCHUR_PRE_A11:
186       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
187       break;
188     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
189       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
190       break;
191     case PC_FIELDSPLIT_SCHUR_PRE_USER:
192       if (jac->schur_user) {
193         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
194       } else {
195         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
196       }
197       break;
198     default:
199       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
200     }
201     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
202     PetscCall(PetscViewerASCIIPushTab(viewer));
203     for (i = 0; i < jac->nsplits; i++) {
204       if (ilink->fields) {
205         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
206         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
207         for (j = 0; j < ilink->nfields; j++) {
208           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
209           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
210         }
211         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
212         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
213       } else {
214         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
215       }
216       ilink = ilink->next;
217     }
218     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
219     PetscCall(PetscViewerASCIIPushTab(viewer));
220     if (jac->head) {
221       PetscCall(KSPView(jac->head->ksp, viewer));
222     } else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
223     PetscCall(PetscViewerASCIIPopTab(viewer));
224     if (jac->head && jac->kspupper != jac->head->ksp) {
225       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor \n"));
226       PetscCall(PetscViewerASCIIPushTab(viewer));
227       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
228       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
229       PetscCall(PetscViewerASCIIPopTab(viewer));
230     }
231     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01 \n"));
232     PetscCall(PetscViewerASCIIPushTab(viewer));
233     if (jac->kspschur) {
234       PetscCall(KSPView(jac->kspschur, viewer));
235     } else {
236       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
237     }
238     PetscCall(PetscViewerASCIIPopTab(viewer));
239     PetscCall(PetscViewerASCIIPopTab(viewer));
240   } else if (isdraw && jac->head) {
241     PetscDraw draw;
242     PetscReal x, y, w, wd, h;
243     PetscInt  cnt = 2;
244     char      str[32];
245 
246     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
247     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
248     if (jac->kspupper != jac->head->ksp) cnt++;
249     w  = 2 * PetscMin(1.0 - x, x);
250     wd = w / (cnt + 1);
251 
252     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
253     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
254     y -= h;
255     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
256       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
257     } else {
258       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
259     }
260     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
261     y -= h;
262     x = x - wd * (cnt - 1) / 2.0;
263 
264     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
265     PetscCall(KSPView(jac->head->ksp, viewer));
266     PetscCall(PetscDrawPopCurrentPoint(draw));
267     if (jac->kspupper != jac->head->ksp) {
268       x += wd;
269       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
270       PetscCall(KSPView(jac->kspupper, viewer));
271       PetscCall(PetscDrawPopCurrentPoint(draw));
272     }
273     x += wd;
274     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
275     PetscCall(KSPView(jac->kspschur, viewer));
276     PetscCall(PetscDrawPopCurrentPoint(draw));
277   }
278   PetscFunctionReturn(PETSC_SUCCESS);
279 }
280 
281 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
282 {
283   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
284   PetscBool         iascii, isdraw;
285   PetscInt          i, j;
286   PC_FieldSplitLink ilink = jac->head;
287 
288   PetscFunctionBegin;
289   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
290   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
291   if (iascii) {
292     if (jac->bs > 0) {
293       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
294     } else {
295       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
296     }
297     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
298     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
299     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
300 
301     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
302     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
303     PetscCall(PetscViewerASCIIPushTab(viewer));
304 
305     if (ilink->fields) {
306       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
307       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
308       for (j = 0; j < ilink->nfields; j++) {
309         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
310         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
311       }
312       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
313       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
314     } else {
315       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
316     }
317     PetscCall(KSPView(ilink->ksp, viewer));
318 
319     PetscCall(PetscViewerASCIIPopTab(viewer));
320   }
321 
322   if (isdraw) {
323     PetscDraw draw;
324     PetscReal x, y, w, wd;
325 
326     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
327     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
328     w  = 2 * PetscMin(1.0 - x, x);
329     wd = w / (jac->nsplits + 1);
330     x  = x - wd * (jac->nsplits - 1) / 2.0;
331     for (i = 0; i < jac->nsplits; i++) {
332       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
333       PetscCall(KSPView(ilink->ksp, viewer));
334       PetscCall(PetscDrawPopCurrentPoint(draw));
335       x += wd;
336       ilink = ilink->next;
337     }
338   }
339   PetscFunctionReturn(PETSC_SUCCESS);
340 }
341 
342 /* Precondition: jac->bs is set to a meaningful value */
343 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
344 {
345   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
346   PetscInt       i, nfields, *ifields, nfields_col, *ifields_col;
347   PetscBool      flg, flg_col;
348   char           optionname[128], splitname[8], optionname_col[128];
349 
350   PetscFunctionBegin;
351   PetscCall(PetscMalloc1(jac->bs, &ifields));
352   PetscCall(PetscMalloc1(jac->bs, &ifields_col));
353   for (i = 0, flg = PETSC_TRUE;; i++) {
354     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
355     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
356     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
357     nfields     = jac->bs;
358     nfields_col = jac->bs;
359     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
360     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
361     if (!flg) break;
362     else if (flg && !flg_col) {
363       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
364       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
365     } else {
366       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
367       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
368       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
369     }
370   }
371   if (i > 0) {
372     /* Makes command-line setting of splits take precedence over setting them in code.
373        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
374        create new splits, which would probably not be what the user wanted. */
375     jac->splitdefined = PETSC_TRUE;
376   }
377   PetscCall(PetscFree(ifields));
378   PetscCall(PetscFree(ifields_col));
379   PetscFunctionReturn(PETSC_SUCCESS);
380 }
381 
382 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
383 {
384   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
385   PC_FieldSplitLink ilink              = jac->head;
386   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
387   PetscInt          i;
388 
389   PetscFunctionBegin;
390   /*
391    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
392    Should probably be rewritten.
393    */
394   if (!ilink) {
395     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
396     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
397       PetscInt  numFields, f, i, j;
398       char    **fieldNames;
399       IS       *fields;
400       DM       *dms;
401       DM        subdm[128];
402       PetscBool flg;
403 
404       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
405       /* Allow the user to prescribe the splits */
406       for (i = 0, flg = PETSC_TRUE;; i++) {
407         PetscInt ifields[128];
408         IS       compField;
409         char     optionname[128], splitname[8];
410         PetscInt nfields = numFields;
411 
412         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
413         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
414         if (!flg) break;
415         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
416         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
417         if (nfields == 1) {
418           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
419         } else {
420           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
421           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
422         }
423         PetscCall(ISDestroy(&compField));
424         for (j = 0; j < nfields; ++j) {
425           f = ifields[j];
426           PetscCall(PetscFree(fieldNames[f]));
427           PetscCall(ISDestroy(&fields[f]));
428         }
429       }
430       if (i == 0) {
431         for (f = 0; f < numFields; ++f) {
432           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
433           PetscCall(PetscFree(fieldNames[f]));
434           PetscCall(ISDestroy(&fields[f]));
435         }
436       } else {
437         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
438         PetscCall(PetscFree(dms));
439         PetscCall(PetscMalloc1(i, &dms));
440         for (j = 0; j < i; ++j) dms[j] = subdm[j];
441       }
442       PetscCall(PetscFree(fieldNames));
443       PetscCall(PetscFree(fields));
444       if (dms) {
445         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
446         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
447           const char *prefix;
448           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)(ilink->ksp), &prefix));
449           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)(dms[i]), prefix));
450           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
451           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
452           {
453             PetscErrorCode (*func)(KSP, Mat, Mat, void *);
454             void *ctx;
455 
456             PetscCall(DMKSPGetComputeOperators(pc->dm, &func, &ctx));
457             PetscCall(DMKSPSetComputeOperators(dms[i], func, ctx));
458           }
459           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
460           PetscCall(DMDestroy(&dms[i]));
461         }
462         PetscCall(PetscFree(dms));
463       }
464     } else {
465       if (jac->bs <= 0) {
466         if (pc->pmat) {
467           PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
468         } else jac->bs = 1;
469       }
470 
471       if (jac->detect) {
472         IS       zerodiags, rest;
473         PetscInt nmin, nmax;
474 
475         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
476         if (jac->diag_use_amat) {
477           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
478         } else {
479           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
480         }
481         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
482         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
483         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
484         PetscCall(ISDestroy(&zerodiags));
485         PetscCall(ISDestroy(&rest));
486       } else if (coupling) {
487         IS       coupling, rest;
488         PetscInt nmin, nmax;
489 
490         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
491         if (jac->offdiag_use_amat) {
492           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
493         } else {
494           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
495         }
496         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
497         PetscCall(ISSetIdentity(rest));
498         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
499         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
500         PetscCall(ISDestroy(&coupling));
501         PetscCall(ISDestroy(&rest));
502       } else {
503         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
504         if (!fieldsplit_default) {
505           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
506            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
507           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
508           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
509         }
510         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
511           Mat       M = pc->pmat;
512           PetscBool isnest;
513 
514           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
515           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
516           if (!isnest) {
517             M = pc->mat;
518             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
519           }
520           if (isnest) {
521             IS      *fields;
522             PetscInt nf;
523 
524             PetscCall(MatNestGetSize(M, &nf, NULL));
525             PetscCall(PetscMalloc1(nf, &fields));
526             PetscCall(MatNestGetISs(M, fields, NULL));
527             for (i = 0; i < nf; i++) PetscCall(PCFieldSplitSetIS(pc, NULL, fields[i]));
528             PetscCall(PetscFree(fields));
529           } else {
530             for (i = 0; i < jac->bs; i++) {
531               char splitname[8];
532               PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
533               PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
534             }
535             jac->defaultsplit = PETSC_TRUE;
536           }
537         }
538       }
539     }
540   } else if (jac->nsplits == 1) {
541     IS       is2;
542     PetscInt nmin, nmax;
543 
544     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
545     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
546     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
547     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
548     PetscCall(ISDestroy(&is2));
549   }
550 
551   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
552   PetscFunctionReturn(PETSC_SUCCESS);
553 }
554 
555 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
556 {
557   Mat       BT, T;
558   PetscReal nrmT, nrmB;
559 
560   PetscFunctionBegin;
561   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
562   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
563   PetscCall(MatNorm(T, NORM_1, &nrmT));
564   PetscCall(MatNorm(B, NORM_1, &nrmB));
565   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable.");
566 
567   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
568   /* setting N := 1/nu*I in [Ar13].                                                 */
569   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
570   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, H)); /* H = A01*A01'          */
571   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
572 
573   PetscCall(MatDestroy(&BT));
574   PetscCall(MatDestroy(&T));
575   PetscFunctionReturn(PETSC_SUCCESS);
576 }
577 
578 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *value[], PetscBool *flg);
579 
580 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
581 {
582   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
583   PC_FieldSplitLink ilink;
584   PetscInt          i, nsplit;
585   PetscBool         sorted, sorted_col;
586 
587   PetscFunctionBegin;
588   pc->failedreason = PC_NOERROR;
589   PetscCall(PCFieldSplitSetDefaults(pc));
590   nsplit = jac->nsplits;
591   ilink  = jac->head;
592 
593   /* get the matrices for each split */
594   if (!jac->issetup) {
595     PetscInt rstart, rend, nslots, bs;
596 
597     jac->issetup = PETSC_TRUE;
598 
599     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
600     if (jac->defaultsplit || !ilink->is) {
601       if (jac->bs <= 0) jac->bs = nsplit;
602     }
603 
604     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
605     PetscCall(MatGetBlockSize(pc->pmat, &bs));
606     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
607       PetscBool blk;
608 
609       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
610       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
611     }
612 
613     bs = jac->bs;
614     PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
615     nslots = (rend - rstart) / bs;
616     for (i = 0; i < nsplit; i++) {
617       if (jac->defaultsplit) {
618         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
619         PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
620       } else if (!ilink->is) {
621         if (ilink->nfields > 1) {
622           PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
623           PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
624           PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
625           for (j = 0; j < nslots; j++) {
626             for (k = 0; k < nfields; k++) {
627               ii[nfields * j + k] = rstart + bs * j + fields[k];
628               jj[nfields * j + k] = rstart + bs * j + fields_col[k];
629             }
630           }
631           PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
632           PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
633           PetscCall(ISSetBlockSize(ilink->is, nfields));
634           PetscCall(ISSetBlockSize(ilink->is_col, nfields));
635         } else {
636           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
637           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
638         }
639       }
640       PetscCall(ISSorted(ilink->is, &sorted));
641       if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col));
642       PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split");
643       ilink = ilink->next;
644     }
645   }
646 
647   ilink = jac->head;
648   if (!jac->pmat) {
649     Vec xtmp;
650 
651     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
652     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
653     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
654     for (i = 0; i < nsplit; i++) {
655       MatNullSpace sp;
656 
657       /* Check for preconditioning matrix attached to IS */
658       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
659       if (jac->pmat[i]) {
660         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
661         if (jac->type == PC_COMPOSITE_SCHUR) {
662           jac->schur_user = jac->pmat[i];
663 
664           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
665         }
666       } else {
667         const char *prefix;
668         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
669         PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
670         PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
671         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
672       }
673       /* create work vectors for each split */
674       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
675       ilink->x = jac->x[i];
676       ilink->y = jac->y[i];
677       ilink->z = NULL;
678       /* compute scatter contexts needed by multiplicative versions and non-default splits */
679       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
680       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
681       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
682       ilink = ilink->next;
683     }
684     PetscCall(VecDestroy(&xtmp));
685   } else {
686     MatReuse scall;
687     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
688       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
689       scall = MAT_INITIAL_MATRIX;
690     } else scall = MAT_REUSE_MATRIX;
691 
692     for (i = 0; i < nsplit; i++) {
693       Mat pmat;
694 
695       /* Check for preconditioning matrix attached to IS */
696       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
697       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
698       ilink = ilink->next;
699     }
700   }
701   if (jac->diag_use_amat) {
702     ilink = jac->head;
703     if (!jac->mat) {
704       PetscCall(PetscMalloc1(nsplit, &jac->mat));
705       for (i = 0; i < nsplit; i++) {
706         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
707         ilink = ilink->next;
708       }
709     } else {
710       MatReuse scall;
711       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
712         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
713         scall = MAT_INITIAL_MATRIX;
714       } else scall = MAT_REUSE_MATRIX;
715 
716       for (i = 0; i < nsplit; i++) {
717         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
718         ilink = ilink->next;
719       }
720     }
721   } else {
722     jac->mat = jac->pmat;
723   }
724 
725   /* Check for null space attached to IS */
726   ilink = jac->head;
727   for (i = 0; i < nsplit; i++) {
728     MatNullSpace sp;
729 
730     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
731     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
732     ilink = ilink->next;
733   }
734 
735   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
736     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
737     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
738     ilink = jac->head;
739     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
740       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
741       if (!jac->Afield) {
742         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
743         if (jac->offdiag_use_amat) {
744           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
745         } else {
746           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
747         }
748       } else {
749         MatReuse scall;
750 
751         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
752           PetscCall(MatDestroy(&jac->Afield[1]));
753           scall = MAT_INITIAL_MATRIX;
754         } else scall = MAT_REUSE_MATRIX;
755 
756         if (jac->offdiag_use_amat) {
757           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
758         } else {
759           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
760         }
761       }
762     } else {
763       if (!jac->Afield) {
764         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
765         for (i = 0; i < nsplit; i++) {
766           if (jac->offdiag_use_amat) {
767             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
768           } else {
769             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
770           }
771           ilink = ilink->next;
772         }
773       } else {
774         MatReuse scall;
775         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
776           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
777           scall = MAT_INITIAL_MATRIX;
778         } else scall = MAT_REUSE_MATRIX;
779 
780         for (i = 0; i < nsplit; i++) {
781           if (jac->offdiag_use_amat) {
782             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
783           } else {
784             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
785           }
786           ilink = ilink->next;
787         }
788       }
789     }
790   }
791 
792   if (jac->type == PC_COMPOSITE_SCHUR) {
793     IS          ccis;
794     PetscBool   isset, isspd;
795     PetscInt    rstart, rend;
796     char        lscname[256];
797     PetscObject LSC_L;
798     PetscBool   set, flg;
799 
800     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
801 
802     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
803     if (jac->schurscale == (PetscScalar)-1.0) {
804       PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
805       jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
806     }
807 
808     /* When extracting off-diagonal submatrices, we take complements from this range */
809     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
810     PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
811 
812     if (jac->schur) {
813       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
814       MatReuse scall;
815 
816       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
817         scall = MAT_INITIAL_MATRIX;
818         PetscCall(MatDestroy(&jac->B));
819         PetscCall(MatDestroy(&jac->C));
820       } else scall = MAT_REUSE_MATRIX;
821 
822       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
823       ilink = jac->head;
824       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
825       if (jac->offdiag_use_amat) {
826         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B));
827       } else {
828         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B));
829       }
830       PetscCall(ISDestroy(&ccis));
831       if (!flg) {
832         ilink = ilink->next;
833         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
834         if (jac->offdiag_use_amat) {
835           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C));
836         } else {
837           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C));
838         }
839         PetscCall(ISDestroy(&ccis));
840       } else {
841         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
842         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
843         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
844       }
845       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
846       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
847         PetscCall(MatDestroy(&jac->schurp));
848         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
849       }
850       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
851       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
852       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
853     } else {
854       const char  *Dprefix;
855       char         schurprefix[256], schurmatprefix[256];
856       char         schurtestoption[256];
857       MatNullSpace sp;
858       KSP          kspt;
859 
860       /* extract the A01 and A10 matrices */
861       ilink = jac->head;
862       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
863       if (jac->offdiag_use_amat) {
864         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
865       } else {
866         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
867       }
868       PetscCall(ISDestroy(&ccis));
869       ilink = ilink->next;
870       if (!flg) {
871         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
872         if (jac->offdiag_use_amat) {
873           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
874         } else {
875           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
876         }
877         PetscCall(ISDestroy(&ccis));
878       } else {
879         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
880         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
881         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
882       }
883       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
884       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
885       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
886       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
887       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
888       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
889       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
890       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
891 
892       /* Note: this is not true in general */
893       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
894       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
895 
896       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
897       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg));
898       if (flg) {
899         DM  dmInner;
900         KSP kspInner;
901         PC  pcInner;
902 
903         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
904         PetscCall(KSPReset(kspInner));
905         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
906         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
907         /* Indent this deeper to emphasize the "inner" nature of this solver. */
908         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
909         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
910         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
911 
912         /* Set DM for new solver */
913         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
914         PetscCall(KSPSetDM(kspInner, dmInner));
915         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
916 
917         /* Defaults to PCKSP as preconditioner */
918         PetscCall(KSPGetPC(kspInner, &pcInner));
919         PetscCall(PCSetType(pcInner, PCKSP));
920         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
921       } else {
922         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
923           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
924           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
925           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
926           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
927           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
928         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
929         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
930       }
931       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
932       PetscCall(KSPSetFromOptions(jac->head->ksp));
933       PetscCall(MatSetFromOptions(jac->schur));
934 
935       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
936       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
937         KSP kspInner;
938         PC  pcInner;
939 
940         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
941         PetscCall(KSPGetPC(kspInner, &pcInner));
942         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
943         if (flg) {
944           KSP ksp;
945 
946           PetscCall(PCKSPGetKSP(pcInner, &ksp));
947           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
948         }
949       }
950       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
951       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg));
952       if (flg) {
953         DM dmInner;
954 
955         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
956         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
957         PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
958         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
959         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
960         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
961         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
962         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
963         PetscCall(KSPSetDM(jac->kspupper, dmInner));
964         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
965         PetscCall(KSPSetFromOptions(jac->kspupper));
966         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
967         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
968       } else {
969         jac->kspupper = jac->head->ksp;
970         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
971       }
972 
973       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
974       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
975       PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
976       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
977       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
978       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
979         PC pcschur;
980         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
981         PetscCall(PCSetType(pcschur, PCNONE));
982         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
983       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
984         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
985       }
986       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
987       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
988       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
989       /* propagate DM */
990       {
991         DM sdm;
992         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
993         if (sdm) {
994           PetscCall(KSPSetDM(jac->kspschur, sdm));
995           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
996         }
997       }
998       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
999       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
1000       PetscCall(KSPSetFromOptions(jac->kspschur));
1001     }
1002     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
1003     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
1004 
1005     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
1006     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
1007     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1008     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1009     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L));
1010     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
1011     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1012     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1013     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L));
1014   } else if (jac->type == PC_COMPOSITE_GKB) {
1015     IS       ccis;
1016     PetscInt rstart, rend;
1017 
1018     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1019 
1020     ilink = jac->head;
1021 
1022     /* When extracting off-diagonal submatrices, we take complements from this range */
1023     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
1024 
1025     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1026     if (jac->offdiag_use_amat) {
1027       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1028     } else {
1029       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1030     }
1031     PetscCall(ISDestroy(&ccis));
1032     /* Create work vectors for GKB algorithm */
1033     PetscCall(VecDuplicate(ilink->x, &jac->u));
1034     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1035     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1036     ilink = ilink->next;
1037     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1038     if (jac->offdiag_use_amat) {
1039       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1040     } else {
1041       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1042     }
1043     PetscCall(ISDestroy(&ccis));
1044     /* Create work vectors for GKB algorithm */
1045     PetscCall(VecDuplicate(ilink->x, &jac->v));
1046     PetscCall(VecDuplicate(ilink->x, &jac->d));
1047     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1048     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1049     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1050 
1051     ilink = jac->head;
1052     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1053     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1054     /* Create gkb_monitor context */
1055     if (jac->gkbmonitor) {
1056       PetscInt tablevel;
1057       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1058       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1059       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1060       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1061       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1062     }
1063   } else {
1064     /* set up the individual splits' PCs */
1065     i     = 0;
1066     ilink = jac->head;
1067     while (ilink) {
1068       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1069       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1070       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1071       i++;
1072       ilink = ilink->next;
1073     }
1074   }
1075 
1076   /* Set coordinates to the sub PC objects whenever these are set */
1077   if (jac->coordinates_set) {
1078     PC pc_coords;
1079     if (jac->type == PC_COMPOSITE_SCHUR) {
1080       // Head is first block.
1081       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1082       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1083       // Second one is Schur block, but its KSP object is in kspschur.
1084       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1085       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1086     } else if (jac->type == PC_COMPOSITE_GKB) {
1087       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1088     } else {
1089       ilink = jac->head;
1090       while (ilink) {
1091         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1092         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1093         ilink = ilink->next;
1094       }
1095     }
1096   }
1097 
1098   jac->suboptionsset = PETSC_TRUE;
1099   PetscFunctionReturn(PETSC_SUCCESS);
1100 }
1101 
1102 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1103   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1104                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1105                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1106 
1107 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1108 {
1109   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1110   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1111   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1112 
1113   PetscFunctionBegin;
1114   switch (jac->schurfactorization) {
1115   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1116     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1117     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1118     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1119     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1120     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1121     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1122     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1123     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1124     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1125     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1126     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1127     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1128     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1129     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1130     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1131     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1132     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1133     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1134     break;
1135   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1136     /* [A00 0; A10 S], suitable for left preconditioning */
1137     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1138     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1139     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1140     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1141     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1142     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1143     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1144     PetscCall(VecScale(ilinkD->x, -1.));
1145     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1146     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1147     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1148     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1149     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1150     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1151     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1152     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1153     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1154     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1155     break;
1156   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1157     /* [A00 A01; 0 S], suitable for right preconditioning */
1158     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1159     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1160     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1161     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1162     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1163     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1164     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1165     PetscCall(VecScale(ilinkA->x, -1.));
1166     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1167     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1168     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1169     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1170     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1171     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1172     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1173     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1174     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1175     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1176     break;
1177   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1178     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1179     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1180     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1181     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1182     PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1183     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1184     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1185     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1186     PetscCall(VecScale(ilinkD->x, -1.0));
1187     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1188     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1189 
1190     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1191     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1192     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1193     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1194     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1195 
1196     if (kspUpper == kspA) {
1197       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1198       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1199       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1200       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1201       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1202       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1203     } else {
1204       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1205       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1206       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1207       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1208       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1209       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1210       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1211       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1212       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1213     }
1214     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1215     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1216     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1217   }
1218   PetscFunctionReturn(PETSC_SUCCESS);
1219 }
1220 
1221 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1222 {
1223   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1224   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1225   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1226 
1227   PetscFunctionBegin;
1228   switch (jac->schurfactorization) {
1229   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1230     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1231     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1232     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1233     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1234     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1235     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1236     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1237     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1238     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1239     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1240     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1241     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1242     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1243     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1244     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1245     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1246     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1247     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1248     break;
1249   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1250     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1251     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1252     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1253     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1254     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1255     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1256     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1257     PetscCall(VecScale(ilinkD->x, -1.));
1258     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1259     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1260     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1261     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1262     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1263     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1264     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1265     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1266     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1267     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1268     break;
1269   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1270     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1271     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1272     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1273     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1274     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1275     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1276     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1277     PetscCall(VecScale(ilinkA->x, -1.));
1278     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1279     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1280     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1281     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1282     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1283     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1284     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1285     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1286     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1287     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1288     break;
1289   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1290     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1291     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1292     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1293     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1294     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1295     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1296     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1297     PetscCall(VecScale(ilinkD->x, -1.0));
1298     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1299     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1300 
1301     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1302     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1303     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1304     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1305     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1306 
1307     if (kspLower == kspA) {
1308       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1309       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1310       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1311       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1312       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1313       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1314     } else {
1315       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1316       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1317       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1318       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1319       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1320       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1321       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1322       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1323       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1324     }
1325     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1326     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1327     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1328   }
1329   PetscFunctionReturn(PETSC_SUCCESS);
1330 }
1331 
1332 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1333 {
1334   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1335   PC_FieldSplitLink ilink = jac->head;
1336   PetscInt          cnt, bs;
1337 
1338   PetscFunctionBegin;
1339   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1340     if (jac->defaultsplit) {
1341       PetscCall(VecGetBlockSize(x, &bs));
1342       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1343       PetscCall(VecGetBlockSize(y, &bs));
1344       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1345       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1346       while (ilink) {
1347         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1348         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1349         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1350         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1351         ilink = ilink->next;
1352       }
1353       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1354     } else {
1355       PetscCall(VecSet(y, 0.0));
1356       while (ilink) {
1357         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1358         ilink = ilink->next;
1359       }
1360     }
1361   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1362     PetscCall(VecSet(y, 0.0));
1363     /* solve on first block for first block variables */
1364     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1365     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1366     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1367     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1368     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1369     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1370     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1371     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1372 
1373     /* compute the residual only onto second block variables using first block variables */
1374     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1375     ilink = ilink->next;
1376     PetscCall(VecScale(ilink->x, -1.0));
1377     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1378     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1379 
1380     /* solve on second block variables */
1381     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1382     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1383     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1384     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1385     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1386     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1387   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1388     if (!jac->w1) {
1389       PetscCall(VecDuplicate(x, &jac->w1));
1390       PetscCall(VecDuplicate(x, &jac->w2));
1391     }
1392     PetscCall(VecSet(y, 0.0));
1393     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1394     cnt = 1;
1395     while (ilink->next) {
1396       ilink = ilink->next;
1397       /* compute the residual only over the part of the vector needed */
1398       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1399       PetscCall(VecScale(ilink->x, -1.0));
1400       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1401       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1402       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1403       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1404       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1405       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1406       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1407       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1408     }
1409     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1410       cnt -= 2;
1411       while (ilink->previous) {
1412         ilink = ilink->previous;
1413         /* compute the residual only over the part of the vector needed */
1414         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1415         PetscCall(VecScale(ilink->x, -1.0));
1416         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1417         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1418         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1419         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1420         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1421         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1422         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1423         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1424       }
1425     }
1426   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1427   PetscFunctionReturn(PETSC_SUCCESS);
1428 }
1429 
1430 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1431 {
1432   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1433   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1434   KSP               ksp = ilinkA->ksp;
1435   Vec               u, v, Hu, d, work1, work2;
1436   PetscScalar       alpha, z, nrmz2, *vecz;
1437   PetscReal         lowbnd, nu, beta;
1438   PetscInt          j, iterGKB;
1439 
1440   PetscFunctionBegin;
1441   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1442   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1443   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1444   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1445 
1446   u     = jac->u;
1447   v     = jac->v;
1448   Hu    = jac->Hu;
1449   d     = jac->d;
1450   work1 = jac->w1;
1451   work2 = jac->w2;
1452   vecz  = jac->vecz;
1453 
1454   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1455   /* Add q = q + nu*B*b */
1456   if (jac->gkbnu) {
1457     nu = jac->gkbnu;
1458     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1459     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1460   } else {
1461     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1462     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1463     nu = 1;
1464   }
1465 
1466   /* Transform rhs from [q,tilde{b}] to [0,b] */
1467   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1468   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1469   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1470   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1471   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1472   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1473 
1474   /* First step of algorithm */
1475   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1476   KSPCheckDot(ksp, beta);
1477   beta = PetscSqrtReal(nu) * beta;
1478   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1479   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1480   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1481   PetscCall(KSPSolve(ksp, work2, u));
1482   PetscCall(KSPCheckSolve(ksp, pc, u));
1483   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1484   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1485   PetscCall(VecDot(Hu, u, &alpha));
1486   KSPCheckDot(ksp, alpha);
1487   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1488   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1489   PetscCall(VecScale(u, 1.0 / alpha));
1490   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1491 
1492   z       = beta / alpha;
1493   vecz[1] = z;
1494 
1495   /* Computation of first iterate x(1) and p(1) */
1496   PetscCall(VecAXPY(ilinkA->y, z, u));
1497   PetscCall(VecCopy(d, ilinkD->y));
1498   PetscCall(VecScale(ilinkD->y, -z));
1499 
1500   iterGKB = 1;
1501   lowbnd  = 2 * jac->gkbtol;
1502   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1503 
1504   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1505     iterGKB += 1;
1506     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1507     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1508     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1509     beta = beta / PetscSqrtReal(nu);
1510     PetscCall(VecScale(v, 1.0 / beta));
1511     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1512     PetscCall(MatMult(jac->H, u, Hu));
1513     PetscCall(VecAXPY(work2, -beta, Hu));
1514     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1515     PetscCall(KSPSolve(ksp, work2, u));
1516     PetscCall(KSPCheckSolve(ksp, pc, u));
1517     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1518     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1519     PetscCall(VecDot(Hu, u, &alpha));
1520     KSPCheckDot(ksp, alpha);
1521     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1522     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1523     PetscCall(VecScale(u, 1.0 / alpha));
1524 
1525     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1526     vecz[0] = z;
1527 
1528     /* Computation of new iterate x(i+1) and p(i+1) */
1529     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1530     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1531     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1532     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1533     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1534 
1535     /* Compute Lower Bound estimate */
1536     if (iterGKB > jac->gkbdelay) {
1537       lowbnd = 0.0;
1538       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1539       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1540     }
1541 
1542     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1543     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1544   }
1545 
1546   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1547   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1548   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1549   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1550 
1551   PetscFunctionReturn(PETSC_SUCCESS);
1552 }
1553 
1554 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1555   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1556                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1557                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1558 
1559 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1560 {
1561   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1562   PC_FieldSplitLink ilink = jac->head;
1563   PetscInt          bs;
1564 
1565   PetscFunctionBegin;
1566   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1567     if (jac->defaultsplit) {
1568       PetscCall(VecGetBlockSize(x, &bs));
1569       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1570       PetscCall(VecGetBlockSize(y, &bs));
1571       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1572       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1573       while (ilink) {
1574         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1575         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1576         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1577         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1578         ilink = ilink->next;
1579       }
1580       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1581     } else {
1582       PetscCall(VecSet(y, 0.0));
1583       while (ilink) {
1584         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1585         ilink = ilink->next;
1586       }
1587     }
1588   } else {
1589     if (!jac->w1) {
1590       PetscCall(VecDuplicate(x, &jac->w1));
1591       PetscCall(VecDuplicate(x, &jac->w2));
1592     }
1593     PetscCall(VecSet(y, 0.0));
1594     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1595       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1596       while (ilink->next) {
1597         ilink = ilink->next;
1598         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1599         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1600         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1601       }
1602       while (ilink->previous) {
1603         ilink = ilink->previous;
1604         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1605         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1606         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1607       }
1608     } else {
1609       while (ilink->next) { /* get to last entry in linked list */
1610         ilink = ilink->next;
1611       }
1612       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1613       while (ilink->previous) {
1614         ilink = ilink->previous;
1615         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1616         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1617         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1618       }
1619     }
1620   }
1621   PetscFunctionReturn(PETSC_SUCCESS);
1622 }
1623 
1624 static PetscErrorCode PCReset_FieldSplit(PC pc)
1625 {
1626   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1627   PC_FieldSplitLink ilink = jac->head, next;
1628 
1629   PetscFunctionBegin;
1630   while (ilink) {
1631     PetscCall(KSPDestroy(&ilink->ksp));
1632     PetscCall(VecDestroy(&ilink->x));
1633     PetscCall(VecDestroy(&ilink->y));
1634     PetscCall(VecDestroy(&ilink->z));
1635     PetscCall(VecScatterDestroy(&ilink->sctx));
1636     PetscCall(ISDestroy(&ilink->is));
1637     PetscCall(ISDestroy(&ilink->is_col));
1638     PetscCall(PetscFree(ilink->splitname));
1639     PetscCall(PetscFree(ilink->fields));
1640     PetscCall(PetscFree(ilink->fields_col));
1641     next = ilink->next;
1642     PetscCall(PetscFree(ilink));
1643     ilink = next;
1644   }
1645   jac->head = NULL;
1646   PetscCall(PetscFree2(jac->x, jac->y));
1647   if (jac->mat && jac->mat != jac->pmat) {
1648     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1649   } else if (jac->mat) {
1650     jac->mat = NULL;
1651   }
1652   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1653   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1654   jac->nsplits = 0;
1655   PetscCall(VecDestroy(&jac->w1));
1656   PetscCall(VecDestroy(&jac->w2));
1657   PetscCall(MatDestroy(&jac->schur));
1658   PetscCall(MatDestroy(&jac->schurp));
1659   PetscCall(MatDestroy(&jac->schur_user));
1660   PetscCall(KSPDestroy(&jac->kspschur));
1661   PetscCall(KSPDestroy(&jac->kspupper));
1662   PetscCall(MatDestroy(&jac->B));
1663   PetscCall(MatDestroy(&jac->C));
1664   PetscCall(MatDestroy(&jac->H));
1665   PetscCall(VecDestroy(&jac->u));
1666   PetscCall(VecDestroy(&jac->v));
1667   PetscCall(VecDestroy(&jac->Hu));
1668   PetscCall(VecDestroy(&jac->d));
1669   PetscCall(PetscFree(jac->vecz));
1670   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1671   jac->isrestrict = PETSC_FALSE;
1672   PetscFunctionReturn(PETSC_SUCCESS);
1673 }
1674 
1675 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1676 {
1677   PetscFunctionBegin;
1678   PetscCall(PCReset_FieldSplit(pc));
1679   PetscCall(PetscFree(pc->data));
1680   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1681   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1682   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1683   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1684   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1685   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1686   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1687   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1688 
1689   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1690   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1691   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1692   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1693   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1694   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1695   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1696   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1697   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1698   PetscFunctionReturn(PETSC_SUCCESS);
1699 }
1700 
1701 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject)
1702 {
1703   PetscInt        bs;
1704   PetscBool       flg;
1705   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1706   PCCompositeType ctype;
1707 
1708   PetscFunctionBegin;
1709   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1710   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1711   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1712   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1713   jac->diag_use_amat = pc->useAmat;
1714   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1715   jac->offdiag_use_amat = pc->useAmat;
1716   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1717   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1718   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1719   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1720   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1721   /* Only setup fields once */
1722   if ((jac->bs > 0) && (jac->nsplits == 0)) {
1723     /* only allow user to set fields from command line if bs is already known.
1724        otherwise user can set them in PCFieldSplitSetDefaults() */
1725     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1726     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1727   }
1728   if (jac->type == PC_COMPOSITE_SCHUR) {
1729     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1730     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1731     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1732     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1733     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1734   } else if (jac->type == PC_COMPOSITE_GKB) {
1735     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1736     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1737     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitGKBNu", jac->gkbnu, &jac->gkbnu, NULL));
1738     PetscCheck(jac->gkbnu >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nu cannot be less than 0: value %g", (double)jac->gkbnu);
1739     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1740     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1741   }
1742   /*
1743     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1744     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1745     is called on the outer solver in case changes were made in the options database
1746 
1747     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1748     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1749     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1750 
1751     There could be a negative side effect of calling the KSPSetFromOptions() below.
1752 
1753     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1754   */
1755   if (jac->issetup) {
1756     PC_FieldSplitLink ilink = jac->head;
1757     if (jac->type == PC_COMPOSITE_SCHUR) {
1758       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1759       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1760     }
1761     while (ilink) {
1762       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1763       ilink = ilink->next;
1764     }
1765   }
1766   PetscOptionsHeadEnd();
1767   PetscFunctionReturn(PETSC_SUCCESS);
1768 }
1769 
1770 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1771 {
1772   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1773   PC_FieldSplitLink ilink, next = jac->head;
1774   char              prefix[128];
1775   PetscInt          i;
1776 
1777   PetscFunctionBegin;
1778   if (jac->splitdefined) {
1779     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1780     PetscFunctionReturn(PETSC_SUCCESS);
1781   }
1782   for (i = 0; i < n; i++) {
1783     PetscCheck(fields[i] < jac->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", fields[i], jac->bs);
1784     PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]);
1785   }
1786   PetscCall(PetscNew(&ilink));
1787   if (splitname) {
1788     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1789   } else {
1790     PetscCall(PetscMalloc1(3, &ilink->splitname));
1791     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1792   }
1793   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1794   PetscCall(PetscMalloc1(n, &ilink->fields));
1795   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1796   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1797   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1798 
1799   ilink->nfields = n;
1800   ilink->next    = NULL;
1801   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1802   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1803   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1804   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1805   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1806 
1807   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1808   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1809 
1810   if (!next) {
1811     jac->head       = ilink;
1812     ilink->previous = NULL;
1813   } else {
1814     while (next->next) next = next->next;
1815     next->next      = ilink;
1816     ilink->previous = next;
1817   }
1818   jac->nsplits++;
1819   PetscFunctionReturn(PETSC_SUCCESS);
1820 }
1821 
1822 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1823 {
1824   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1825 
1826   PetscFunctionBegin;
1827   *subksp = NULL;
1828   if (n) *n = 0;
1829   if (jac->type == PC_COMPOSITE_SCHUR) {
1830     PetscInt nn;
1831 
1832     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1833     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1834     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1835     PetscCall(PetscMalloc1(nn, subksp));
1836     (*subksp)[0] = jac->head->ksp;
1837     (*subksp)[1] = jac->kspschur;
1838     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
1839     if (n) *n = nn;
1840   }
1841   PetscFunctionReturn(PETSC_SUCCESS);
1842 }
1843 
1844 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
1845 {
1846   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1847 
1848   PetscFunctionBegin;
1849   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
1850   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1851   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
1852 
1853   (*subksp)[1] = jac->kspschur;
1854   if (n) *n = jac->nsplits;
1855   PetscFunctionReturn(PETSC_SUCCESS);
1856 }
1857 
1858 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1859 {
1860   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1861   PetscInt          cnt   = 0;
1862   PC_FieldSplitLink ilink = jac->head;
1863 
1864   PetscFunctionBegin;
1865   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1866   while (ilink) {
1867     (*subksp)[cnt++] = ilink->ksp;
1868     ilink            = ilink->next;
1869   }
1870   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
1871   if (n) *n = jac->nsplits;
1872   PetscFunctionReturn(PETSC_SUCCESS);
1873 }
1874 
1875 /*@C
1876   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
1877 
1878   Input Parameters:
1879 + pc  - the preconditioner context
1880 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
1881 
1882   Level: advanced
1883 
1884   Developer Notes:
1885   It seems the resulting `IS`s will not cover the entire space, so
1886   how can they define a convergent preconditioner? Needs explaining.
1887 
1888 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
1889 @*/
1890 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
1891 {
1892   PetscFunctionBegin;
1893   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1894   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
1895   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
1896   PetscFunctionReturn(PETSC_SUCCESS);
1897 }
1898 
1899 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
1900 {
1901   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1902   PC_FieldSplitLink ilink = jac->head, next;
1903   PetscInt          localsize, size, sizez, i;
1904   const PetscInt   *ind, *indz;
1905   PetscInt         *indc, *indcz;
1906   PetscBool         flg;
1907 
1908   PetscFunctionBegin;
1909   PetscCall(ISGetLocalSize(isy, &localsize));
1910   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
1911   size -= localsize;
1912   while (ilink) {
1913     IS isrl, isr;
1914     PC subpc;
1915     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
1916     PetscCall(ISGetLocalSize(isrl, &localsize));
1917     PetscCall(PetscMalloc1(localsize, &indc));
1918     PetscCall(ISGetIndices(isrl, &ind));
1919     PetscCall(PetscArraycpy(indc, ind, localsize));
1920     PetscCall(ISRestoreIndices(isrl, &ind));
1921     PetscCall(ISDestroy(&isrl));
1922     for (i = 0; i < localsize; i++) *(indc + i) += size;
1923     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
1924     PetscCall(PetscObjectReference((PetscObject)isr));
1925     PetscCall(ISDestroy(&ilink->is));
1926     ilink->is = isr;
1927     PetscCall(PetscObjectReference((PetscObject)isr));
1928     PetscCall(ISDestroy(&ilink->is_col));
1929     ilink->is_col = isr;
1930     PetscCall(ISDestroy(&isr));
1931     PetscCall(KSPGetPC(ilink->ksp, &subpc));
1932     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
1933     if (flg) {
1934       IS       iszl, isz;
1935       MPI_Comm comm;
1936       PetscCall(ISGetLocalSize(ilink->is, &localsize));
1937       comm = PetscObjectComm((PetscObject)ilink->is);
1938       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
1939       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
1940       sizez -= localsize;
1941       PetscCall(ISGetLocalSize(iszl, &localsize));
1942       PetscCall(PetscMalloc1(localsize, &indcz));
1943       PetscCall(ISGetIndices(iszl, &indz));
1944       PetscCall(PetscArraycpy(indcz, indz, localsize));
1945       PetscCall(ISRestoreIndices(iszl, &indz));
1946       PetscCall(ISDestroy(&iszl));
1947       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
1948       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
1949       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
1950       PetscCall(ISDestroy(&isz));
1951     }
1952     next  = ilink->next;
1953     ilink = next;
1954   }
1955   jac->isrestrict = PETSC_TRUE;
1956   PetscFunctionReturn(PETSC_SUCCESS);
1957 }
1958 
1959 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
1960 {
1961   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1962   PC_FieldSplitLink ilink, next = jac->head;
1963   char              prefix[128];
1964 
1965   PetscFunctionBegin;
1966   if (jac->splitdefined) {
1967     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1968     PetscFunctionReturn(PETSC_SUCCESS);
1969   }
1970   PetscCall(PetscNew(&ilink));
1971   if (splitname) {
1972     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1973   } else {
1974     PetscCall(PetscMalloc1(8, &ilink->splitname));
1975     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
1976   }
1977   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1978   PetscCall(PetscObjectReference((PetscObject)is));
1979   PetscCall(ISDestroy(&ilink->is));
1980   ilink->is = is;
1981   PetscCall(PetscObjectReference((PetscObject)is));
1982   PetscCall(ISDestroy(&ilink->is_col));
1983   ilink->is_col = is;
1984   ilink->next   = NULL;
1985   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1986   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1987   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1988   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1989   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1990 
1991   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1992   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1993 
1994   if (!next) {
1995     jac->head       = ilink;
1996     ilink->previous = NULL;
1997   } else {
1998     while (next->next) next = next->next;
1999     next->next      = ilink;
2000     ilink->previous = next;
2001   }
2002   jac->nsplits++;
2003   PetscFunctionReturn(PETSC_SUCCESS);
2004 }
2005 
2006 /*@C
2007   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
2008 
2009   Logically Collective
2010 
2011   Input Parameters:
2012 + pc         - the preconditioner context
2013 . splitname  - name of this split, if `NULL` the number of the split is used
2014 . n          - the number of fields in this split
2015 . fields     - the fields in this split
2016 - fields_col - generally the same as fields, if it does not match fields then the matrix block that is solved for this set of fields comes from an off-diagonal block
2017                  of the matrix and fields_col provides the column indices for that block
2018 
2019   Level: intermediate
2020 
2021   Notes:
2022   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2023 
2024   `PCFieldSplitSetFields()` is for defining fields as strided blocks. For example, if the block
2025   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2026   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x....
2027   where the numbered entries indicate what is in the split.
2028 
2029   This function is called once per split (it creates a new split each time).  Solve options
2030   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2031 
2032   `PCFieldSplitSetIS()` does not support having a fields_col different from fields
2033 
2034   Developer Notes:
2035   This routine does not actually create the `IS` representing the split, that is delayed
2036   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2037   available when this routine is called.
2038 
2039 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`
2040 @*/
2041 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
2042 {
2043   PetscFunctionBegin;
2044   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2045   PetscAssertPointer(splitname, 2);
2046   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2047   PetscAssertPointer(fields, 4);
2048   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2049   PetscFunctionReturn(PETSC_SUCCESS);
2050 }
2051 
2052 /*@
2053   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2054   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
2055 
2056   Logically Collective
2057 
2058   Input Parameters:
2059 + pc  - the preconditioner object
2060 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2061 
2062   Options Database Key:
2063 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2064 
2065   Level: intermediate
2066 
2067 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2068 @*/
2069 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2070 {
2071   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2072   PetscBool      isfs;
2073 
2074   PetscFunctionBegin;
2075   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2076   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2077   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2078   jac->diag_use_amat = flg;
2079   PetscFunctionReturn(PETSC_SUCCESS);
2080 }
2081 
2082 /*@
2083   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2084   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
2085 
2086   Logically Collective
2087 
2088   Input Parameter:
2089 . pc - the preconditioner object
2090 
2091   Output Parameter:
2092 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2093 
2094   Level: intermediate
2095 
2096 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2097 @*/
2098 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2099 {
2100   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2101   PetscBool      isfs;
2102 
2103   PetscFunctionBegin;
2104   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2105   PetscAssertPointer(flg, 2);
2106   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2107   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2108   *flg = jac->diag_use_amat;
2109   PetscFunctionReturn(PETSC_SUCCESS);
2110 }
2111 
2112 /*@
2113   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2114   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
2115 
2116   Logically Collective
2117 
2118   Input Parameters:
2119 + pc  - the preconditioner object
2120 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2121 
2122   Options Database Key:
2123 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2124 
2125   Level: intermediate
2126 
2127 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2128 @*/
2129 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2130 {
2131   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2132   PetscBool      isfs;
2133 
2134   PetscFunctionBegin;
2135   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2136   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2137   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2138   jac->offdiag_use_amat = flg;
2139   PetscFunctionReturn(PETSC_SUCCESS);
2140 }
2141 
2142 /*@
2143   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2144   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
2145 
2146   Logically Collective
2147 
2148   Input Parameter:
2149 . pc - the preconditioner object
2150 
2151   Output Parameter:
2152 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2153 
2154   Level: intermediate
2155 
2156 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2157 @*/
2158 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2159 {
2160   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2161   PetscBool      isfs;
2162 
2163   PetscFunctionBegin;
2164   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2165   PetscAssertPointer(flg, 2);
2166   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2167   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2168   *flg = jac->offdiag_use_amat;
2169   PetscFunctionReturn(PETSC_SUCCESS);
2170 }
2171 
2172 /*@C
2173   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2174 
2175   Logically Collective
2176 
2177   Input Parameters:
2178 + pc        - the preconditioner context
2179 . splitname - name of this split, if `NULL` the number of the split is used
2180 - is        - the index set that defines the elements in this split
2181 
2182   Level: intermediate
2183 
2184   Notes:
2185   Use `PCFieldSplitSetFields()`, for splits defined by strided types.
2186 
2187   This function is called once per split (it creates a new split each time).  Solve options
2188   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2189 
2190 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`
2191 @*/
2192 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2193 {
2194   PetscFunctionBegin;
2195   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2196   if (splitname) PetscAssertPointer(splitname, 2);
2197   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2198   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2199   PetscFunctionReturn(PETSC_SUCCESS);
2200 }
2201 
2202 /*@C
2203   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2204 
2205   Logically Collective
2206 
2207   Input Parameters:
2208 + pc        - the preconditioner context
2209 - splitname - name of this split
2210 
2211   Output Parameter:
2212 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2213 
2214   Level: intermediate
2215 
2216 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`
2217 @*/
2218 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2219 {
2220   PetscFunctionBegin;
2221   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2222   PetscAssertPointer(splitname, 2);
2223   PetscAssertPointer(is, 3);
2224   {
2225     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2226     PC_FieldSplitLink ilink = jac->head;
2227     PetscBool         found;
2228 
2229     *is = NULL;
2230     while (ilink) {
2231       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2232       if (found) {
2233         *is = ilink->is;
2234         break;
2235       }
2236       ilink = ilink->next;
2237     }
2238   }
2239   PetscFunctionReturn(PETSC_SUCCESS);
2240 }
2241 
2242 /*@C
2243   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2244 
2245   Logically Collective
2246 
2247   Input Parameters:
2248 + pc    - the preconditioner context
2249 - index - index of this split
2250 
2251   Output Parameter:
2252 . is - the index set that defines the elements in this split
2253 
2254   Level: intermediate
2255 
2256 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`
2257 @*/
2258 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2259 {
2260   PetscFunctionBegin;
2261   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2262   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2263   PetscAssertPointer(is, 3);
2264   {
2265     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2266     PC_FieldSplitLink ilink = jac->head;
2267     PetscInt          i     = 0;
2268     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2269 
2270     while (i < index) {
2271       ilink = ilink->next;
2272       ++i;
2273     }
2274     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2275   }
2276   PetscFunctionReturn(PETSC_SUCCESS);
2277 }
2278 
2279 /*@
2280   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2281   fieldsplit preconditioner when calling `PCFieldSplitSetIS()`. If not set the matrix block size is used.
2282 
2283   Logically Collective
2284 
2285   Input Parameters:
2286 + pc - the preconditioner context
2287 - bs - the block size
2288 
2289   Level: intermediate
2290 
2291 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2292 @*/
2293 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2294 {
2295   PetscFunctionBegin;
2296   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2297   PetscValidLogicalCollectiveInt(pc, bs, 2);
2298   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2299   PetscFunctionReturn(PETSC_SUCCESS);
2300 }
2301 
2302 /*@C
2303   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2304 
2305   Collective
2306 
2307   Input Parameter:
2308 . pc - the preconditioner context
2309 
2310   Output Parameters:
2311 + n      - the number of splits
2312 - subksp - the array of `KSP` contexts
2313 
2314   Level: advanced
2315 
2316   Notes:
2317   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2318   (not the `KSP`, just the array that contains them).
2319 
2320   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2321 
2322   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2323   Schur complement and the `KSP` object used to iterate over the Schur complement.
2324   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2325 
2326   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2327   inner linear system defined by the matrix H in each loop.
2328 
2329   Fortran Notes:
2330   You must pass in a `KSP` array that is large enough to contain all the `KSP`s.
2331   You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2332   `KSP` array must be.
2333 
2334   Developer Notes:
2335   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2336 
2337   The Fortran interface should be modernized to return directly the array of values.
2338 
2339 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2340 @*/
2341 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2342 {
2343   PetscFunctionBegin;
2344   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2345   if (n) PetscAssertPointer(n, 2);
2346   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2347   PetscFunctionReturn(PETSC_SUCCESS);
2348 }
2349 
2350 /*@C
2351   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2352 
2353   Collective
2354 
2355   Input Parameter:
2356 . pc - the preconditioner context
2357 
2358   Output Parameters:
2359 + n      - the number of splits
2360 - subksp - the array of `KSP` contexts
2361 
2362   Level: advanced
2363 
2364   Notes:
2365   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2366   (not the `KSP` just the array that contains them).
2367 
2368   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2369 
2370   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2371 +  1  - the `KSP` used for the (1,1) block
2372 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2373 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2374 
2375   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2376 
2377   Fortran Notes:
2378   You must pass in a `KSP` array that is large enough to contain all the local `KSP`s.
2379   You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2380   `KSP` array must be.
2381 
2382   Developer Notes:
2383   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2384 
2385   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2386 
2387   The Fortran interface should be modernized to return directly the array of values.
2388 
2389 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2390 @*/
2391 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2392 {
2393   PetscFunctionBegin;
2394   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2395   if (n) PetscAssertPointer(n, 2);
2396   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2397   PetscFunctionReturn(PETSC_SUCCESS);
2398 }
2399 
2400 /*@
2401   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructucted for the Schur complement.
2402   The default is the A11 matrix.
2403 
2404   Collective
2405 
2406   Input Parameters:
2407 + pc    - the preconditioner context
2408 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2409               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2410               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2411 - pre   - matrix to use for preconditioning, or `NULL`
2412 
2413   Options Database Keys:
2414 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2415 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2416 
2417   Level: intermediate
2418 
2419   Notes:
2420   If ptype is
2421 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2422   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2423 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2424   The only preconditioner that currently works with this symbolic representation matrix object is the `PCLSC`
2425   preconditioner
2426 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2427   to this function).
2428 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation Sp = A11 - A10 inv(diag(A00)) A01
2429   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2430   lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2431 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2432   computed internally by `PCFIELDSPLIT` (this is expensive)
2433   useful mostly as a test that the Schur complement approach can work for your problem
2434 
2435   When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2436   with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a ptype of self and
2437   `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2438 
2439 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2440           `MatSchurComplementSetAinvType()`, `PCLSC`,
2441 
2442 @*/
2443 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2444 {
2445   PetscFunctionBegin;
2446   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2447   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2448   PetscFunctionReturn(PETSC_SUCCESS);
2449 }
2450 
2451 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2452 {
2453   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2454 } /* Deprecated name */
2455 
2456 /*@
2457   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2458   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2459 
2460   Logically Collective
2461 
2462   Input Parameter:
2463 . pc - the preconditioner context
2464 
2465   Output Parameters:
2466 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2467 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2468 
2469   Level: intermediate
2470 
2471 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2472 
2473 @*/
2474 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2475 {
2476   PetscFunctionBegin;
2477   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2478   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2479   PetscFunctionReturn(PETSC_SUCCESS);
2480 }
2481 
2482 /*@
2483   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2484 
2485   Not Collective
2486 
2487   Input Parameter:
2488 . pc - the preconditioner context
2489 
2490   Output Parameter:
2491 . S - the Schur complement matrix
2492 
2493   Level: advanced
2494 
2495   Note:
2496   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2497 
2498 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2499           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2500 @*/
2501 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2502 {
2503   const char    *t;
2504   PetscBool      isfs;
2505   PC_FieldSplit *jac;
2506 
2507   PetscFunctionBegin;
2508   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2509   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2510   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2511   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2512   jac = (PC_FieldSplit *)pc->data;
2513   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2514   if (S) *S = jac->schur;
2515   PetscFunctionReturn(PETSC_SUCCESS);
2516 }
2517 
2518 /*@
2519   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2520 
2521   Not Collective
2522 
2523   Input Parameters:
2524 + pc - the preconditioner context
2525 - S  - the Schur complement matrix
2526 
2527   Level: advanced
2528 
2529 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2530 @*/
2531 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2532 {
2533   const char    *t;
2534   PetscBool      isfs;
2535   PC_FieldSplit *jac;
2536 
2537   PetscFunctionBegin;
2538   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2539   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2540   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2541   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2542   jac = (PC_FieldSplit *)pc->data;
2543   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2544   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2545   PetscFunctionReturn(PETSC_SUCCESS);
2546 }
2547 
2548 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2549 {
2550   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2551 
2552   PetscFunctionBegin;
2553   jac->schurpre = ptype;
2554   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2555     PetscCall(MatDestroy(&jac->schur_user));
2556     jac->schur_user = pre;
2557     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2558   }
2559   PetscFunctionReturn(PETSC_SUCCESS);
2560 }
2561 
2562 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2563 {
2564   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2565 
2566   PetscFunctionBegin;
2567   if (ptype) *ptype = jac->schurpre;
2568   if (pre) *pre = jac->schur_user;
2569   PetscFunctionReturn(PETSC_SUCCESS);
2570 }
2571 
2572 /*@
2573   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner
2574 
2575   Collective
2576 
2577   Input Parameters:
2578 + pc    - the preconditioner context
2579 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2580 
2581   Options Database Key:
2582 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
2583 
2584   Level: intermediate
2585 
2586   Notes:
2587   The FULL factorization is
2588 
2589 .vb
2590    (A   B)  = (1       0) (A   0) (1  Ainv*B)  = L D U
2591    (C   E)    (C*Ainv  1) (0   S) (0       1)
2592 .vb
2593     where S = E - C*Ainv*B. In practice, the full factorization is applied via block triangular solves with the grouping L*(D*U). UPPER uses D*U, LOWER uses L*D,
2594     and DIAG is the diagonal part with the sign of S flipped (because this makes the preconditioner positive definite for many formulations, thus allowing the use of `KSPMINRES)`.
2595     Sign flipping of S can be turned off with `PCFieldSplitSetSchurScale()`.
2596 
2597     If A and S are solved exactly
2598 .vb
2599       *) FULL factorization is a direct solver.
2600       *) The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2601       *) With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2602 .ve
2603 
2604   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2605   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2606 
2607   For symmetric problems in which A is positive definite and S is negative definite, DIAG can be used with `KSPMINRES`.
2608 
2609   A flexible method like `KSPFGMRES` or `KSPGCR` must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S).
2610 
2611   References:
2612 +   * - Murphy, Golub, and Wathen, A note on preconditioning indefinite linear systems, SIAM J. Sci. Comput., 21 (2000).
2613 -   * - Ipsen, A note on preconditioning nonsymmetric matrices, SIAM J. Sci. Comput., 23 (2001).
2614 
2615 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`
2616 @*/
2617 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2618 {
2619   PetscFunctionBegin;
2620   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2621   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2622   PetscFunctionReturn(PETSC_SUCCESS);
2623 }
2624 
2625 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2626 {
2627   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2628 
2629   PetscFunctionBegin;
2630   jac->schurfactorization = ftype;
2631   PetscFunctionReturn(PETSC_SUCCESS);
2632 }
2633 
2634 /*@
2635   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2636 
2637   Collective
2638 
2639   Input Parameters:
2640 + pc    - the preconditioner context
2641 - scale - scaling factor for the Schur complement
2642 
2643   Options Database Key:
2644 . -pc_fieldsplit_schur_scale - default is -1.0
2645 
2646   Level: intermediate
2647 
2648 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
2649 @*/
2650 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2651 {
2652   PetscFunctionBegin;
2653   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2654   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2655   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2656   PetscFunctionReturn(PETSC_SUCCESS);
2657 }
2658 
2659 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2660 {
2661   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2662 
2663   PetscFunctionBegin;
2664   jac->schurscale = scale;
2665   PetscFunctionReturn(PETSC_SUCCESS);
2666 }
2667 
2668 /*@C
2669   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2670 
2671   Collective
2672 
2673   Input Parameter:
2674 . pc - the preconditioner context
2675 
2676   Output Parameters:
2677 + A00 - the (0,0) block
2678 . A01 - the (0,1) block
2679 . A10 - the (1,0) block
2680 - A11 - the (1,1) block
2681 
2682   Level: advanced
2683 
2684 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2685 @*/
2686 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2687 {
2688   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2689 
2690   PetscFunctionBegin;
2691   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2692   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2693   if (A00) *A00 = jac->pmat[0];
2694   if (A01) *A01 = jac->B;
2695   if (A10) *A10 = jac->C;
2696   if (A11) *A11 = jac->pmat[1];
2697   PetscFunctionReturn(PETSC_SUCCESS);
2698 }
2699 
2700 /*@
2701   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT`
2702 
2703   Collective
2704 
2705   Input Parameters:
2706 + pc        - the preconditioner context
2707 - tolerance - the solver tolerance
2708 
2709   Options Database Key:
2710 . -pc_fieldsplit_gkb_tol - default is 1e-5
2711 
2712   Level: intermediate
2713 
2714   Note:
2715   The generalized GKB algorithm uses a lower bound estimate of the error in energy norm as stopping criterion.
2716   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2717   this estimate, the stopping criterion is satisfactory in practical cases [A13].
2718 
2719   References:
2720   [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2721 
2722 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2723 @*/
2724 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2725 {
2726   PetscFunctionBegin;
2727   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2728   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2729   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2730   PetscFunctionReturn(PETSC_SUCCESS);
2731 }
2732 
2733 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2734 {
2735   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2736 
2737   PetscFunctionBegin;
2738   jac->gkbtol = tolerance;
2739   PetscFunctionReturn(PETSC_SUCCESS);
2740 }
2741 
2742 /*@
2743   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT`
2744 
2745   Collective
2746 
2747   Input Parameters:
2748 + pc    - the preconditioner context
2749 - maxit - the maximum number of iterations
2750 
2751   Options Database Key:
2752 . -pc_fieldsplit_gkb_maxit - default is 100
2753 
2754   Level: intermediate
2755 
2756 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2757 @*/
2758 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2759 {
2760   PetscFunctionBegin;
2761   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2762   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2763   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2764   PetscFunctionReturn(PETSC_SUCCESS);
2765 }
2766 
2767 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2768 {
2769   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2770 
2771   PetscFunctionBegin;
2772   jac->gkbmaxit = maxit;
2773   PetscFunctionReturn(PETSC_SUCCESS);
2774 }
2775 
2776 /*@
2777   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization in `PCFIELDSPLIT`
2778   preconditioner.
2779 
2780   Collective
2781 
2782   Input Parameters:
2783 + pc    - the preconditioner context
2784 - delay - the delay window in the lower bound estimate
2785 
2786   Options Database Key:
2787 . -pc_fieldsplit_gkb_delay - default is 5
2788 
2789   Level: intermediate
2790 
2791   Note:
2792   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error ||u-u^k||_H
2793   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + delay), and thus the algorithm needs
2794   at least (delay + 1) iterations to stop. For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to
2795 
2796   References:
2797   [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2798 
2799 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2800 @*/
2801 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2802 {
2803   PetscFunctionBegin;
2804   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2805   PetscValidLogicalCollectiveInt(pc, delay, 2);
2806   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2807   PetscFunctionReturn(PETSC_SUCCESS);
2808 }
2809 
2810 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2811 {
2812   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2813 
2814   PetscFunctionBegin;
2815   jac->gkbdelay = delay;
2816   PetscFunctionReturn(PETSC_SUCCESS);
2817 }
2818 
2819 /*@
2820   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the Golub-Kahan bidiagonalization preconditioner
2821   in `PCFIELDSPLIT`
2822 
2823   Collective
2824 
2825   Input Parameters:
2826 + pc - the preconditioner context
2827 - nu - the shift parameter
2828 
2829   Options Database Key:
2830 . -pc_fieldsplit_gkb_nu - default is 1
2831 
2832   Level: intermediate
2833 
2834   Notes:
2835   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing nu sufficiently big. However,
2836   if nu is chosen too big, the matrix H might be badly conditioned and the solution of the linear system Hx = b in the inner loop becomes difficult. It is therefore
2837   necessary to find a good balance in between the convergence of the inner and outer loop.
2838 
2839   For nu = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in [Ar13] is then chosen as identity.
2840 
2841   References:
2842   [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2843 
2844 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2845 @*/
2846 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
2847 {
2848   PetscFunctionBegin;
2849   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2850   PetscValidLogicalCollectiveReal(pc, nu, 2);
2851   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
2852   PetscFunctionReturn(PETSC_SUCCESS);
2853 }
2854 
2855 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
2856 {
2857   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2858 
2859   PetscFunctionBegin;
2860   jac->gkbnu = nu;
2861   PetscFunctionReturn(PETSC_SUCCESS);
2862 }
2863 
2864 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
2865 {
2866   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2867 
2868   PetscFunctionBegin;
2869   jac->type = type;
2870   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
2871   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
2872   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
2873   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
2874   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
2875   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
2876   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
2877   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
2878   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
2879 
2880   if (type == PC_COMPOSITE_SCHUR) {
2881     pc->ops->apply          = PCApply_FieldSplit_Schur;
2882     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
2883     pc->ops->view           = PCView_FieldSplit_Schur;
2884 
2885     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
2886     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
2887     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
2888     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
2889     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
2890   } else if (type == PC_COMPOSITE_GKB) {
2891     pc->ops->apply = PCApply_FieldSplit_GKB;
2892     pc->ops->view  = PCView_FieldSplit_GKB;
2893 
2894     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2895     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
2896     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
2897     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
2898     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
2899   } else {
2900     pc->ops->apply = PCApply_FieldSplit;
2901     pc->ops->view  = PCView_FieldSplit;
2902 
2903     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2904   }
2905   PetscFunctionReturn(PETSC_SUCCESS);
2906 }
2907 
2908 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
2909 {
2910   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2911 
2912   PetscFunctionBegin;
2913   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
2914   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
2915   jac->bs = bs;
2916   PetscFunctionReturn(PETSC_SUCCESS);
2917 }
2918 
2919 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
2920 {
2921   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
2922   PC_FieldSplitLink ilink_current = jac->head;
2923   IS                is_owned;
2924 
2925   PetscFunctionBegin;
2926   jac->coordinates_set = PETSC_TRUE; // Internal flag
2927   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
2928 
2929   while (ilink_current) {
2930     // For each IS, embed it to get local coords indces
2931     IS              is_coords;
2932     PetscInt        ndofs_block;
2933     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
2934 
2935     // Setting drop to true for safety. It should make no difference.
2936     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
2937     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
2938     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
2939 
2940     // Allocate coordinates vector and set it directly
2941     PetscCall(PetscMalloc1(ndofs_block * dim, &(ilink_current->coords)));
2942     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
2943       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
2944     }
2945     ilink_current->dim   = dim;
2946     ilink_current->ndofs = ndofs_block;
2947     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
2948     PetscCall(ISDestroy(&is_coords));
2949     ilink_current = ilink_current->next;
2950   }
2951   PetscCall(ISDestroy(&is_owned));
2952   PetscFunctionReturn(PETSC_SUCCESS);
2953 }
2954 
2955 /*@
2956   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
2957 
2958   Collective
2959 
2960   Input Parameters:
2961 + pc   - the preconditioner context
2962 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2963 
2964   Options Database Key:
2965 . -pc_fieldsplit_type <type: one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
2966 
2967   Level: intermediate
2968 
2969 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
2970           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2971 @*/
2972 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
2973 {
2974   PetscFunctionBegin;
2975   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2976   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
2977   PetscFunctionReturn(PETSC_SUCCESS);
2978 }
2979 
2980 /*@
2981   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
2982 
2983   Not collective
2984 
2985   Input Parameter:
2986 . pc - the preconditioner context
2987 
2988   Output Parameter:
2989 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2990 
2991   Level: intermediate
2992 
2993 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
2994           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2995 @*/
2996 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
2997 {
2998   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2999 
3000   PetscFunctionBegin;
3001   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3002   PetscAssertPointer(type, 2);
3003   *type = jac->type;
3004   PetscFunctionReturn(PETSC_SUCCESS);
3005 }
3006 
3007 /*@
3008   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3009 
3010   Logically Collective
3011 
3012   Input Parameters:
3013 + pc  - the preconditioner context
3014 - flg - boolean indicating whether to use field splits defined by the `DM`
3015 
3016   Options Database Key:
3017 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3018 
3019   Level: intermediate
3020 
3021 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3022 @*/
3023 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3024 {
3025   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3026   PetscBool      isfs;
3027 
3028   PetscFunctionBegin;
3029   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3030   PetscValidLogicalCollectiveBool(pc, flg, 2);
3031   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3032   if (isfs) jac->dm_splits = flg;
3033   PetscFunctionReturn(PETSC_SUCCESS);
3034 }
3035 
3036 /*@
3037   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3038 
3039   Logically Collective
3040 
3041   Input Parameter:
3042 . pc - the preconditioner context
3043 
3044   Output Parameter:
3045 . flg - boolean indicating whether to use field splits defined by the `DM`
3046 
3047   Level: intermediate
3048 
3049 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3050 @*/
3051 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3052 {
3053   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3054   PetscBool      isfs;
3055 
3056   PetscFunctionBegin;
3057   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3058   PetscAssertPointer(flg, 2);
3059   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3060   if (isfs) {
3061     if (flg) *flg = jac->dm_splits;
3062   }
3063   PetscFunctionReturn(PETSC_SUCCESS);
3064 }
3065 
3066 /*@
3067   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3068 
3069   Logically Collective
3070 
3071   Input Parameter:
3072 . pc - the preconditioner context
3073 
3074   Output Parameter:
3075 . flg - boolean indicating whether to detect fields or not
3076 
3077   Level: intermediate
3078 
3079 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3080 @*/
3081 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3082 {
3083   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3084 
3085   PetscFunctionBegin;
3086   *flg = jac->detect;
3087   PetscFunctionReturn(PETSC_SUCCESS);
3088 }
3089 
3090 /*@
3091   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3092 
3093   Logically Collective
3094 
3095   Input Parameter:
3096 . pc - the preconditioner context
3097 
3098   Output Parameter:
3099 . flg - boolean indicating whether to detect fields or not
3100 
3101   Options Database Key:
3102 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3103 
3104   Level: intermediate
3105 
3106   Note:
3107   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3108 
3109 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3110 @*/
3111 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3112 {
3113   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3114 
3115   PetscFunctionBegin;
3116   jac->detect = flg;
3117   if (jac->detect) {
3118     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3119     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3120   }
3121   PetscFunctionReturn(PETSC_SUCCESS);
3122 }
3123 
3124 /*MC
3125    PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3126    collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3127 
3128    Options Database Keys:
3129 +   -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
3130 .   -pc_fieldsplit_default - automatically add any fields to additional splits that have not
3131                               been supplied explicitly by `-pc_fieldsplit_%d_fields`
3132 .   -pc_fieldsplit_block_size <bs> - size of block that defines fields (i.e. there are bs fields)
3133 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3134 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`; see `PCFieldSplitSetSchurPre()`
3135 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - set factorization type when using `-pc_fieldsplit_type schur`; see `PCFieldSplitSetSchurFactType()`
3136 -   -pc_fieldsplit_detect_saddle_point - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3137 
3138      Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3139      The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3140      For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3141 
3142      To set options on the solvers for each block append `-fieldsplit_` to all the `PC`
3143      options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`
3144 
3145      To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3146       and set the options directly on the resulting `KSP` object
3147 
3148     Level: intermediate
3149 
3150    Notes:
3151     Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries and `PCFieldSplitSetIS()`
3152      to define a split by an arbitrary collection of entries.
3153 
3154       If no splits are set the default is used. The splits are defined by entries strided by bs,
3155       beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3156       if this is not called the block size defaults to the blocksize of the second matrix passed
3157       to `KSPSetOperators()`/`PCSetOperators()`.
3158 
3159       For the Schur complement preconditioner if
3160 
3161       ```{math}
3162       J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3163       ```
3164 
3165       the preconditioner using `full` factorization is logically
3166       ```{math}
3167       \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3168       ```
3169      where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3170      ```{math}
3171      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3172      ```
3173      which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given
3174      in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0,
3175      it returns the KSP associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default
3176      $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3177 
3178      The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3179      `diag` gives
3180       ```{math}
3181       \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3182       ```
3183      Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3184      can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3185       ```{math}
3186       \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3187       ```
3188      where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of
3189       ```{math}
3190       \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3191       ```
3192      where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3193 
3194      If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3195      is used automatically for a second block.
3196 
3197      The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3198      Generally it should be used with the `MATAIJ` format.
3199 
3200      The forms of these preconditioners are closely related if not identical to forms derived as "Distributive Iterations", see,
3201      for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3202      One can also use `PCFIELDSPLIT`
3203      inside a smoother resulting in "Distributive Smoothers".
3204 
3205      See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3206 
3207      The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3208      residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3209 
3210      The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3211      ```{math}
3212      \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3213      ```
3214      with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3215      A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3216 
3217    Developer Note:
3218    The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3219    user API.
3220 
3221 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3222           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3223           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3224           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3225 M*/
3226 
3227 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3228 {
3229   PC_FieldSplit *jac;
3230 
3231   PetscFunctionBegin;
3232   PetscCall(PetscNew(&jac));
3233 
3234   jac->bs                 = -1;
3235   jac->nsplits            = 0;
3236   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3237   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3238   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3239   jac->schurscale         = -1.0;
3240   jac->dm_splits          = PETSC_TRUE;
3241   jac->detect             = PETSC_FALSE;
3242   jac->gkbtol             = 1e-5;
3243   jac->gkbdelay           = 5;
3244   jac->gkbnu              = 1;
3245   jac->gkbmaxit           = 100;
3246   jac->gkbmonitor         = PETSC_FALSE;
3247   jac->coordinates_set    = PETSC_FALSE;
3248 
3249   pc->data = (void *)jac;
3250 
3251   pc->ops->apply           = PCApply_FieldSplit;
3252   pc->ops->applytranspose  = PCApplyTranspose_FieldSplit;
3253   pc->ops->setup           = PCSetUp_FieldSplit;
3254   pc->ops->reset           = PCReset_FieldSplit;
3255   pc->ops->destroy         = PCDestroy_FieldSplit;
3256   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3257   pc->ops->view            = PCView_FieldSplit;
3258   pc->ops->applyrichardson = NULL;
3259 
3260   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3261   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3262   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3263   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3264   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3265   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3266   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3267   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3268   PetscFunctionReturn(PETSC_SUCCESS);
3269 }
3270