xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision 8ea0bac9d10baee98247a9f5030ae4007889014c)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 
5 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
6 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
7 
8 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
9 
10 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
11 struct _PC_FieldSplitLink {
12   KSP               ksp;
13   Vec               x, y, z;
14   char             *splitname;
15   PetscInt          nfields;
16   PetscInt         *fields, *fields_col;
17   VecScatter        sctx;
18   IS                is, is_col;
19   PC_FieldSplitLink next, previous;
20   PetscLogEvent     event;
21 
22   /* Used only when setting coordinates with PCSetCoordinates */
23   PetscInt   dim;
24   PetscInt   ndofs;
25   PetscReal *coords;
26 };
27 
28 typedef struct {
29   PCCompositeType type;
30   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
31   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
32   PetscInt        bs;           /* Block size for IS and Mat structures */
33   PetscInt        nsplits;      /* Number of field divisions defined */
34   Vec            *x, *y, w1, w2;
35   Mat            *mat;    /* The diagonal block for each split */
36   Mat            *pmat;   /* The preconditioning diagonal block for each split */
37   Mat            *Afield; /* The rows of the matrix associated with each split */
38   PetscBool       issetup;
39 
40   /* Only used when Schur complement preconditioning is used */
41   Mat                       B;          /* The (0,1) block */
42   Mat                       C;          /* The (1,0) block */
43   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
44   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */
45   Mat                       schur_user; /* User-provided preconditioning matrix for the Schur complement */
46   PCFieldSplitSchurPreType  schurpre;   /* Determines which preconditioning matrix is used for the Schur complement */
47   PCFieldSplitSchurFactType schurfactorization;
48   KSP                       kspschur;   /* The solver for S */
49   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
50   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
51 
52   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
53   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
54   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
55   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
56   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
57   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
58   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
59   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
60   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
61   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
62 
63   PC_FieldSplitLink head;
64   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
65   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
66   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
67   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
68   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
69   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
70   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
71 } PC_FieldSplit;
72 
73 /*
74     Note:
75     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
76    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
77    PC you could change this.
78 */
79 
80 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it.  This way the
81 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
82 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
83 {
84   switch (jac->schurpre) {
85   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
86     return jac->schur;
87   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
88     return jac->schurp;
89   case PC_FIELDSPLIT_SCHUR_PRE_A11:
90     return jac->pmat[1];
91   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
92   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
93   default:
94     return jac->schur_user ? jac->schur_user : jac->pmat[1];
95   }
96 }
97 
98 #include <petscdraw.h>
99 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
100 {
101   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
102   PetscBool         iascii, isdraw;
103   PetscInt          i, j;
104   PC_FieldSplitLink ilink = jac->head;
105 
106   PetscFunctionBegin;
107   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
108   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
109   if (iascii) {
110     if (jac->bs > 0) {
111       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
112     } else {
113       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
114     }
115     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
116     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
117     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
118     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
119     for (i = 0; i < jac->nsplits; i++) {
120       if (ilink->fields) {
121         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
122         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
123         for (j = 0; j < ilink->nfields; j++) {
124           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
125           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
126         }
127         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
128         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
129       } else {
130         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
131       }
132       PetscCall(KSPView(ilink->ksp, viewer));
133       ilink = ilink->next;
134     }
135   }
136 
137   if (isdraw) {
138     PetscDraw draw;
139     PetscReal x, y, w, wd;
140 
141     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
142     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
143     w  = 2 * PetscMin(1.0 - x, x);
144     wd = w / (jac->nsplits + 1);
145     x  = x - wd * (jac->nsplits - 1) / 2.0;
146     for (i = 0; i < jac->nsplits; i++) {
147       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
148       PetscCall(KSPView(ilink->ksp, viewer));
149       PetscCall(PetscDrawPopCurrentPoint(draw));
150       x += wd;
151       ilink = ilink->next;
152     }
153   }
154   PetscFunctionReturn(PETSC_SUCCESS);
155 }
156 
157 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
158 {
159   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
160   PetscBool                  iascii, isdraw;
161   PetscInt                   i, j;
162   PC_FieldSplitLink          ilink = jac->head;
163   MatSchurComplementAinvType atype;
164 
165   PetscFunctionBegin;
166   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
167   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
168   if (iascii) {
169     if (jac->bs > 0) {
170       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
171     } else {
172       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
173     }
174     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
175     switch (jac->schurpre) {
176     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
178       break;
179     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
180       if (jac->schur) {
181         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
182         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
183       }
184       break;
185     case PC_FIELDSPLIT_SCHUR_PRE_A11:
186       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
187       break;
188     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
189       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
190       break;
191     case PC_FIELDSPLIT_SCHUR_PRE_USER:
192       if (jac->schur_user) {
193         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
194       } else {
195         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
196       }
197       break;
198     default:
199       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
200     }
201     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
202     PetscCall(PetscViewerASCIIPushTab(viewer));
203     for (i = 0; i < jac->nsplits; i++) {
204       if (ilink->fields) {
205         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
206         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
207         for (j = 0; j < ilink->nfields; j++) {
208           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
209           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
210         }
211         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
212         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
213       } else {
214         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
215       }
216       ilink = ilink->next;
217     }
218     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
219     PetscCall(PetscViewerASCIIPushTab(viewer));
220     if (jac->head) {
221       PetscCall(KSPView(jac->head->ksp, viewer));
222     } else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
223     PetscCall(PetscViewerASCIIPopTab(viewer));
224     if (jac->head && jac->kspupper != jac->head->ksp) {
225       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor \n"));
226       PetscCall(PetscViewerASCIIPushTab(viewer));
227       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
228       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
229       PetscCall(PetscViewerASCIIPopTab(viewer));
230     }
231     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01 \n"));
232     PetscCall(PetscViewerASCIIPushTab(viewer));
233     if (jac->kspschur) {
234       PetscCall(KSPView(jac->kspschur, viewer));
235     } else {
236       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
237     }
238     PetscCall(PetscViewerASCIIPopTab(viewer));
239     PetscCall(PetscViewerASCIIPopTab(viewer));
240   } else if (isdraw && jac->head) {
241     PetscDraw draw;
242     PetscReal x, y, w, wd, h;
243     PetscInt  cnt = 2;
244     char      str[32];
245 
246     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
247     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
248     if (jac->kspupper != jac->head->ksp) cnt++;
249     w  = 2 * PetscMin(1.0 - x, x);
250     wd = w / (cnt + 1);
251 
252     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
253     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
254     y -= h;
255     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
256       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
257     } else {
258       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
259     }
260     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
261     y -= h;
262     x = x - wd * (cnt - 1) / 2.0;
263 
264     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
265     PetscCall(KSPView(jac->head->ksp, viewer));
266     PetscCall(PetscDrawPopCurrentPoint(draw));
267     if (jac->kspupper != jac->head->ksp) {
268       x += wd;
269       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
270       PetscCall(KSPView(jac->kspupper, viewer));
271       PetscCall(PetscDrawPopCurrentPoint(draw));
272     }
273     x += wd;
274     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
275     PetscCall(KSPView(jac->kspschur, viewer));
276     PetscCall(PetscDrawPopCurrentPoint(draw));
277   }
278   PetscFunctionReturn(PETSC_SUCCESS);
279 }
280 
281 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
282 {
283   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
284   PetscBool         iascii, isdraw;
285   PetscInt          i, j;
286   PC_FieldSplitLink ilink = jac->head;
287 
288   PetscFunctionBegin;
289   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
290   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
291   if (iascii) {
292     if (jac->bs > 0) {
293       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
294     } else {
295       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
296     }
297     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
298     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
299     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
300 
301     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
302     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
303     PetscCall(PetscViewerASCIIPushTab(viewer));
304 
305     if (ilink->fields) {
306       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
307       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
308       for (j = 0; j < ilink->nfields; j++) {
309         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
310         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
311       }
312       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
313       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
314     } else {
315       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
316     }
317     PetscCall(KSPView(ilink->ksp, viewer));
318 
319     PetscCall(PetscViewerASCIIPopTab(viewer));
320   }
321 
322   if (isdraw) {
323     PetscDraw draw;
324     PetscReal x, y, w, wd;
325 
326     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
327     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
328     w  = 2 * PetscMin(1.0 - x, x);
329     wd = w / (jac->nsplits + 1);
330     x  = x - wd * (jac->nsplits - 1) / 2.0;
331     for (i = 0; i < jac->nsplits; i++) {
332       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
333       PetscCall(KSPView(ilink->ksp, viewer));
334       PetscCall(PetscDrawPopCurrentPoint(draw));
335       x += wd;
336       ilink = ilink->next;
337     }
338   }
339   PetscFunctionReturn(PETSC_SUCCESS);
340 }
341 
342 /* Precondition: jac->bs is set to a meaningful value */
343 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
344 {
345   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
346   PetscInt       i, nfields, *ifields, nfields_col, *ifields_col;
347   PetscBool      flg, flg_col;
348   char           optionname[128], splitname[8], optionname_col[128];
349 
350   PetscFunctionBegin;
351   PetscCall(PetscMalloc1(jac->bs, &ifields));
352   PetscCall(PetscMalloc1(jac->bs, &ifields_col));
353   for (i = 0, flg = PETSC_TRUE;; i++) {
354     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
355     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
356     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
357     nfields     = jac->bs;
358     nfields_col = jac->bs;
359     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
360     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
361     if (!flg) break;
362     else if (flg && !flg_col) {
363       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
364       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
365     } else {
366       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
367       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
368       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
369     }
370   }
371   if (i > 0) {
372     /* Makes command-line setting of splits take precedence over setting them in code.
373        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
374        create new splits, which would probably not be what the user wanted. */
375     jac->splitdefined = PETSC_TRUE;
376   }
377   PetscCall(PetscFree(ifields));
378   PetscCall(PetscFree(ifields_col));
379   PetscFunctionReturn(PETSC_SUCCESS);
380 }
381 
382 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
383 {
384   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
385   PC_FieldSplitLink ilink              = jac->head;
386   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
387   PetscInt          i;
388 
389   PetscFunctionBegin;
390   /*
391    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
392    Should probably be rewritten.
393    */
394   if (!ilink) {
395     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
396     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
397       PetscInt  numFields, f, i, j;
398       char    **fieldNames;
399       IS       *fields;
400       DM       *dms;
401       DM        subdm[128];
402       PetscBool flg;
403 
404       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
405       /* Allow the user to prescribe the splits */
406       for (i = 0, flg = PETSC_TRUE;; i++) {
407         PetscInt ifields[128];
408         IS       compField;
409         char     optionname[128], splitname[8];
410         PetscInt nfields = numFields;
411 
412         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
413         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
414         if (!flg) break;
415         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
416         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
417         if (nfields == 1) {
418           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
419         } else {
420           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
421           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
422         }
423         PetscCall(ISDestroy(&compField));
424         for (j = 0; j < nfields; ++j) {
425           f = ifields[j];
426           PetscCall(PetscFree(fieldNames[f]));
427           PetscCall(ISDestroy(&fields[f]));
428         }
429       }
430       if (i == 0) {
431         for (f = 0; f < numFields; ++f) {
432           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
433           PetscCall(PetscFree(fieldNames[f]));
434           PetscCall(ISDestroy(&fields[f]));
435         }
436       } else {
437         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
438         PetscCall(PetscFree(dms));
439         PetscCall(PetscMalloc1(i, &dms));
440         for (j = 0; j < i; ++j) dms[j] = subdm[j];
441       }
442       PetscCall(PetscFree(fieldNames));
443       PetscCall(PetscFree(fields));
444       if (dms) {
445         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
446         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
447           const char *prefix;
448           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)(ilink->ksp), &prefix));
449           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)(dms[i]), prefix));
450           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
451           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
452           {
453             PetscErrorCode (*func)(KSP, Mat, Mat, void *);
454             void *ctx;
455 
456             PetscCall(DMKSPGetComputeOperators(pc->dm, &func, &ctx));
457             PetscCall(DMKSPSetComputeOperators(dms[i], func, ctx));
458           }
459           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
460           PetscCall(DMDestroy(&dms[i]));
461         }
462         PetscCall(PetscFree(dms));
463       }
464     } else {
465       if (jac->bs <= 0) {
466         if (pc->pmat) {
467           PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
468         } else jac->bs = 1;
469       }
470 
471       if (jac->detect) {
472         IS       zerodiags, rest;
473         PetscInt nmin, nmax;
474 
475         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
476         if (jac->diag_use_amat) {
477           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
478         } else {
479           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
480         }
481         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
482         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
483         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
484         PetscCall(ISDestroy(&zerodiags));
485         PetscCall(ISDestroy(&rest));
486       } else if (coupling) {
487         IS       coupling, rest;
488         PetscInt nmin, nmax;
489 
490         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
491         if (jac->offdiag_use_amat) {
492           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
493         } else {
494           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
495         }
496         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
497         PetscCall(ISSetIdentity(rest));
498         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
499         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
500         PetscCall(ISDestroy(&coupling));
501         PetscCall(ISDestroy(&rest));
502       } else {
503         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
504         if (!fieldsplit_default) {
505           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
506            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
507           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
508           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
509         }
510         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
511           Mat       M = pc->pmat;
512           PetscBool isnest;
513 
514           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
515           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
516           if (!isnest) {
517             M = pc->mat;
518             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
519           }
520           if (isnest) {
521             IS      *fields;
522             PetscInt nf;
523 
524             PetscCall(MatNestGetSize(M, &nf, NULL));
525             PetscCall(PetscMalloc1(nf, &fields));
526             PetscCall(MatNestGetISs(M, fields, NULL));
527             for (i = 0; i < nf; i++) PetscCall(PCFieldSplitSetIS(pc, NULL, fields[i]));
528             PetscCall(PetscFree(fields));
529           } else {
530             for (i = 0; i < jac->bs; i++) {
531               char splitname[8];
532               PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
533               PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
534             }
535             jac->defaultsplit = PETSC_TRUE;
536           }
537         }
538       }
539     }
540   } else if (jac->nsplits == 1) {
541     IS       is2;
542     PetscInt nmin, nmax;
543 
544     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
545     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
546     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
547     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
548     PetscCall(ISDestroy(&is2));
549   }
550 
551   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
552   PetscFunctionReturn(PETSC_SUCCESS);
553 }
554 
555 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
556 {
557   Mat       BT, T;
558   PetscReal nrmT, nrmB;
559 
560   PetscFunctionBegin;
561   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
562   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
563   PetscCall(MatNorm(T, NORM_1, &nrmT));
564   PetscCall(MatNorm(B, NORM_1, &nrmB));
565   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable.");
566 
567   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
568   /* setting N := 1/nu*I in [Ar13].                                                 */
569   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
570   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, H)); /* H = A01*A01'          */
571   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
572 
573   PetscCall(MatDestroy(&BT));
574   PetscCall(MatDestroy(&T));
575   PetscFunctionReturn(PETSC_SUCCESS);
576 }
577 
578 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *value[], PetscBool *flg);
579 
580 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
581 {
582   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
583   PC_FieldSplitLink ilink;
584   PetscInt          i, nsplit;
585   PetscBool         sorted, sorted_col;
586 
587   PetscFunctionBegin;
588   pc->failedreason = PC_NOERROR;
589   PetscCall(PCFieldSplitSetDefaults(pc));
590   nsplit = jac->nsplits;
591   ilink  = jac->head;
592 
593   /* get the matrices for each split */
594   if (!jac->issetup) {
595     PetscInt rstart, rend, nslots, bs;
596 
597     jac->issetup = PETSC_TRUE;
598 
599     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
600     if (jac->defaultsplit || !ilink->is) {
601       if (jac->bs <= 0) jac->bs = nsplit;
602     }
603 
604     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
605     PetscCall(MatGetBlockSize(pc->pmat, &bs));
606     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
607       PetscBool blk;
608 
609       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
610       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
611     }
612 
613     bs = jac->bs;
614     PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
615     nslots = (rend - rstart) / bs;
616     for (i = 0; i < nsplit; i++) {
617       if (jac->defaultsplit) {
618         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
619         PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
620       } else if (!ilink->is) {
621         if (ilink->nfields > 1) {
622           PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
623           PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
624           PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
625           for (j = 0; j < nslots; j++) {
626             for (k = 0; k < nfields; k++) {
627               ii[nfields * j + k] = rstart + bs * j + fields[k];
628               jj[nfields * j + k] = rstart + bs * j + fields_col[k];
629             }
630           }
631           PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
632           PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
633           PetscCall(ISSetBlockSize(ilink->is, nfields));
634           PetscCall(ISSetBlockSize(ilink->is_col, nfields));
635         } else {
636           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
637           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
638         }
639       }
640       PetscCall(ISSorted(ilink->is, &sorted));
641       if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col));
642       PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split");
643       ilink = ilink->next;
644     }
645   }
646 
647   ilink = jac->head;
648   if (!jac->pmat) {
649     Vec xtmp;
650 
651     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
652     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
653     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
654     for (i = 0; i < nsplit; i++) {
655       MatNullSpace sp;
656 
657       /* Check for preconditioning matrix attached to IS */
658       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
659       if (jac->pmat[i]) {
660         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
661         if (jac->type == PC_COMPOSITE_SCHUR) {
662           jac->schur_user = jac->pmat[i];
663 
664           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
665         }
666       } else {
667         const char *prefix;
668         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
669         PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
670         PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
671         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
672       }
673       /* create work vectors for each split */
674       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
675       ilink->x = jac->x[i];
676       ilink->y = jac->y[i];
677       ilink->z = NULL;
678       /* compute scatter contexts needed by multiplicative versions and non-default splits */
679       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
680       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
681       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
682       ilink = ilink->next;
683     }
684     PetscCall(VecDestroy(&xtmp));
685   } else {
686     MatReuse scall;
687     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
688       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
689       scall = MAT_INITIAL_MATRIX;
690     } else scall = MAT_REUSE_MATRIX;
691 
692     for (i = 0; i < nsplit; i++) {
693       Mat pmat;
694 
695       /* Check for preconditioning matrix attached to IS */
696       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
697       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
698       ilink = ilink->next;
699     }
700   }
701   if (jac->diag_use_amat) {
702     ilink = jac->head;
703     if (!jac->mat) {
704       PetscCall(PetscMalloc1(nsplit, &jac->mat));
705       for (i = 0; i < nsplit; i++) {
706         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
707         ilink = ilink->next;
708       }
709     } else {
710       MatReuse scall;
711       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
712         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
713         scall = MAT_INITIAL_MATRIX;
714       } else scall = MAT_REUSE_MATRIX;
715 
716       for (i = 0; i < nsplit; i++) {
717         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
718         ilink = ilink->next;
719       }
720     }
721   } else {
722     jac->mat = jac->pmat;
723   }
724 
725   /* Check for null space attached to IS */
726   ilink = jac->head;
727   for (i = 0; i < nsplit; i++) {
728     MatNullSpace sp;
729 
730     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
731     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
732     ilink = ilink->next;
733   }
734 
735   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
736     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
737     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
738     ilink = jac->head;
739     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
740       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
741       if (!jac->Afield) {
742         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
743         if (jac->offdiag_use_amat) {
744           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
745         } else {
746           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
747         }
748       } else {
749         MatReuse scall;
750 
751         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
752           PetscCall(MatDestroy(&jac->Afield[1]));
753           scall = MAT_INITIAL_MATRIX;
754         } else scall = MAT_REUSE_MATRIX;
755 
756         if (jac->offdiag_use_amat) {
757           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
758         } else {
759           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
760         }
761       }
762     } else {
763       if (!jac->Afield) {
764         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
765         for (i = 0; i < nsplit; i++) {
766           if (jac->offdiag_use_amat) {
767             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
768           } else {
769             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
770           }
771           ilink = ilink->next;
772         }
773       } else {
774         MatReuse scall;
775         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
776           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
777           scall = MAT_INITIAL_MATRIX;
778         } else scall = MAT_REUSE_MATRIX;
779 
780         for (i = 0; i < nsplit; i++) {
781           if (jac->offdiag_use_amat) {
782             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
783           } else {
784             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
785           }
786           ilink = ilink->next;
787         }
788       }
789     }
790   }
791 
792   if (jac->type == PC_COMPOSITE_SCHUR) {
793     IS          ccis;
794     PetscBool   isset, isspd;
795     PetscInt    rstart, rend;
796     char        lscname[256];
797     PetscObject LSC_L;
798 
799     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
800 
801     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
802     if (jac->schurscale == (PetscScalar)-1.0) {
803       PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
804       jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
805     }
806 
807     /* When extracting off-diagonal submatrices, we take complements from this range */
808     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
809 
810     if (jac->schur) {
811       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
812       MatReuse scall;
813 
814       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
815         scall = MAT_INITIAL_MATRIX;
816         PetscCall(MatDestroy(&jac->B));
817         PetscCall(MatDestroy(&jac->C));
818       } else scall = MAT_REUSE_MATRIX;
819 
820       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
821       ilink = jac->head;
822       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
823       if (jac->offdiag_use_amat) {
824         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B));
825       } else {
826         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B));
827       }
828       PetscCall(ISDestroy(&ccis));
829       ilink = ilink->next;
830       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
831       if (jac->offdiag_use_amat) {
832         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C));
833       } else {
834         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C));
835       }
836       PetscCall(ISDestroy(&ccis));
837       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
838       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
839         PetscCall(MatDestroy(&jac->schurp));
840         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
841       }
842       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
843       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
844       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
845     } else {
846       const char  *Dprefix;
847       char         schurprefix[256], schurmatprefix[256];
848       char         schurtestoption[256];
849       MatNullSpace sp;
850       PetscBool    flg;
851       KSP          kspt;
852 
853       /* extract the A01 and A10 matrices */
854       ilink = jac->head;
855       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
856       if (jac->offdiag_use_amat) {
857         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
858       } else {
859         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
860       }
861       PetscCall(ISDestroy(&ccis));
862       ilink = ilink->next;
863       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
864       if (jac->offdiag_use_amat) {
865         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
866       } else {
867         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
868       }
869       PetscCall(ISDestroy(&ccis));
870 
871       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
872       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
873       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
874       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
875       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
876       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
877       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
878       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
879 
880       /* Note: this is not true in general */
881       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
882       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
883 
884       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
885       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg));
886       if (flg) {
887         DM  dmInner;
888         KSP kspInner;
889         PC  pcInner;
890 
891         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
892         PetscCall(KSPReset(kspInner));
893         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
894         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
895         /* Indent this deeper to emphasize the "inner" nature of this solver. */
896         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
897         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
898         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
899 
900         /* Set DM for new solver */
901         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
902         PetscCall(KSPSetDM(kspInner, dmInner));
903         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
904 
905         /* Defaults to PCKSP as preconditioner */
906         PetscCall(KSPGetPC(kspInner, &pcInner));
907         PetscCall(PCSetType(pcInner, PCKSP));
908         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
909       } else {
910         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
911           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
912           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
913           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
914           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
915           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
916         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
917         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
918       }
919       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
920       PetscCall(KSPSetFromOptions(jac->head->ksp));
921       PetscCall(MatSetFromOptions(jac->schur));
922 
923       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
924       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
925         KSP kspInner;
926         PC  pcInner;
927 
928         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
929         PetscCall(KSPGetPC(kspInner, &pcInner));
930         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
931         if (flg) {
932           KSP ksp;
933 
934           PetscCall(PCKSPGetKSP(pcInner, &ksp));
935           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
936         }
937       }
938       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
939       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg));
940       if (flg) {
941         DM dmInner;
942 
943         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
944         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
945         PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
946         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
947         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
948         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
949         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
950         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
951         PetscCall(KSPSetDM(jac->kspupper, dmInner));
952         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
953         PetscCall(KSPSetFromOptions(jac->kspupper));
954         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
955         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
956       } else {
957         jac->kspupper = jac->head->ksp;
958         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
959       }
960 
961       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
962       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
963       PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
964       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
965       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
966       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
967         PC pcschur;
968         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
969         PetscCall(PCSetType(pcschur, PCNONE));
970         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
971       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
972         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
973       }
974       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
975       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
976       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
977       /* propagate DM */
978       {
979         DM sdm;
980         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
981         if (sdm) {
982           PetscCall(KSPSetDM(jac->kspschur, sdm));
983           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
984         }
985       }
986       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
987       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
988       PetscCall(KSPSetFromOptions(jac->kspschur));
989     }
990     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
991     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
992 
993     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
994     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
995     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
996     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
997     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L));
998     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
999     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1000     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1001     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L));
1002   } else if (jac->type == PC_COMPOSITE_GKB) {
1003     IS       ccis;
1004     PetscInt rstart, rend;
1005 
1006     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1007 
1008     ilink = jac->head;
1009 
1010     /* When extracting off-diagonal submatrices, we take complements from this range */
1011     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
1012 
1013     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1014     if (jac->offdiag_use_amat) {
1015       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1016     } else {
1017       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1018     }
1019     PetscCall(ISDestroy(&ccis));
1020     /* Create work vectors for GKB algorithm */
1021     PetscCall(VecDuplicate(ilink->x, &jac->u));
1022     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1023     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1024     ilink = ilink->next;
1025     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1026     if (jac->offdiag_use_amat) {
1027       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1028     } else {
1029       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1030     }
1031     PetscCall(ISDestroy(&ccis));
1032     /* Create work vectors for GKB algorithm */
1033     PetscCall(VecDuplicate(ilink->x, &jac->v));
1034     PetscCall(VecDuplicate(ilink->x, &jac->d));
1035     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1036     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1037     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1038 
1039     ilink = jac->head;
1040     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1041     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1042     /* Create gkb_monitor context */
1043     if (jac->gkbmonitor) {
1044       PetscInt tablevel;
1045       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1046       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1047       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1048       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1049       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1050     }
1051   } else {
1052     /* set up the individual splits' PCs */
1053     i     = 0;
1054     ilink = jac->head;
1055     while (ilink) {
1056       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1057       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1058       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1059       i++;
1060       ilink = ilink->next;
1061     }
1062   }
1063 
1064   /* Set coordinates to the sub PC objects whenever these are set */
1065   if (jac->coordinates_set) {
1066     PC pc_coords;
1067     if (jac->type == PC_COMPOSITE_SCHUR) {
1068       // Head is first block.
1069       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1070       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1071       // Second one is Schur block, but its KSP object is in kspschur.
1072       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1073       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1074     } else if (jac->type == PC_COMPOSITE_GKB) {
1075       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1076     } else {
1077       ilink = jac->head;
1078       while (ilink) {
1079         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1080         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1081         ilink = ilink->next;
1082       }
1083     }
1084   }
1085 
1086   jac->suboptionsset = PETSC_TRUE;
1087   PetscFunctionReturn(PETSC_SUCCESS);
1088 }
1089 
1090 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1091   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1092                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1093                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1094 
1095 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1096 {
1097   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1098   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1099   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1100 
1101   PetscFunctionBegin;
1102   switch (jac->schurfactorization) {
1103   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1104     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1105     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1106     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1107     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1108     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1109     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1110     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1111     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1112     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1113     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1114     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1115     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1116     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1117     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1118     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1119     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1120     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1121     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1122     break;
1123   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1124     /* [A00 0; A10 S], suitable for left preconditioning */
1125     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1126     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1127     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1128     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1129     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1130     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1131     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1132     PetscCall(VecScale(ilinkD->x, -1.));
1133     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1134     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1135     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1136     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1137     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1138     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1139     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1140     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1141     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1142     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1143     break;
1144   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1145     /* [A00 A01; 0 S], suitable for right preconditioning */
1146     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1147     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1148     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1149     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1150     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1151     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1152     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1153     PetscCall(VecScale(ilinkA->x, -1.));
1154     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1155     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1156     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1157     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1158     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1159     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1160     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1161     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1162     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1163     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1164     break;
1165   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1166     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1167     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1168     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1169     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1170     PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1171     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1172     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1173     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1174     PetscCall(VecScale(ilinkD->x, -1.0));
1175     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1176     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1177 
1178     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1179     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1180     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1181     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1182     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1183 
1184     if (kspUpper == kspA) {
1185       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1186       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1187       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1188       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1189       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1190       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1191     } else {
1192       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1193       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1194       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1195       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1196       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1197       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1198       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1199       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1200       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1201     }
1202     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1203     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1204     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1205   }
1206   PetscFunctionReturn(PETSC_SUCCESS);
1207 }
1208 
1209 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1210 {
1211   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1212   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1213   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1214 
1215   PetscFunctionBegin;
1216   switch (jac->schurfactorization) {
1217   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1218     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1219     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1220     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1221     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1222     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1223     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1224     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1225     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1226     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1227     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1228     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1229     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1230     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1231     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1232     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1233     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1234     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1235     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1236     break;
1237   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1238     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1239     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1240     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1241     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1242     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1243     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1244     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1245     PetscCall(VecScale(ilinkD->x, -1.));
1246     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1247     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1248     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1249     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1250     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1251     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1252     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1253     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1254     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1255     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1256     break;
1257   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1258     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1259     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1260     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1261     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1262     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1263     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1264     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1265     PetscCall(VecScale(ilinkA->x, -1.));
1266     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1267     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1268     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1269     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1270     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1271     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1272     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1273     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1274     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1275     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1276     break;
1277   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1278     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1279     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1280     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1281     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1282     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1283     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1284     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1285     PetscCall(VecScale(ilinkD->x, -1.0));
1286     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1287     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1288 
1289     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1290     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1291     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1292     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1293     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1294 
1295     if (kspLower == kspA) {
1296       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1297       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1298       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1299       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1300       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1301       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1302     } else {
1303       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1304       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1305       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1306       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1307       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1308       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1309       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1310       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1311       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1312     }
1313     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1314     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1315     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1316   }
1317   PetscFunctionReturn(PETSC_SUCCESS);
1318 }
1319 
1320 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1321 {
1322   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1323   PC_FieldSplitLink ilink = jac->head;
1324   PetscInt          cnt, bs;
1325 
1326   PetscFunctionBegin;
1327   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1328     if (jac->defaultsplit) {
1329       PetscCall(VecGetBlockSize(x, &bs));
1330       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1331       PetscCall(VecGetBlockSize(y, &bs));
1332       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1333       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1334       while (ilink) {
1335         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1336         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1337         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1338         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1339         ilink = ilink->next;
1340       }
1341       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1342     } else {
1343       PetscCall(VecSet(y, 0.0));
1344       while (ilink) {
1345         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1346         ilink = ilink->next;
1347       }
1348     }
1349   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1350     PetscCall(VecSet(y, 0.0));
1351     /* solve on first block for first block variables */
1352     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1353     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1354     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1355     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1356     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1357     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1358     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1359     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1360 
1361     /* compute the residual only onto second block variables using first block variables */
1362     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1363     ilink = ilink->next;
1364     PetscCall(VecScale(ilink->x, -1.0));
1365     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1366     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1367 
1368     /* solve on second block variables */
1369     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1370     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1371     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1372     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1373     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1374     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1375   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1376     if (!jac->w1) {
1377       PetscCall(VecDuplicate(x, &jac->w1));
1378       PetscCall(VecDuplicate(x, &jac->w2));
1379     }
1380     PetscCall(VecSet(y, 0.0));
1381     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1382     cnt = 1;
1383     while (ilink->next) {
1384       ilink = ilink->next;
1385       /* compute the residual only over the part of the vector needed */
1386       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1387       PetscCall(VecScale(ilink->x, -1.0));
1388       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1389       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1390       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1391       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1392       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1393       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1394       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1395       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1396     }
1397     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1398       cnt -= 2;
1399       while (ilink->previous) {
1400         ilink = ilink->previous;
1401         /* compute the residual only over the part of the vector needed */
1402         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1403         PetscCall(VecScale(ilink->x, -1.0));
1404         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1405         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1406         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1407         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1408         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1409         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1410         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1411         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1412       }
1413     }
1414   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1415   PetscFunctionReturn(PETSC_SUCCESS);
1416 }
1417 
1418 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1419 {
1420   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1421   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1422   KSP               ksp = ilinkA->ksp;
1423   Vec               u, v, Hu, d, work1, work2;
1424   PetscScalar       alpha, z, nrmz2, *vecz;
1425   PetscReal         lowbnd, nu, beta;
1426   PetscInt          j, iterGKB;
1427 
1428   PetscFunctionBegin;
1429   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1430   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1431   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1432   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1433 
1434   u     = jac->u;
1435   v     = jac->v;
1436   Hu    = jac->Hu;
1437   d     = jac->d;
1438   work1 = jac->w1;
1439   work2 = jac->w2;
1440   vecz  = jac->vecz;
1441 
1442   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1443   /* Add q = q + nu*B*b */
1444   if (jac->gkbnu) {
1445     nu = jac->gkbnu;
1446     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1447     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1448   } else {
1449     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1450     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1451     nu = 1;
1452   }
1453 
1454   /* Transform rhs from [q,tilde{b}] to [0,b] */
1455   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1456   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1457   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1458   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1459   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1460   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1461 
1462   /* First step of algorithm */
1463   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1464   KSPCheckDot(ksp, beta);
1465   beta = PetscSqrtReal(nu) * beta;
1466   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1467   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1468   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1469   PetscCall(KSPSolve(ksp, work2, u));
1470   PetscCall(KSPCheckSolve(ksp, pc, u));
1471   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1472   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1473   PetscCall(VecDot(Hu, u, &alpha));
1474   KSPCheckDot(ksp, alpha);
1475   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1476   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1477   PetscCall(VecScale(u, 1.0 / alpha));
1478   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1479 
1480   z       = beta / alpha;
1481   vecz[1] = z;
1482 
1483   /* Computation of first iterate x(1) and p(1) */
1484   PetscCall(VecAXPY(ilinkA->y, z, u));
1485   PetscCall(VecCopy(d, ilinkD->y));
1486   PetscCall(VecScale(ilinkD->y, -z));
1487 
1488   iterGKB = 1;
1489   lowbnd  = 2 * jac->gkbtol;
1490   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1491 
1492   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1493     iterGKB += 1;
1494     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1495     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1496     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1497     beta = beta / PetscSqrtReal(nu);
1498     PetscCall(VecScale(v, 1.0 / beta));
1499     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1500     PetscCall(MatMult(jac->H, u, Hu));
1501     PetscCall(VecAXPY(work2, -beta, Hu));
1502     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1503     PetscCall(KSPSolve(ksp, work2, u));
1504     PetscCall(KSPCheckSolve(ksp, pc, u));
1505     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1506     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1507     PetscCall(VecDot(Hu, u, &alpha));
1508     KSPCheckDot(ksp, alpha);
1509     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1510     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1511     PetscCall(VecScale(u, 1.0 / alpha));
1512 
1513     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1514     vecz[0] = z;
1515 
1516     /* Computation of new iterate x(i+1) and p(i+1) */
1517     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1518     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1519     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1520     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1521     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1522 
1523     /* Compute Lower Bound estimate */
1524     if (iterGKB > jac->gkbdelay) {
1525       lowbnd = 0.0;
1526       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1527       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1528     }
1529 
1530     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1531     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1532   }
1533 
1534   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1535   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1536   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1537   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1538 
1539   PetscFunctionReturn(PETSC_SUCCESS);
1540 }
1541 
1542 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1543   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1544                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1545                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1546 
1547 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1548 {
1549   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1550   PC_FieldSplitLink ilink = jac->head;
1551   PetscInt          bs;
1552 
1553   PetscFunctionBegin;
1554   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1555     if (jac->defaultsplit) {
1556       PetscCall(VecGetBlockSize(x, &bs));
1557       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1558       PetscCall(VecGetBlockSize(y, &bs));
1559       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1560       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1561       while (ilink) {
1562         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1563         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1564         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1565         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1566         ilink = ilink->next;
1567       }
1568       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1569     } else {
1570       PetscCall(VecSet(y, 0.0));
1571       while (ilink) {
1572         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1573         ilink = ilink->next;
1574       }
1575     }
1576   } else {
1577     if (!jac->w1) {
1578       PetscCall(VecDuplicate(x, &jac->w1));
1579       PetscCall(VecDuplicate(x, &jac->w2));
1580     }
1581     PetscCall(VecSet(y, 0.0));
1582     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1583       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1584       while (ilink->next) {
1585         ilink = ilink->next;
1586         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1587         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1588         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1589       }
1590       while (ilink->previous) {
1591         ilink = ilink->previous;
1592         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1593         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1594         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1595       }
1596     } else {
1597       while (ilink->next) { /* get to last entry in linked list */
1598         ilink = ilink->next;
1599       }
1600       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1601       while (ilink->previous) {
1602         ilink = ilink->previous;
1603         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1604         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1605         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1606       }
1607     }
1608   }
1609   PetscFunctionReturn(PETSC_SUCCESS);
1610 }
1611 
1612 static PetscErrorCode PCReset_FieldSplit(PC pc)
1613 {
1614   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1615   PC_FieldSplitLink ilink = jac->head, next;
1616 
1617   PetscFunctionBegin;
1618   while (ilink) {
1619     PetscCall(KSPDestroy(&ilink->ksp));
1620     PetscCall(VecDestroy(&ilink->x));
1621     PetscCall(VecDestroy(&ilink->y));
1622     PetscCall(VecDestroy(&ilink->z));
1623     PetscCall(VecScatterDestroy(&ilink->sctx));
1624     PetscCall(ISDestroy(&ilink->is));
1625     PetscCall(ISDestroy(&ilink->is_col));
1626     PetscCall(PetscFree(ilink->splitname));
1627     PetscCall(PetscFree(ilink->fields));
1628     PetscCall(PetscFree(ilink->fields_col));
1629     next = ilink->next;
1630     PetscCall(PetscFree(ilink));
1631     ilink = next;
1632   }
1633   jac->head = NULL;
1634   PetscCall(PetscFree2(jac->x, jac->y));
1635   if (jac->mat && jac->mat != jac->pmat) {
1636     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1637   } else if (jac->mat) {
1638     jac->mat = NULL;
1639   }
1640   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1641   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1642   jac->nsplits = 0;
1643   PetscCall(VecDestroy(&jac->w1));
1644   PetscCall(VecDestroy(&jac->w2));
1645   PetscCall(MatDestroy(&jac->schur));
1646   PetscCall(MatDestroy(&jac->schurp));
1647   PetscCall(MatDestroy(&jac->schur_user));
1648   PetscCall(KSPDestroy(&jac->kspschur));
1649   PetscCall(KSPDestroy(&jac->kspupper));
1650   PetscCall(MatDestroy(&jac->B));
1651   PetscCall(MatDestroy(&jac->C));
1652   PetscCall(MatDestroy(&jac->H));
1653   PetscCall(VecDestroy(&jac->u));
1654   PetscCall(VecDestroy(&jac->v));
1655   PetscCall(VecDestroy(&jac->Hu));
1656   PetscCall(VecDestroy(&jac->d));
1657   PetscCall(PetscFree(jac->vecz));
1658   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1659   jac->isrestrict = PETSC_FALSE;
1660   PetscFunctionReturn(PETSC_SUCCESS);
1661 }
1662 
1663 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1664 {
1665   PetscFunctionBegin;
1666   PetscCall(PCReset_FieldSplit(pc));
1667   PetscCall(PetscFree(pc->data));
1668   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1669   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1670   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1671   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1672   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1673   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1674   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1675   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1676 
1677   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1678   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1679   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1680   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1681   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1682   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1683   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1684   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1685   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1686   PetscFunctionReturn(PETSC_SUCCESS);
1687 }
1688 
1689 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject)
1690 {
1691   PetscInt        bs;
1692   PetscBool       flg;
1693   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1694   PCCompositeType ctype;
1695 
1696   PetscFunctionBegin;
1697   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1698   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1699   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1700   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1701   jac->diag_use_amat = pc->useAmat;
1702   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1703   jac->offdiag_use_amat = pc->useAmat;
1704   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1705   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1706   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1707   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1708   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1709   /* Only setup fields once */
1710   if ((jac->bs > 0) && (jac->nsplits == 0)) {
1711     /* only allow user to set fields from command line if bs is already known.
1712        otherwise user can set them in PCFieldSplitSetDefaults() */
1713     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1714     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1715   }
1716   if (jac->type == PC_COMPOSITE_SCHUR) {
1717     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1718     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1719     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1720     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1721     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1722   } else if (jac->type == PC_COMPOSITE_GKB) {
1723     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1724     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1725     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitGKBNu", jac->gkbnu, &jac->gkbnu, NULL));
1726     PetscCheck(jac->gkbnu >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nu cannot be less than 0: value %g", (double)jac->gkbnu);
1727     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1728     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1729   }
1730   /*
1731     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1732     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1733     is called on the outer solver in case changes were made in the options database
1734 
1735     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1736     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1737     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1738 
1739     There could be a negative side effect of calling the KSPSetFromOptions() below.
1740 
1741     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1742   */
1743   if (jac->issetup) {
1744     PC_FieldSplitLink ilink = jac->head;
1745     if (jac->type == PC_COMPOSITE_SCHUR) {
1746       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1747       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1748     }
1749     while (ilink) {
1750       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1751       ilink = ilink->next;
1752     }
1753   }
1754   PetscOptionsHeadEnd();
1755   PetscFunctionReturn(PETSC_SUCCESS);
1756 }
1757 
1758 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1759 {
1760   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1761   PC_FieldSplitLink ilink, next = jac->head;
1762   char              prefix[128];
1763   PetscInt          i;
1764 
1765   PetscFunctionBegin;
1766   if (jac->splitdefined) {
1767     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1768     PetscFunctionReturn(PETSC_SUCCESS);
1769   }
1770   for (i = 0; i < n; i++) {
1771     PetscCheck(fields[i] < jac->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", fields[i], jac->bs);
1772     PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]);
1773   }
1774   PetscCall(PetscNew(&ilink));
1775   if (splitname) {
1776     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1777   } else {
1778     PetscCall(PetscMalloc1(3, &ilink->splitname));
1779     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1780   }
1781   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1782   PetscCall(PetscMalloc1(n, &ilink->fields));
1783   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1784   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1785   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1786 
1787   ilink->nfields = n;
1788   ilink->next    = NULL;
1789   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1790   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1791   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1792   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1793   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1794 
1795   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1796   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1797 
1798   if (!next) {
1799     jac->head       = ilink;
1800     ilink->previous = NULL;
1801   } else {
1802     while (next->next) next = next->next;
1803     next->next      = ilink;
1804     ilink->previous = next;
1805   }
1806   jac->nsplits++;
1807   PetscFunctionReturn(PETSC_SUCCESS);
1808 }
1809 
1810 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1811 {
1812   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1813 
1814   PetscFunctionBegin;
1815   *subksp = NULL;
1816   if (n) *n = 0;
1817   if (jac->type == PC_COMPOSITE_SCHUR) {
1818     PetscInt nn;
1819 
1820     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1821     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1822     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1823     PetscCall(PetscMalloc1(nn, subksp));
1824     (*subksp)[0] = jac->head->ksp;
1825     (*subksp)[1] = jac->kspschur;
1826     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
1827     if (n) *n = nn;
1828   }
1829   PetscFunctionReturn(PETSC_SUCCESS);
1830 }
1831 
1832 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
1833 {
1834   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1835 
1836   PetscFunctionBegin;
1837   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
1838   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1839   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
1840 
1841   (*subksp)[1] = jac->kspschur;
1842   if (n) *n = jac->nsplits;
1843   PetscFunctionReturn(PETSC_SUCCESS);
1844 }
1845 
1846 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1847 {
1848   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1849   PetscInt          cnt   = 0;
1850   PC_FieldSplitLink ilink = jac->head;
1851 
1852   PetscFunctionBegin;
1853   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1854   while (ilink) {
1855     (*subksp)[cnt++] = ilink->ksp;
1856     ilink            = ilink->next;
1857   }
1858   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
1859   if (n) *n = jac->nsplits;
1860   PetscFunctionReturn(PETSC_SUCCESS);
1861 }
1862 
1863 /*@C
1864   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
1865 
1866   Input Parameters:
1867 + pc  - the preconditioner context
1868 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
1869 
1870   Level: advanced
1871 
1872   Developer Notes:
1873   It seems the resulting `IS`s will not cover the entire space, so
1874   how can they define a convergent preconditioner? Needs explaining.
1875 
1876 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
1877 @*/
1878 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
1879 {
1880   PetscFunctionBegin;
1881   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1882   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
1883   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
1884   PetscFunctionReturn(PETSC_SUCCESS);
1885 }
1886 
1887 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
1888 {
1889   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1890   PC_FieldSplitLink ilink = jac->head, next;
1891   PetscInt          localsize, size, sizez, i;
1892   const PetscInt   *ind, *indz;
1893   PetscInt         *indc, *indcz;
1894   PetscBool         flg;
1895 
1896   PetscFunctionBegin;
1897   PetscCall(ISGetLocalSize(isy, &localsize));
1898   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
1899   size -= localsize;
1900   while (ilink) {
1901     IS isrl, isr;
1902     PC subpc;
1903     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
1904     PetscCall(ISGetLocalSize(isrl, &localsize));
1905     PetscCall(PetscMalloc1(localsize, &indc));
1906     PetscCall(ISGetIndices(isrl, &ind));
1907     PetscCall(PetscArraycpy(indc, ind, localsize));
1908     PetscCall(ISRestoreIndices(isrl, &ind));
1909     PetscCall(ISDestroy(&isrl));
1910     for (i = 0; i < localsize; i++) *(indc + i) += size;
1911     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
1912     PetscCall(PetscObjectReference((PetscObject)isr));
1913     PetscCall(ISDestroy(&ilink->is));
1914     ilink->is = isr;
1915     PetscCall(PetscObjectReference((PetscObject)isr));
1916     PetscCall(ISDestroy(&ilink->is_col));
1917     ilink->is_col = isr;
1918     PetscCall(ISDestroy(&isr));
1919     PetscCall(KSPGetPC(ilink->ksp, &subpc));
1920     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
1921     if (flg) {
1922       IS       iszl, isz;
1923       MPI_Comm comm;
1924       PetscCall(ISGetLocalSize(ilink->is, &localsize));
1925       comm = PetscObjectComm((PetscObject)ilink->is);
1926       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
1927       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
1928       sizez -= localsize;
1929       PetscCall(ISGetLocalSize(iszl, &localsize));
1930       PetscCall(PetscMalloc1(localsize, &indcz));
1931       PetscCall(ISGetIndices(iszl, &indz));
1932       PetscCall(PetscArraycpy(indcz, indz, localsize));
1933       PetscCall(ISRestoreIndices(iszl, &indz));
1934       PetscCall(ISDestroy(&iszl));
1935       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
1936       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
1937       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
1938       PetscCall(ISDestroy(&isz));
1939     }
1940     next  = ilink->next;
1941     ilink = next;
1942   }
1943   jac->isrestrict = PETSC_TRUE;
1944   PetscFunctionReturn(PETSC_SUCCESS);
1945 }
1946 
1947 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
1948 {
1949   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1950   PC_FieldSplitLink ilink, next = jac->head;
1951   char              prefix[128];
1952 
1953   PetscFunctionBegin;
1954   if (jac->splitdefined) {
1955     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1956     PetscFunctionReturn(PETSC_SUCCESS);
1957   }
1958   PetscCall(PetscNew(&ilink));
1959   if (splitname) {
1960     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1961   } else {
1962     PetscCall(PetscMalloc1(8, &ilink->splitname));
1963     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
1964   }
1965   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1966   PetscCall(PetscObjectReference((PetscObject)is));
1967   PetscCall(ISDestroy(&ilink->is));
1968   ilink->is = is;
1969   PetscCall(PetscObjectReference((PetscObject)is));
1970   PetscCall(ISDestroy(&ilink->is_col));
1971   ilink->is_col = is;
1972   ilink->next   = NULL;
1973   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1974   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1975   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1976   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1977   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1978 
1979   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1980   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1981 
1982   if (!next) {
1983     jac->head       = ilink;
1984     ilink->previous = NULL;
1985   } else {
1986     while (next->next) next = next->next;
1987     next->next      = ilink;
1988     ilink->previous = next;
1989   }
1990   jac->nsplits++;
1991   PetscFunctionReturn(PETSC_SUCCESS);
1992 }
1993 
1994 /*@C
1995   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
1996 
1997   Logically Collective
1998 
1999   Input Parameters:
2000 + pc         - the preconditioner context
2001 . splitname  - name of this split, if `NULL` the number of the split is used
2002 . n          - the number of fields in this split
2003 . fields     - the fields in this split
2004 - fields_col - generally the same as fields, if it does not match fields then the matrix block that is solved for this set of fields comes from an off-diagonal block
2005                  of the matrix and fields_col provides the column indices for that block
2006 
2007   Level: intermediate
2008 
2009   Notes:
2010   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2011 
2012   `PCFieldSplitSetFields()` is for defining fields as strided blocks. For example, if the block
2013   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2014   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x....
2015   where the numbered entries indicate what is in the split.
2016 
2017   This function is called once per split (it creates a new split each time).  Solve options
2018   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2019 
2020   `PCFieldSplitSetIS()` does not support having a fields_col different from fields
2021 
2022   Developer Notes:
2023   This routine does not actually create the `IS` representing the split, that is delayed
2024   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2025   available when this routine is called.
2026 
2027 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`
2028 @*/
2029 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
2030 {
2031   PetscFunctionBegin;
2032   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2033   PetscAssertPointer(splitname, 2);
2034   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2035   PetscAssertPointer(fields, 4);
2036   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2037   PetscFunctionReturn(PETSC_SUCCESS);
2038 }
2039 
2040 /*@
2041   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2042   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
2043 
2044   Logically Collective
2045 
2046   Input Parameters:
2047 + pc  - the preconditioner object
2048 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2049 
2050   Options Database Key:
2051 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2052 
2053   Level: intermediate
2054 
2055 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2056 @*/
2057 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2058 {
2059   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2060   PetscBool      isfs;
2061 
2062   PetscFunctionBegin;
2063   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2064   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2065   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2066   jac->diag_use_amat = flg;
2067   PetscFunctionReturn(PETSC_SUCCESS);
2068 }
2069 
2070 /*@
2071   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2072   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
2073 
2074   Logically Collective
2075 
2076   Input Parameter:
2077 . pc - the preconditioner object
2078 
2079   Output Parameter:
2080 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2081 
2082   Level: intermediate
2083 
2084 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2085 @*/
2086 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2087 {
2088   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2089   PetscBool      isfs;
2090 
2091   PetscFunctionBegin;
2092   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2093   PetscAssertPointer(flg, 2);
2094   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2095   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2096   *flg = jac->diag_use_amat;
2097   PetscFunctionReturn(PETSC_SUCCESS);
2098 }
2099 
2100 /*@
2101   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2102   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
2103 
2104   Logically Collective
2105 
2106   Input Parameters:
2107 + pc  - the preconditioner object
2108 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2109 
2110   Options Database Key:
2111 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2112 
2113   Level: intermediate
2114 
2115 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2116 @*/
2117 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2118 {
2119   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2120   PetscBool      isfs;
2121 
2122   PetscFunctionBegin;
2123   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2124   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2125   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2126   jac->offdiag_use_amat = flg;
2127   PetscFunctionReturn(PETSC_SUCCESS);
2128 }
2129 
2130 /*@
2131   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2132   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators.
2133 
2134   Logically Collective
2135 
2136   Input Parameter:
2137 . pc - the preconditioner object
2138 
2139   Output Parameter:
2140 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2141 
2142   Level: intermediate
2143 
2144 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2145 @*/
2146 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2147 {
2148   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2149   PetscBool      isfs;
2150 
2151   PetscFunctionBegin;
2152   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2153   PetscAssertPointer(flg, 2);
2154   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2155   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2156   *flg = jac->offdiag_use_amat;
2157   PetscFunctionReturn(PETSC_SUCCESS);
2158 }
2159 
2160 /*@C
2161   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2162 
2163   Logically Collective
2164 
2165   Input Parameters:
2166 + pc        - the preconditioner context
2167 . splitname - name of this split, if `NULL` the number of the split is used
2168 - is        - the index set that defines the elements in this split
2169 
2170   Level: intermediate
2171 
2172   Notes:
2173   Use `PCFieldSplitSetFields()`, for splits defined by strided types.
2174 
2175   This function is called once per split (it creates a new split each time).  Solve options
2176   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2177 
2178 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`
2179 @*/
2180 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2181 {
2182   PetscFunctionBegin;
2183   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2184   if (splitname) PetscAssertPointer(splitname, 2);
2185   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2186   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2187   PetscFunctionReturn(PETSC_SUCCESS);
2188 }
2189 
2190 /*@C
2191   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2192 
2193   Logically Collective
2194 
2195   Input Parameters:
2196 + pc        - the preconditioner context
2197 - splitname - name of this split
2198 
2199   Output Parameter:
2200 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2201 
2202   Level: intermediate
2203 
2204 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`
2205 @*/
2206 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2207 {
2208   PetscFunctionBegin;
2209   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2210   PetscAssertPointer(splitname, 2);
2211   PetscAssertPointer(is, 3);
2212   {
2213     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2214     PC_FieldSplitLink ilink = jac->head;
2215     PetscBool         found;
2216 
2217     *is = NULL;
2218     while (ilink) {
2219       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2220       if (found) {
2221         *is = ilink->is;
2222         break;
2223       }
2224       ilink = ilink->next;
2225     }
2226   }
2227   PetscFunctionReturn(PETSC_SUCCESS);
2228 }
2229 
2230 /*@C
2231   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2232 
2233   Logically Collective
2234 
2235   Input Parameters:
2236 + pc    - the preconditioner context
2237 - index - index of this split
2238 
2239   Output Parameter:
2240 . is - the index set that defines the elements in this split
2241 
2242   Level: intermediate
2243 
2244 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`
2245 @*/
2246 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2247 {
2248   PetscFunctionBegin;
2249   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2250   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2251   PetscAssertPointer(is, 3);
2252   {
2253     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2254     PC_FieldSplitLink ilink = jac->head;
2255     PetscInt          i     = 0;
2256     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2257 
2258     while (i < index) {
2259       ilink = ilink->next;
2260       ++i;
2261     }
2262     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2263   }
2264   PetscFunctionReturn(PETSC_SUCCESS);
2265 }
2266 
2267 /*@
2268   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2269   fieldsplit preconditioner when calling `PCFieldSplitSetIS()`. If not set the matrix block size is used.
2270 
2271   Logically Collective
2272 
2273   Input Parameters:
2274 + pc - the preconditioner context
2275 - bs - the block size
2276 
2277   Level: intermediate
2278 
2279 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2280 @*/
2281 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2282 {
2283   PetscFunctionBegin;
2284   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2285   PetscValidLogicalCollectiveInt(pc, bs, 2);
2286   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2287   PetscFunctionReturn(PETSC_SUCCESS);
2288 }
2289 
2290 /*@C
2291   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2292 
2293   Collective
2294 
2295   Input Parameter:
2296 . pc - the preconditioner context
2297 
2298   Output Parameters:
2299 + n      - the number of splits
2300 - subksp - the array of `KSP` contexts
2301 
2302   Level: advanced
2303 
2304   Notes:
2305   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2306   (not the `KSP`, just the array that contains them).
2307 
2308   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2309 
2310   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2311   Schur complement and the `KSP` object used to iterate over the Schur complement.
2312   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2313 
2314   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2315   inner linear system defined by the matrix H in each loop.
2316 
2317   Fortran Notes:
2318   You must pass in a `KSP` array that is large enough to contain all the `KSP`s.
2319   You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2320   `KSP` array must be.
2321 
2322   Developer Notes:
2323   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2324 
2325   The Fortran interface should be modernized to return directly the array of values.
2326 
2327 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2328 @*/
2329 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2330 {
2331   PetscFunctionBegin;
2332   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2333   if (n) PetscAssertPointer(n, 2);
2334   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2335   PetscFunctionReturn(PETSC_SUCCESS);
2336 }
2337 
2338 /*@C
2339   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2340 
2341   Collective
2342 
2343   Input Parameter:
2344 . pc - the preconditioner context
2345 
2346   Output Parameters:
2347 + n      - the number of splits
2348 - subksp - the array of `KSP` contexts
2349 
2350   Level: advanced
2351 
2352   Notes:
2353   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2354   (not the `KSP` just the array that contains them).
2355 
2356   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2357 
2358   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2359 +  1  - the `KSP` used for the (1,1) block
2360 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2361 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2362 
2363   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2364 
2365   Fortran Notes:
2366   You must pass in a `KSP` array that is large enough to contain all the local `KSP`s.
2367   You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2368   `KSP` array must be.
2369 
2370   Developer Notes:
2371   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2372 
2373   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2374 
2375   The Fortran interface should be modernized to return directly the array of values.
2376 
2377 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2378 @*/
2379 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2380 {
2381   PetscFunctionBegin;
2382   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2383   if (n) PetscAssertPointer(n, 2);
2384   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2385   PetscFunctionReturn(PETSC_SUCCESS);
2386 }
2387 
2388 /*@
2389   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructucted for the Schur complement.
2390   The default is the A11 matrix.
2391 
2392   Collective
2393 
2394   Input Parameters:
2395 + pc    - the preconditioner context
2396 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2397               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2398               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2399 - pre   - matrix to use for preconditioning, or `NULL`
2400 
2401   Options Database Keys:
2402 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2403 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2404 
2405   Level: intermediate
2406 
2407   Notes:
2408   If ptype is
2409 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2410   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2411 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2412   The only preconditioner that currently works with this symbolic representation matrix object is the `PCLSC`
2413   preconditioner
2414 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2415   to this function).
2416 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation Sp = A11 - A10 inv(diag(A00)) A01
2417   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2418   lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2419 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2420   computed internally by `PCFIELDSPLIT` (this is expensive)
2421   useful mostly as a test that the Schur complement approach can work for your problem
2422 
2423   When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2424   with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a ptype of self and
2425   `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2426 
2427 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2428           `MatSchurComplementSetAinvType()`, `PCLSC`,
2429 
2430 @*/
2431 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2432 {
2433   PetscFunctionBegin;
2434   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2435   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2436   PetscFunctionReturn(PETSC_SUCCESS);
2437 }
2438 
2439 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2440 {
2441   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2442 } /* Deprecated name */
2443 
2444 /*@
2445   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2446   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2447 
2448   Logically Collective
2449 
2450   Input Parameter:
2451 . pc - the preconditioner context
2452 
2453   Output Parameters:
2454 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2455 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2456 
2457   Level: intermediate
2458 
2459 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2460 
2461 @*/
2462 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2463 {
2464   PetscFunctionBegin;
2465   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2466   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2467   PetscFunctionReturn(PETSC_SUCCESS);
2468 }
2469 
2470 /*@
2471   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2472 
2473   Not Collective
2474 
2475   Input Parameter:
2476 . pc - the preconditioner context
2477 
2478   Output Parameter:
2479 . S - the Schur complement matrix
2480 
2481   Level: advanced
2482 
2483   Note:
2484   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2485 
2486 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2487           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2488 @*/
2489 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2490 {
2491   const char    *t;
2492   PetscBool      isfs;
2493   PC_FieldSplit *jac;
2494 
2495   PetscFunctionBegin;
2496   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2497   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2498   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2499   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2500   jac = (PC_FieldSplit *)pc->data;
2501   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2502   if (S) *S = jac->schur;
2503   PetscFunctionReturn(PETSC_SUCCESS);
2504 }
2505 
2506 /*@
2507   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2508 
2509   Not Collective
2510 
2511   Input Parameters:
2512 + pc - the preconditioner context
2513 - S  - the Schur complement matrix
2514 
2515   Level: advanced
2516 
2517 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2518 @*/
2519 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2520 {
2521   const char    *t;
2522   PetscBool      isfs;
2523   PC_FieldSplit *jac;
2524 
2525   PetscFunctionBegin;
2526   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2527   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2528   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2529   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2530   jac = (PC_FieldSplit *)pc->data;
2531   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2532   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2533   PetscFunctionReturn(PETSC_SUCCESS);
2534 }
2535 
2536 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2537 {
2538   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2539 
2540   PetscFunctionBegin;
2541   jac->schurpre = ptype;
2542   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2543     PetscCall(MatDestroy(&jac->schur_user));
2544     jac->schur_user = pre;
2545     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2546   }
2547   PetscFunctionReturn(PETSC_SUCCESS);
2548 }
2549 
2550 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2551 {
2552   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2553 
2554   PetscFunctionBegin;
2555   if (ptype) *ptype = jac->schurpre;
2556   if (pre) *pre = jac->schur_user;
2557   PetscFunctionReturn(PETSC_SUCCESS);
2558 }
2559 
2560 /*@
2561   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner
2562 
2563   Collective
2564 
2565   Input Parameters:
2566 + pc    - the preconditioner context
2567 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2568 
2569   Options Database Key:
2570 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
2571 
2572   Level: intermediate
2573 
2574   Notes:
2575   The FULL factorization is
2576 
2577 .vb
2578    (A   B)  = (1       0) (A   0) (1  Ainv*B)  = L D U
2579    (C   E)    (C*Ainv  1) (0   S) (0       1)
2580 .vb
2581     where S = E - C*Ainv*B. In practice, the full factorization is applied via block triangular solves with the grouping L*(D*U). UPPER uses D*U, LOWER uses L*D,
2582     and DIAG is the diagonal part with the sign of S flipped (because this makes the preconditioner positive definite for many formulations, thus allowing the use of `KSPMINRES)`.
2583     Sign flipping of S can be turned off with `PCFieldSplitSetSchurScale()`.
2584 
2585     If A and S are solved exactly
2586 .vb
2587       *) FULL factorization is a direct solver.
2588       *) The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2589       *) With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2590 .ve
2591 
2592   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2593   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2594 
2595   For symmetric problems in which A is positive definite and S is negative definite, DIAG can be used with `KSPMINRES`.
2596 
2597   A flexible method like `KSPFGMRES` or `KSPGCR` must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S).
2598 
2599   References:
2600 +   * - Murphy, Golub, and Wathen, A note on preconditioning indefinite linear systems, SIAM J. Sci. Comput., 21 (2000).
2601 -   * - Ipsen, A note on preconditioning nonsymmetric matrices, SIAM J. Sci. Comput., 23 (2001).
2602 
2603 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`
2604 @*/
2605 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2606 {
2607   PetscFunctionBegin;
2608   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2609   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2610   PetscFunctionReturn(PETSC_SUCCESS);
2611 }
2612 
2613 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2614 {
2615   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2616 
2617   PetscFunctionBegin;
2618   jac->schurfactorization = ftype;
2619   PetscFunctionReturn(PETSC_SUCCESS);
2620 }
2621 
2622 /*@
2623   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2624 
2625   Collective
2626 
2627   Input Parameters:
2628 + pc    - the preconditioner context
2629 - scale - scaling factor for the Schur complement
2630 
2631   Options Database Key:
2632 . -pc_fieldsplit_schur_scale - default is -1.0
2633 
2634   Level: intermediate
2635 
2636 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
2637 @*/
2638 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2639 {
2640   PetscFunctionBegin;
2641   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2642   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2643   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2644   PetscFunctionReturn(PETSC_SUCCESS);
2645 }
2646 
2647 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2648 {
2649   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2650 
2651   PetscFunctionBegin;
2652   jac->schurscale = scale;
2653   PetscFunctionReturn(PETSC_SUCCESS);
2654 }
2655 
2656 /*@C
2657   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2658 
2659   Collective
2660 
2661   Input Parameter:
2662 . pc - the preconditioner context
2663 
2664   Output Parameters:
2665 + A00 - the (0,0) block
2666 . A01 - the (0,1) block
2667 . A10 - the (1,0) block
2668 - A11 - the (1,1) block
2669 
2670   Level: advanced
2671 
2672 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2673 @*/
2674 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2675 {
2676   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2677 
2678   PetscFunctionBegin;
2679   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2680   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2681   if (A00) *A00 = jac->pmat[0];
2682   if (A01) *A01 = jac->B;
2683   if (A10) *A10 = jac->C;
2684   if (A11) *A11 = jac->pmat[1];
2685   PetscFunctionReturn(PETSC_SUCCESS);
2686 }
2687 
2688 /*@
2689   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT`
2690 
2691   Collective
2692 
2693   Input Parameters:
2694 + pc        - the preconditioner context
2695 - tolerance - the solver tolerance
2696 
2697   Options Database Key:
2698 . -pc_fieldsplit_gkb_tol - default is 1e-5
2699 
2700   Level: intermediate
2701 
2702   Note:
2703   The generalized GKB algorithm uses a lower bound estimate of the error in energy norm as stopping criterion.
2704   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2705   this estimate, the stopping criterion is satisfactory in practical cases [A13].
2706 
2707   References:
2708   [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2709 
2710 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2711 @*/
2712 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2713 {
2714   PetscFunctionBegin;
2715   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2716   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2717   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2718   PetscFunctionReturn(PETSC_SUCCESS);
2719 }
2720 
2721 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2722 {
2723   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2724 
2725   PetscFunctionBegin;
2726   jac->gkbtol = tolerance;
2727   PetscFunctionReturn(PETSC_SUCCESS);
2728 }
2729 
2730 /*@
2731   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT`
2732 
2733   Collective
2734 
2735   Input Parameters:
2736 + pc    - the preconditioner context
2737 - maxit - the maximum number of iterations
2738 
2739   Options Database Key:
2740 . -pc_fieldsplit_gkb_maxit - default is 100
2741 
2742   Level: intermediate
2743 
2744 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2745 @*/
2746 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2747 {
2748   PetscFunctionBegin;
2749   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2750   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2751   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2752   PetscFunctionReturn(PETSC_SUCCESS);
2753 }
2754 
2755 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2756 {
2757   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2758 
2759   PetscFunctionBegin;
2760   jac->gkbmaxit = maxit;
2761   PetscFunctionReturn(PETSC_SUCCESS);
2762 }
2763 
2764 /*@
2765   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization in `PCFIELDSPLIT`
2766   preconditioner.
2767 
2768   Collective
2769 
2770   Input Parameters:
2771 + pc    - the preconditioner context
2772 - delay - the delay window in the lower bound estimate
2773 
2774   Options Database Key:
2775 . -pc_fieldsplit_gkb_delay - default is 5
2776 
2777   Level: intermediate
2778 
2779   Note:
2780   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error ||u-u^k||_H
2781   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + delay), and thus the algorithm needs
2782   at least (delay + 1) iterations to stop. For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to
2783 
2784   References:
2785   [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2786 
2787 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2788 @*/
2789 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2790 {
2791   PetscFunctionBegin;
2792   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2793   PetscValidLogicalCollectiveInt(pc, delay, 2);
2794   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2795   PetscFunctionReturn(PETSC_SUCCESS);
2796 }
2797 
2798 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2799 {
2800   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2801 
2802   PetscFunctionBegin;
2803   jac->gkbdelay = delay;
2804   PetscFunctionReturn(PETSC_SUCCESS);
2805 }
2806 
2807 /*@
2808   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the Golub-Kahan bidiagonalization preconditioner
2809   in `PCFIELDSPLIT`
2810 
2811   Collective
2812 
2813   Input Parameters:
2814 + pc - the preconditioner context
2815 - nu - the shift parameter
2816 
2817   Options Database Key:
2818 . -pc_fieldsplit_gkb_nu - default is 1
2819 
2820   Level: intermediate
2821 
2822   Notes:
2823   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing nu sufficiently big. However,
2824   if nu is chosen too big, the matrix H might be badly conditioned and the solution of the linear system Hx = b in the inner loop becomes difficult. It is therefore
2825   necessary to find a good balance in between the convergence of the inner and outer loop.
2826 
2827   For nu = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in [Ar13] is then chosen as identity.
2828 
2829   References:
2830   [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013.
2831 
2832 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2833 @*/
2834 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
2835 {
2836   PetscFunctionBegin;
2837   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2838   PetscValidLogicalCollectiveReal(pc, nu, 2);
2839   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
2840   PetscFunctionReturn(PETSC_SUCCESS);
2841 }
2842 
2843 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
2844 {
2845   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2846 
2847   PetscFunctionBegin;
2848   jac->gkbnu = nu;
2849   PetscFunctionReturn(PETSC_SUCCESS);
2850 }
2851 
2852 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
2853 {
2854   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2855 
2856   PetscFunctionBegin;
2857   jac->type = type;
2858   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
2859   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
2860   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
2861   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
2862   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
2863   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
2864   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
2865   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
2866   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
2867 
2868   if (type == PC_COMPOSITE_SCHUR) {
2869     pc->ops->apply          = PCApply_FieldSplit_Schur;
2870     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
2871     pc->ops->view           = PCView_FieldSplit_Schur;
2872 
2873     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
2874     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
2875     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
2876     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
2877     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
2878   } else if (type == PC_COMPOSITE_GKB) {
2879     pc->ops->apply = PCApply_FieldSplit_GKB;
2880     pc->ops->view  = PCView_FieldSplit_GKB;
2881 
2882     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2883     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
2884     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
2885     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
2886     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
2887   } else {
2888     pc->ops->apply = PCApply_FieldSplit;
2889     pc->ops->view  = PCView_FieldSplit;
2890 
2891     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2892   }
2893   PetscFunctionReturn(PETSC_SUCCESS);
2894 }
2895 
2896 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
2897 {
2898   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2899 
2900   PetscFunctionBegin;
2901   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
2902   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
2903   jac->bs = bs;
2904   PetscFunctionReturn(PETSC_SUCCESS);
2905 }
2906 
2907 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
2908 {
2909   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
2910   PC_FieldSplitLink ilink_current = jac->head;
2911   IS                is_owned;
2912 
2913   PetscFunctionBegin;
2914   jac->coordinates_set = PETSC_TRUE; // Internal flag
2915   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
2916 
2917   while (ilink_current) {
2918     // For each IS, embed it to get local coords indces
2919     IS              is_coords;
2920     PetscInt        ndofs_block;
2921     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
2922 
2923     // Setting drop to true for safety. It should make no difference.
2924     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
2925     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
2926     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
2927 
2928     // Allocate coordinates vector and set it directly
2929     PetscCall(PetscMalloc1(ndofs_block * dim, &(ilink_current->coords)));
2930     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
2931       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
2932     }
2933     ilink_current->dim   = dim;
2934     ilink_current->ndofs = ndofs_block;
2935     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
2936     PetscCall(ISDestroy(&is_coords));
2937     ilink_current = ilink_current->next;
2938   }
2939   PetscCall(ISDestroy(&is_owned));
2940   PetscFunctionReturn(PETSC_SUCCESS);
2941 }
2942 
2943 /*@
2944   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
2945 
2946   Collective
2947 
2948   Input Parameters:
2949 + pc   - the preconditioner context
2950 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2951 
2952   Options Database Key:
2953 . -pc_fieldsplit_type <type: one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
2954 
2955   Level: intermediate
2956 
2957 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
2958           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2959 @*/
2960 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
2961 {
2962   PetscFunctionBegin;
2963   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2964   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
2965   PetscFunctionReturn(PETSC_SUCCESS);
2966 }
2967 
2968 /*@
2969   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
2970 
2971   Not collective
2972 
2973   Input Parameter:
2974 . pc - the preconditioner context
2975 
2976   Output Parameter:
2977 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2978 
2979   Level: intermediate
2980 
2981 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
2982           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
2983 @*/
2984 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
2985 {
2986   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2987 
2988   PetscFunctionBegin;
2989   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2990   PetscAssertPointer(type, 2);
2991   *type = jac->type;
2992   PetscFunctionReturn(PETSC_SUCCESS);
2993 }
2994 
2995 /*@
2996   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
2997 
2998   Logically Collective
2999 
3000   Input Parameters:
3001 + pc  - the preconditioner context
3002 - flg - boolean indicating whether to use field splits defined by the `DM`
3003 
3004   Options Database Key:
3005 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3006 
3007   Level: intermediate
3008 
3009 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3010 @*/
3011 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3012 {
3013   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3014   PetscBool      isfs;
3015 
3016   PetscFunctionBegin;
3017   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3018   PetscValidLogicalCollectiveBool(pc, flg, 2);
3019   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3020   if (isfs) jac->dm_splits = flg;
3021   PetscFunctionReturn(PETSC_SUCCESS);
3022 }
3023 
3024 /*@
3025   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3026 
3027   Logically Collective
3028 
3029   Input Parameter:
3030 . pc - the preconditioner context
3031 
3032   Output Parameter:
3033 . flg - boolean indicating whether to use field splits defined by the `DM`
3034 
3035   Level: intermediate
3036 
3037 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3038 @*/
3039 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3040 {
3041   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3042   PetscBool      isfs;
3043 
3044   PetscFunctionBegin;
3045   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3046   PetscAssertPointer(flg, 2);
3047   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3048   if (isfs) {
3049     if (flg) *flg = jac->dm_splits;
3050   }
3051   PetscFunctionReturn(PETSC_SUCCESS);
3052 }
3053 
3054 /*@
3055   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3056 
3057   Logically Collective
3058 
3059   Input Parameter:
3060 . pc - the preconditioner context
3061 
3062   Output Parameter:
3063 . flg - boolean indicating whether to detect fields or not
3064 
3065   Level: intermediate
3066 
3067 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3068 @*/
3069 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3070 {
3071   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3072 
3073   PetscFunctionBegin;
3074   *flg = jac->detect;
3075   PetscFunctionReturn(PETSC_SUCCESS);
3076 }
3077 
3078 /*@
3079   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3080 
3081   Logically Collective
3082 
3083   Input Parameter:
3084 . pc - the preconditioner context
3085 
3086   Output Parameter:
3087 . flg - boolean indicating whether to detect fields or not
3088 
3089   Options Database Key:
3090 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3091 
3092   Level: intermediate
3093 
3094   Note:
3095   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3096 
3097 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3098 @*/
3099 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3100 {
3101   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3102 
3103   PetscFunctionBegin;
3104   jac->detect = flg;
3105   if (jac->detect) {
3106     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3107     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3108   }
3109   PetscFunctionReturn(PETSC_SUCCESS);
3110 }
3111 
3112 /*MC
3113    PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3114    collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3115 
3116    Options Database Keys:
3117 +   -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
3118 .   -pc_fieldsplit_default - automatically add any fields to additional splits that have not
3119                               been supplied explicitly by `-pc_fieldsplit_%d_fields`
3120 .   -pc_fieldsplit_block_size <bs> - size of block that defines fields (i.e. there are bs fields)
3121 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3122 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`; see `PCFieldSplitSetSchurPre()`
3123 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - set factorization type when using `-pc_fieldsplit_type schur`; see `PCFieldSplitSetSchurFactType()`
3124 -   -pc_fieldsplit_detect_saddle_point - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3125 
3126      Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3127      The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3128      For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3129 
3130      To set options on the solvers for each block append `-fieldsplit_` to all the `PC`
3131      options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`
3132 
3133      To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3134       and set the options directly on the resulting `KSP` object
3135 
3136     Level: intermediate
3137 
3138    Notes:
3139     Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries and `PCFieldSplitSetIS()`
3140      to define a split by an arbitrary collection of entries.
3141 
3142       If no splits are set the default is used. The splits are defined by entries strided by bs,
3143       beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3144       if this is not called the block size defaults to the blocksize of the second matrix passed
3145       to `KSPSetOperators()`/`PCSetOperators()`.
3146 
3147       For the Schur complement preconditioner if
3148 
3149       ```{math}
3150       J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3151       ```
3152 
3153       the preconditioner using `full` factorization is logically
3154       ```{math}
3155       \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3156       ```
3157      where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3158      ```{math}
3159      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3160      ```
3161      which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given
3162      in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0,
3163      it returns the KSP associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default
3164      $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3165 
3166      The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3167      `diag` gives
3168       ```{math}
3169       \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3170       ```
3171      Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3172      can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3173       ```{math}
3174       \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3175       ```
3176      where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of
3177       ```{math}
3178       \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3179       ```
3180      where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3181 
3182      If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3183      is used automatically for a second block.
3184 
3185      The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3186      Generally it should be used with the `MATAIJ` format.
3187 
3188      The forms of these preconditioners are closely related if not identical to forms derived as "Distributive Iterations", see,
3189      for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3190      One can also use `PCFIELDSPLIT`
3191      inside a smoother resulting in "Distributive Smoothers".
3192 
3193      See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3194 
3195      The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3196      residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3197 
3198      The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3199      ```{math}
3200      \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3201      ```
3202      with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3203      A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3204 
3205    Developer Note:
3206    The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3207    user API.
3208 
3209 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3210           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3211           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3212           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3213 M*/
3214 
3215 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3216 {
3217   PC_FieldSplit *jac;
3218 
3219   PetscFunctionBegin;
3220   PetscCall(PetscNew(&jac));
3221 
3222   jac->bs                 = -1;
3223   jac->nsplits            = 0;
3224   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3225   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3226   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3227   jac->schurscale         = -1.0;
3228   jac->dm_splits          = PETSC_TRUE;
3229   jac->detect             = PETSC_FALSE;
3230   jac->gkbtol             = 1e-5;
3231   jac->gkbdelay           = 5;
3232   jac->gkbnu              = 1;
3233   jac->gkbmaxit           = 100;
3234   jac->gkbmonitor         = PETSC_FALSE;
3235   jac->coordinates_set    = PETSC_FALSE;
3236 
3237   pc->data = (void *)jac;
3238 
3239   pc->ops->apply           = PCApply_FieldSplit;
3240   pc->ops->applytranspose  = PCApplyTranspose_FieldSplit;
3241   pc->ops->setup           = PCSetUp_FieldSplit;
3242   pc->ops->reset           = PCReset_FieldSplit;
3243   pc->ops->destroy         = PCDestroy_FieldSplit;
3244   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3245   pc->ops->view            = PCView_FieldSplit;
3246   pc->ops->applyrichardson = NULL;
3247 
3248   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3249   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3250   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3251   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3252   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3253   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3254   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3255   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3256   PetscFunctionReturn(PETSC_SUCCESS);
3257 }
3258