xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision e0b7e82fd3cf27fce84cc3e37e8d70a5c36a2d4e)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petscdm.h>
4 
5 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
6 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
7 
8 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
9 
10 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
11 struct _PC_FieldSplitLink {
12   KSP               ksp;
13   Vec               x, y, z;
14   char             *splitname;
15   PetscInt          nfields;
16   PetscInt         *fields, *fields_col;
17   VecScatter        sctx;
18   IS                is, is_col;
19   PC_FieldSplitLink next, previous;
20   PetscLogEvent     event;
21 
22   /* Used only when setting coordinates with PCSetCoordinates */
23   PetscInt   dim;
24   PetscInt   ndofs;
25   PetscReal *coords;
26 };
27 
28 typedef struct {
29   PCCompositeType type;
30   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
31   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
32   PetscInt        bs;           /* Block size for IS and Mat structures */
33   PetscInt        nsplits;      /* Number of field divisions defined */
34   Vec            *x, *y, w1, w2;
35   Mat            *mat;    /* The diagonal block for each split */
36   Mat            *pmat;   /* The preconditioning diagonal block for each split */
37   Mat            *Afield; /* The rows of the matrix associated with each split */
38   PetscBool       issetup;
39 
40   /* Only used when Schur complement preconditioning is used */
41   Mat                       B;          /* The (0,1) block */
42   Mat                       C;          /* The (1,0) block */
43   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
44   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */
45   Mat                       schur_user; /* User-provided preconditioning matrix for the Schur complement */
46   PCFieldSplitSchurPreType  schurpre;   /* Determines which preconditioning matrix is used for the Schur complement */
47   PCFieldSplitSchurFactType schurfactorization;
48   KSP                       kspschur;   /* The solver for S */
49   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
50   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
51 
52   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
53   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
54   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
55   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
56   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
57   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
58   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
59   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
60   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
61   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
62 
63   PC_FieldSplitLink head;
64   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
65   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
66   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
67   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
68   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
69   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
70   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
71 } PC_FieldSplit;
72 
73 /*
74     Note:
75     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
76    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
77    PC you could change this.
78 */
79 
80 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it.  This way the
81 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
82 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
83 {
84   switch (jac->schurpre) {
85   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
86     return jac->schur;
87   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
88     return jac->schurp;
89   case PC_FIELDSPLIT_SCHUR_PRE_A11:
90     return jac->pmat[1];
91   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
92   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
93   default:
94     return jac->schur_user ? jac->schur_user : jac->pmat[1];
95   }
96 }
97 
98 #include <petscdraw.h>
99 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
100 {
101   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
102   PetscBool         iascii, isdraw;
103   PetscInt          i, j;
104   PC_FieldSplitLink ilink = jac->head;
105 
106   PetscFunctionBegin;
107   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
108   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
109   if (iascii) {
110     if (jac->bs > 0) {
111       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
112     } else {
113       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
114     }
115     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
116     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
117     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
118     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
119     for (i = 0; i < jac->nsplits; i++) {
120       if (ilink->fields) {
121         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
122         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
123         for (j = 0; j < ilink->nfields; j++) {
124           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
125           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
126         }
127         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
128         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
129       } else {
130         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
131       }
132       PetscCall(KSPView(ilink->ksp, viewer));
133       ilink = ilink->next;
134     }
135   }
136 
137   if (isdraw) {
138     PetscDraw draw;
139     PetscReal x, y, w, wd;
140 
141     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
142     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
143     w  = 2 * PetscMin(1.0 - x, x);
144     wd = w / (jac->nsplits + 1);
145     x  = x - wd * (jac->nsplits - 1) / 2.0;
146     for (i = 0; i < jac->nsplits; i++) {
147       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
148       PetscCall(KSPView(ilink->ksp, viewer));
149       PetscCall(PetscDrawPopCurrentPoint(draw));
150       x += wd;
151       ilink = ilink->next;
152     }
153   }
154   PetscFunctionReturn(PETSC_SUCCESS);
155 }
156 
157 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
158 {
159   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
160   PetscBool                  iascii, isdraw;
161   PetscInt                   i, j;
162   PC_FieldSplitLink          ilink = jac->head;
163   MatSchurComplementAinvType atype;
164 
165   PetscFunctionBegin;
166   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
167   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
168   if (iascii) {
169     if (jac->bs > 0) {
170       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
171     } else {
172       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
173     }
174     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
175     switch (jac->schurpre) {
176     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
177       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
178       break;
179     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
180       if (jac->schur) {
181         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
182         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
183       }
184       break;
185     case PC_FIELDSPLIT_SCHUR_PRE_A11:
186       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
187       break;
188     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
189       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
190       break;
191     case PC_FIELDSPLIT_SCHUR_PRE_USER:
192       if (jac->schur_user) {
193         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
194       } else {
195         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
196       }
197       break;
198     default:
199       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
200     }
201     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
202     PetscCall(PetscViewerASCIIPushTab(viewer));
203     for (i = 0; i < jac->nsplits; i++) {
204       if (ilink->fields) {
205         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
206         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
207         for (j = 0; j < ilink->nfields; j++) {
208           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
209           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
210         }
211         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
212         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
213       } else {
214         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
215       }
216       ilink = ilink->next;
217     }
218     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
219     PetscCall(PetscViewerASCIIPushTab(viewer));
220     if (jac->head) {
221       PetscCall(KSPView(jac->head->ksp, viewer));
222     } else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
223     PetscCall(PetscViewerASCIIPopTab(viewer));
224     if (jac->head && jac->kspupper != jac->head->ksp) {
225       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n"));
226       PetscCall(PetscViewerASCIIPushTab(viewer));
227       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
228       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
229       PetscCall(PetscViewerASCIIPopTab(viewer));
230     }
231     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n"));
232     PetscCall(PetscViewerASCIIPushTab(viewer));
233     if (jac->kspschur) {
234       PetscCall(KSPView(jac->kspschur, viewer));
235     } else {
236       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
237     }
238     PetscCall(PetscViewerASCIIPopTab(viewer));
239     PetscCall(PetscViewerASCIIPopTab(viewer));
240   } else if (isdraw && jac->head) {
241     PetscDraw draw;
242     PetscReal x, y, w, wd, h;
243     PetscInt  cnt = 2;
244     char      str[32];
245 
246     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
247     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
248     if (jac->kspupper != jac->head->ksp) cnt++;
249     w  = 2 * PetscMin(1.0 - x, x);
250     wd = w / (cnt + 1);
251 
252     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
253     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
254     y -= h;
255     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
256       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
257     } else {
258       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
259     }
260     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
261     y -= h;
262     x = x - wd * (cnt - 1) / 2.0;
263 
264     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
265     PetscCall(KSPView(jac->head->ksp, viewer));
266     PetscCall(PetscDrawPopCurrentPoint(draw));
267     if (jac->kspupper != jac->head->ksp) {
268       x += wd;
269       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
270       PetscCall(KSPView(jac->kspupper, viewer));
271       PetscCall(PetscDrawPopCurrentPoint(draw));
272     }
273     x += wd;
274     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
275     PetscCall(KSPView(jac->kspschur, viewer));
276     PetscCall(PetscDrawPopCurrentPoint(draw));
277   }
278   PetscFunctionReturn(PETSC_SUCCESS);
279 }
280 
281 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
282 {
283   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
284   PetscBool         iascii, isdraw;
285   PetscInt          i, j;
286   PC_FieldSplitLink ilink = jac->head;
287 
288   PetscFunctionBegin;
289   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
290   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
291   if (iascii) {
292     if (jac->bs > 0) {
293       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
294     } else {
295       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
296     }
297     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
298     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
299     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
300 
301     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
302     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
303     PetscCall(PetscViewerASCIIPushTab(viewer));
304 
305     if (ilink->fields) {
306       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
307       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
308       for (j = 0; j < ilink->nfields; j++) {
309         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
310         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
311       }
312       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
313       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
314     } else {
315       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
316     }
317     PetscCall(KSPView(ilink->ksp, viewer));
318 
319     PetscCall(PetscViewerASCIIPopTab(viewer));
320   }
321 
322   if (isdraw) {
323     PetscDraw draw;
324     PetscReal x, y, w, wd;
325 
326     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
327     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
328     w  = 2 * PetscMin(1.0 - x, x);
329     wd = w / (jac->nsplits + 1);
330     x  = x - wd * (jac->nsplits - 1) / 2.0;
331     for (i = 0; i < jac->nsplits; i++) {
332       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
333       PetscCall(KSPView(ilink->ksp, viewer));
334       PetscCall(PetscDrawPopCurrentPoint(draw));
335       x += wd;
336       ilink = ilink->next;
337     }
338   }
339   PetscFunctionReturn(PETSC_SUCCESS);
340 }
341 
342 /* Precondition: jac->bs is set to a meaningful value or MATNEST */
343 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
344 {
345   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
346   PetscInt       bs, i, nfields, *ifields, nfields_col, *ifields_col;
347   PetscBool      flg, flg_col, mnest;
348   char           optionname[128], splitname[8], optionname_col[128];
349 
350   PetscFunctionBegin;
351   PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest));
352   if (mnest) {
353     PetscCall(MatNestGetSize(pc->pmat, &bs, NULL));
354   } else {
355     bs = jac->bs;
356   }
357   PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col));
358   for (i = 0, flg = PETSC_TRUE;; i++) {
359     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
360     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
361     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
362     nfields     = bs;
363     nfields_col = bs;
364     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
365     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
366     if (!flg) break;
367     else if (flg && !flg_col) {
368       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
369       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
370     } else {
371       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
372       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
373       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
374     }
375   }
376   if (i > 0) {
377     /* Makes command-line setting of splits take precedence over setting them in code.
378        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
379        create new splits, which would probably not be what the user wanted. */
380     jac->splitdefined = PETSC_TRUE;
381   }
382   PetscCall(PetscFree2(ifields, ifields_col));
383   PetscFunctionReturn(PETSC_SUCCESS);
384 }
385 
386 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
387 {
388   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
389   PC_FieldSplitLink ilink              = jac->head;
390   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
391   PetscInt          i;
392 
393   PetscFunctionBegin;
394   /*
395    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
396    Should probably be rewritten.
397    */
398   if (!ilink) {
399     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
400     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
401       PetscInt  numFields, f, i, j;
402       char    **fieldNames;
403       IS       *fields;
404       DM       *dms;
405       DM        subdm[128];
406       PetscBool flg;
407 
408       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
409       /* Allow the user to prescribe the splits */
410       for (i = 0, flg = PETSC_TRUE;; i++) {
411         PetscInt ifields[128];
412         IS       compField;
413         char     optionname[128], splitname[8];
414         PetscInt nfields = numFields;
415 
416         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
417         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
418         if (!flg) break;
419         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
420         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
421         if (nfields == 1) {
422           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
423         } else {
424           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
425           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
426         }
427         PetscCall(ISDestroy(&compField));
428         for (j = 0; j < nfields; ++j) {
429           f = ifields[j];
430           PetscCall(PetscFree(fieldNames[f]));
431           PetscCall(ISDestroy(&fields[f]));
432         }
433       }
434       if (i == 0) {
435         for (f = 0; f < numFields; ++f) {
436           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
437           PetscCall(PetscFree(fieldNames[f]));
438           PetscCall(ISDestroy(&fields[f]));
439         }
440       } else {
441         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
442         PetscCall(PetscFree(dms));
443         PetscCall(PetscMalloc1(i, &dms));
444         for (j = 0; j < i; ++j) dms[j] = subdm[j];
445       }
446       PetscCall(PetscFree(fieldNames));
447       PetscCall(PetscFree(fields));
448       if (dms) {
449         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
450         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
451           const char *prefix;
452           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix));
453           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix));
454           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
455           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
456           {
457             PetscErrorCode (*func)(KSP, Mat, Mat, void *);
458             void *ctx;
459 
460             PetscCall(DMKSPGetComputeOperators(pc->dm, &func, &ctx));
461             PetscCall(DMKSPSetComputeOperators(dms[i], func, ctx));
462           }
463           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
464           PetscCall(DMDestroy(&dms[i]));
465         }
466         PetscCall(PetscFree(dms));
467       }
468     } else {
469       if (jac->bs <= 0) {
470         if (pc->pmat) {
471           PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
472         } else jac->bs = 1;
473       }
474 
475       if (jac->detect) {
476         IS       zerodiags, rest;
477         PetscInt nmin, nmax;
478 
479         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
480         if (jac->diag_use_amat) {
481           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
482         } else {
483           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
484         }
485         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
486         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
487         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
488         PetscCall(ISDestroy(&zerodiags));
489         PetscCall(ISDestroy(&rest));
490       } else if (coupling) {
491         IS       coupling, rest;
492         PetscInt nmin, nmax;
493 
494         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
495         if (jac->offdiag_use_amat) {
496           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
497         } else {
498           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
499         }
500         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
501         PetscCall(ISSetIdentity(rest));
502         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
503         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
504         PetscCall(ISDestroy(&coupling));
505         PetscCall(ISDestroy(&rest));
506       } else {
507         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
508         if (!fieldsplit_default) {
509           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
510            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
511           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
512           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
513         }
514         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
515           Mat       M = pc->pmat;
516           PetscBool isnest;
517           PetscInt  nf;
518 
519           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
520           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
521           if (!isnest) {
522             M = pc->mat;
523             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
524           }
525           if (!isnest) nf = jac->bs;
526           else PetscCall(MatNestGetSize(M, &nf, NULL));
527           for (i = 0; i < nf; i++) {
528             char splitname[8];
529 
530             PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
531             PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
532           }
533           jac->defaultsplit = PETSC_TRUE;
534         }
535       }
536     }
537   } else if (jac->nsplits == 1) {
538     IS       is2;
539     PetscInt nmin, nmax;
540 
541     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
542     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
543     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
544     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
545     PetscCall(ISDestroy(&is2));
546   }
547 
548   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
549   PetscFunctionReturn(PETSC_SUCCESS);
550 }
551 
552 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
553 {
554   Mat       BT, T;
555   PetscReal nrmT, nrmB;
556 
557   PetscFunctionBegin;
558   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
559   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
560   PetscCall(MatNorm(T, NORM_1, &nrmT));
561   PetscCall(MatNorm(B, NORM_1, &nrmB));
562   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable.");
563 
564   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
565   /* setting N := 1/nu*I in [Ar13].                                                 */
566   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
567   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, H)); /* H = A01*A01'          */
568   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
569 
570   PetscCall(MatDestroy(&BT));
571   PetscCall(MatDestroy(&T));
572   PetscFunctionReturn(PETSC_SUCCESS);
573 }
574 
575 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg);
576 
577 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
578 {
579   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
580   PC_FieldSplitLink ilink;
581   PetscInt          i, nsplit;
582   PetscBool         sorted, sorted_col, matnest = PETSC_FALSE;
583 
584   PetscFunctionBegin;
585   pc->failedreason = PC_NOERROR;
586   PetscCall(PCFieldSplitSetDefaults(pc));
587   nsplit = jac->nsplits;
588   ilink  = jac->head;
589   if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
590 
591   /* get the matrices for each split */
592   if (!jac->issetup) {
593     PetscInt rstart, rend, nslots, bs;
594 
595     jac->issetup = PETSC_TRUE;
596 
597     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
598     if (jac->defaultsplit || !ilink->is) {
599       if (jac->bs <= 0) jac->bs = nsplit;
600     }
601 
602     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
603     PetscCall(MatGetBlockSize(pc->pmat, &bs));
604     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
605       PetscBool blk;
606 
607       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
608       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
609     }
610 
611     if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */
612       bs = jac->bs;
613       PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
614       nslots = (rend - rstart) / bs;
615       for (i = 0; i < nsplit; i++) {
616         if (jac->defaultsplit) {
617           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
618           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
619         } else if (!ilink->is) {
620           if (ilink->nfields > 1) {
621             PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
622 
623             PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
624             PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
625             for (j = 0; j < nslots; j++) {
626               for (k = 0; k < nfields; k++) {
627                 ii[nfields * j + k] = rstart + bs * j + fields[k];
628                 jj[nfields * j + k] = rstart + bs * j + fields_col[k];
629               }
630             }
631             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
632             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
633             PetscCall(ISSetBlockSize(ilink->is, nfields));
634             PetscCall(ISSetBlockSize(ilink->is_col, nfields));
635           } else {
636             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
637             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
638           }
639         }
640         PetscCall(ISSorted(ilink->is, &sorted));
641         if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col));
642         PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split");
643         ilink = ilink->next;
644       }
645     } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */
646       IS      *rowis, *colis, *ises = NULL;
647       PetscInt mis, nis;
648 
649       PetscCall(MatNestGetSize(pc->pmat, &mis, &nis));
650       PetscCall(PetscMalloc2(mis, &rowis, nis, &colis));
651       PetscCall(MatNestGetISs(pc->pmat, rowis, colis));
652       if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises));
653 
654       for (i = 0; i < nsplit; i++) {
655         if (jac->defaultsplit) {
656           PetscCall(ISDuplicate(rowis[i], &ilink->is));
657           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
658         } else if (!ilink->is) {
659           if (ilink->nfields > 1) {
660             for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]];
661             PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is));
662           } else {
663             PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is));
664           }
665           PetscCall(ISDuplicate(ilink->is, &ilink->is_col));
666         }
667         ilink = ilink->next;
668       }
669       PetscCall(PetscFree2(rowis, colis));
670       PetscCall(PetscFree(ises));
671     }
672   }
673 
674   ilink = jac->head;
675   if (!jac->pmat) {
676     Vec xtmp;
677 
678     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
679     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
680     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
681     for (i = 0; i < nsplit; i++) {
682       MatNullSpace sp;
683 
684       /* Check for preconditioning matrix attached to IS */
685       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
686       if (jac->pmat[i]) {
687         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
688         if (jac->type == PC_COMPOSITE_SCHUR) {
689           jac->schur_user = jac->pmat[i];
690 
691           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
692         }
693       } else {
694         const char *prefix;
695         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
696         PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix));
697         if (!prefix) {
698           PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
699           PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
700         }
701         PetscCall(MatSetFromOptions(jac->pmat[i]));
702         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
703       }
704       /* create work vectors for each split */
705       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
706       ilink->x = jac->x[i];
707       ilink->y = jac->y[i];
708       ilink->z = NULL;
709       /* compute scatter contexts needed by multiplicative versions and non-default splits */
710       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
711       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
712       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
713       ilink = ilink->next;
714     }
715     PetscCall(VecDestroy(&xtmp));
716   } else {
717     MatReuse      scall;
718     MatNullSpace *nullsp = NULL;
719 
720     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
721       PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp));
722       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
723       scall = MAT_INITIAL_MATRIX;
724     } else scall = MAT_REUSE_MATRIX;
725 
726     for (i = 0; i < nsplit; i++) {
727       Mat pmat;
728 
729       /* Check for preconditioning matrix attached to IS */
730       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
731       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
732       ilink = ilink->next;
733     }
734     if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp));
735   }
736   if (jac->diag_use_amat) {
737     ilink = jac->head;
738     if (!jac->mat) {
739       PetscCall(PetscMalloc1(nsplit, &jac->mat));
740       for (i = 0; i < nsplit; i++) {
741         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
742         ilink = ilink->next;
743       }
744     } else {
745       MatReuse      scall;
746       MatNullSpace *nullsp = NULL;
747 
748       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
749         PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp));
750         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
751         scall = MAT_INITIAL_MATRIX;
752       } else scall = MAT_REUSE_MATRIX;
753 
754       for (i = 0; i < nsplit; i++) {
755         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
756         ilink = ilink->next;
757       }
758       if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp));
759     }
760   } else {
761     jac->mat = jac->pmat;
762   }
763 
764   /* Check for null space attached to IS */
765   ilink = jac->head;
766   for (i = 0; i < nsplit; i++) {
767     MatNullSpace sp;
768 
769     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
770     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
771     ilink = ilink->next;
772   }
773 
774   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
775     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
776     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
777     ilink = jac->head;
778     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
779       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
780       if (!jac->Afield) {
781         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
782         if (jac->offdiag_use_amat) {
783           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
784         } else {
785           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
786         }
787       } else {
788         MatReuse scall;
789 
790         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
791           PetscCall(MatDestroy(&jac->Afield[1]));
792           scall = MAT_INITIAL_MATRIX;
793         } else scall = MAT_REUSE_MATRIX;
794 
795         if (jac->offdiag_use_amat) {
796           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
797         } else {
798           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
799         }
800       }
801     } else {
802       if (!jac->Afield) {
803         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
804         for (i = 0; i < nsplit; i++) {
805           if (jac->offdiag_use_amat) {
806             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
807           } else {
808             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
809           }
810           ilink = ilink->next;
811         }
812       } else {
813         MatReuse scall;
814         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
815           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
816           scall = MAT_INITIAL_MATRIX;
817         } else scall = MAT_REUSE_MATRIX;
818 
819         for (i = 0; i < nsplit; i++) {
820           if (jac->offdiag_use_amat) {
821             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
822           } else {
823             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
824           }
825           ilink = ilink->next;
826         }
827       }
828     }
829   }
830 
831   if (jac->type == PC_COMPOSITE_SCHUR) {
832     IS          ccis;
833     PetscBool   isset, isspd;
834     PetscInt    rstart, rend;
835     char        lscname[256];
836     PetscObject LSC_L;
837     PetscBool   set, flg;
838 
839     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
840 
841     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
842     if (jac->schurscale == (PetscScalar)-1.0) {
843       PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
844       jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
845     }
846 
847     /* When extracting off-diagonal submatrices, we take complements from this range */
848     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
849     PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
850 
851     if (jac->schur) {
852       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
853       MatReuse scall;
854 
855       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
856         scall = MAT_INITIAL_MATRIX;
857         PetscCall(MatDestroy(&jac->B));
858         PetscCall(MatDestroy(&jac->C));
859       } else scall = MAT_REUSE_MATRIX;
860 
861       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
862       ilink = jac->head;
863       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
864       if (jac->offdiag_use_amat) {
865         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B));
866       } else {
867         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B));
868       }
869       PetscCall(ISDestroy(&ccis));
870       if (!flg) {
871         ilink = ilink->next;
872         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
873         if (jac->offdiag_use_amat) {
874           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C));
875         } else {
876           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C));
877         }
878         PetscCall(ISDestroy(&ccis));
879       } else {
880         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
881         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
882         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
883       }
884       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
885       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
886         PetscCall(MatDestroy(&jac->schurp));
887         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
888       }
889       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
890       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
891       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
892     } else {
893       const char  *Dprefix;
894       char         schurprefix[256], schurmatprefix[256];
895       char         schurtestoption[256];
896       MatNullSpace sp;
897       KSP          kspt;
898 
899       /* extract the A01 and A10 matrices */
900       ilink = jac->head;
901       PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
902       if (jac->offdiag_use_amat) {
903         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
904       } else {
905         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
906       }
907       PetscCall(ISDestroy(&ccis));
908       ilink = ilink->next;
909       if (!flg) {
910         PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
911         if (jac->offdiag_use_amat) {
912           PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
913         } else {
914           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
915         }
916         PetscCall(ISDestroy(&ccis));
917       } else {
918         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg));
919         if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
920         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
921       }
922       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
923       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
924       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
925       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
926       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
927       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
928       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
929       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
930 
931       /* Note: this is not true in general */
932       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
933       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
934 
935       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
936       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
937       if (flg) {
938         DM  dmInner;
939         KSP kspInner;
940         PC  pcInner;
941 
942         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
943         PetscCall(KSPReset(kspInner));
944         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
945         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
946         /* Indent this deeper to emphasize the "inner" nature of this solver. */
947         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
948         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
949         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
950 
951         /* Set DM for new solver */
952         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
953         PetscCall(KSPSetDM(kspInner, dmInner));
954         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
955 
956         /* Defaults to PCKSP as preconditioner */
957         PetscCall(KSPGetPC(kspInner, &pcInner));
958         PetscCall(PCSetType(pcInner, PCKSP));
959         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
960       } else {
961         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
962           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
963           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
964           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
965           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
966           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
967         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
968         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
969       }
970       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
971       PetscCall(KSPSetFromOptions(jac->head->ksp));
972       PetscCall(MatSetFromOptions(jac->schur));
973 
974       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
975       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
976         KSP kspInner;
977         PC  pcInner;
978 
979         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
980         PetscCall(KSPGetPC(kspInner, &pcInner));
981         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
982         if (flg) {
983           KSP ksp;
984 
985           PetscCall(PCKSPGetKSP(pcInner, &ksp));
986           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
987         }
988       }
989       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
990       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
991       if (flg) {
992         DM dmInner;
993 
994         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
995         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
996         PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
997         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
998         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
999         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
1000         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
1001         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
1002         PetscCall(KSPSetDM(jac->kspupper, dmInner));
1003         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
1004         PetscCall(KSPSetFromOptions(jac->kspupper));
1005         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
1006         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
1007       } else {
1008         jac->kspupper = jac->head->ksp;
1009         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
1010       }
1011 
1012       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
1013       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
1014       PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
1015       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
1016       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
1017       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
1018         PC pcschur;
1019         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
1020         PetscCall(PCSetType(pcschur, PCNONE));
1021         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
1022       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1023         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1024       }
1025       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
1026       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
1027       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
1028       /* propagate DM */
1029       {
1030         DM sdm;
1031         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
1032         if (sdm) {
1033           PetscCall(KSPSetDM(jac->kspschur, sdm));
1034           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
1035         }
1036       }
1037       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1038       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
1039       PetscCall(KSPSetFromOptions(jac->kspschur));
1040     }
1041     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
1042     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
1043 
1044     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
1045     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
1046     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1047     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1048     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L));
1049     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
1050     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L));
1051     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L));
1052     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L));
1053   } else if (jac->type == PC_COMPOSITE_GKB) {
1054     IS       ccis;
1055     PetscInt rstart, rend;
1056 
1057     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1058 
1059     ilink = jac->head;
1060 
1061     /* When extracting off-diagonal submatrices, we take complements from this range */
1062     PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend));
1063 
1064     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1065     if (jac->offdiag_use_amat) {
1066       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1067     } else {
1068       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B));
1069     }
1070     PetscCall(ISDestroy(&ccis));
1071     /* Create work vectors for GKB algorithm */
1072     PetscCall(VecDuplicate(ilink->x, &jac->u));
1073     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1074     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1075     ilink = ilink->next;
1076     PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis));
1077     if (jac->offdiag_use_amat) {
1078       PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1079     } else {
1080       PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C));
1081     }
1082     PetscCall(ISDestroy(&ccis));
1083     /* Create work vectors for GKB algorithm */
1084     PetscCall(VecDuplicate(ilink->x, &jac->v));
1085     PetscCall(VecDuplicate(ilink->x, &jac->d));
1086     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1087     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1088     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1089 
1090     ilink = jac->head;
1091     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1092     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1093     /* Create gkb_monitor context */
1094     if (jac->gkbmonitor) {
1095       PetscInt tablevel;
1096       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1097       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1098       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1099       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1100       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1101     }
1102   } else {
1103     /* set up the individual splits' PCs */
1104     i     = 0;
1105     ilink = jac->head;
1106     while (ilink) {
1107       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1108       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1109       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1110       i++;
1111       ilink = ilink->next;
1112     }
1113   }
1114 
1115   /* Set coordinates to the sub PC objects whenever these are set */
1116   if (jac->coordinates_set) {
1117     PC pc_coords;
1118     if (jac->type == PC_COMPOSITE_SCHUR) {
1119       // Head is first block.
1120       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1121       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1122       // Second one is Schur block, but its KSP object is in kspschur.
1123       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1124       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1125     } else if (jac->type == PC_COMPOSITE_GKB) {
1126       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1127     } else {
1128       ilink = jac->head;
1129       while (ilink) {
1130         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1131         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1132         ilink = ilink->next;
1133       }
1134     }
1135   }
1136 
1137   jac->suboptionsset = PETSC_TRUE;
1138   PetscFunctionReturn(PETSC_SUCCESS);
1139 }
1140 
1141 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1142   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1143                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1144                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1145 
1146 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1147 {
1148   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1149   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1150   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1151 
1152   PetscFunctionBegin;
1153   switch (jac->schurfactorization) {
1154   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1155     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1156     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1157     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1158     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1159     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1160     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1161     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1162     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1163     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1164     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1165     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1166     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1167     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1168     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1169     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1170     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1171     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1172     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1173     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1174     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1175     break;
1176   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1177     /* [A00 0; A10 S], suitable for left preconditioning */
1178     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1179     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1180     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1181     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1182     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1183     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1184     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1185     PetscCall(VecScale(ilinkD->x, -1.));
1186     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1187     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1188     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1189     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1190     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1191     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1192     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1193     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1194     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1195     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1196     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1197     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1198     break;
1199   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1200     /* [A00 A01; 0 S], suitable for right preconditioning */
1201     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1202     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1203     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1204     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1205     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1206     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1207     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1208     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1209     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1210     PetscCall(VecScale(ilinkA->x, -1.));
1211     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1212     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1213     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1214     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1215     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1216     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1217     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1218     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1219     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1220     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1221     break;
1222   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1223     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1224     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1225     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1226     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1227     PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1228     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1229     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1230     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1231     PetscCall(VecScale(ilinkD->x, -1.0));
1232     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1233     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1234 
1235     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1236     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1237     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1238     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1239     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1240     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1241     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1242 
1243     if (kspUpper == kspA) {
1244       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1245       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1246       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1247       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1248       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1249       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1250     } else {
1251       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1252       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1253       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1254       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1255       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1256       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1257       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1258       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1259       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1260     }
1261     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1262     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1263     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1264   }
1265   PetscFunctionReturn(PETSC_SUCCESS);
1266 }
1267 
1268 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1269 {
1270   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1271   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1272   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1273 
1274   PetscFunctionBegin;
1275   switch (jac->schurfactorization) {
1276   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1277     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1278     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1279     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1280     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1281     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1282     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1283     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1284     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1285     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1286     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1287     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1288     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1289     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1290     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1291     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1292     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1293     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1294     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1295     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1296     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1297     break;
1298   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1299     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1300     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1301     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1302     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1303     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1304     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1305     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1306     PetscCall(VecScale(ilinkD->x, -1.));
1307     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1308     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1309     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1310     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1311     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1312     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1313     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1314     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1315     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1316     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1317     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1318     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1319     break;
1320   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1321     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1322     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1323     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1324     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1325     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1326     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1327     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1328     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1329     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1330     PetscCall(VecScale(ilinkA->x, -1.));
1331     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1332     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1333     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1334     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1335     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1336     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1337     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1338     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1339     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1340     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1341     break;
1342   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1343     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1344     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1345     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1346     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1347     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1348     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1349     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1350     PetscCall(VecScale(ilinkD->x, -1.0));
1351     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1352     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1353 
1354     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1355     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1356     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1357     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1358     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1359     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1360     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1361 
1362     if (kspLower == kspA) {
1363       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1364       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1365       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1366       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1367       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1368       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1369     } else {
1370       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1371       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1372       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1373       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1374       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1375       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1376       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1377       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1378       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1379     }
1380     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1381     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1382     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1383   }
1384   PetscFunctionReturn(PETSC_SUCCESS);
1385 }
1386 
1387 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1388 {
1389   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1390   PC_FieldSplitLink ilink = jac->head;
1391   PetscInt          cnt, bs;
1392 
1393   PetscFunctionBegin;
1394   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1395     PetscBool matnest;
1396 
1397     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1398     if (jac->defaultsplit && !matnest) {
1399       PetscCall(VecGetBlockSize(x, &bs));
1400       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1401       PetscCall(VecGetBlockSize(y, &bs));
1402       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1403       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1404       while (ilink) {
1405         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1406         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1407         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1408         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1409         ilink = ilink->next;
1410       }
1411       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1412     } else {
1413       PetscCall(VecSet(y, 0.0));
1414       while (ilink) {
1415         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1416         ilink = ilink->next;
1417       }
1418     }
1419   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1420     PetscCall(VecSet(y, 0.0));
1421     /* solve on first block for first block variables */
1422     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1423     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1424     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1425     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1426     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1427     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1428     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1429     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1430 
1431     /* compute the residual only onto second block variables using first block variables */
1432     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1433     ilink = ilink->next;
1434     PetscCall(VecScale(ilink->x, -1.0));
1435     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1436     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1437 
1438     /* solve on second block variables */
1439     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1440     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1441     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1442     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1443     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1444     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1445   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1446     if (!jac->w1) {
1447       PetscCall(VecDuplicate(x, &jac->w1));
1448       PetscCall(VecDuplicate(x, &jac->w2));
1449     }
1450     PetscCall(VecSet(y, 0.0));
1451     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1452     cnt = 1;
1453     while (ilink->next) {
1454       ilink = ilink->next;
1455       /* compute the residual only over the part of the vector needed */
1456       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1457       PetscCall(VecScale(ilink->x, -1.0));
1458       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1459       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1460       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1461       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1462       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1463       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1464       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1465       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1466     }
1467     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1468       cnt -= 2;
1469       while (ilink->previous) {
1470         ilink = ilink->previous;
1471         /* compute the residual only over the part of the vector needed */
1472         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1473         PetscCall(VecScale(ilink->x, -1.0));
1474         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1475         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1476         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1477         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1478         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1479         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1480         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1481         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1482       }
1483     }
1484   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1485   PetscFunctionReturn(PETSC_SUCCESS);
1486 }
1487 
1488 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1489 {
1490   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1491   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1492   KSP               ksp = ilinkA->ksp;
1493   Vec               u, v, Hu, d, work1, work2;
1494   PetscScalar       alpha, z, nrmz2, *vecz;
1495   PetscReal         lowbnd, nu, beta;
1496   PetscInt          j, iterGKB;
1497 
1498   PetscFunctionBegin;
1499   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1500   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1501   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1502   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1503 
1504   u     = jac->u;
1505   v     = jac->v;
1506   Hu    = jac->Hu;
1507   d     = jac->d;
1508   work1 = jac->w1;
1509   work2 = jac->w2;
1510   vecz  = jac->vecz;
1511 
1512   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1513   /* Add q = q + nu*B*b */
1514   if (jac->gkbnu) {
1515     nu = jac->gkbnu;
1516     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1517     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1518   } else {
1519     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1520     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1521     nu = 1;
1522   }
1523 
1524   /* Transform rhs from [q,tilde{b}] to [0,b] */
1525   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1526   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1527   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1528   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1529   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1530   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1531 
1532   /* First step of algorithm */
1533   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1534   KSPCheckDot(ksp, beta);
1535   beta = PetscSqrtReal(nu) * beta;
1536   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1537   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1538   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1539   PetscCall(KSPSolve(ksp, work2, u));
1540   PetscCall(KSPCheckSolve(ksp, pc, u));
1541   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1542   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1543   PetscCall(VecDot(Hu, u, &alpha));
1544   KSPCheckDot(ksp, alpha);
1545   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1546   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1547   PetscCall(VecScale(u, 1.0 / alpha));
1548   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1549 
1550   z       = beta / alpha;
1551   vecz[1] = z;
1552 
1553   /* Computation of first iterate x(1) and p(1) */
1554   PetscCall(VecAXPY(ilinkA->y, z, u));
1555   PetscCall(VecCopy(d, ilinkD->y));
1556   PetscCall(VecScale(ilinkD->y, -z));
1557 
1558   iterGKB = 1;
1559   lowbnd  = 2 * jac->gkbtol;
1560   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1561 
1562   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1563     iterGKB += 1;
1564     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1565     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1566     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1567     beta = beta / PetscSqrtReal(nu);
1568     PetscCall(VecScale(v, 1.0 / beta));
1569     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1570     PetscCall(MatMult(jac->H, u, Hu));
1571     PetscCall(VecAXPY(work2, -beta, Hu));
1572     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1573     PetscCall(KSPSolve(ksp, work2, u));
1574     PetscCall(KSPCheckSolve(ksp, pc, u));
1575     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1576     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1577     PetscCall(VecDot(Hu, u, &alpha));
1578     KSPCheckDot(ksp, alpha);
1579     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1580     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1581     PetscCall(VecScale(u, 1.0 / alpha));
1582 
1583     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1584     vecz[0] = z;
1585 
1586     /* Computation of new iterate x(i+1) and p(i+1) */
1587     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1588     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1589     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1590     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1591     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1592 
1593     /* Compute Lower Bound estimate */
1594     if (iterGKB > jac->gkbdelay) {
1595       lowbnd = 0.0;
1596       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1597       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1598     }
1599 
1600     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1601     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1602   }
1603 
1604   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1605   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1606   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1607   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1608   PetscFunctionReturn(PETSC_SUCCESS);
1609 }
1610 
1611 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1612   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1613                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1614                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1615 
1616 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1617 {
1618   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1619   PC_FieldSplitLink ilink = jac->head;
1620   PetscInt          bs;
1621 
1622   PetscFunctionBegin;
1623   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1624     PetscBool matnest;
1625 
1626     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1627     if (jac->defaultsplit && !matnest) {
1628       PetscCall(VecGetBlockSize(x, &bs));
1629       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1630       PetscCall(VecGetBlockSize(y, &bs));
1631       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1632       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1633       while (ilink) {
1634         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1635         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1636         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1637         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1638         ilink = ilink->next;
1639       }
1640       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1641     } else {
1642       PetscCall(VecSet(y, 0.0));
1643       while (ilink) {
1644         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1645         ilink = ilink->next;
1646       }
1647     }
1648   } else {
1649     if (!jac->w1) {
1650       PetscCall(VecDuplicate(x, &jac->w1));
1651       PetscCall(VecDuplicate(x, &jac->w2));
1652     }
1653     PetscCall(VecSet(y, 0.0));
1654     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1655       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1656       while (ilink->next) {
1657         ilink = ilink->next;
1658         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1659         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1660         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1661       }
1662       while (ilink->previous) {
1663         ilink = ilink->previous;
1664         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1665         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1666         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1667       }
1668     } else {
1669       while (ilink->next) { /* get to last entry in linked list */
1670         ilink = ilink->next;
1671       }
1672       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1673       while (ilink->previous) {
1674         ilink = ilink->previous;
1675         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1676         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1677         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1678       }
1679     }
1680   }
1681   PetscFunctionReturn(PETSC_SUCCESS);
1682 }
1683 
1684 static PetscErrorCode PCReset_FieldSplit(PC pc)
1685 {
1686   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1687   PC_FieldSplitLink ilink = jac->head, next;
1688 
1689   PetscFunctionBegin;
1690   while (ilink) {
1691     PetscCall(KSPDestroy(&ilink->ksp));
1692     PetscCall(VecDestroy(&ilink->x));
1693     PetscCall(VecDestroy(&ilink->y));
1694     PetscCall(VecDestroy(&ilink->z));
1695     PetscCall(VecScatterDestroy(&ilink->sctx));
1696     PetscCall(ISDestroy(&ilink->is));
1697     PetscCall(ISDestroy(&ilink->is_col));
1698     PetscCall(PetscFree(ilink->splitname));
1699     PetscCall(PetscFree(ilink->fields));
1700     PetscCall(PetscFree(ilink->fields_col));
1701     next = ilink->next;
1702     PetscCall(PetscFree(ilink));
1703     ilink = next;
1704   }
1705   jac->head = NULL;
1706   PetscCall(PetscFree2(jac->x, jac->y));
1707   if (jac->mat && jac->mat != jac->pmat) {
1708     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1709   } else if (jac->mat) {
1710     jac->mat = NULL;
1711   }
1712   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1713   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1714   jac->nsplits = 0;
1715   PetscCall(VecDestroy(&jac->w1));
1716   PetscCall(VecDestroy(&jac->w2));
1717   PetscCall(MatDestroy(&jac->schur));
1718   PetscCall(MatDestroy(&jac->schurp));
1719   PetscCall(MatDestroy(&jac->schur_user));
1720   PetscCall(KSPDestroy(&jac->kspschur));
1721   PetscCall(KSPDestroy(&jac->kspupper));
1722   PetscCall(MatDestroy(&jac->B));
1723   PetscCall(MatDestroy(&jac->C));
1724   PetscCall(MatDestroy(&jac->H));
1725   PetscCall(VecDestroy(&jac->u));
1726   PetscCall(VecDestroy(&jac->v));
1727   PetscCall(VecDestroy(&jac->Hu));
1728   PetscCall(VecDestroy(&jac->d));
1729   PetscCall(PetscFree(jac->vecz));
1730   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1731   jac->isrestrict = PETSC_FALSE;
1732   PetscFunctionReturn(PETSC_SUCCESS);
1733 }
1734 
1735 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1736 {
1737   PetscFunctionBegin;
1738   PetscCall(PCReset_FieldSplit(pc));
1739   PetscCall(PetscFree(pc->data));
1740   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1741   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1742   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1743   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1744   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1745   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1746   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1747   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1748 
1749   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1750   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1751   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1752   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1753   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1754   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1755   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1756   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1757   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1758   PetscFunctionReturn(PETSC_SUCCESS);
1759 }
1760 
1761 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject)
1762 {
1763   PetscInt        bs;
1764   PetscBool       flg;
1765   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1766   PCCompositeType ctype;
1767 
1768   PetscFunctionBegin;
1769   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1770   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1771   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1772   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1773   jac->diag_use_amat = pc->useAmat;
1774   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1775   jac->offdiag_use_amat = pc->useAmat;
1776   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1777   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1778   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1779   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1780   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1781   /* Only setup fields once */
1782   if ((jac->bs > 0) && (jac->nsplits == 0)) {
1783     /* only allow user to set fields from command line.
1784        otherwise user can set them in PCFieldSplitSetDefaults() */
1785     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1786     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1787   }
1788   if (jac->type == PC_COMPOSITE_SCHUR) {
1789     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1790     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1791     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1792     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1793     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1794   } else if (jac->type == PC_COMPOSITE_GKB) {
1795     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1796     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1797     PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0));
1798     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1799     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1800   }
1801   /*
1802     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1803     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1804     is called on the outer solver in case changes were made in the options database
1805 
1806     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1807     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1808     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1809 
1810     There could be a negative side effect of calling the KSPSetFromOptions() below.
1811 
1812     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1813   */
1814   if (jac->issetup) {
1815     PC_FieldSplitLink ilink = jac->head;
1816     if (jac->type == PC_COMPOSITE_SCHUR) {
1817       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1818       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1819     }
1820     while (ilink) {
1821       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1822       ilink = ilink->next;
1823     }
1824   }
1825   PetscOptionsHeadEnd();
1826   PetscFunctionReturn(PETSC_SUCCESS);
1827 }
1828 
1829 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1830 {
1831   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
1832   PC_FieldSplitLink ilink, next = jac->head;
1833   char              prefix[128];
1834   PetscInt          i;
1835 
1836   PetscFunctionBegin;
1837   if (jac->splitdefined) {
1838     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1839     PetscFunctionReturn(PETSC_SUCCESS);
1840   }
1841   for (i = 0; i < n; i++) { PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); }
1842   PetscCall(PetscNew(&ilink));
1843   if (splitname) {
1844     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1845   } else {
1846     PetscCall(PetscMalloc1(3, &ilink->splitname));
1847     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1848   }
1849   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
1850   PetscCall(PetscMalloc1(n, &ilink->fields));
1851   PetscCall(PetscArraycpy(ilink->fields, fields, n));
1852   PetscCall(PetscMalloc1(n, &ilink->fields_col));
1853   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1854 
1855   ilink->nfields = n;
1856   ilink->next    = NULL;
1857   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1858   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1859   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1860   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1861   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1862 
1863   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1864   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1865 
1866   if (!next) {
1867     jac->head       = ilink;
1868     ilink->previous = NULL;
1869   } else {
1870     while (next->next) next = next->next;
1871     next->next      = ilink;
1872     ilink->previous = next;
1873   }
1874   jac->nsplits++;
1875   PetscFunctionReturn(PETSC_SUCCESS);
1876 }
1877 
1878 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1879 {
1880   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1881 
1882   PetscFunctionBegin;
1883   *subksp = NULL;
1884   if (n) *n = 0;
1885   if (jac->type == PC_COMPOSITE_SCHUR) {
1886     PetscInt nn;
1887 
1888     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1889     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1890     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1891     PetscCall(PetscMalloc1(nn, subksp));
1892     (*subksp)[0] = jac->head->ksp;
1893     (*subksp)[1] = jac->kspschur;
1894     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
1895     if (n) *n = nn;
1896   }
1897   PetscFunctionReturn(PETSC_SUCCESS);
1898 }
1899 
1900 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
1901 {
1902   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1903 
1904   PetscFunctionBegin;
1905   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
1906   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1907   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
1908 
1909   (*subksp)[1] = jac->kspschur;
1910   if (n) *n = jac->nsplits;
1911   PetscFunctionReturn(PETSC_SUCCESS);
1912 }
1913 
1914 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1915 {
1916   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1917   PetscInt          cnt   = 0;
1918   PC_FieldSplitLink ilink = jac->head;
1919 
1920   PetscFunctionBegin;
1921   PetscCall(PetscMalloc1(jac->nsplits, subksp));
1922   while (ilink) {
1923     (*subksp)[cnt++] = ilink->ksp;
1924     ilink            = ilink->next;
1925   }
1926   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
1927   if (n) *n = jac->nsplits;
1928   PetscFunctionReturn(PETSC_SUCCESS);
1929 }
1930 
1931 /*@
1932   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
1933 
1934   Input Parameters:
1935 + pc  - the preconditioner context
1936 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
1937 
1938   Level: advanced
1939 
1940   Developer Notes:
1941   It seems the resulting `IS`s will not cover the entire space, so
1942   how can they define a convergent preconditioner? Needs explaining.
1943 
1944 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
1945 @*/
1946 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
1947 {
1948   PetscFunctionBegin;
1949   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1950   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
1951   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
1952   PetscFunctionReturn(PETSC_SUCCESS);
1953 }
1954 
1955 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
1956 {
1957   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1958   PC_FieldSplitLink ilink = jac->head, next;
1959   PetscInt          localsize, size, sizez, i;
1960   const PetscInt   *ind, *indz;
1961   PetscInt         *indc, *indcz;
1962   PetscBool         flg;
1963 
1964   PetscFunctionBegin;
1965   PetscCall(ISGetLocalSize(isy, &localsize));
1966   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
1967   size -= localsize;
1968   while (ilink) {
1969     IS isrl, isr;
1970     PC subpc;
1971     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
1972     PetscCall(ISGetLocalSize(isrl, &localsize));
1973     PetscCall(PetscMalloc1(localsize, &indc));
1974     PetscCall(ISGetIndices(isrl, &ind));
1975     PetscCall(PetscArraycpy(indc, ind, localsize));
1976     PetscCall(ISRestoreIndices(isrl, &ind));
1977     PetscCall(ISDestroy(&isrl));
1978     for (i = 0; i < localsize; i++) *(indc + i) += size;
1979     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
1980     PetscCall(PetscObjectReference((PetscObject)isr));
1981     PetscCall(ISDestroy(&ilink->is));
1982     ilink->is = isr;
1983     PetscCall(PetscObjectReference((PetscObject)isr));
1984     PetscCall(ISDestroy(&ilink->is_col));
1985     ilink->is_col = isr;
1986     PetscCall(ISDestroy(&isr));
1987     PetscCall(KSPGetPC(ilink->ksp, &subpc));
1988     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
1989     if (flg) {
1990       IS       iszl, isz;
1991       MPI_Comm comm;
1992       PetscCall(ISGetLocalSize(ilink->is, &localsize));
1993       comm = PetscObjectComm((PetscObject)ilink->is);
1994       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
1995       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
1996       sizez -= localsize;
1997       PetscCall(ISGetLocalSize(iszl, &localsize));
1998       PetscCall(PetscMalloc1(localsize, &indcz));
1999       PetscCall(ISGetIndices(iszl, &indz));
2000       PetscCall(PetscArraycpy(indcz, indz, localsize));
2001       PetscCall(ISRestoreIndices(iszl, &indz));
2002       PetscCall(ISDestroy(&iszl));
2003       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
2004       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
2005       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
2006       PetscCall(ISDestroy(&isz));
2007     }
2008     next  = ilink->next;
2009     ilink = next;
2010   }
2011   jac->isrestrict = PETSC_TRUE;
2012   PetscFunctionReturn(PETSC_SUCCESS);
2013 }
2014 
2015 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
2016 {
2017   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
2018   PC_FieldSplitLink ilink, next = jac->head;
2019   char              prefix[128];
2020 
2021   PetscFunctionBegin;
2022   if (jac->splitdefined) {
2023     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
2024     PetscFunctionReturn(PETSC_SUCCESS);
2025   }
2026   PetscCall(PetscNew(&ilink));
2027   if (splitname) {
2028     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
2029   } else {
2030     PetscCall(PetscMalloc1(8, &ilink->splitname));
2031     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
2032   }
2033   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */
2034   PetscCall(PetscObjectReference((PetscObject)is));
2035   PetscCall(ISDestroy(&ilink->is));
2036   ilink->is = is;
2037   PetscCall(PetscObjectReference((PetscObject)is));
2038   PetscCall(ISDestroy(&ilink->is_col));
2039   ilink->is_col = is;
2040   ilink->next   = NULL;
2041   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
2042   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
2043   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
2044   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
2045   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
2046 
2047   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
2048   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
2049 
2050   if (!next) {
2051     jac->head       = ilink;
2052     ilink->previous = NULL;
2053   } else {
2054     while (next->next) next = next->next;
2055     next->next      = ilink;
2056     ilink->previous = next;
2057   }
2058   jac->nsplits++;
2059   PetscFunctionReturn(PETSC_SUCCESS);
2060 }
2061 
2062 /*@C
2063   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
2064 
2065   Logically Collective
2066 
2067   Input Parameters:
2068 + pc         - the preconditioner context
2069 . splitname  - name of this split, if `NULL` the number of the split is used
2070 . n          - the number of fields in this split
2071 . fields     - the fields in this split
2072 - fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block
2073                of the matrix and `fields_col` provides the column indices for that block
2074 
2075   Options Database Key:
2076 . -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
2077 
2078   Level: intermediate
2079 
2080   Notes:
2081   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2082 
2083   If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`.
2084 
2085   If the matrix used to construct the preconditioner is not `MATNEST` then
2086   `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlocksize()` or
2087   to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block
2088   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2089   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x....
2090   where the numbered entries indicate what is in the split.
2091 
2092   This function is called once per split (it creates a new split each time).  Solve options
2093   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2094 
2095   `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields`
2096 
2097   Developer Notes:
2098   This routine does not actually create the `IS` representing the split, that is delayed
2099   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2100   available when this routine is called.
2101 
2102 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`,
2103           `MatSetBlocksize()`, `MatCreateNest()`
2104 @*/
2105 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt fields[], const PetscInt fields_col[])
2106 {
2107   PetscFunctionBegin;
2108   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2109   PetscAssertPointer(splitname, 2);
2110   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2111   PetscAssertPointer(fields, 4);
2112   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2113   PetscFunctionReturn(PETSC_SUCCESS);
2114 }
2115 
2116 /*@
2117   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2118   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2119 
2120   Logically Collective
2121 
2122   Input Parameters:
2123 + pc  - the preconditioner object
2124 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2125 
2126   Options Database Key:
2127 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2128 
2129   Level: intermediate
2130 
2131 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2132 @*/
2133 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2134 {
2135   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2136   PetscBool      isfs;
2137 
2138   PetscFunctionBegin;
2139   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2140   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2141   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2142   jac->diag_use_amat = flg;
2143   PetscFunctionReturn(PETSC_SUCCESS);
2144 }
2145 
2146 /*@
2147   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2148   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2149 
2150   Logically Collective
2151 
2152   Input Parameter:
2153 . pc - the preconditioner object
2154 
2155   Output Parameter:
2156 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2157 
2158   Level: intermediate
2159 
2160 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2161 @*/
2162 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2163 {
2164   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2165   PetscBool      isfs;
2166 
2167   PetscFunctionBegin;
2168   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2169   PetscAssertPointer(flg, 2);
2170   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2171   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2172   *flg = jac->diag_use_amat;
2173   PetscFunctionReturn(PETSC_SUCCESS);
2174 }
2175 
2176 /*@
2177   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2178   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2179 
2180   Logically Collective
2181 
2182   Input Parameters:
2183 + pc  - the preconditioner object
2184 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2185 
2186   Options Database Key:
2187 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2188 
2189   Level: intermediate
2190 
2191 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2192 @*/
2193 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2194 {
2195   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2196   PetscBool      isfs;
2197 
2198   PetscFunctionBegin;
2199   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2200   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2201   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2202   jac->offdiag_use_amat = flg;
2203   PetscFunctionReturn(PETSC_SUCCESS);
2204 }
2205 
2206 /*@
2207   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2208   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2209 
2210   Logically Collective
2211 
2212   Input Parameter:
2213 . pc - the preconditioner object
2214 
2215   Output Parameter:
2216 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2217 
2218   Level: intermediate
2219 
2220 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2221 @*/
2222 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2223 {
2224   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2225   PetscBool      isfs;
2226 
2227   PetscFunctionBegin;
2228   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2229   PetscAssertPointer(flg, 2);
2230   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2231   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2232   *flg = jac->offdiag_use_amat;
2233   PetscFunctionReturn(PETSC_SUCCESS);
2234 }
2235 
2236 /*@
2237   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2238 
2239   Logically Collective
2240 
2241   Input Parameters:
2242 + pc        - the preconditioner context
2243 . splitname - name of this split, if `NULL` the number of the split is used
2244 - is        - the index set that defines the elements in this split
2245 
2246   Level: intermediate
2247 
2248   Notes:
2249   Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST`
2250 
2251   This function is called once per split (it creates a new split each time).  Solve options
2252   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2253 
2254 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()`
2255 @*/
2256 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2257 {
2258   PetscFunctionBegin;
2259   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2260   if (splitname) PetscAssertPointer(splitname, 2);
2261   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2262   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2263   PetscFunctionReturn(PETSC_SUCCESS);
2264 }
2265 
2266 /*@
2267   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2268 
2269   Logically Collective
2270 
2271   Input Parameters:
2272 + pc        - the preconditioner context
2273 - splitname - name of this split
2274 
2275   Output Parameter:
2276 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2277 
2278   Level: intermediate
2279 
2280 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()`
2281 @*/
2282 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2283 {
2284   PetscFunctionBegin;
2285   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2286   PetscAssertPointer(splitname, 2);
2287   PetscAssertPointer(is, 3);
2288   {
2289     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2290     PC_FieldSplitLink ilink = jac->head;
2291     PetscBool         found;
2292 
2293     *is = NULL;
2294     while (ilink) {
2295       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2296       if (found) {
2297         *is = ilink->is;
2298         break;
2299       }
2300       ilink = ilink->next;
2301     }
2302   }
2303   PetscFunctionReturn(PETSC_SUCCESS);
2304 }
2305 
2306 /*@
2307   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2308 
2309   Logically Collective
2310 
2311   Input Parameters:
2312 + pc    - the preconditioner context
2313 - index - index of this split
2314 
2315   Output Parameter:
2316 . is - the index set that defines the elements in this split
2317 
2318   Level: intermediate
2319 
2320 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`,
2321 
2322 @*/
2323 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2324 {
2325   PetscFunctionBegin;
2326   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2327   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2328   PetscAssertPointer(is, 3);
2329   {
2330     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2331     PC_FieldSplitLink ilink = jac->head;
2332     PetscInt          i     = 0;
2333     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2334 
2335     while (i < index) {
2336       ilink = ilink->next;
2337       ++i;
2338     }
2339     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2340   }
2341   PetscFunctionReturn(PETSC_SUCCESS);
2342 }
2343 
2344 /*@
2345   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2346   fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used.
2347 
2348   Logically Collective
2349 
2350   Input Parameters:
2351 + pc - the preconditioner context
2352 - bs - the block size
2353 
2354   Level: intermediate
2355 
2356   Note:
2357   If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields.
2358 
2359 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2360 @*/
2361 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2362 {
2363   PetscFunctionBegin;
2364   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2365   PetscValidLogicalCollectiveInt(pc, bs, 2);
2366   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2367   PetscFunctionReturn(PETSC_SUCCESS);
2368 }
2369 
2370 /*@C
2371   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2372 
2373   Collective
2374 
2375   Input Parameter:
2376 . pc - the preconditioner context
2377 
2378   Output Parameters:
2379 + n      - the number of splits
2380 - subksp - the array of `KSP` contexts
2381 
2382   Level: advanced
2383 
2384   Notes:
2385   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2386   (not the `KSP`, just the array that contains them).
2387 
2388   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2389 
2390   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2391   Schur complement and the `KSP` object used to iterate over the Schur complement.
2392   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2393 
2394   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2395   inner linear system defined by the matrix H in each loop.
2396 
2397   Fortran Notes:
2398   You must pass in a `KSP` array that is large enough to contain all the `KSP`s.
2399   You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2400   `KSP` array must be.
2401 
2402   Developer Notes:
2403   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2404 
2405   The Fortran interface should be modernized to return directly the array of values.
2406 
2407 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2408 @*/
2409 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2410 {
2411   PetscFunctionBegin;
2412   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2413   if (n) PetscAssertPointer(n, 2);
2414   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2415   PetscFunctionReturn(PETSC_SUCCESS);
2416 }
2417 
2418 /*@C
2419   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2420 
2421   Collective
2422 
2423   Input Parameter:
2424 . pc - the preconditioner context
2425 
2426   Output Parameters:
2427 + n      - the number of splits
2428 - subksp - the array of `KSP` contexts
2429 
2430   Level: advanced
2431 
2432   Notes:
2433   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2434   (not the `KSP` just the array that contains them).
2435 
2436   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2437 
2438   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2439 +  1  - the `KSP` used for the (1,1) block
2440 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2441 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2442 
2443   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2444 
2445   Fortran Notes:
2446   You must pass in a `KSP` array that is large enough to contain all the local `KSP`s.
2447   You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the
2448   `KSP` array must be.
2449 
2450   Developer Notes:
2451   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2452 
2453   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2454 
2455   The Fortran interface should be modernized to return directly the array of values.
2456 
2457 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2458 @*/
2459 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2460 {
2461   PetscFunctionBegin;
2462   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2463   if (n) PetscAssertPointer(n, 2);
2464   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2465   PetscFunctionReturn(PETSC_SUCCESS);
2466 }
2467 
2468 /*@
2469   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructed for the Schur complement.
2470   The default is the A11 matrix.
2471 
2472   Collective
2473 
2474   Input Parameters:
2475 + pc    - the preconditioner context
2476 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2477               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2478               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2479 - pre   - matrix to use for preconditioning, or `NULL`
2480 
2481   Options Database Keys:
2482 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2483 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2484 
2485   Level: intermediate
2486 
2487   Notes:
2488   If ptype is
2489 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2490   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2491 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2492   The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM`
2493 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2494   to this function).
2495 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $
2496   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2497   lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2498 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2499   computed internally by `PCFIELDSPLIT` (this is expensive)
2500   useful mostly as a test that the Schur complement approach can work for your problem
2501 
2502   When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2503   with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and
2504   `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2505 
2506   Developer Note:
2507   The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere.
2508 
2509 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2510           `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()`
2511 @*/
2512 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2513 {
2514   PetscFunctionBegin;
2515   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2516   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2517   PetscFunctionReturn(PETSC_SUCCESS);
2518 }
2519 
2520 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2521 {
2522   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2523 } /* Deprecated name */
2524 
2525 /*@
2526   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2527   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2528 
2529   Logically Collective
2530 
2531   Input Parameter:
2532 . pc - the preconditioner context
2533 
2534   Output Parameters:
2535 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2536 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2537 
2538   Level: intermediate
2539 
2540 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2541 @*/
2542 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2543 {
2544   PetscFunctionBegin;
2545   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2546   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2547   PetscFunctionReturn(PETSC_SUCCESS);
2548 }
2549 
2550 /*@
2551   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2552 
2553   Not Collective
2554 
2555   Input Parameter:
2556 . pc - the preconditioner context
2557 
2558   Output Parameter:
2559 . S - the Schur complement matrix
2560 
2561   Level: advanced
2562 
2563   Note:
2564   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2565 
2566 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2567           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2568 @*/
2569 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2570 {
2571   const char    *t;
2572   PetscBool      isfs;
2573   PC_FieldSplit *jac;
2574 
2575   PetscFunctionBegin;
2576   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2577   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2578   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2579   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2580   jac = (PC_FieldSplit *)pc->data;
2581   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2582   if (S) *S = jac->schur;
2583   PetscFunctionReturn(PETSC_SUCCESS);
2584 }
2585 
2586 /*@
2587   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2588 
2589   Not Collective
2590 
2591   Input Parameters:
2592 + pc - the preconditioner context
2593 - S  - the Schur complement matrix
2594 
2595   Level: advanced
2596 
2597 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2598 @*/
2599 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2600 {
2601   const char    *t;
2602   PetscBool      isfs;
2603   PC_FieldSplit *jac;
2604 
2605   PetscFunctionBegin;
2606   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2607   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2608   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2609   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2610   jac = (PC_FieldSplit *)pc->data;
2611   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2612   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2613   PetscFunctionReturn(PETSC_SUCCESS);
2614 }
2615 
2616 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2617 {
2618   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2619 
2620   PetscFunctionBegin;
2621   jac->schurpre = ptype;
2622   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2623     PetscCall(MatDestroy(&jac->schur_user));
2624     jac->schur_user = pre;
2625     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2626   }
2627   PetscFunctionReturn(PETSC_SUCCESS);
2628 }
2629 
2630 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2631 {
2632   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2633 
2634   PetscFunctionBegin;
2635   if (ptype) *ptype = jac->schurpre;
2636   if (pre) *pre = jac->schur_user;
2637   PetscFunctionReturn(PETSC_SUCCESS);
2638 }
2639 
2640 /*@
2641   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note`
2642 
2643   Collective
2644 
2645   Input Parameters:
2646 + pc    - the preconditioner context
2647 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2648 
2649   Options Database Key:
2650 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
2651 
2652   Level: intermediate
2653 
2654   Notes:
2655   The FULL factorization is
2656 
2657   ```{math}
2658   \left(\begin{array}{cc} A & B \\
2659   C & E \\
2660   \end{array}\right) =
2661   \left(\begin{array}{cc} 1 & 0 \\
2662   C*A^{-1} & I \\
2663   \end{array}\right)
2664   \left(\begin{array}{cc} A & 0 \\
2665   0 & S \\
2666   \end{array}\right)
2667   \left(\begin{array}{cc} I & A^{-1}B \\
2668   0 & I \\
2669   \end{array}\right) = L D U.
2670   ```
2671 
2672   where $ S = E - C*A^{-1}*B $. In practice, the full factorization is applied via block triangular solves with the grouping $L*(D*U)$. UPPER uses $D*U$, LOWER uses $L*D$,
2673   and DIAG is the diagonal part with the sign of $ S $ flipped (because this makes the preconditioner positive definite for many formulations,
2674   thus allowing the use of `KSPMINRES)`. Sign flipping of $ S $ can be turned off with `PCFieldSplitSetSchurScale()`.
2675 
2676   If $A$ and $S$ are solved exactly
2677 +  1 - FULL factorization is a direct solver.
2678 .  2 - The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2679 -  3 -  With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2680 
2681   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2682   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2683 
2684   For symmetric problems in which $A$ is positive definite and $S$ is negative definite, DIAG can be used with `KSPMINRES`.
2685 
2686   A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S).
2687 
2688 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`,
2689           [](sec_flexibleksp), `PCFieldSplitSetSchurPre()`
2690 @*/
2691 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2692 {
2693   PetscFunctionBegin;
2694   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2695   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2696   PetscFunctionReturn(PETSC_SUCCESS);
2697 }
2698 
2699 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2700 {
2701   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2702 
2703   PetscFunctionBegin;
2704   jac->schurfactorization = ftype;
2705   PetscFunctionReturn(PETSC_SUCCESS);
2706 }
2707 
2708 /*@
2709   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2710 
2711   Collective
2712 
2713   Input Parameters:
2714 + pc    - the preconditioner context
2715 - scale - scaling factor for the Schur complement
2716 
2717   Options Database Key:
2718 . -pc_fieldsplit_schur_scale <scale> - default is -1.0
2719 
2720   Level: intermediate
2721 
2722 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
2723 @*/
2724 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2725 {
2726   PetscFunctionBegin;
2727   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2728   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2729   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2730   PetscFunctionReturn(PETSC_SUCCESS);
2731 }
2732 
2733 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2734 {
2735   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2736 
2737   PetscFunctionBegin;
2738   jac->schurscale = scale;
2739   PetscFunctionReturn(PETSC_SUCCESS);
2740 }
2741 
2742 /*@C
2743   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2744 
2745   Collective
2746 
2747   Input Parameter:
2748 . pc - the preconditioner context
2749 
2750   Output Parameters:
2751 + A00 - the (0,0) block
2752 . A01 - the (0,1) block
2753 . A10 - the (1,0) block
2754 - A11 - the (1,1) block
2755 
2756   Level: advanced
2757 
2758 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2759 @*/
2760 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2761 {
2762   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2763 
2764   PetscFunctionBegin;
2765   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2766   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2767   if (A00) *A00 = jac->pmat[0];
2768   if (A01) *A01 = jac->B;
2769   if (A10) *A10 = jac->C;
2770   if (A11) *A11 = jac->pmat[1];
2771   PetscFunctionReturn(PETSC_SUCCESS);
2772 }
2773 
2774 /*@
2775   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2776 
2777   Collective
2778 
2779   Input Parameters:
2780 + pc        - the preconditioner context
2781 - tolerance - the solver tolerance
2782 
2783   Options Database Key:
2784 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5
2785 
2786   Level: intermediate
2787 
2788   Note:
2789   The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion.
2790   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2791   this estimate, the stopping criterion is satisfactory in practical cases.
2792 
2793 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2794 @*/
2795 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2796 {
2797   PetscFunctionBegin;
2798   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2799   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2800   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2801   PetscFunctionReturn(PETSC_SUCCESS);
2802 }
2803 
2804 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2805 {
2806   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2807 
2808   PetscFunctionBegin;
2809   jac->gkbtol = tolerance;
2810   PetscFunctionReturn(PETSC_SUCCESS);
2811 }
2812 
2813 /*@
2814   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2815 
2816   Collective
2817 
2818   Input Parameters:
2819 + pc    - the preconditioner context
2820 - maxit - the maximum number of iterations
2821 
2822   Options Database Key:
2823 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100
2824 
2825   Level: intermediate
2826 
2827 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2828 @*/
2829 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2830 {
2831   PetscFunctionBegin;
2832   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2833   PetscValidLogicalCollectiveInt(pc, maxit, 2);
2834   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2835   PetscFunctionReturn(PETSC_SUCCESS);
2836 }
2837 
2838 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2839 {
2840   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2841 
2842   PetscFunctionBegin;
2843   jac->gkbmaxit = maxit;
2844   PetscFunctionReturn(PETSC_SUCCESS);
2845 }
2846 
2847 /*@
2848   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT`
2849   preconditioner.
2850 
2851   Collective
2852 
2853   Input Parameters:
2854 + pc    - the preconditioner context
2855 - delay - the delay window in the lower bound estimate
2856 
2857   Options Database Key:
2858 . -pc_fieldsplit_gkb_delay <delay> - default is 5
2859 
2860   Level: intermediate
2861 
2862   Notes:
2863   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $
2864   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs
2865   at least (`delay` + 1) iterations to stop.
2866 
2867   For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013`
2868 
2869 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2870 @*/
2871 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2872 {
2873   PetscFunctionBegin;
2874   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2875   PetscValidLogicalCollectiveInt(pc, delay, 2);
2876   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2877   PetscFunctionReturn(PETSC_SUCCESS);
2878 }
2879 
2880 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2881 {
2882   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2883 
2884   PetscFunctionBegin;
2885   jac->gkbdelay = delay;
2886   PetscFunctionReturn(PETSC_SUCCESS);
2887 }
2888 
2889 /*@
2890   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the
2891   Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2892 
2893   Collective
2894 
2895   Input Parameters:
2896 + pc - the preconditioner context
2897 - nu - the shift parameter
2898 
2899   Options Database Key:
2900 . -pc_fieldsplit_gkb_nu <nu> - default is 1
2901 
2902   Level: intermediate
2903 
2904   Notes:
2905   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However,
2906   if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore
2907   necessary to find a good balance in between the convergence of the inner and outer loop.
2908 
2909   For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity.
2910 
2911 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2912 @*/
2913 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
2914 {
2915   PetscFunctionBegin;
2916   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2917   PetscValidLogicalCollectiveReal(pc, nu, 2);
2918   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
2919   PetscFunctionReturn(PETSC_SUCCESS);
2920 }
2921 
2922 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
2923 {
2924   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2925 
2926   PetscFunctionBegin;
2927   jac->gkbnu = nu;
2928   PetscFunctionReturn(PETSC_SUCCESS);
2929 }
2930 
2931 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
2932 {
2933   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2934 
2935   PetscFunctionBegin;
2936   jac->type = type;
2937   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
2938   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
2939   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
2940   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
2941   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
2942   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
2943   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
2944   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
2945   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
2946 
2947   if (type == PC_COMPOSITE_SCHUR) {
2948     pc->ops->apply          = PCApply_FieldSplit_Schur;
2949     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
2950     pc->ops->view           = PCView_FieldSplit_Schur;
2951 
2952     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
2953     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
2954     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
2955     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
2956     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
2957   } else if (type == PC_COMPOSITE_GKB) {
2958     pc->ops->apply = PCApply_FieldSplit_GKB;
2959     pc->ops->view  = PCView_FieldSplit_GKB;
2960 
2961     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2962     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
2963     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
2964     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
2965     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
2966   } else {
2967     pc->ops->apply = PCApply_FieldSplit;
2968     pc->ops->view  = PCView_FieldSplit;
2969 
2970     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
2971   }
2972   PetscFunctionReturn(PETSC_SUCCESS);
2973 }
2974 
2975 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
2976 {
2977   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2978 
2979   PetscFunctionBegin;
2980   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
2981   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
2982   jac->bs = bs;
2983   PetscFunctionReturn(PETSC_SUCCESS);
2984 }
2985 
2986 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
2987 {
2988   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
2989   PC_FieldSplitLink ilink_current = jac->head;
2990   IS                is_owned;
2991 
2992   PetscFunctionBegin;
2993   jac->coordinates_set = PETSC_TRUE; // Internal flag
2994   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
2995 
2996   while (ilink_current) {
2997     // For each IS, embed it to get local coords indces
2998     IS              is_coords;
2999     PetscInt        ndofs_block;
3000     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
3001 
3002     // Setting drop to true for safety. It should make no difference.
3003     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
3004     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
3005     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
3006 
3007     // Allocate coordinates vector and set it directly
3008     PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords));
3009     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
3010       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
3011     }
3012     ilink_current->dim   = dim;
3013     ilink_current->ndofs = ndofs_block;
3014     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
3015     PetscCall(ISDestroy(&is_coords));
3016     ilink_current = ilink_current->next;
3017   }
3018   PetscCall(ISDestroy(&is_owned));
3019   PetscFunctionReturn(PETSC_SUCCESS);
3020 }
3021 
3022 /*@
3023   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3024 
3025   Collective
3026 
3027   Input Parameters:
3028 + pc   - the preconditioner context
3029 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`,
3030          `PC_COMPOSITE_GKB`
3031 
3032   Options Database Key:
3033 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
3034 
3035   Level: intermediate
3036 
3037 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3038           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()`
3039 @*/
3040 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
3041 {
3042   PetscFunctionBegin;
3043   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3044   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
3045   PetscFunctionReturn(PETSC_SUCCESS);
3046 }
3047 
3048 /*@
3049   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3050 
3051   Not collective
3052 
3053   Input Parameter:
3054 . pc - the preconditioner context
3055 
3056   Output Parameter:
3057 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3058 
3059   Level: intermediate
3060 
3061 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3062           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3063 @*/
3064 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
3065 {
3066   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3067 
3068   PetscFunctionBegin;
3069   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3070   PetscAssertPointer(type, 2);
3071   *type = jac->type;
3072   PetscFunctionReturn(PETSC_SUCCESS);
3073 }
3074 
3075 /*@
3076   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3077 
3078   Logically Collective
3079 
3080   Input Parameters:
3081 + pc  - the preconditioner context
3082 - flg - boolean indicating whether to use field splits defined by the `DM`
3083 
3084   Options Database Key:
3085 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3086 
3087   Level: intermediate
3088 
3089   Developer Note:
3090   The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database
3091 
3092 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3093 @*/
3094 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3095 {
3096   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3097   PetscBool      isfs;
3098 
3099   PetscFunctionBegin;
3100   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3101   PetscValidLogicalCollectiveBool(pc, flg, 2);
3102   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3103   if (isfs) jac->dm_splits = flg;
3104   PetscFunctionReturn(PETSC_SUCCESS);
3105 }
3106 
3107 /*@
3108   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3109 
3110   Logically Collective
3111 
3112   Input Parameter:
3113 . pc - the preconditioner context
3114 
3115   Output Parameter:
3116 . flg - boolean indicating whether to use field splits defined by the `DM`
3117 
3118   Level: intermediate
3119 
3120   Developer Note:
3121   The name should be `PCFieldSplitGetUseDMSplits()`
3122 
3123 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()`
3124 @*/
3125 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3126 {
3127   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3128   PetscBool      isfs;
3129 
3130   PetscFunctionBegin;
3131   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3132   PetscAssertPointer(flg, 2);
3133   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3134   if (isfs) {
3135     if (flg) *flg = jac->dm_splits;
3136   }
3137   PetscFunctionReturn(PETSC_SUCCESS);
3138 }
3139 
3140 /*@
3141   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3142 
3143   Logically Collective
3144 
3145   Input Parameter:
3146 . pc - the preconditioner context
3147 
3148   Output Parameter:
3149 . flg - boolean indicating whether to detect fields or not
3150 
3151   Level: intermediate
3152 
3153 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3154 @*/
3155 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3156 {
3157   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3158 
3159   PetscFunctionBegin;
3160   *flg = jac->detect;
3161   PetscFunctionReturn(PETSC_SUCCESS);
3162 }
3163 
3164 /*@
3165   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3166 
3167   Logically Collective
3168 
3169   Input Parameter:
3170 . pc - the preconditioner context
3171 
3172   Output Parameter:
3173 . flg - boolean indicating whether to detect fields or not
3174 
3175   Options Database Key:
3176 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3177 
3178   Level: intermediate
3179 
3180   Note:
3181   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3182 
3183 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3184 @*/
3185 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3186 {
3187   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3188 
3189   PetscFunctionBegin;
3190   jac->detect = flg;
3191   if (jac->detect) {
3192     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3193     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3194   }
3195   PetscFunctionReturn(PETSC_SUCCESS);
3196 }
3197 
3198 /*MC
3199   PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3200   collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3201 
3202   Options Database Keys:
3203 +   -pc_fieldsplit_%d_fields <a,b,..>                                                - indicates the fields to be used in the `%d`'th split
3204 .   -pc_fieldsplit_default                                                           - automatically add any fields to additional splits that have not
3205                                                                                        been supplied explicitly by `-pc_fieldsplit_%d_fields`
3206 .   -pc_fieldsplit_block_size <bs>                                                   - size of block that defines fields (i.e. there are bs fields)
3207                                                                                        when the matrix is not of `MatType` `MATNEST`
3208 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3209 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full>                     - default is `a11`; see `PCFieldSplitSetSchurPre()`
3210 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full>                           - set factorization type when using `-pc_fieldsplit_type schur`;
3211                                                                                        see `PCFieldSplitSetSchurFactType()`
3212 .   -pc_fieldsplit_dm_splits <true,false> (default is true)                          - Whether to use `DMCreateFieldDecomposition()` for splits
3213 -   -pc_fieldsplit_detect_saddle_point                                               - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3214 
3215   Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3216   The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3217   For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3218 
3219   To set options on the solvers for each block append `-fieldsplit_` to all the `PC`
3220   options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`
3221 
3222   To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3223   and set the options directly on the resulting `KSP` object
3224 
3225   Level: intermediate
3226 
3227   Notes:
3228   Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()`
3229   to define a split by an arbitrary collection of entries.
3230 
3231   If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports
3232   `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs,
3233   beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3234   if this is not called the block size defaults to the blocksize of the second matrix passed
3235   to `KSPSetOperators()`/`PCSetOperators()`.
3236 
3237   For the Schur complement preconditioner if
3238   ```{math}
3239     J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3240   ```
3241 
3242   the preconditioner using `full` factorization is logically
3243   ```{math}
3244     \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3245       ```
3246   where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3247   ```{math}
3248      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3249   ```
3250   which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given
3251   in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0,
3252   it returns the `KSP` associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default
3253   $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3254 
3255   The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3256   `diag` gives
3257   ```{math}
3258     \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3259   ```
3260   Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3261   can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3262   ```{math}
3263     \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3264   ```
3265   where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of
3266   ```{math}
3267     \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3268   ```
3269   where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3270 
3271   If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3272   is used automatically for a second submatrix.
3273 
3274   The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3275   Generally it should be used with the `MATAIJ` or `MATNEST` `MatType`
3276 
3277   The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see,
3278   for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3279   One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers".
3280 
3281   See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3282 
3283   The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3284   residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3285 
3286   The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3287   ```{math}
3288     \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3289   ```
3290   with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3291   A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3292 
3293   Developer Note:
3294   The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3295   user API.
3296 
3297 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3298           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3299           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3300           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3301 M*/
3302 
3303 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3304 {
3305   PC_FieldSplit *jac;
3306 
3307   PetscFunctionBegin;
3308   PetscCall(PetscNew(&jac));
3309 
3310   jac->bs                 = -1;
3311   jac->nsplits            = 0;
3312   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3313   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3314   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3315   jac->schurscale         = -1.0;
3316   jac->dm_splits          = PETSC_TRUE;
3317   jac->detect             = PETSC_FALSE;
3318   jac->gkbtol             = 1e-5;
3319   jac->gkbdelay           = 5;
3320   jac->gkbnu              = 1;
3321   jac->gkbmaxit           = 100;
3322   jac->gkbmonitor         = PETSC_FALSE;
3323   jac->coordinates_set    = PETSC_FALSE;
3324 
3325   pc->data = (void *)jac;
3326 
3327   pc->ops->apply           = PCApply_FieldSplit;
3328   pc->ops->applytranspose  = PCApplyTranspose_FieldSplit;
3329   pc->ops->setup           = PCSetUp_FieldSplit;
3330   pc->ops->reset           = PCReset_FieldSplit;
3331   pc->ops->destroy         = PCDestroy_FieldSplit;
3332   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3333   pc->ops->view            = PCView_FieldSplit;
3334   pc->ops->applyrichardson = NULL;
3335 
3336   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3337   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3338   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3339   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3340   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3341   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3342   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3343   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3344   PetscFunctionReturn(PETSC_SUCCESS);
3345 }
3346