xref: /petsc/src/ksp/pc/impls/fieldsplit/fieldsplit.c (revision d484b38499953fbf56fc48e9f43f331c10de329f)
1 #include <petsc/private/pcimpl.h>  /*I "petscpc.h" I*/
2 #include <petsc/private/kspimpl.h> /*  This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/
3 #include <petsc/private/matimpl.h> /* MatScatterDense_Private() for PCMatApply() */
4 #include <petscdm.h>
5 #include <petscdevice.h>
6 #if PetscDefined(HAVE_CUDA)
7   #include <petscdevice_cuda.h>
8 #endif
9 #if PetscDefined(HAVE_HIP)
10   #include <petscdevice_hip.h>
11 #endif
12 
13 const char *const PCFieldSplitSchurPreTypes[]  = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
14 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
15 
16 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
17 
18 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
19 struct _PC_FieldSplitLink {
20   KSP               ksp;
21   Vec               x, y, z;
22   Mat               X, Y;
23   char             *splitname;
24   PetscInt          nfields;
25   PetscInt         *fields, *fields_col;
26   VecScatter        sctx;
27   IS                is, is_col;
28   PC_FieldSplitLink next, previous;
29   PetscLogEvent     event;
30 
31   /* Used only when setting coordinates with PCSetCoordinates */
32   PetscInt   dim;
33   PetscInt   ndofs;
34   PetscReal *coords;
35 };
36 
37 typedef struct {
38   PCCompositeType type;
39   PetscBool       defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
40   PetscBool       splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
41   PetscInt        bs;           /* Block size for IS and Mat structures */
42   PetscInt        nsplits;      /* Number of field divisions defined */
43   Vec            *x, *y, w1, w2;
44   Mat            *mat;    /* The diagonal block for each split */
45   Mat            *pmat;   /* The preconditioning diagonal block for each split */
46   Mat            *Afield; /* The rows of the matrix associated with each split */
47   PetscBool       issetup;
48 
49   /* Only used when Schur complement preconditioning is used */
50   Mat                       B;          /* The (0,1) block */
51   Mat                       C;          /* The (1,0) block */
52   Mat                       schur;      /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
53   Mat                       schurp;     /* Assembled approximation to S built by MatSchurComplement to be used as a matrix for constructing the preconditioner when solving with S */
54   Mat                       schur_user; /* User-provided matrix for constructing the preconditioner for the Schur complement */
55   PCFieldSplitSchurPreType  schurpre;   /* Determines which matrix is used for the Schur complement */
56   PCFieldSplitSchurFactType schurfactorization;
57   KSP                       kspschur;   /* The solver for S */
58   KSP                       kspupper;   /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
59   PetscScalar               schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
60 
61   /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
62   Mat          H;           /* The modified matrix H = A00 + nu*A01*A01'              */
63   PetscReal    gkbtol;      /* Stopping tolerance for lower bound estimate            */
64   PetscInt     gkbdelay;    /* The delay window for the stopping criterion            */
65   PetscReal    gkbnu;       /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
66   PetscInt     gkbmaxit;    /* Maximum number of iterations for outer loop            */
67   PetscBool    gkbmonitor;  /* Monitor for gkb iterations and the lower bound error   */
68   PetscViewer  gkbviewer;   /* Viewer context for gkbmonitor                          */
69   Vec          u, v, d, Hu; /* Work vectors for the GKB algorithm                     */
70   PetscScalar *vecz;        /* Contains intermediate values, eg for lower bound       */
71 
72   PC_FieldSplitLink head;
73   PetscBool         isrestrict;       /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
74   PetscBool         suboptionsset;    /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
75   PetscBool         dm_splits;        /* Whether to use DMCreateFieldDecomposition() whenever possible */
76   PetscBool         diag_use_amat;    /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
77   PetscBool         offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
78   PetscBool         detect;           /* Whether to form 2-way split by finding zero diagonal entries */
79   PetscBool         coordinates_set;  /* Whether PCSetCoordinates has been called */
80 } PC_FieldSplit;
81 
82 /*
83     Note:
84     there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
85    inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
86    PC you could change this.
87 */
88 
89 /* This helper is so that setting a user-provided matrix is orthogonal to choosing to use it.  This way the
90 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
91 static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
92 {
93   switch (jac->schurpre) {
94   case PC_FIELDSPLIT_SCHUR_PRE_SELF:
95     return jac->schur;
96   case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
97     return jac->schurp;
98   case PC_FIELDSPLIT_SCHUR_PRE_A11:
99     return jac->pmat[1];
100   case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
101   case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
102   default:
103     return jac->schur_user ? jac->schur_user : jac->pmat[1];
104   }
105 }
106 
107 #include <petscdraw.h>
108 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
109 {
110   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
111   PetscBool         isascii, isdraw;
112   PetscInt          i, j;
113   PC_FieldSplitLink ilink = jac->head;
114 
115   PetscFunctionBegin;
116   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
117   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
118   if (isascii) {
119     if (jac->bs > 0) {
120       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
121     } else {
122       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
123     }
124     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
125     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
126     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
127     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for each split is in the following KSP objects:\n"));
128     for (i = 0; i < jac->nsplits; i++) {
129       if (ilink->fields) {
130         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
131         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
132         for (j = 0; j < ilink->nfields; j++) {
133           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
134           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
135         }
136         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
137         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
138       } else {
139         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
140       }
141       PetscCall(KSPView(ilink->ksp, viewer));
142       ilink = ilink->next;
143     }
144   }
145 
146   if (isdraw) {
147     PetscDraw draw;
148     PetscReal x, y, w, wd;
149 
150     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
151     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
152     w  = 2 * PetscMin(1.0 - x, x);
153     wd = w / (jac->nsplits + 1);
154     x  = x - wd * (jac->nsplits - 1) / 2.0;
155     for (i = 0; i < jac->nsplits; i++) {
156       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
157       PetscCall(KSPView(ilink->ksp, viewer));
158       PetscCall(PetscDrawPopCurrentPoint(draw));
159       x += wd;
160       ilink = ilink->next;
161     }
162   }
163   PetscFunctionReturn(PETSC_SUCCESS);
164 }
165 
166 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
167 {
168   PC_FieldSplit             *jac = (PC_FieldSplit *)pc->data;
169   PetscBool                  isascii, isdraw;
170   PetscInt                   i, j;
171   PC_FieldSplitLink          ilink = jac->head;
172   MatSchurComplementAinvType atype;
173 
174   PetscFunctionBegin;
175   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
176   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
177   if (isascii) {
178     if (jac->bs > 0) {
179       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
180     } else {
181       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
182     }
183     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
184     switch (jac->schurpre) {
185     case PC_FIELDSPLIT_SCHUR_PRE_SELF:
186       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from S itself\n"));
187       break;
188     case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
189       if (jac->schur) {
190         PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
191         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
192       }
193       break;
194     case PC_FIELDSPLIT_SCHUR_PRE_A11:
195       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
196       break;
197     case PC_FIELDSPLIT_SCHUR_PRE_FULL:
198       PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from the exact Schur complement\n"));
199       break;
200     case PC_FIELDSPLIT_SCHUR_PRE_USER:
201       if (jac->schur_user) {
202         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from user provided matrix\n"));
203       } else {
204         PetscCall(PetscViewerASCIIPrintf(viewer, "  Preconditioner for the Schur complement formed from A11\n"));
205       }
206       break;
207     default:
208       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
209     }
210     PetscCall(PetscViewerASCIIPrintf(viewer, "  Split info:\n"));
211     PetscCall(PetscViewerASCIIPushTab(viewer));
212     for (i = 0; i < jac->nsplits; i++) {
213       if (ilink->fields) {
214         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
215         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
216         for (j = 0; j < ilink->nfields; j++) {
217           if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
218           PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
219         }
220         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
221         PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
222       } else {
223         PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
224       }
225       ilink = ilink->next;
226     }
227     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
228     PetscCall(PetscViewerASCIIPushTab(viewer));
229     if (jac->head) PetscCall(KSPView(jac->head->ksp, viewer));
230     else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
231     PetscCall(PetscViewerASCIIPopTab(viewer));
232     if (jac->head && jac->kspupper != jac->head->ksp) {
233       PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n"));
234       PetscCall(PetscViewerASCIIPushTab(viewer));
235       if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
236       else PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
237       PetscCall(PetscViewerASCIIPopTab(viewer));
238     }
239     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n"));
240     PetscCall(PetscViewerASCIIPushTab(viewer));
241     if (jac->kspschur) {
242       PetscCall(KSPView(jac->kspschur, viewer));
243     } else {
244       PetscCall(PetscViewerASCIIPrintf(viewer, "  not yet available\n"));
245     }
246     PetscCall(PetscViewerASCIIPopTab(viewer));
247     PetscCall(PetscViewerASCIIPopTab(viewer));
248   } else if (isdraw && jac->head) {
249     PetscDraw draw;
250     PetscReal x, y, w, wd, h;
251     PetscInt  cnt = 2;
252     char      str[32];
253 
254     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
255     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
256     if (jac->kspupper != jac->head->ksp) cnt++;
257     w  = 2 * PetscMin(1.0 - x, x);
258     wd = w / (cnt + 1);
259 
260     PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
261     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
262     y -= h;
263     if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
264       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
265     } else {
266       PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
267     }
268     PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
269     y -= h;
270     x = x - wd * (cnt - 1) / 2.0;
271 
272     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
273     PetscCall(KSPView(jac->head->ksp, viewer));
274     PetscCall(PetscDrawPopCurrentPoint(draw));
275     if (jac->kspupper != jac->head->ksp) {
276       x += wd;
277       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
278       PetscCall(KSPView(jac->kspupper, viewer));
279       PetscCall(PetscDrawPopCurrentPoint(draw));
280     }
281     x += wd;
282     PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
283     PetscCall(KSPView(jac->kspschur, viewer));
284     PetscCall(PetscDrawPopCurrentPoint(draw));
285   }
286   PetscFunctionReturn(PETSC_SUCCESS);
287 }
288 
289 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
290 {
291   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
292   PetscBool         isascii, isdraw;
293   PetscInt          i, j;
294   PC_FieldSplitLink ilink = jac->head;
295 
296   PetscFunctionBegin;
297   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
298   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
299   if (isascii) {
300     if (jac->bs > 0) {
301       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
302     } else {
303       PetscCall(PetscViewerASCIIPrintf(viewer, "  FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
304     }
305     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for blocks\n"));
306     if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for diagonal blocks\n"));
307     if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat (not Pmat) as operator for off-diagonal blocks\n"));
308 
309     PetscCall(PetscViewerASCIIPrintf(viewer, "  Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
310     PetscCall(PetscViewerASCIIPrintf(viewer, "  Solver info for H = A00 + nu*A01*A01' matrix:\n"));
311     PetscCall(PetscViewerASCIIPushTab(viewer));
312 
313     if (ilink->fields) {
314       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
315       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
316       for (j = 0; j < ilink->nfields; j++) {
317         if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
318         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
319       }
320       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
321       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
322     } else {
323       PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
324     }
325     PetscCall(KSPView(ilink->ksp, viewer));
326 
327     PetscCall(PetscViewerASCIIPopTab(viewer));
328   }
329 
330   if (isdraw) {
331     PetscDraw draw;
332     PetscReal x, y, w, wd;
333 
334     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
335     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
336     w  = 2 * PetscMin(1.0 - x, x);
337     wd = w / (jac->nsplits + 1);
338     x  = x - wd * (jac->nsplits - 1) / 2.0;
339     for (i = 0; i < jac->nsplits; i++) {
340       PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
341       PetscCall(KSPView(ilink->ksp, viewer));
342       PetscCall(PetscDrawPopCurrentPoint(draw));
343       x += wd;
344       ilink = ilink->next;
345     }
346   }
347   PetscFunctionReturn(PETSC_SUCCESS);
348 }
349 
350 /* Precondition: jac->bs is set to a meaningful value or MATNEST */
351 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
352 {
353   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
354   PetscInt       bs, i, nfields, *ifields, nfields_col, *ifields_col;
355   PetscBool      flg, flg_col, mnest;
356   char           optionname[128], splitname[8], optionname_col[128];
357 
358   PetscFunctionBegin;
359   PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest));
360   if (mnest) {
361     PetscCall(MatNestGetSize(pc->pmat, &bs, NULL));
362   } else {
363     bs = jac->bs;
364   }
365   PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col));
366   for (i = 0, flg = PETSC_TRUE;; i++) {
367     PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
368     PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
369     PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
370     nfields     = bs;
371     nfields_col = bs;
372     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
373     PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
374     if (!flg) break;
375     else if (flg && !flg_col) {
376       PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
377       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
378     } else {
379       PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
380       PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
381       PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
382     }
383   }
384   if (i > 0) {
385     /* Makes command-line setting of splits take precedence over setting them in code.
386        Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
387        create new splits, which would probably not be what the user wanted. */
388     jac->splitdefined = PETSC_TRUE;
389   }
390   PetscCall(PetscFree2(ifields, ifields_col));
391   PetscFunctionReturn(PETSC_SUCCESS);
392 }
393 
394 static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
395 {
396   PC_FieldSplit    *jac                = (PC_FieldSplit *)pc->data;
397   PC_FieldSplitLink ilink              = jac->head;
398   PetscBool         fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
399   PetscInt          i;
400 
401   PetscFunctionBegin;
402   /*
403    Kinda messy, but at least this now uses DMCreateFieldDecomposition().
404    Should probably be rewritten.
405    */
406   if (!ilink) {
407     PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
408     if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
409       PetscInt  numFields, f, i, j;
410       char    **fieldNames;
411       IS       *fields;
412       DM       *dms;
413       DM        subdm[128];
414       PetscBool flg;
415 
416       PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
417       /* Allow the user to prescribe the splits */
418       for (i = 0, flg = PETSC_TRUE;; i++) {
419         PetscInt ifields[128];
420         IS       compField;
421         char     optionname[128], splitname[8];
422         PetscInt nfields = numFields;
423 
424         PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
425         PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
426         if (!flg) break;
427         PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
428         PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
429         if (nfields == 1) {
430           PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
431         } else {
432           PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
433           PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
434         }
435         PetscCall(ISDestroy(&compField));
436         for (j = 0; j < nfields; ++j) {
437           f = ifields[j];
438           PetscCall(PetscFree(fieldNames[f]));
439           PetscCall(ISDestroy(&fields[f]));
440         }
441       }
442       if (i == 0) {
443         for (f = 0; f < numFields; ++f) {
444           PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
445           PetscCall(PetscFree(fieldNames[f]));
446           PetscCall(ISDestroy(&fields[f]));
447         }
448       } else {
449         for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
450         PetscCall(PetscFree(dms));
451         PetscCall(PetscMalloc1(i, &dms));
452         for (j = 0; j < i; ++j) dms[j] = subdm[j];
453       }
454       PetscCall(PetscFree(fieldNames));
455       PetscCall(PetscFree(fields));
456       if (dms) {
457         PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
458         for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
459           const char *prefix;
460           PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix));
461           PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix));
462           PetscCall(KSPSetDM(ilink->ksp, dms[i]));
463           PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
464           PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
465           PetscCall(DMDestroy(&dms[i]));
466         }
467         PetscCall(PetscFree(dms));
468       }
469     } else {
470       if (jac->bs <= 0) {
471         if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
472         else jac->bs = 1;
473       }
474 
475       if (jac->detect) {
476         IS       zerodiags, rest;
477         PetscInt nmin, nmax;
478 
479         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
480         if (jac->diag_use_amat) {
481           PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
482         } else {
483           PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
484         }
485         PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
486         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
487         PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
488         PetscCall(ISDestroy(&zerodiags));
489         PetscCall(ISDestroy(&rest));
490       } else if (coupling) {
491         IS       coupling, rest;
492         PetscInt nmin, nmax;
493 
494         PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
495         if (jac->offdiag_use_amat) {
496           PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
497         } else {
498           PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
499         }
500         PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
501         PetscCall(ISSetIdentity(rest));
502         PetscCall(PCFieldSplitSetIS(pc, "0", rest));
503         PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
504         PetscCall(ISDestroy(&coupling));
505         PetscCall(ISDestroy(&rest));
506       } else {
507         PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
508         if (!fieldsplit_default) {
509           /* Allow user to set fields from command line,  if bs was known at the time of PCSetFromOptions_FieldSplit()
510            then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
511           PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
512           if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
513         }
514         if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
515           Mat       M = pc->pmat;
516           PetscBool isnest;
517           PetscInt  nf;
518 
519           PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
520           PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
521           if (!isnest) {
522             M = pc->mat;
523             PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
524           }
525           if (!isnest) nf = jac->bs;
526           else PetscCall(MatNestGetSize(M, &nf, NULL));
527           for (i = 0; i < nf; i++) {
528             char splitname[8];
529 
530             PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
531             PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
532           }
533           jac->defaultsplit = PETSC_TRUE;
534         }
535       }
536     }
537   } else if (jac->nsplits == 1) {
538     IS       is2;
539     PetscInt nmin, nmax;
540 
541     PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
542     PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
543     PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
544     PetscCall(PCFieldSplitSetIS(pc, "1", is2));
545     PetscCall(ISDestroy(&is2));
546   }
547 
548   PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
549   PetscFunctionReturn(PETSC_SUCCESS);
550 }
551 
552 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
553 {
554   Mat       BT, T;
555   PetscReal nrmT, nrmB;
556 
557   PetscFunctionBegin;
558   PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
559   PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
560   PetscCall(MatNorm(T, NORM_1, &nrmT));
561   PetscCall(MatNorm(B, NORM_1, &nrmB));
562   PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/Hermitian, GKB is not applicable.");
563 
564   /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
565   /* setting N := 1/nu*I in [Ar13].                                                 */
566   PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
567   PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_CURRENT, H)); /* H = A01*A01'          */
568   PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN));        /* H = A00 + nu*A01*A01' */
569 
570   PetscCall(MatDestroy(&BT));
571   PetscCall(MatDestroy(&T));
572   PetscFunctionReturn(PETSC_SUCCESS);
573 }
574 
575 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg);
576 
577 static PetscErrorCode PCSetUp_FieldSplit(PC pc)
578 {
579   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
580   PC_FieldSplitLink ilink;
581   PetscInt          i, nsplit;
582   PetscBool         matnest = PETSC_FALSE;
583 
584   PetscFunctionBegin;
585   pc->failedreason = PC_NOERROR;
586   PetscCall(PCFieldSplitSetDefaults(pc));
587   nsplit = jac->nsplits;
588   ilink  = jac->head;
589   if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
590 
591   /* get the matrices for each split */
592   if (!jac->issetup) {
593     PetscInt rstart, rend, nslots, bs;
594 
595     jac->issetup = PETSC_TRUE;
596 
597     /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
598     if (jac->defaultsplit || !ilink->is) {
599       if (jac->bs <= 0) jac->bs = nsplit;
600     }
601 
602     /*  MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
603     PetscCall(MatGetBlockSize(pc->pmat, &bs));
604     if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
605       PetscBool blk;
606 
607       PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
608       PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
609     }
610 
611     if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */
612       bs = jac->bs;
613       PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
614       nslots = (rend - rstart) / bs;
615       for (i = 0; i < nsplit; i++) {
616         if (jac->defaultsplit) {
617           PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
618           PetscCall(PetscObjectReference((PetscObject)ilink->is));
619           ilink->is_col = ilink->is;
620         } else if (!ilink->is) {
621           PetscBool same_fields = PETSC_TRUE;
622 
623           for (PetscInt k = 0; k < ilink->nfields; k++) {
624             if (ilink->fields[k] != ilink->fields_col[k]) same_fields = PETSC_FALSE;
625           }
626 
627           if (ilink->nfields > 1) {
628             PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
629 
630             PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
631             if (!same_fields) PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
632             for (j = 0; j < nslots; j++) {
633               for (k = 0; k < nfields; k++) {
634                 ii[nfields * j + k] = rstart + bs * j + fields[k];
635                 if (!same_fields) jj[nfields * j + k] = rstart + bs * j + fields_col[k];
636               }
637             }
638             PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
639             if (!same_fields) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
640             else {
641               PetscCall(PetscObjectReference((PetscObject)ilink->is));
642               ilink->is_col = ilink->is;
643             }
644             PetscCall(ISSetBlockSize(ilink->is, nfields));
645             PetscCall(ISSetBlockSize(ilink->is_col, nfields));
646           } else {
647             PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
648             if (!same_fields) PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
649             else {
650               PetscCall(PetscObjectReference((PetscObject)ilink->is));
651               ilink->is_col = ilink->is;
652             }
653           }
654         }
655         ilink = ilink->next;
656       }
657     } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */
658       IS      *rowis, *colis, *ises = NULL;
659       PetscInt mis, nis;
660 
661       PetscCall(MatNestGetSize(pc->pmat, &mis, &nis));
662       PetscCall(PetscMalloc2(mis, &rowis, nis, &colis));
663       PetscCall(MatNestGetISs(pc->pmat, rowis, colis));
664       if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises));
665 
666       for (i = 0; i < nsplit; i++) {
667         if (jac->defaultsplit) {
668           PetscCall(ISDuplicate(rowis[i], &ilink->is));
669           PetscCall(PetscObjectReference((PetscObject)ilink->is));
670           ilink->is_col = ilink->is;
671         } else if (!ilink->is) {
672           if (ilink->nfields > 1) {
673             for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]];
674             PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is));
675           } else {
676             PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is));
677           }
678           PetscCall(PetscObjectReference((PetscObject)ilink->is));
679           ilink->is_col = ilink->is;
680         }
681         ilink = ilink->next;
682       }
683       PetscCall(PetscFree2(rowis, colis));
684       PetscCall(PetscFree(ises));
685     }
686   }
687 
688   ilink = jac->head;
689   if (!jac->pmat) {
690     Vec xtmp;
691 
692     PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
693     PetscCall(PetscMalloc1(nsplit, &jac->pmat));
694     PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
695     for (i = 0; i < nsplit; i++) {
696       MatNullSpace sp;
697 
698       /* Check for matrix attached to IS */
699       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
700       if (jac->pmat[i]) {
701         PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
702         if (jac->type == PC_COMPOSITE_SCHUR) {
703           jac->schur_user = jac->pmat[i];
704 
705           PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
706         }
707       } else {
708         const char *prefix;
709         PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
710         PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix));
711         if (!prefix) {
712           PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
713           PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
714         }
715         PetscCall(MatSetFromOptions(jac->pmat[i]));
716         PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
717       }
718       /* create work vectors for each split */
719       PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
720       ilink->x = jac->x[i];
721       ilink->y = jac->y[i];
722       ilink->z = NULL;
723       /* compute scatter contexts needed by multiplicative versions and non-default splits */
724       PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
725       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
726       if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
727       ilink = ilink->next;
728     }
729     PetscCall(VecDestroy(&xtmp));
730   } else {
731     MatReuse      scall;
732     MatNullSpace *nullsp = NULL;
733 
734     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
735       PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp));
736       for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
737       scall = MAT_INITIAL_MATRIX;
738     } else scall = MAT_REUSE_MATRIX;
739 
740     for (i = 0; i < nsplit; i++) {
741       Mat pmat;
742 
743       /* Check for matrix attached to IS */
744       PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
745       if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
746       ilink = ilink->next;
747     }
748     if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp));
749   }
750   if (jac->diag_use_amat) {
751     ilink = jac->head;
752     if (!jac->mat) {
753       PetscCall(PetscMalloc1(nsplit, &jac->mat));
754       for (i = 0; i < nsplit; i++) {
755         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
756         ilink = ilink->next;
757       }
758     } else {
759       MatReuse      scall;
760       MatNullSpace *nullsp = NULL;
761 
762       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
763         PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp));
764         for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
765         scall = MAT_INITIAL_MATRIX;
766       } else scall = MAT_REUSE_MATRIX;
767 
768       for (i = 0; i < nsplit; i++) {
769         PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
770         ilink = ilink->next;
771       }
772       if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp));
773     }
774   } else {
775     jac->mat = jac->pmat;
776   }
777 
778   /* Check for null space attached to IS */
779   ilink = jac->head;
780   for (i = 0; i < nsplit; i++) {
781     MatNullSpace sp;
782 
783     PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
784     if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
785     ilink = ilink->next;
786   }
787 
788   if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
789     /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
790     /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
791     ilink = jac->head;
792     if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
793       /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
794       if (!jac->Afield) {
795         PetscCall(PetscCalloc1(nsplit, &jac->Afield));
796         if (jac->offdiag_use_amat) {
797           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
798         } else {
799           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
800         }
801       } else {
802         MatReuse scall;
803 
804         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
805           PetscCall(MatDestroy(&jac->Afield[1]));
806           scall = MAT_INITIAL_MATRIX;
807         } else scall = MAT_REUSE_MATRIX;
808 
809         if (jac->offdiag_use_amat) {
810           PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
811         } else {
812           PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
813         }
814       }
815     } else {
816       if (!jac->Afield) {
817         PetscCall(PetscMalloc1(nsplit, &jac->Afield));
818         for (i = 0; i < nsplit; i++) {
819           if (jac->offdiag_use_amat) {
820             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
821           } else {
822             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
823           }
824           ilink = ilink->next;
825         }
826       } else {
827         MatReuse scall;
828         if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
829           for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
830           scall = MAT_INITIAL_MATRIX;
831         } else scall = MAT_REUSE_MATRIX;
832 
833         for (i = 0; i < nsplit; i++) {
834           if (jac->offdiag_use_amat) {
835             PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
836           } else {
837             PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
838           }
839           ilink = ilink->next;
840         }
841       }
842     }
843   }
844 
845   if (jac->type == PC_COMPOSITE_SCHUR) {
846     PetscBool   isset, isspd = PETSC_FALSE, issym = PETSC_FALSE, flg;
847     char        lscname[256];
848     PetscObject LSC_L;
849 
850     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
851 
852     /* If pc->mat is SPD, don't scale by -1 the Schur complement */
853     PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
854     if (jac->schurscale == (PetscScalar)-1.0) jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
855     PetscCall(MatIsSymmetricKnown(pc->pmat, &isset, &issym));
856 
857     PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
858 
859     if (jac->schur) {
860       KSP      kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
861       MatReuse scall;
862 
863       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
864         scall = MAT_INITIAL_MATRIX;
865         PetscCall(MatDestroy(&jac->B));
866         PetscCall(MatDestroy(&jac->C));
867       } else scall = MAT_REUSE_MATRIX;
868 
869       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
870       ilink = jac->head;
871       PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->is, ilink->next->is, scall, &jac->B));
872       if (!flg) PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->next->is, ilink->is, scall, &jac->C));
873       else {
874         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg));
875         if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
876         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
877       }
878       ilink = ilink->next;
879       PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
880       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
881         PetscCall(MatDestroy(&jac->schurp));
882         PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
883       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL && jac->kspupper != jac->head->ksp) {
884         PetscCall(MatDestroy(&jac->schur_user));
885         PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
886       }
887       if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
888       if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
889       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
890     } else {
891       const char  *Dprefix;
892       char         schurprefix[256], schurmatprefix[256];
893       char         schurtestoption[256];
894       MatNullSpace sp;
895       KSP          kspt;
896 
897       /* extract the A01 and A10 matrices */
898       ilink = jac->head;
899       PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->is, ilink->next->is, MAT_INITIAL_MATRIX, &jac->B));
900       if (!flg) PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->C));
901       else {
902         PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg));
903         if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
904         else PetscCall(MatCreateTranspose(jac->B, &jac->C));
905       }
906       ilink = ilink->next;
907       /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
908       PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
909       PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
910       PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
911       PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
912       PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
913       PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
914       PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
915 
916       /* Note: this is not true in general */
917       PetscCall(MatGetNullSpace(jac->mat[1], &sp));
918       if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
919 
920       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
921       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
922       if (flg) {
923         DM  dmInner;
924         KSP kspInner;
925         PC  pcInner;
926 
927         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
928         PetscCall(KSPReset(kspInner));
929         PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
930         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
931         /* Indent this deeper to emphasize the "inner" nature of this solver. */
932         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
933         PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
934         PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
935 
936         /* Set DM for new solver */
937         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
938         PetscCall(KSPSetDM(kspInner, dmInner));
939         PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
940 
941         /* Defaults to PCKSP as preconditioner */
942         PetscCall(KSPGetPC(kspInner, &pcInner));
943         PetscCall(PCSetType(pcInner, PCKSP));
944         PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
945       } else {
946         /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
947           * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
948           * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
949           * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
950           * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
951           * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
952         PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
953         PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
954       }
955       PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
956       PetscCall(KSPSetFromOptions(jac->head->ksp));
957       PetscCall(MatSetFromOptions(jac->schur));
958 
959       PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
960       if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
961         KSP kspInner;
962         PC  pcInner;
963 
964         PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
965         PetscCall(KSPGetPC(kspInner, &pcInner));
966         PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
967         if (flg) {
968           KSP ksp;
969 
970           PetscCall(PCKSPGetKSP(pcInner, &ksp));
971           if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
972         }
973       }
974       PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
975       PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
976       if (flg) {
977         DM dmInner;
978 
979         PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
980         PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
981         PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
982         PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
983         PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
984         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
985         PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
986         PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
987         PetscCall(KSPSetDM(jac->kspupper, dmInner));
988         PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
989         PetscCall(KSPSetFromOptions(jac->kspupper));
990         PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
991         PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
992       } else {
993         jac->kspupper = jac->head->ksp;
994         PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
995       }
996 
997       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
998       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
999       PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
1000       PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
1001       PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
1002       if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
1003         PC pcschur;
1004         PetscCall(KSPGetPC(jac->kspschur, &pcschur));
1005         PetscCall(PCSetType(pcschur, PCNONE));
1006         /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
1007       } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1008         if (jac->schurfactorization != PC_FIELDSPLIT_SCHUR_FACT_FULL || jac->kspupper != jac->head->ksp) PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1009       }
1010       PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
1011       PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
1012       PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
1013       /* propagate DM */
1014       {
1015         DM sdm;
1016         PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
1017         if (sdm) {
1018           PetscCall(KSPSetDM(jac->kspschur, sdm));
1019           PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
1020         }
1021       }
1022       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1023       /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
1024       PetscCall(KSPSetFromOptions(jac->kspschur));
1025     }
1026     PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
1027     PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
1028     if (issym) PetscCall(MatSetOption(jac->schur, MAT_SYMMETRIC, PETSC_TRUE));
1029     if (isspd) PetscCall(MatSetOption(jac->schur, MAT_SPD, PETSC_TRUE));
1030 
1031     /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
1032     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
1033     PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L));
1034     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L));
1035     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", LSC_L));
1036     PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
1037     PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L));
1038     if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L));
1039     if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", LSC_L));
1040   } else if (jac->type == PC_COMPOSITE_GKB) {
1041     PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1042     ilink = jac->head;
1043     PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->is, ilink->next->is, MAT_INITIAL_MATRIX, &jac->B));
1044     /* Create work vectors for GKB algorithm */
1045     PetscCall(VecDuplicate(ilink->x, &jac->u));
1046     PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1047     PetscCall(VecDuplicate(ilink->x, &jac->w2));
1048     PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->C));
1049     ilink = ilink->next;
1050     /* Create work vectors for GKB algorithm */
1051     PetscCall(VecDuplicate(ilink->x, &jac->v));
1052     PetscCall(VecDuplicate(ilink->x, &jac->d));
1053     PetscCall(VecDuplicate(ilink->x, &jac->w1));
1054     PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1055     PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1056 
1057     ilink = jac->head;
1058     PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1059     if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1060     /* Create gkb_monitor context */
1061     if (jac->gkbmonitor) {
1062       PetscInt tablevel;
1063       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1064       PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1065       PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1066       PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1067       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1068     }
1069   } else {
1070     /* set up the individual splits' PCs */
1071     i     = 0;
1072     ilink = jac->head;
1073     while (ilink) {
1074       PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1075       /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1076       if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1077       i++;
1078       ilink = ilink->next;
1079     }
1080   }
1081 
1082   /* Set coordinates to the sub PC objects whenever these are set */
1083   if (jac->coordinates_set) {
1084     PC pc_coords;
1085     if (jac->type == PC_COMPOSITE_SCHUR) {
1086       // Head is first block.
1087       PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1088       PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1089       // Second one is Schur block, but its KSP object is in kspschur.
1090       PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1091       PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1092     } else if (jac->type == PC_COMPOSITE_GKB) {
1093       PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1094     } else {
1095       ilink = jac->head;
1096       while (ilink) {
1097         PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1098         PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1099         ilink = ilink->next;
1100       }
1101     }
1102   }
1103 
1104   jac->suboptionsset = PETSC_TRUE;
1105   PetscFunctionReturn(PETSC_SUCCESS);
1106 }
1107 
1108 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_Schur(PC pc)
1109 {
1110   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1111   PC_FieldSplitLink ilinkA = jac->head;
1112   KSP               kspA = ilinkA->ksp, kspUpper = jac->kspupper;
1113 
1114   PetscFunctionBegin;
1115   if (jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL && kspUpper != kspA) {
1116     PetscCall(KSPSetUp(kspUpper));
1117     PetscCall(KSPSetUpOnBlocks(kspUpper));
1118   }
1119   PetscCall(KSPSetUp(kspA));
1120   PetscCall(KSPSetUpOnBlocks(kspA));
1121   if (jac->schurpre != PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1122     PetscCall(KSPSetUp(jac->kspschur));
1123     PetscCall(KSPSetUpOnBlocks(jac->kspschur));
1124   } else if (kspUpper == kspA && jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL) {
1125     Mat          A;
1126     PetscInt     m, M, N;
1127     VecType      vtype;
1128     PetscMemType mtype;
1129     PetscScalar *array;
1130 
1131     PetscCall(MatGetSize(jac->B, &M, &N));
1132     PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1133     PetscCall(MatGetVecType(jac->B, &vtype));
1134     PetscCall(VecGetArrayAndMemType(ilinkA->x, &array, &mtype));
1135     PetscCall(VecRestoreArrayAndMemType(ilinkA->x, &array));
1136     PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&A));
1137     if (!A) {
1138       if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(PetscMalloc1(m * (N + 1), &array));
1139 #if PetscDefined(HAVE_CUDA)
1140       else if (PetscMemTypeCUDA(mtype)) PetscCallCUDA(cudaMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1141 #endif
1142 #if PetscDefined(HAVE_HIP)
1143       else if (PetscMemTypeHIP(mtype)) PetscCallHIP(hipMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1144 #endif
1145       PetscCall(MatCreateDenseFromVecType(PetscObjectComm((PetscObject)jac->schur), vtype, m, PETSC_DECIDE, M, N + 1, -1, array, &A)); // number of columns of the Schur complement plus one
1146       PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)A));
1147       PetscCall(MatDestroy(&A));
1148     }
1149   }
1150   PetscFunctionReturn(PETSC_SUCCESS);
1151 }
1152 
1153 static PetscErrorCode PCSetUpOnBlocks_FieldSplit(PC pc)
1154 {
1155   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1156   PC_FieldSplitLink ilink = jac->head;
1157 
1158   PetscFunctionBegin;
1159   while (ilink) {
1160     PetscCall(KSPSetUp(ilink->ksp));
1161     PetscCall(KSPSetUpOnBlocks(ilink->ksp));
1162     ilink = ilink->next;
1163   }
1164   PetscFunctionReturn(PETSC_SUCCESS);
1165 }
1166 
1167 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_GKB(PC pc)
1168 {
1169   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1170   PC_FieldSplitLink ilinkA = jac->head;
1171   KSP               ksp    = ilinkA->ksp;
1172 
1173   PetscFunctionBegin;
1174   PetscCall(KSPSetUp(ksp));
1175   PetscCall(KSPSetUpOnBlocks(ksp));
1176   PetscFunctionReturn(PETSC_SUCCESS);
1177 }
1178 
1179 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1180 {
1181   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1182   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1183   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1184   Mat               AinvB = NULL;
1185   PetscInt          N, P;
1186 
1187   PetscFunctionBegin;
1188   switch (jac->schurfactorization) {
1189   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1190     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1191     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1192     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1193     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1194     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1195     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1196     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1197     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1198     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1199     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1200     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1201     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1202     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1203     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1204     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1205     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1206     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1207     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1208     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1209     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1210     break;
1211   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1212     /* [A00 0; A10 S], suitable for left preconditioning */
1213     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1214     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1215     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1216     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1217     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1218     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1219     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1220     PetscCall(VecScale(ilinkD->x, -1.));
1221     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1222     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1223     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1224     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1225     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1226     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1227     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1228     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1229     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1230     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1231     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1232     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1233     break;
1234   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1235     /* [A00 A01; 0 S], suitable for right preconditioning */
1236     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1237     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1238     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1239     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1240     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1241     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1242     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1243     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1244     PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1245     PetscCall(VecScale(ilinkA->x, -1.));
1246     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1247     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1248     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1249     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1250     PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1251     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1252     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1253     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1254     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1255     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1256     break;
1257   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1258     /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1259     PetscCall(MatGetSize(jac->B, NULL, &P));
1260     N = P;
1261     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1262     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1263     PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1264     if (kspUpper == kspA) {
1265       PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB));
1266       if (AinvB) {
1267         PetscCall(MatGetSize(AinvB, NULL, &N));
1268         if (N > P) { // first time PCApply_FieldSplit_Schur() is called
1269           PetscMemType mtype;
1270           Vec          c = NULL;
1271           PetscScalar *array;
1272           PetscInt     m, M;
1273 
1274           PetscCall(MatGetSize(jac->B, &M, NULL));
1275           PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1276           PetscCall(MatDenseGetArrayAndMemType(AinvB, &array, &mtype));
1277           if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1278 #if PetscDefined(HAVE_CUDA)
1279           else if (PetscMemTypeCUDA(mtype)) PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1280 #endif
1281 #if PetscDefined(HAVE_HIP)
1282           else if (PetscMemTypeHIP(mtype)) PetscCall(VecCreateMPIHIPWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1283 #endif
1284           PetscCall(MatDenseRestoreArrayAndMemType(AinvB, &array));
1285           PetscCall(VecCopy(ilinkA->x, c));
1286           PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1287           PetscCall(KSPSetOperators(jac->kspschur, jac->schur, jac->schur_user));
1288           PetscCall(VecCopy(c, ilinkA->y)); // retrieve the solution as the last column of the composed Mat
1289           PetscCall(VecDestroy(&c));
1290         }
1291       }
1292     }
1293     if (N == P) PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1294     PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1295     PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1296     PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1297     PetscCall(VecScale(ilinkD->x, -1.0));
1298     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1299     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1300 
1301     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1302     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1303     PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1304     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1305     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1306     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1307     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1308 
1309     if (kspUpper == kspA) {
1310       if (!AinvB) {
1311         PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1312         PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1313         PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1314         PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1315         PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1316         PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1317       } else PetscCall(MatMultAdd(AinvB, ilinkD->y, ilinkA->y, ilinkA->y));
1318     } else {
1319       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1320       PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1321       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1322       PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1323       PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1324       PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1325       PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1326       PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1327       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1328     }
1329     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1330     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1331     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1332   }
1333   PetscFunctionReturn(PETSC_SUCCESS);
1334 }
1335 
1336 /*
1337   PCFieldSplitCreateWorkMats_Private - Allocate per-field dense work matrices for multi-RHS
1338 
1339   Input Parameters:
1340 + pc - the PC context
1341 - X  - matrix to copy column-layout from
1342 
1343   Notes:
1344   If matrices already exist with correct column count, they are reused.
1345   If column count changed, old matrices are destroyed and new ones created.
1346 */
1347 static PetscErrorCode PCFieldSplitCreateWorkMats_Private(PC pc, Mat X)
1348 {
1349   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1350   PC_FieldSplitLink ilink = jac->head;
1351   PetscInt          mx, Mx, my, My, N;
1352 
1353   PetscFunctionBegin;
1354   while (ilink) {
1355     /* check if reallocation needed (previous allocation with wrong column count) */
1356     if (ilink->X) {
1357       PetscCall(MatGetSize(ilink->X, NULL, &N));
1358       if (N != X->cmap->N) {
1359         PetscCall(MatDestroy(&ilink->X));
1360         PetscCall(MatDestroy(&ilink->Y));
1361       }
1362     }
1363     /* create if needed */
1364     if (!ilink->X) {
1365       VecType xtype, ytype;
1366 
1367       PetscCall(VecGetType(ilink->x, &xtype));
1368       PetscCall(VecGetType(ilink->y, &ytype));
1369       PetscCall(VecGetLocalSize(ilink->x, &mx));
1370       PetscCall(VecGetSize(ilink->x, &Mx));
1371       PetscCall(VecGetLocalSize(ilink->y, &my));
1372       PetscCall(VecGetSize(ilink->y, &My));
1373       /* use default lda */
1374       PetscCall(MatCreateDenseFromVecType(PetscObjectComm((PetscObject)pc), xtype, mx, X->cmap->n, Mx, X->cmap->N, -1, NULL, &ilink->X));
1375       PetscCall(MatCreateDenseFromVecType(PetscObjectComm((PetscObject)pc), ytype, my, X->cmap->n, My, X->cmap->N, -1, NULL, &ilink->Y));
1376     }
1377     ilink = ilink->next;
1378   }
1379   PetscFunctionReturn(PETSC_SUCCESS);
1380 }
1381 
1382 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1383 {
1384   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1385   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1386   KSP               kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1387 
1388   PetscFunctionBegin;
1389   switch (jac->schurfactorization) {
1390   case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1391     /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1392     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1393     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1394     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1395     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1396     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1397     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1398     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1399     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1400     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1401     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1402     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1403     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1404     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1405     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1406     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1407     PetscCall(VecScale(ilinkD->y, jac->schurscale));
1408     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1409     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1410     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1411     break;
1412   case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1413     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1414     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1415     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1416     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1417     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1418     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1419     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1420     PetscCall(VecScale(ilinkD->x, -1.));
1421     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1422     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1423     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1424     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1425     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1426     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1427     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1428     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1429     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1430     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1431     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1432     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1433     break;
1434   case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1435     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1436     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1437     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1438     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1439     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1440     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1441     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1442     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1443     PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1444     PetscCall(VecScale(ilinkA->x, -1.));
1445     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1446     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1447     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1448     PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1449     PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1450     PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1451     PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1452     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1453     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1454     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1455     break;
1456   case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1457     PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1458     PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1459     PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1460     PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1461     PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1462     PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1463     PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1464     PetscCall(VecScale(ilinkD->x, -1.0));
1465     PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1466     PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1467 
1468     PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1469     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1470     PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1471     PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1472     PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1473     PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1474     PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1475 
1476     if (kspLower == kspA) {
1477       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1478       PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1479       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1480       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1481       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1482       PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1483     } else {
1484       PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1485       PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1486       PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1487       PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1488       PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1489       PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1490       PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1491       PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1492       PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1493     }
1494     PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1495     PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1496     PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1497   }
1498   PetscFunctionReturn(PETSC_SUCCESS);
1499 }
1500 
1501 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1502   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1503                     KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1504                     VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1505 
1506 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1507 {
1508   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1509   PC_FieldSplitLink ilink = jac->head;
1510   PetscInt          cnt, bs;
1511 
1512   PetscFunctionBegin;
1513   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1514     PetscBool matnest;
1515 
1516     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1517     if (jac->defaultsplit && !matnest) {
1518       PetscCall(VecGetBlockSize(x, &bs));
1519       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1520       PetscCall(VecGetBlockSize(y, &bs));
1521       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1522       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1523       while (ilink) {
1524         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1525         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1526         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1527         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1528         ilink = ilink->next;
1529       }
1530       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1531     } else {
1532       PetscCall(VecSet(y, 0.0));
1533       while (ilink) {
1534         PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1535         ilink = ilink->next;
1536       }
1537     }
1538   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1539     PetscCall(VecSet(y, 0.0));
1540     /* solve on first block for first block variables */
1541     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1542     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1543     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1544     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1545     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1546     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1547     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1548     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1549 
1550     /* compute the residual only onto second block variables using first block variables */
1551     PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1552     ilink = ilink->next;
1553     PetscCall(VecScale(ilink->x, -1.0));
1554     PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1555     PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1556 
1557     /* solve on second block variables */
1558     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1559     PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1560     PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1561     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1562     PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1563     PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1564   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1565     if (!jac->w1) {
1566       PetscCall(VecDuplicate(x, &jac->w1));
1567       PetscCall(VecDuplicate(x, &jac->w2));
1568     }
1569     PetscCall(VecSet(y, 0.0));
1570     PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1571     cnt = 1;
1572     while (ilink->next) {
1573       ilink = ilink->next;
1574       /* compute the residual only over the part of the vector needed */
1575       PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1576       PetscCall(VecScale(ilink->x, -1.0));
1577       PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1578       PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1579       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1580       PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1581       PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1582       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1583       PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1584       PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1585     }
1586     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1587       cnt -= 2;
1588       while (ilink->previous) {
1589         ilink = ilink->previous;
1590         /* compute the residual only over the part of the vector needed */
1591         PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1592         PetscCall(VecScale(ilink->x, -1.0));
1593         PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1594         PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1595         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1596         PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1597         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1598         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1599         PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1600         PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1601       }
1602     }
1603   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1604   PetscFunctionReturn(PETSC_SUCCESS);
1605 }
1606 
1607 static PetscErrorCode PCMatApply_FieldSplit(PC pc, Mat X, Mat Y)
1608 {
1609   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1610   PC_FieldSplitLink ilink = jac->head;
1611   PetscInt          cnt;
1612 
1613   PetscFunctionBegin;
1614   /* create working matrices with the correct number of columns */
1615   PetscCall(PCFieldSplitCreateWorkMats_Private(pc, X));
1616   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1617     PetscCall(MatZeroEntries(Y));
1618     while (ilink) {
1619       PetscCall(MatDenseScatter_Private(ilink->sctx, X, ilink->X, INSERT_VALUES, SCATTER_FORWARD));
1620       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1621       PetscCall(KSPMatSolve(ilink->ksp, ilink->X, ilink->Y));
1622       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1623       PetscCall(MatDenseScatter_Private(ilink->sctx, ilink->Y, Y, ADD_VALUES, SCATTER_REVERSE));
1624       ilink = ilink->next;
1625     }
1626   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1627     PetscCall(MatZeroEntries(Y));
1628     PetscCall(MatDenseScatter_Private(ilink->sctx, X, ilink->X, INSERT_VALUES, SCATTER_FORWARD));
1629     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1630     PetscCall(KSPMatSolve(ilink->ksp, ilink->X, ilink->Y));
1631     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1632     PetscCall(MatDenseScatter_Private(ilink->sctx, ilink->Y, Y, ADD_VALUES, SCATTER_REVERSE));
1633 
1634     /* compute the residual only onto second block variables using first block variables */
1635     PetscCall(MatMatMult(jac->Afield[1], ilink->Y, MAT_REUSE_MATRIX, PETSC_DETERMINE, &ilink->next->X));
1636     ilink = ilink->next;
1637     PetscCall(MatScale(ilink->X, -1.0));
1638     PetscCall(MatDenseScatter_Private(ilink->sctx, X, ilink->X, ADD_VALUES, SCATTER_FORWARD));
1639 
1640     /* solve on second block variables */
1641     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1642     PetscCall(KSPMatSolve(ilink->ksp, ilink->X, ilink->Y));
1643     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1644     PetscCall(MatDenseScatter_Private(ilink->sctx, ilink->Y, Y, ADD_VALUES, SCATTER_REVERSE));
1645   } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1646     /* general multiplicative with any number of splits */
1647     PetscCall(MatZeroEntries(Y));
1648     /* first split */
1649     PetscCall(MatDenseScatter_Private(ilink->sctx, X, ilink->X, INSERT_VALUES, SCATTER_FORWARD));
1650     PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1651     PetscCall(KSPMatSolve(ilink->ksp, ilink->X, ilink->Y));
1652     PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1653     PetscCall(MatDenseScatter_Private(ilink->sctx, ilink->Y, Y, ADD_VALUES, SCATTER_REVERSE));
1654     cnt = 1;
1655     /* forward sweep */
1656     while (ilink->next) {
1657       ilink = ilink->next;
1658       /* compute the residual only over the part of the vector needed */
1659       PetscCall(MatMatMult(jac->Afield[cnt++], Y, MAT_REUSE_MATRIX, PETSC_DETERMINE, &ilink->X));
1660       PetscCall(MatScale(ilink->X, -1.0));
1661       PetscCall(MatDenseScatter_Private(ilink->sctx, X, ilink->X, ADD_VALUES, SCATTER_FORWARD));
1662       PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1663       PetscCall(KSPMatSolve(ilink->ksp, ilink->X, ilink->Y));
1664       PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1665       PetscCall(MatDenseScatter_Private(ilink->sctx, ilink->Y, Y, ADD_VALUES, SCATTER_REVERSE));
1666     }
1667     /* backward sweep for symmetric multiplicative */
1668     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1669       cnt -= 2;
1670       while (ilink->previous) {
1671         ilink = ilink->previous;
1672         /* compute the residual only over the part of the vector needed */
1673         PetscCall(MatMatMult(jac->Afield[cnt--], Y, MAT_REUSE_MATRIX, PETSC_DETERMINE, &ilink->X));
1674         PetscCall(MatScale(ilink->X, -1.0));
1675         PetscCall(MatDenseScatter_Private(ilink->sctx, X, ilink->X, ADD_VALUES, SCATTER_FORWARD));
1676         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1677         PetscCall(KSPMatSolve(ilink->ksp, ilink->X, ilink->Y));
1678         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->X, ilink->Y, NULL));
1679         PetscCall(MatDenseScatter_Private(ilink->sctx, ilink->Y, Y, ADD_VALUES, SCATTER_REVERSE));
1680       }
1681     }
1682   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCMatApply() not implemented for this fieldsplit type");
1683   PetscFunctionReturn(PETSC_SUCCESS);
1684 }
1685 
1686 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1687 {
1688   PC_FieldSplit    *jac    = (PC_FieldSplit *)pc->data;
1689   PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1690   KSP               ksp = ilinkA->ksp;
1691   Vec               u, v, Hu, d, work1, work2;
1692   PetscScalar       alpha, z, nrmz2, *vecz;
1693   PetscReal         lowbnd, nu, beta;
1694   PetscInt          j, iterGKB;
1695 
1696   PetscFunctionBegin;
1697   PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1698   PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1699   PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1700   PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1701 
1702   u     = jac->u;
1703   v     = jac->v;
1704   Hu    = jac->Hu;
1705   d     = jac->d;
1706   work1 = jac->w1;
1707   work2 = jac->w2;
1708   vecz  = jac->vecz;
1709 
1710   /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1711   /* Add q = q + nu*B*b */
1712   if (jac->gkbnu) {
1713     nu = jac->gkbnu;
1714     PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1715     PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1716   } else {
1717     /* Situation when no augmented Lagrangian is used. Then we set inner  */
1718     /* matrix N = I in [Ar13], and thus nu = 1.                           */
1719     nu = 1;
1720   }
1721 
1722   /* Transform rhs from [q,tilde{b}] to [0,b] */
1723   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1724   PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1725   PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1726   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1727   PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1728   PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x        */
1729 
1730   /* First step of algorithm */
1731   PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1732   KSPCheckDot(ksp, beta);
1733   beta = PetscSqrtReal(nu) * beta;
1734   PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c      */
1735   PetscCall(MatMult(jac->B, v, work2));          /* u = H^{-1}*B*v      */
1736   PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1737   PetscCall(KSPSolve(ksp, work2, u));
1738   PetscCall(KSPCheckSolve(ksp, pc, u));
1739   PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1740   PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u      */
1741   PetscCall(VecDot(Hu, u, &alpha));
1742   KSPCheckDot(ksp, alpha);
1743   PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1744   alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1745   PetscCall(VecScale(u, 1.0 / alpha));
1746   PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c      */
1747 
1748   z       = beta / alpha;
1749   vecz[1] = z;
1750 
1751   /* Computation of first iterate x(1) and p(1) */
1752   PetscCall(VecAXPY(ilinkA->y, z, u));
1753   PetscCall(VecCopy(d, ilinkD->y));
1754   PetscCall(VecScale(ilinkD->y, -z));
1755 
1756   iterGKB = 1;
1757   lowbnd  = 2 * jac->gkbtol;
1758   if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1759 
1760   while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1761     iterGKB += 1;
1762     PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1763     PetscCall(VecAXPBY(v, nu, -alpha, work1));
1764     PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v      */
1765     beta = beta / PetscSqrtReal(nu);
1766     PetscCall(VecScale(v, 1.0 / beta));
1767     PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1768     PetscCall(MatMult(jac->H, u, Hu));
1769     PetscCall(VecAXPY(work2, -beta, Hu));
1770     PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1771     PetscCall(KSPSolve(ksp, work2, u));
1772     PetscCall(KSPCheckSolve(ksp, pc, u));
1773     PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1774     PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u            */
1775     PetscCall(VecDot(Hu, u, &alpha));
1776     KSPCheckDot(ksp, alpha);
1777     PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1778     alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1779     PetscCall(VecScale(u, 1.0 / alpha));
1780 
1781     z       = -beta / alpha * z; /* z <- beta/alpha*z     */
1782     vecz[0] = z;
1783 
1784     /* Computation of new iterate x(i+1) and p(i+1) */
1785     PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1786     PetscCall(VecAXPY(ilinkA->y, z, u));                   /* r = r + z*u          */
1787     PetscCall(VecAXPY(ilinkD->y, -z, d));                  /* p = p - z*d          */
1788     PetscCall(MatMult(jac->H, ilinkA->y, Hu));             /* ||u||_H = u'*H*u     */
1789     PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1790 
1791     /* Compute Lower Bound estimate */
1792     if (iterGKB > jac->gkbdelay) {
1793       lowbnd = 0.0;
1794       for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1795       lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1796     }
1797 
1798     for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1799     if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1800   }
1801 
1802   PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1803   PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1804   PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1805   PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1806   PetscFunctionReturn(PETSC_SUCCESS);
1807 }
1808 
1809 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1810   ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1811                     KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1812                     VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1813 
1814 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1815 {
1816   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1817   PC_FieldSplitLink ilink = jac->head;
1818   PetscInt          bs;
1819 
1820   PetscFunctionBegin;
1821   if (jac->type == PC_COMPOSITE_ADDITIVE) {
1822     PetscBool matnest;
1823 
1824     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1825     if (jac->defaultsplit && !matnest) {
1826       PetscCall(VecGetBlockSize(x, &bs));
1827       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1828       PetscCall(VecGetBlockSize(y, &bs));
1829       PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1830       PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1831       while (ilink) {
1832         PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1833         PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1834         PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1835         PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1836         ilink = ilink->next;
1837       }
1838       PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1839     } else {
1840       PetscCall(VecSet(y, 0.0));
1841       while (ilink) {
1842         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1843         ilink = ilink->next;
1844       }
1845     }
1846   } else {
1847     if (!jac->w1) {
1848       PetscCall(VecDuplicate(x, &jac->w1));
1849       PetscCall(VecDuplicate(x, &jac->w2));
1850     }
1851     PetscCall(VecSet(y, 0.0));
1852     if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1853       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1854       while (ilink->next) {
1855         ilink = ilink->next;
1856         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1857         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1858         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1859       }
1860       while (ilink->previous) {
1861         ilink = ilink->previous;
1862         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1863         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1864         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1865       }
1866     } else {
1867       while (ilink->next) { /* get to last entry in linked list */
1868         ilink = ilink->next;
1869       }
1870       PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1871       while (ilink->previous) {
1872         ilink = ilink->previous;
1873         PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1874         PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1875         PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1876       }
1877     }
1878   }
1879   PetscFunctionReturn(PETSC_SUCCESS);
1880 }
1881 
1882 static PetscErrorCode PCReset_FieldSplit(PC pc)
1883 {
1884   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
1885   PC_FieldSplitLink ilink = jac->head, next;
1886 
1887   PetscFunctionBegin;
1888   while (ilink) {
1889     PetscCall(KSPDestroy(&ilink->ksp));
1890     PetscCall(VecDestroy(&ilink->x));
1891     PetscCall(VecDestroy(&ilink->y));
1892     PetscCall(VecDestroy(&ilink->z));
1893     PetscCall(MatDestroy(&ilink->X));
1894     PetscCall(MatDestroy(&ilink->Y));
1895     PetscCall(VecScatterDestroy(&ilink->sctx));
1896     PetscCall(ISDestroy(&ilink->is));
1897     PetscCall(ISDestroy(&ilink->is_col));
1898     PetscCall(PetscFree(ilink->splitname));
1899     PetscCall(PetscFree(ilink->fields));
1900     PetscCall(PetscFree(ilink->fields_col));
1901     next = ilink->next;
1902     PetscCall(PetscFree(ilink));
1903     ilink = next;
1904   }
1905   jac->head = NULL;
1906   PetscCall(PetscFree2(jac->x, jac->y));
1907   if (jac->mat && jac->mat != jac->pmat) {
1908     PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1909   } else if (jac->mat) {
1910     jac->mat = NULL;
1911   }
1912   if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1913   if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1914   jac->nsplits = 0;
1915   PetscCall(VecDestroy(&jac->w1));
1916   PetscCall(VecDestroy(&jac->w2));
1917   if (jac->schur) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", NULL));
1918   PetscCall(MatDestroy(&jac->schur));
1919   PetscCall(MatDestroy(&jac->schurp));
1920   PetscCall(MatDestroy(&jac->schur_user));
1921   PetscCall(KSPDestroy(&jac->kspschur));
1922   PetscCall(KSPDestroy(&jac->kspupper));
1923   PetscCall(MatDestroy(&jac->B));
1924   PetscCall(MatDestroy(&jac->C));
1925   PetscCall(MatDestroy(&jac->H));
1926   PetscCall(VecDestroy(&jac->u));
1927   PetscCall(VecDestroy(&jac->v));
1928   PetscCall(VecDestroy(&jac->Hu));
1929   PetscCall(VecDestroy(&jac->d));
1930   PetscCall(PetscFree(jac->vecz));
1931   PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1932   jac->isrestrict = PETSC_FALSE;
1933   PetscFunctionReturn(PETSC_SUCCESS);
1934 }
1935 
1936 static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1937 {
1938   PetscFunctionBegin;
1939   PetscCall(PCReset_FieldSplit(pc));
1940   PetscCall(PetscFree(pc->data));
1941   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1942   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1943   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1944   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1945   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1946   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1947   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1948   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1949   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1950   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1951   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1952   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1953   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1954   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1955   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1956   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1957   PetscFunctionReturn(PETSC_SUCCESS);
1958 }
1959 
1960 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems PetscOptionsObject)
1961 {
1962   PetscInt        bs;
1963   PetscBool       flg;
1964   PC_FieldSplit  *jac = (PC_FieldSplit *)pc->data;
1965   PCCompositeType ctype;
1966 
1967   PetscFunctionBegin;
1968   PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1969   PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1970   PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1971   if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1972   jac->diag_use_amat = pc->useAmat;
1973   PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1974   jac->offdiag_use_amat = pc->useAmat;
1975   PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1976   PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1977   PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1978   PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1979   if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1980   /* Only setup fields once */
1981   if (jac->bs > 0 && jac->nsplits == 0) {
1982     /* only allow user to set fields from command line.
1983        otherwise user can set them in PCFieldSplitSetDefaults() */
1984     PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1985     if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1986   }
1987   if (jac->type == PC_COMPOSITE_SCHUR) {
1988     PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1989     if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1990     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1991     PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1992     PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1993   } else if (jac->type == PC_COMPOSITE_GKB) {
1994     PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1995     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1996     PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0));
1997     PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1998     PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1999   }
2000   /*
2001     In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
2002     But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
2003     is called on the outer solver in case changes were made in the options database
2004 
2005     But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
2006     if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
2007     Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
2008 
2009     There could be a negative side effect of calling the KSPSetFromOptions() below.
2010 
2011     If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
2012   */
2013   if (jac->issetup) {
2014     PC_FieldSplitLink ilink = jac->head;
2015     if (jac->type == PC_COMPOSITE_SCHUR) {
2016       if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
2017       if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
2018     }
2019     while (ilink) {
2020       if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
2021       ilink = ilink->next;
2022     }
2023   }
2024   PetscOptionsHeadEnd();
2025   PetscFunctionReturn(PETSC_SUCCESS);
2026 }
2027 
2028 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
2029 {
2030   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
2031   PC_FieldSplitLink ilink, next = jac->head;
2032   char              prefix[128];
2033   PetscInt          i;
2034   PetscLogEvent     nse;
2035 
2036   PetscFunctionBegin;
2037   if (jac->splitdefined) {
2038     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
2039     PetscFunctionReturn(PETSC_SUCCESS);
2040   }
2041   for (i = 0; i < n; i++) PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]);
2042   PetscCall(PetscNew(&ilink));
2043   if (splitname) {
2044     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
2045   } else {
2046     PetscCall(PetscMalloc1(3, &ilink->splitname));
2047     PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
2048   }
2049   PetscCall(PetscMPIIntCast(jac->nsplits, &nse));
2050   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */
2051   PetscCall(PetscMalloc1(n, &ilink->fields));
2052   PetscCall(PetscArraycpy(ilink->fields, fields, n));
2053   PetscCall(PetscMalloc1(n, &ilink->fields_col));
2054   PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
2055 
2056   ilink->nfields = n;
2057   ilink->next    = NULL;
2058   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
2059   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
2060   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
2061   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
2062   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
2063 
2064   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
2065   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
2066 
2067   if (!next) {
2068     jac->head       = ilink;
2069     ilink->previous = NULL;
2070   } else {
2071     while (next->next) next = next->next;
2072     next->next      = ilink;
2073     ilink->previous = next;
2074   }
2075   jac->nsplits++;
2076   PetscFunctionReturn(PETSC_SUCCESS);
2077 }
2078 
2079 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
2080 {
2081   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2082 
2083   PetscFunctionBegin;
2084   *subksp = NULL;
2085   if (n) *n = 0;
2086   if (jac->type == PC_COMPOSITE_SCHUR) {
2087     PetscInt nn;
2088 
2089     PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
2090     PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
2091     nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
2092     PetscCall(PetscMalloc1(nn, subksp));
2093     (*subksp)[0] = jac->head->ksp;
2094     (*subksp)[1] = jac->kspschur;
2095     if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
2096     if (n) *n = nn;
2097   }
2098   PetscFunctionReturn(PETSC_SUCCESS);
2099 }
2100 
2101 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
2102 {
2103   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2104 
2105   PetscFunctionBegin;
2106   PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
2107   PetscCall(PetscMalloc1(jac->nsplits, subksp));
2108   PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
2109 
2110   (*subksp)[1] = jac->kspschur;
2111   if (n) *n = jac->nsplits;
2112   PetscFunctionReturn(PETSC_SUCCESS);
2113 }
2114 
2115 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
2116 {
2117   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2118   PetscInt          cnt   = 0;
2119   PC_FieldSplitLink ilink = jac->head;
2120 
2121   PetscFunctionBegin;
2122   PetscCall(PetscMalloc1(jac->nsplits, subksp));
2123   while (ilink) {
2124     (*subksp)[cnt++] = ilink->ksp;
2125     ilink            = ilink->next;
2126   }
2127   PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
2128   if (n) *n = jac->nsplits;
2129   PetscFunctionReturn(PETSC_SUCCESS);
2130 }
2131 
2132 /*@
2133   PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
2134 
2135   Input Parameters:
2136 + pc  - the preconditioner context
2137 - isy - the index set that defines the indices to which the fieldsplit is to be restricted
2138 
2139   Level: advanced
2140 
2141   Developer Notes:
2142   It seems the resulting `IS`s will not cover the entire space, so
2143   how can they define a convergent preconditioner? Needs explaining.
2144 
2145 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2146 @*/
2147 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
2148 {
2149   PetscFunctionBegin;
2150   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2151   PetscValidHeaderSpecific(isy, IS_CLASSID, 2);
2152   PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
2153   PetscFunctionReturn(PETSC_SUCCESS);
2154 }
2155 
2156 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
2157 {
2158   PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2159   PC_FieldSplitLink ilink = jac->head, next;
2160   PetscInt          localsize, size, sizez, i;
2161   const PetscInt   *ind, *indz;
2162   PetscInt         *indc, *indcz;
2163   PetscBool         flg;
2164 
2165   PetscFunctionBegin;
2166   PetscCall(ISGetLocalSize(isy, &localsize));
2167   PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
2168   size -= localsize;
2169   while (ilink) {
2170     IS isrl, isr;
2171     PC subpc;
2172     PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
2173     PetscCall(ISGetLocalSize(isrl, &localsize));
2174     PetscCall(PetscMalloc1(localsize, &indc));
2175     PetscCall(ISGetIndices(isrl, &ind));
2176     PetscCall(PetscArraycpy(indc, ind, localsize));
2177     PetscCall(ISRestoreIndices(isrl, &ind));
2178     PetscCall(ISDestroy(&isrl));
2179     for (i = 0; i < localsize; i++) *(indc + i) += size;
2180     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
2181     PetscCall(PetscObjectReference((PetscObject)isr));
2182     PetscCall(ISDestroy(&ilink->is));
2183     ilink->is = isr;
2184     PetscCall(PetscObjectReference((PetscObject)isr));
2185     PetscCall(ISDestroy(&ilink->is_col));
2186     ilink->is_col = isr;
2187     PetscCall(ISDestroy(&isr));
2188     PetscCall(KSPGetPC(ilink->ksp, &subpc));
2189     PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
2190     if (flg) {
2191       IS       iszl, isz;
2192       MPI_Comm comm;
2193       PetscCall(ISGetLocalSize(ilink->is, &localsize));
2194       comm = PetscObjectComm((PetscObject)ilink->is);
2195       PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
2196       PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
2197       sizez -= localsize;
2198       PetscCall(ISGetLocalSize(iszl, &localsize));
2199       PetscCall(PetscMalloc1(localsize, &indcz));
2200       PetscCall(ISGetIndices(iszl, &indz));
2201       PetscCall(PetscArraycpy(indcz, indz, localsize));
2202       PetscCall(ISRestoreIndices(iszl, &indz));
2203       PetscCall(ISDestroy(&iszl));
2204       for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
2205       PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
2206       PetscCall(PCFieldSplitRestrictIS(subpc, isz));
2207       PetscCall(ISDestroy(&isz));
2208     }
2209     next  = ilink->next;
2210     ilink = next;
2211   }
2212   jac->isrestrict = PETSC_TRUE;
2213   PetscFunctionReturn(PETSC_SUCCESS);
2214 }
2215 
2216 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
2217 {
2218   PC_FieldSplit    *jac = (PC_FieldSplit *)pc->data;
2219   PC_FieldSplitLink ilink, next = jac->head;
2220   char              prefix[128];
2221   PetscLogEvent     nse;
2222 
2223   PetscFunctionBegin;
2224   if (jac->splitdefined) {
2225     PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
2226     PetscFunctionReturn(PETSC_SUCCESS);
2227   }
2228   PetscCall(PetscNew(&ilink));
2229   if (splitname) {
2230     PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
2231   } else {
2232     PetscCall(PetscMalloc1(8, &ilink->splitname));
2233     PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
2234   }
2235   PetscCall(PetscMPIIntCast(jac->nsplits, &nse));
2236   ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */
2237   PetscCall(PetscObjectReference((PetscObject)is));
2238   PetscCall(ISDestroy(&ilink->is));
2239   ilink->is = is;
2240   PetscCall(PetscObjectReference((PetscObject)is));
2241   PetscCall(ISDestroy(&ilink->is_col));
2242   ilink->is_col = is;
2243   ilink->next   = NULL;
2244   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
2245   PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
2246   PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
2247   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
2248   PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
2249 
2250   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
2251   PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
2252 
2253   if (!next) {
2254     jac->head       = ilink;
2255     ilink->previous = NULL;
2256   } else {
2257     while (next->next) next = next->next;
2258     next->next      = ilink;
2259     ilink->previous = next;
2260   }
2261   jac->nsplits++;
2262   PetscFunctionReturn(PETSC_SUCCESS);
2263 }
2264 
2265 /*@
2266   PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
2267 
2268   Logically Collective
2269 
2270   Input Parameters:
2271 + pc         - the preconditioner context
2272 . splitname  - name of this split, if `NULL` the number of the split is used
2273 . n          - the number of fields in this split
2274 . fields     - the fields in this split
2275 - fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block
2276                of the matrix and `fields_col` provides the column indices for that block
2277 
2278   Options Database Key:
2279 . -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
2280 
2281   Level: intermediate
2282 
2283   Notes:
2284   Use `PCFieldSplitSetIS()` to set a  general set of indices as a split.
2285 
2286   If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`.
2287 
2288   If the matrix used to construct the preconditioner is not `MATNEST` then
2289   `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlockSize()` or
2290   to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block
2291   size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2292   0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x23x56x8.. x12x45x78x....
2293   where the numbered entries indicate what is in the split.
2294 
2295   This function is called once per split (it creates a new split each time).  Solve options
2296   for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2297 
2298   `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields`
2299 
2300   Developer Notes:
2301   This routine does not actually create the `IS` representing the split, that is delayed
2302   until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2303   available when this routine is called.
2304 
2305 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`,
2306           `MatSetBlockSize()`, `MatCreateNest()`
2307 @*/
2308 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt fields[], const PetscInt fields_col[])
2309 {
2310   PetscFunctionBegin;
2311   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2312   PetscAssertPointer(splitname, 2);
2313   PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2314   PetscAssertPointer(fields, 4);
2315   PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2316   PetscFunctionReturn(PETSC_SUCCESS);
2317 }
2318 
2319 /*@
2320   PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2321   the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2322 
2323   Logically Collective
2324 
2325   Input Parameters:
2326 + pc  - the preconditioner object
2327 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2328 
2329   Options Database Key:
2330 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2331 
2332   Level: intermediate
2333 
2334 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2335 @*/
2336 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2337 {
2338   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2339   PetscBool      isfs;
2340 
2341   PetscFunctionBegin;
2342   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2343   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2344   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2345   jac->diag_use_amat = flg;
2346   PetscFunctionReturn(PETSC_SUCCESS);
2347 }
2348 
2349 /*@
2350   PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2351   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2352 
2353   Logically Collective
2354 
2355   Input Parameter:
2356 . pc - the preconditioner object
2357 
2358   Output Parameter:
2359 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2360 
2361   Level: intermediate
2362 
2363 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2364 @*/
2365 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2366 {
2367   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2368   PetscBool      isfs;
2369 
2370   PetscFunctionBegin;
2371   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2372   PetscAssertPointer(flg, 2);
2373   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2374   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2375   *flg = jac->diag_use_amat;
2376   PetscFunctionReturn(PETSC_SUCCESS);
2377 }
2378 
2379 /*@
2380   PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2381   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2382 
2383   Logically Collective
2384 
2385   Input Parameters:
2386 + pc  - the preconditioner object
2387 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2388 
2389   Options Database Key:
2390 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2391 
2392   Level: intermediate
2393 
2394 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2395 @*/
2396 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2397 {
2398   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2399   PetscBool      isfs;
2400 
2401   PetscFunctionBegin;
2402   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2403   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2404   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2405   jac->offdiag_use_amat = flg;
2406   PetscFunctionReturn(PETSC_SUCCESS);
2407 }
2408 
2409 /*@
2410   PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2411   the sub-matrices associated with each split.  Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2412 
2413   Logically Collective
2414 
2415   Input Parameter:
2416 . pc - the preconditioner object
2417 
2418   Output Parameter:
2419 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2420 
2421   Level: intermediate
2422 
2423 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2424 @*/
2425 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2426 {
2427   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2428   PetscBool      isfs;
2429 
2430   PetscFunctionBegin;
2431   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2432   PetscAssertPointer(flg, 2);
2433   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2434   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2435   *flg = jac->offdiag_use_amat;
2436   PetscFunctionReturn(PETSC_SUCCESS);
2437 }
2438 
2439 /*@
2440   PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2441 
2442   Logically Collective
2443 
2444   Input Parameters:
2445 + pc        - the preconditioner context
2446 . splitname - name of this split, if `NULL` the number of the split is used
2447 - is        - the index set that defines the elements in this split
2448 
2449   Level: intermediate
2450 
2451   Notes:
2452   Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST`
2453 
2454   This function is called once per split (it creates a new split each time).  Solve options
2455   for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2456 
2457 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()`
2458 @*/
2459 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2460 {
2461   PetscFunctionBegin;
2462   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2463   if (splitname) PetscAssertPointer(splitname, 2);
2464   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
2465   PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2466   PetscFunctionReturn(PETSC_SUCCESS);
2467 }
2468 
2469 /*@
2470   PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2471 
2472   Logically Collective
2473 
2474   Input Parameters:
2475 + pc        - the preconditioner context
2476 - splitname - name of this split
2477 
2478   Output Parameter:
2479 . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2480 
2481   Level: intermediate
2482 
2483 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()`
2484 @*/
2485 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2486 {
2487   PetscFunctionBegin;
2488   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2489   PetscAssertPointer(splitname, 2);
2490   PetscAssertPointer(is, 3);
2491   {
2492     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2493     PC_FieldSplitLink ilink = jac->head;
2494     PetscBool         found;
2495 
2496     *is = NULL;
2497     while (ilink) {
2498       PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2499       if (found) {
2500         *is = ilink->is;
2501         break;
2502       }
2503       ilink = ilink->next;
2504     }
2505   }
2506   PetscFunctionReturn(PETSC_SUCCESS);
2507 }
2508 
2509 /*@
2510   PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2511 
2512   Logically Collective
2513 
2514   Input Parameters:
2515 + pc    - the preconditioner context
2516 - index - index of this split
2517 
2518   Output Parameter:
2519 . is - the index set that defines the elements in this split
2520 
2521   Level: intermediate
2522 
2523 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`,
2524 
2525 @*/
2526 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2527 {
2528   PetscFunctionBegin;
2529   PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2530   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2531   PetscAssertPointer(is, 3);
2532   {
2533     PC_FieldSplit    *jac   = (PC_FieldSplit *)pc->data;
2534     PC_FieldSplitLink ilink = jac->head;
2535     PetscInt          i     = 0;
2536     PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2537 
2538     while (i < index) {
2539       ilink = ilink->next;
2540       ++i;
2541     }
2542     PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2543   }
2544   PetscFunctionReturn(PETSC_SUCCESS);
2545 }
2546 
2547 /*@
2548   PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2549   fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used.
2550 
2551   Logically Collective
2552 
2553   Input Parameters:
2554 + pc - the preconditioner context
2555 - bs - the block size
2556 
2557   Level: intermediate
2558 
2559   Note:
2560   If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields.
2561 
2562 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2563 @*/
2564 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2565 {
2566   PetscFunctionBegin;
2567   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2568   PetscValidLogicalCollectiveInt(pc, bs, 2);
2569   PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2570   PetscFunctionReturn(PETSC_SUCCESS);
2571 }
2572 
2573 /*@C
2574   PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2575 
2576   Collective
2577 
2578   Input Parameter:
2579 . pc - the preconditioner context
2580 
2581   Output Parameters:
2582 + n      - the number of splits
2583 - subksp - the array of `KSP` contexts
2584 
2585   Level: advanced
2586 
2587   Notes:
2588   After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2589   (not the `KSP`, just the array that contains them).
2590 
2591   You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2592 
2593   If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2594   Schur complement and the `KSP` object used to iterate over the Schur complement.
2595   To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2596 
2597   If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2598   inner linear system defined by the matrix H in each loop.
2599 
2600   Fortran Note:
2601   Call `PCFieldSplitRestoreSubKSP()` when the array of `KSP` is no longer needed
2602 
2603   Developer Notes:
2604   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2605 
2606 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2607 @*/
2608 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2609 {
2610   PetscFunctionBegin;
2611   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2612   if (n) PetscAssertPointer(n, 2);
2613   PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2614   PetscFunctionReturn(PETSC_SUCCESS);
2615 }
2616 
2617 /*@C
2618   PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2619 
2620   Collective
2621 
2622   Input Parameter:
2623 . pc - the preconditioner context
2624 
2625   Output Parameters:
2626 + n      - the number of splits
2627 - subksp - the array of `KSP` contexts
2628 
2629   Level: advanced
2630 
2631   Notes:
2632   After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2633   (not the `KSP` just the array that contains them).
2634 
2635   You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2636 
2637   If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2638 +  1  - the `KSP` used for the (1,1) block
2639 .  2  - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2640 -  3  - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2641 
2642   It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2643 
2644   Fortran Note:
2645   Call `PCFieldSplitSchurRestoreSubKSP()` when the array of `KSP` is no longer needed
2646 
2647   Developer Notes:
2648   There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2649 
2650   Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2651 
2652 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2653 @*/
2654 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2655 {
2656   PetscFunctionBegin;
2657   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2658   if (n) PetscAssertPointer(n, 2);
2659   PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2660   PetscFunctionReturn(PETSC_SUCCESS);
2661 }
2662 
2663 /*@
2664   PCFieldSplitSetSchurPre -  Indicates from what operator the preconditioner is constructed for the Schur complement.
2665   The default is the A11 matrix.
2666 
2667   Collective
2668 
2669   Input Parameters:
2670 + pc    - the preconditioner context
2671 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2672               `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2673               `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2674 - pre   - matrix to use for preconditioning, or `NULL`
2675 
2676   Options Database Keys:
2677 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2678 - -fieldsplit_1_pc_type <pctype>                               - the preconditioner algorithm that is used to construct the preconditioner from the operator
2679 
2680   Level: intermediate
2681 
2682   Notes:
2683   If ptype is
2684 +     a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2685   matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2686 .     self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2687   The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM`
2688 .     user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2689   to this function).
2690 .     selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $
2691   This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2692   lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2693 -     full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2694   computed internally by `PCFIELDSPLIT` (this is expensive)
2695   useful mostly as a test that the Schur complement approach can work for your problem
2696 
2697   When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2698   with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and
2699   `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2700 
2701   Developer Note:
2702   The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere.
2703 
2704 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2705           `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()`
2706 @*/
2707 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2708 {
2709   PetscFunctionBegin;
2710   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2711   PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2712   PetscFunctionReturn(PETSC_SUCCESS);
2713 }
2714 
2715 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2716 {
2717   return PCFieldSplitSetSchurPre(pc, ptype, pre);
2718 } /* Deprecated name */
2719 
2720 /*@
2721   PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2722   preconditioned.  See `PCFieldSplitSetSchurPre()` for details.
2723 
2724   Logically Collective
2725 
2726   Input Parameter:
2727 . pc - the preconditioner context
2728 
2729   Output Parameters:
2730 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2731 - pre   - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2732 
2733   Level: intermediate
2734 
2735 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2736 @*/
2737 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2738 {
2739   PetscFunctionBegin;
2740   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2741   PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2742   PetscFunctionReturn(PETSC_SUCCESS);
2743 }
2744 
2745 /*@
2746   PCFieldSplitSchurGetS -  extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2747 
2748   Not Collective
2749 
2750   Input Parameter:
2751 . pc - the preconditioner context
2752 
2753   Output Parameter:
2754 . S - the Schur complement matrix
2755 
2756   Level: advanced
2757 
2758   Note:
2759   This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2760 
2761 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2762           `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2763 @*/
2764 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2765 {
2766   const char    *t;
2767   PetscBool      isfs;
2768   PC_FieldSplit *jac;
2769 
2770   PetscFunctionBegin;
2771   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2772   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2773   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2774   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2775   jac = (PC_FieldSplit *)pc->data;
2776   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2777   if (S) *S = jac->schur;
2778   PetscFunctionReturn(PETSC_SUCCESS);
2779 }
2780 
2781 /*@
2782   PCFieldSplitSchurRestoreS -  returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2783 
2784   Not Collective
2785 
2786   Input Parameters:
2787 + pc - the preconditioner context
2788 - S  - the Schur complement matrix
2789 
2790   Level: advanced
2791 
2792 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2793 @*/
2794 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2795 {
2796   const char    *t;
2797   PetscBool      isfs;
2798   PC_FieldSplit *jac;
2799 
2800   PetscFunctionBegin;
2801   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2802   PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2803   PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2804   PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2805   jac = (PC_FieldSplit *)pc->data;
2806   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2807   PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2808   PetscFunctionReturn(PETSC_SUCCESS);
2809 }
2810 
2811 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2812 {
2813   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2814 
2815   PetscFunctionBegin;
2816   jac->schurpre = ptype;
2817   if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2818     PetscCall(MatDestroy(&jac->schur_user));
2819     jac->schur_user = pre;
2820     PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2821   }
2822   PetscFunctionReturn(PETSC_SUCCESS);
2823 }
2824 
2825 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2826 {
2827   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2828 
2829   PetscFunctionBegin;
2830   if (ptype) *ptype = jac->schurpre;
2831   if (pre) *pre = jac->schur_user;
2832   PetscFunctionReturn(PETSC_SUCCESS);
2833 }
2834 
2835 /*@
2836   PCFieldSplitSetSchurFactType -  sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note`
2837 
2838   Collective
2839 
2840   Input Parameters:
2841 + pc    - the preconditioner context
2842 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2843 
2844   Options Database Key:
2845 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
2846 
2847   Level: intermediate
2848 
2849   Notes:
2850   The `full` factorization is
2851 
2852   ```{math}
2853   \left(\begin{array}{cc} A & B \\
2854   C & E \\
2855   \end{array}\right) =
2856   \left(\begin{array}{cc} I & 0 \\
2857   C A^{-1} & I \\
2858   \end{array}\right)
2859   \left(\begin{array}{cc} A & 0 \\
2860   0 & S \\
2861   \end{array}\right)
2862   \left(\begin{array}{cc} I & A^{-1}B \\
2863   0 & I \\
2864   \end{array}\right) = L D U,
2865   ```
2866 
2867   where $ S = E - C A^{-1} B $. In practice, the full factorization is applied via block triangular solves with the grouping $L(DU)$. `upper` uses $DU$, `lower` uses $LD$,
2868   and `diag` is the diagonal part with the sign of $S$ flipped (because this makes the preconditioner positive definite for many formulations,
2869   thus allowing the use of `KSPMINRES)`. Sign flipping of $S$ can be turned off with `PCFieldSplitSetSchurScale()`.
2870 
2871   If $A$ and $S$ are solved exactly
2872 +  1 - `full` factorization is a direct solver.
2873 .  2 - The preconditioned operator with `lower` or `upper` has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2874 -  3 - With `diag`, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2875 
2876   If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2877   application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2878 
2879   For symmetric problems in which $A$ is positive definite and $S$ is negative definite, `diag` can be used with `KSPMINRES`.
2880 
2881   A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g., a few iterations of a Krylov method is used to solve with $A$ or $S$).
2882 
2883 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`,
2884           [](sec_flexibleksp), `PCFieldSplitSetSchurPre()`
2885 @*/
2886 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2887 {
2888   PetscFunctionBegin;
2889   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2890   PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2891   PetscFunctionReturn(PETSC_SUCCESS);
2892 }
2893 
2894 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2895 {
2896   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2897 
2898   PetscFunctionBegin;
2899   jac->schurfactorization = ftype;
2900   PetscFunctionReturn(PETSC_SUCCESS);
2901 }
2902 
2903 /*@
2904   PCFieldSplitSetSchurScale -  Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2905 
2906   Collective
2907 
2908   Input Parameters:
2909 + pc    - the preconditioner context
2910 - scale - scaling factor for the Schur complement
2911 
2912   Options Database Key:
2913 . -pc_fieldsplit_schur_scale <scale> - default is -1.0
2914 
2915   Level: intermediate
2916 
2917 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
2918 @*/
2919 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2920 {
2921   PetscFunctionBegin;
2922   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2923   PetscValidLogicalCollectiveScalar(pc, scale, 2);
2924   PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2925   PetscFunctionReturn(PETSC_SUCCESS);
2926 }
2927 
2928 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2929 {
2930   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2931 
2932   PetscFunctionBegin;
2933   jac->schurscale = scale;
2934   PetscFunctionReturn(PETSC_SUCCESS);
2935 }
2936 
2937 /*@C
2938   PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2939 
2940   Collective
2941 
2942   Input Parameter:
2943 . pc - the preconditioner context
2944 
2945   Output Parameters:
2946 + A00 - the (0,0) block
2947 . A01 - the (0,1) block
2948 . A10 - the (1,0) block
2949 - A11 - the (1,1) block
2950 
2951   Level: advanced
2952 
2953   Note:
2954   Use `NULL` for any unneeded output arguments
2955 
2956 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2957 @*/
2958 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2959 {
2960   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2961 
2962   PetscFunctionBegin;
2963   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2964   PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2965   if (A00) *A00 = jac->pmat[0];
2966   if (A01) *A01 = jac->B;
2967   if (A10) *A10 = jac->C;
2968   if (A11) *A11 = jac->pmat[1];
2969   PetscFunctionReturn(PETSC_SUCCESS);
2970 }
2971 
2972 /*@
2973   PCFieldSplitSetGKBTol -  Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2974 
2975   Collective
2976 
2977   Input Parameters:
2978 + pc        - the preconditioner context
2979 - tolerance - the solver tolerance
2980 
2981   Options Database Key:
2982 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5
2983 
2984   Level: intermediate
2985 
2986   Note:
2987   The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion.
2988   It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2989   this estimate, the stopping criterion is satisfactory in practical cases.
2990 
2991 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2992 @*/
2993 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2994 {
2995   PetscFunctionBegin;
2996   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2997   PetscValidLogicalCollectiveReal(pc, tolerance, 2);
2998   PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2999   PetscFunctionReturn(PETSC_SUCCESS);
3000 }
3001 
3002 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
3003 {
3004   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3005 
3006   PetscFunctionBegin;
3007   jac->gkbtol = tolerance;
3008   PetscFunctionReturn(PETSC_SUCCESS);
3009 }
3010 
3011 /*@
3012   PCFieldSplitSetGKBMaxit -  Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
3013 
3014   Collective
3015 
3016   Input Parameters:
3017 + pc    - the preconditioner context
3018 - maxit - the maximum number of iterations
3019 
3020   Options Database Key:
3021 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100
3022 
3023   Level: intermediate
3024 
3025 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
3026 @*/
3027 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
3028 {
3029   PetscFunctionBegin;
3030   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3031   PetscValidLogicalCollectiveInt(pc, maxit, 2);
3032   PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
3033   PetscFunctionReturn(PETSC_SUCCESS);
3034 }
3035 
3036 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
3037 {
3038   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3039 
3040   PetscFunctionBegin;
3041   jac->gkbmaxit = maxit;
3042   PetscFunctionReturn(PETSC_SUCCESS);
3043 }
3044 
3045 /*@
3046   PCFieldSplitSetGKBDelay -  Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT`
3047   preconditioner.
3048 
3049   Collective
3050 
3051   Input Parameters:
3052 + pc    - the preconditioner context
3053 - delay - the delay window in the lower bound estimate
3054 
3055   Options Database Key:
3056 . -pc_fieldsplit_gkb_delay <delay> - default is 5
3057 
3058   Level: intermediate
3059 
3060   Notes:
3061   The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $
3062   is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs
3063   at least (`delay` + 1) iterations to stop.
3064 
3065   For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013`
3066 
3067 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
3068 @*/
3069 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
3070 {
3071   PetscFunctionBegin;
3072   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3073   PetscValidLogicalCollectiveInt(pc, delay, 2);
3074   PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
3075   PetscFunctionReturn(PETSC_SUCCESS);
3076 }
3077 
3078 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
3079 {
3080   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3081 
3082   PetscFunctionBegin;
3083   jac->gkbdelay = delay;
3084   PetscFunctionReturn(PETSC_SUCCESS);
3085 }
3086 
3087 /*@
3088   PCFieldSplitSetGKBNu -  Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the
3089   Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
3090 
3091   Collective
3092 
3093   Input Parameters:
3094 + pc - the preconditioner context
3095 - nu - the shift parameter
3096 
3097   Options Database Key:
3098 . -pc_fieldsplit_gkb_nu <nu> - default is 1
3099 
3100   Level: intermediate
3101 
3102   Notes:
3103   This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However,
3104   if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore
3105   necessary to find a good balance in between the convergence of the inner and outer loop.
3106 
3107   For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity.
3108 
3109 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
3110 @*/
3111 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
3112 {
3113   PetscFunctionBegin;
3114   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3115   PetscValidLogicalCollectiveReal(pc, nu, 2);
3116   PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
3117   PetscFunctionReturn(PETSC_SUCCESS);
3118 }
3119 
3120 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
3121 {
3122   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3123 
3124   PetscFunctionBegin;
3125   jac->gkbnu = nu;
3126   PetscFunctionReturn(PETSC_SUCCESS);
3127 }
3128 
3129 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
3130 {
3131   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3132 
3133   PetscFunctionBegin;
3134   jac->type = type;
3135   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
3136   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
3137   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
3138   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
3139   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
3140   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
3141   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
3142   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
3143   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
3144 
3145   if (type == PC_COMPOSITE_SCHUR) {
3146     pc->ops->apply          = PCApply_FieldSplit_Schur;
3147     pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
3148     pc->ops->matapply       = NULL;
3149     pc->ops->view           = PCView_FieldSplit_Schur;
3150     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit_Schur;
3151 
3152     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
3153     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
3154     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
3155     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
3156     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
3157   } else if (type == PC_COMPOSITE_GKB) {
3158     pc->ops->apply          = PCApply_FieldSplit_GKB;
3159     pc->ops->applytranspose = NULL;
3160     pc->ops->matapply       = NULL;
3161     pc->ops->view           = PCView_FieldSplit_GKB;
3162     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit_GKB;
3163 
3164     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3165     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
3166     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
3167     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
3168     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
3169   } else {
3170     pc->ops->apply          = PCApply_FieldSplit;
3171     pc->ops->applytranspose = PCApplyTranspose_FieldSplit;
3172     pc->ops->matapply       = PCMatApply_FieldSplit;
3173     pc->ops->view           = PCView_FieldSplit;
3174     pc->ops->setuponblocks  = PCSetUpOnBlocks_FieldSplit;
3175 
3176     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3177   }
3178   PetscFunctionReturn(PETSC_SUCCESS);
3179 }
3180 
3181 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
3182 {
3183   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3184 
3185   PetscFunctionBegin;
3186   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
3187   PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
3188   jac->bs = bs;
3189   PetscFunctionReturn(PETSC_SUCCESS);
3190 }
3191 
3192 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
3193 {
3194   PC_FieldSplit    *jac           = (PC_FieldSplit *)pc->data;
3195   PC_FieldSplitLink ilink_current = jac->head;
3196   IS                is_owned;
3197 
3198   PetscFunctionBegin;
3199   jac->coordinates_set = PETSC_TRUE; // Internal flag
3200   PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
3201 
3202   while (ilink_current) {
3203     // For each IS, embed it to get local coords indces
3204     IS              is_coords;
3205     PetscInt        ndofs_block;
3206     const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
3207 
3208     // Setting drop to true for safety. It should make no difference.
3209     PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
3210     PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
3211     PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
3212 
3213     // Allocate coordinates vector and set it directly
3214     PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords));
3215     for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
3216       for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
3217     }
3218     ilink_current->dim   = dim;
3219     ilink_current->ndofs = ndofs_block;
3220     PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
3221     PetscCall(ISDestroy(&is_coords));
3222     ilink_current = ilink_current->next;
3223   }
3224   PetscCall(ISDestroy(&is_owned));
3225   PetscFunctionReturn(PETSC_SUCCESS);
3226 }
3227 
3228 /*@
3229   PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3230 
3231   Collective
3232 
3233   Input Parameters:
3234 + pc   - the preconditioner context
3235 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`,
3236          `PC_COMPOSITE_GKB`
3237 
3238   Options Database Key:
3239 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
3240 
3241   Level: intermediate
3242 
3243 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3244           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()`
3245 @*/
3246 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
3247 {
3248   PetscFunctionBegin;
3249   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3250   PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
3251   PetscFunctionReturn(PETSC_SUCCESS);
3252 }
3253 
3254 /*@
3255   PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3256 
3257   Not collective
3258 
3259   Input Parameter:
3260 . pc - the preconditioner context
3261 
3262   Output Parameter:
3263 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3264 
3265   Level: intermediate
3266 
3267 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3268           `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3269 @*/
3270 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
3271 {
3272   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3273 
3274   PetscFunctionBegin;
3275   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3276   PetscAssertPointer(type, 2);
3277   *type = jac->type;
3278   PetscFunctionReturn(PETSC_SUCCESS);
3279 }
3280 
3281 /*@
3282   PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3283 
3284   Logically Collective
3285 
3286   Input Parameters:
3287 + pc  - the preconditioner context
3288 - flg - boolean indicating whether to use field splits defined by the `DM`
3289 
3290   Options Database Key:
3291 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3292 
3293   Level: intermediate
3294 
3295   Developer Note:
3296   The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database
3297 
3298 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
3299 @*/
3300 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3301 {
3302   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3303   PetscBool      isfs;
3304 
3305   PetscFunctionBegin;
3306   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3307   PetscValidLogicalCollectiveBool(pc, flg, 2);
3308   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3309   if (isfs) jac->dm_splits = flg;
3310   PetscFunctionReturn(PETSC_SUCCESS);
3311 }
3312 
3313 /*@
3314   PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3315 
3316   Logically Collective
3317 
3318   Input Parameter:
3319 . pc - the preconditioner context
3320 
3321   Output Parameter:
3322 . flg - boolean indicating whether to use field splits defined by the `DM`
3323 
3324   Level: intermediate
3325 
3326   Developer Note:
3327   The name should be `PCFieldSplitGetUseDMSplits()`
3328 
3329 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
3330 @*/
3331 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3332 {
3333   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3334   PetscBool      isfs;
3335 
3336   PetscFunctionBegin;
3337   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
3338   PetscAssertPointer(flg, 2);
3339   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3340   if (isfs) {
3341     if (flg) *flg = jac->dm_splits;
3342   }
3343   PetscFunctionReturn(PETSC_SUCCESS);
3344 }
3345 
3346 /*@
3347   PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3348 
3349   Logically Collective
3350 
3351   Input Parameter:
3352 . pc - the preconditioner context
3353 
3354   Output Parameter:
3355 . flg - boolean indicating whether to detect fields or not
3356 
3357   Level: intermediate
3358 
3359 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3360 @*/
3361 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3362 {
3363   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3364 
3365   PetscFunctionBegin;
3366   *flg = jac->detect;
3367   PetscFunctionReturn(PETSC_SUCCESS);
3368 }
3369 
3370 /*@
3371   PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3372 
3373   Logically Collective
3374 
3375   Input Parameter:
3376 . pc - the preconditioner context
3377 
3378   Output Parameter:
3379 . flg - boolean indicating whether to detect fields or not
3380 
3381   Options Database Key:
3382 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3383 
3384   Level: intermediate
3385 
3386   Note:
3387   Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3388 
3389 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3390 @*/
3391 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3392 {
3393   PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3394 
3395   PetscFunctionBegin;
3396   jac->detect = flg;
3397   if (jac->detect) {
3398     PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3399     PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3400   }
3401   PetscFunctionReturn(PETSC_SUCCESS);
3402 }
3403 
3404 /*MC
3405   PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3406   collections of variables (that may overlap) called fields or splits. Each field often represents a different continuum variable
3407   represented on a grid, such as velocity, pressure, or temperature.
3408   In the literature these are sometimes called block preconditioners; but should not be confused with `PCBJACOBI`.
3409   See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3410 
3411   Options Database Keys:
3412 +   -pc_fieldsplit_%d_fields <a,b,..>                                                - indicates the fields to be used in the `%d`'th split
3413 .   -pc_fieldsplit_default                                                           - automatically add any fields to additional splits that have not
3414                                                                                        been supplied explicitly by `-pc_fieldsplit_%d_fields`
3415 .   -pc_fieldsplit_block_size <bs>                                                   - size of block that defines fields (i.e. there are bs fields)
3416                                                                                        when the matrix is not of `MatType` `MATNEST`
3417 .   -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3418 .   -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full>                     - default is `a11`; see `PCFieldSplitSetSchurPre()`
3419 .   -pc_fieldsplit_schur_fact_type <diag,lower,upper,full>                           - set factorization type when using `-pc_fieldsplit_type schur`;
3420                                                                                        see `PCFieldSplitSetSchurFactType()`
3421 .   -pc_fieldsplit_dm_splits <true,false> (default is true)                          - Whether to use `DMCreateFieldDecomposition()` for splits
3422 -   -pc_fieldsplit_detect_saddle_point                                               - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3423 
3424   Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3425   The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3426   For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3427 
3428   To set options on the solvers for all blocks, prepend `-fieldsplit_` to all the `PC`
3429   options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`.
3430 
3431   To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3432   and set the options directly on the resulting `KSP` object
3433 
3434   Level: intermediate
3435 
3436   Notes:
3437   Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()`
3438   to define a split by an arbitrary collection of entries.
3439 
3440   If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports
3441   `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs,
3442   beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3443   if this is not called the block size defaults to the blocksize of the second matrix passed
3444   to `KSPSetOperators()`/`PCSetOperators()`.
3445 
3446   For the Schur complement preconditioner if
3447   ```{math}
3448     J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3449   ```
3450 
3451   the preconditioner using `full` factorization is logically
3452   ```{math}
3453     \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3454       ```
3455   where the action of $\text{ksp}(A_{00})$ is applied using the `KSP` solver with prefix `-fieldsplit_0_`.  $S$ is the Schur complement
3456   ```{math}
3457      S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3458   ```
3459   which is usually dense and not stored explicitly.  The action of $\text{ksp}(S)$ is computed using the `KSP` solver with prefix `-fieldsplit_splitname_` (where `splitname`
3460   was given in providing the SECOND split or 1 if not given). Accordingly, if using `PCFieldSplitGetSubKSP()`, the array of sub-`KSP` contexts will hold two `KSP`s: at its
3461   0th index, the `KSP` associated with `-fieldsplit_0_`, and at its 1st index, the `KSP` corresponding to `-fieldsplit_1_`.
3462   By default, $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3463 
3464   The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3465   `diag` gives
3466   ```{math}
3467     \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\  0 & -\text{ksp}(S) \end{array}\right]
3468   ```
3469   Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$  so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3470   can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3471   ```{math}
3472     \left[\begin{array}{cc} A_{00} & 0 \\  A_{10} & S \end{array}\right]
3473   ```
3474   where the inverses of $A_{00}$ and $S$ are applied using `KSP`s. The upper factorization is the inverse of
3475   ```{math}
3476     \left[\begin{array}{cc} A_{00} & A_{01} \\  0 & S \end{array}\right]
3477   ```
3478   where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3479 
3480   If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3481   is used automatically for a second submatrix.
3482 
3483   The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3484   Generally it should be used with the `MATAIJ` or `MATNEST` `MatType`
3485 
3486   The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see,
3487   for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3488   One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers".
3489 
3490   See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3491 
3492   The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3493   residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3494 
3495   The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3496   ```{math}
3497     \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3498   ```
3499   with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3500   A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3501 
3502   Some `PCFIELDSPLIT` variants are called physics-based preconditioners, since the preconditioner takes into account the underlying physics of the
3503   problem. But this nomenclature is not well-defined.
3504 
3505   Developer Note:
3506   The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3507   user API.
3508 
3509 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3510           `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3511           `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3512           `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3513 M*/
3514 
3515 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3516 {
3517   PC_FieldSplit *jac;
3518 
3519   PetscFunctionBegin;
3520   PetscCall(PetscNew(&jac));
3521 
3522   jac->bs                 = -1;
3523   jac->type               = PC_COMPOSITE_MULTIPLICATIVE;
3524   jac->schurpre           = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3525   jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3526   jac->schurscale         = -1.0;
3527   jac->dm_splits          = PETSC_TRUE;
3528   jac->gkbtol             = 1e-5;
3529   jac->gkbdelay           = 5;
3530   jac->gkbnu              = 1;
3531   jac->gkbmaxit           = 100;
3532 
3533   pc->data = (void *)jac;
3534 
3535   pc->ops->setup           = PCSetUp_FieldSplit;
3536   pc->ops->reset           = PCReset_FieldSplit;
3537   pc->ops->destroy         = PCDestroy_FieldSplit;
3538   pc->ops->setfromoptions  = PCSetFromOptions_FieldSplit;
3539   pc->ops->applyrichardson = NULL;
3540 
3541   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3542   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3543   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3544   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3545   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3546   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3547   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3548 
3549   /* Initialize function pointers */
3550   PetscCall(PCFieldSplitSetType(pc, jac->type));
3551   PetscFunctionReturn(PETSC_SUCCESS);
3552 }
3553