1 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 2 #include <petsc/private/kspimpl.h> /* This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/ 3 #include <petscdm.h> 4 5 const char *const PCFieldSplitSchurPreTypes[] = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL}; 6 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL}; 7 8 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4; 9 10 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink; 11 struct _PC_FieldSplitLink { 12 KSP ksp; 13 Vec x, y, z; 14 char *splitname; 15 PetscInt nfields; 16 PetscInt *fields, *fields_col; 17 VecScatter sctx; 18 IS is, is_col; 19 PC_FieldSplitLink next, previous; 20 PetscLogEvent event; 21 22 /* Used only when setting coordinates with PCSetCoordinates */ 23 PetscInt dim; 24 PetscInt ndofs; 25 PetscReal *coords; 26 }; 27 28 typedef struct { 29 PCCompositeType type; 30 PetscBool defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */ 31 PetscBool splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */ 32 PetscInt bs; /* Block size for IS and Mat structures */ 33 PetscInt nsplits; /* Number of field divisions defined */ 34 Vec *x, *y, w1, w2; 35 Mat *mat; /* The diagonal block for each split */ 36 Mat *pmat; /* The preconditioning diagonal block for each split */ 37 Mat *Afield; /* The rows of the matrix associated with each split */ 38 PetscBool issetup; 39 40 /* Only used when Schur complement preconditioning is used */ 41 Mat B; /* The (0,1) block */ 42 Mat C; /* The (1,0) block */ 43 Mat schur; /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */ 44 Mat schurp; /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */ 45 Mat schur_user; /* User-provided preconditioning matrix for the Schur complement */ 46 PCFieldSplitSchurPreType schurpre; /* Determines which preconditioning matrix is used for the Schur complement */ 47 PCFieldSplitSchurFactType schurfactorization; 48 KSP kspschur; /* The solver for S */ 49 KSP kspupper; /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */ 50 PetscScalar schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */ 51 52 /* Only used when Golub-Kahan bidiagonalization preconditioning is used */ 53 Mat H; /* The modified matrix H = A00 + nu*A01*A01' */ 54 PetscReal gkbtol; /* Stopping tolerance for lower bound estimate */ 55 PetscInt gkbdelay; /* The delay window for the stopping criterion */ 56 PetscReal gkbnu; /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */ 57 PetscInt gkbmaxit; /* Maximum number of iterations for outer loop */ 58 PetscBool gkbmonitor; /* Monitor for gkb iterations and the lower bound error */ 59 PetscViewer gkbviewer; /* Viewer context for gkbmonitor */ 60 Vec u, v, d, Hu; /* Work vectors for the GKB algorithm */ 61 PetscScalar *vecz; /* Contains intermediate values, eg for lower bound */ 62 63 PC_FieldSplitLink head; 64 PetscBool isrestrict; /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */ 65 PetscBool suboptionsset; /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */ 66 PetscBool dm_splits; /* Whether to use DMCreateFieldDecomposition() whenever possible */ 67 PetscBool diag_use_amat; /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */ 68 PetscBool offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */ 69 PetscBool detect; /* Whether to form 2-way split by finding zero diagonal entries */ 70 PetscBool coordinates_set; /* Whether PCSetCoordinates has been called */ 71 } PC_FieldSplit; 72 73 /* 74 Note: 75 there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of 76 inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the 77 PC you could change this. 78 */ 79 80 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it. This way the 81 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */ 82 static Mat FieldSplitSchurPre(PC_FieldSplit *jac) { 83 switch (jac->schurpre) { 84 case PC_FIELDSPLIT_SCHUR_PRE_SELF: return jac->schur; 85 case PC_FIELDSPLIT_SCHUR_PRE_SELFP: return jac->schurp; 86 case PC_FIELDSPLIT_SCHUR_PRE_A11: return jac->pmat[1]; 87 case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */ 88 case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */ 89 default: return jac->schur_user ? jac->schur_user : jac->pmat[1]; 90 } 91 } 92 93 #include <petscdraw.h> 94 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer) { 95 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 96 PetscBool iascii, isdraw; 97 PetscInt i, j; 98 PC_FieldSplitLink ilink = jac->head; 99 100 PetscFunctionBegin; 101 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 102 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 103 if (iascii) { 104 if (jac->bs > 0) { 105 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs)); 106 } else { 107 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits)); 108 } 109 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 110 if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for diagonal blocks\n")); 111 if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for off-diagonal blocks\n")); 112 PetscCall(PetscViewerASCIIPrintf(viewer, " Solver info for each split is in the following KSP objects:\n")); 113 for (i = 0; i < jac->nsplits; i++) { 114 if (ilink->fields) { 115 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i)); 116 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 117 for (j = 0; j < ilink->nfields; j++) { 118 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 119 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 120 } 121 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 122 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 123 } else { 124 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i)); 125 } 126 PetscCall(KSPView(ilink->ksp, viewer)); 127 ilink = ilink->next; 128 } 129 } 130 131 if (isdraw) { 132 PetscDraw draw; 133 PetscReal x, y, w, wd; 134 135 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 136 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 137 w = 2 * PetscMin(1.0 - x, x); 138 wd = w / (jac->nsplits + 1); 139 x = x - wd * (jac->nsplits - 1) / 2.0; 140 for (i = 0; i < jac->nsplits; i++) { 141 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 142 PetscCall(KSPView(ilink->ksp, viewer)); 143 PetscCall(PetscDrawPopCurrentPoint(draw)); 144 x += wd; 145 ilink = ilink->next; 146 } 147 } 148 PetscFunctionReturn(0); 149 } 150 151 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer) { 152 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 153 PetscBool iascii, isdraw; 154 PetscInt i, j; 155 PC_FieldSplitLink ilink = jac->head; 156 MatSchurComplementAinvType atype; 157 158 PetscFunctionBegin; 159 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 160 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 161 if (iascii) { 162 if (jac->bs > 0) { 163 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization])); 164 } else { 165 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization])); 166 } 167 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 168 switch (jac->schurpre) { 169 case PC_FIELDSPLIT_SCHUR_PRE_SELF: PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from S itself\n")); break; 170 case PC_FIELDSPLIT_SCHUR_PRE_SELFP: 171 PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype)); 172 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's ")))); 173 break; 174 case PC_FIELDSPLIT_SCHUR_PRE_A11: PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from A11\n")); break; 175 case PC_FIELDSPLIT_SCHUR_PRE_FULL: PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from the exact Schur complement\n")); break; 176 case PC_FIELDSPLIT_SCHUR_PRE_USER: 177 if (jac->schur_user) { 178 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from user provided matrix\n")); 179 } else { 180 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from A11\n")); 181 } 182 break; 183 default: SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre); 184 } 185 PetscCall(PetscViewerASCIIPrintf(viewer, " Split info:\n")); 186 PetscCall(PetscViewerASCIIPushTab(viewer)); 187 for (i = 0; i < jac->nsplits; i++) { 188 if (ilink->fields) { 189 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i)); 190 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 191 for (j = 0; j < ilink->nfields; j++) { 192 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 193 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 194 } 195 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 196 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 197 } else { 198 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i)); 199 } 200 ilink = ilink->next; 201 } 202 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n")); 203 PetscCall(PetscViewerASCIIPushTab(viewer)); 204 if (jac->head) { 205 PetscCall(KSPView(jac->head->ksp, viewer)); 206 } else PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 207 PetscCall(PetscViewerASCIIPopTab(viewer)); 208 if (jac->head && jac->kspupper != jac->head->ksp) { 209 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor \n")); 210 PetscCall(PetscViewerASCIIPushTab(viewer)); 211 if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer)); 212 else PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 213 PetscCall(PetscViewerASCIIPopTab(viewer)); 214 } 215 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01 \n")); 216 PetscCall(PetscViewerASCIIPushTab(viewer)); 217 if (jac->kspschur) { 218 PetscCall(KSPView(jac->kspschur, viewer)); 219 } else { 220 PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 221 } 222 PetscCall(PetscViewerASCIIPopTab(viewer)); 223 PetscCall(PetscViewerASCIIPopTab(viewer)); 224 } else if (isdraw && jac->head) { 225 PetscDraw draw; 226 PetscReal x, y, w, wd, h; 227 PetscInt cnt = 2; 228 char str[32]; 229 230 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 231 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 232 if (jac->kspupper != jac->head->ksp) cnt++; 233 w = 2 * PetscMin(1.0 - x, x); 234 wd = w / (cnt + 1); 235 236 PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization])); 237 PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 238 y -= h; 239 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) { 240 PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11])); 241 } else { 242 PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre])); 243 } 244 PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 245 y -= h; 246 x = x - wd * (cnt - 1) / 2.0; 247 248 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 249 PetscCall(KSPView(jac->head->ksp, viewer)); 250 PetscCall(PetscDrawPopCurrentPoint(draw)); 251 if (jac->kspupper != jac->head->ksp) { 252 x += wd; 253 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 254 PetscCall(KSPView(jac->kspupper, viewer)); 255 PetscCall(PetscDrawPopCurrentPoint(draw)); 256 } 257 x += wd; 258 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 259 PetscCall(KSPView(jac->kspschur, viewer)); 260 PetscCall(PetscDrawPopCurrentPoint(draw)); 261 } 262 PetscFunctionReturn(0); 263 } 264 265 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer) { 266 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 267 PetscBool iascii, isdraw; 268 PetscInt i, j; 269 PC_FieldSplitLink ilink = jac->head; 270 271 PetscFunctionBegin; 272 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 273 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 274 if (iascii) { 275 if (jac->bs > 0) { 276 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs)); 277 } else { 278 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits)); 279 } 280 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 281 if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for diagonal blocks\n")); 282 if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for off-diagonal blocks\n")); 283 284 PetscCall(PetscViewerASCIIPrintf(viewer, " Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit)); 285 PetscCall(PetscViewerASCIIPrintf(viewer, " Solver info for H = A00 + nu*A01*A01' matrix:\n")); 286 PetscCall(PetscViewerASCIIPushTab(viewer)); 287 288 if (ilink->fields) { 289 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields ")); 290 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 291 for (j = 0; j < ilink->nfields; j++) { 292 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 293 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 294 } 295 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 296 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 297 } else { 298 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n")); 299 } 300 PetscCall(KSPView(ilink->ksp, viewer)); 301 302 PetscCall(PetscViewerASCIIPopTab(viewer)); 303 } 304 305 if (isdraw) { 306 PetscDraw draw; 307 PetscReal x, y, w, wd; 308 309 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 310 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 311 w = 2 * PetscMin(1.0 - x, x); 312 wd = w / (jac->nsplits + 1); 313 x = x - wd * (jac->nsplits - 1) / 2.0; 314 for (i = 0; i < jac->nsplits; i++) { 315 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 316 PetscCall(KSPView(ilink->ksp, viewer)); 317 PetscCall(PetscDrawPopCurrentPoint(draw)); 318 x += wd; 319 ilink = ilink->next; 320 } 321 } 322 PetscFunctionReturn(0); 323 } 324 325 /* Precondition: jac->bs is set to a meaningful value */ 326 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc) { 327 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 328 PetscInt i, nfields, *ifields, nfields_col, *ifields_col; 329 PetscBool flg, flg_col; 330 char optionname[128], splitname[8], optionname_col[128]; 331 332 PetscFunctionBegin; 333 PetscCall(PetscMalloc1(jac->bs, &ifields)); 334 PetscCall(PetscMalloc1(jac->bs, &ifields_col)); 335 for (i = 0, flg = PETSC_TRUE;; i++) { 336 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 337 PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i)); 338 PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i)); 339 nfields = jac->bs; 340 nfields_col = jac->bs; 341 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg)); 342 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col)); 343 if (!flg) break; 344 else if (flg && !flg_col) { 345 PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields"); 346 PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields)); 347 } else { 348 PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields"); 349 PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match"); 350 PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col)); 351 } 352 } 353 if (i > 0) { 354 /* Makes command-line setting of splits take precedence over setting them in code. 355 Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would 356 create new splits, which would probably not be what the user wanted. */ 357 jac->splitdefined = PETSC_TRUE; 358 } 359 PetscCall(PetscFree(ifields)); 360 PetscCall(PetscFree(ifields_col)); 361 PetscFunctionReturn(0); 362 } 363 364 static PetscErrorCode PCFieldSplitSetDefaults(PC pc) { 365 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 366 PC_FieldSplitLink ilink = jac->head; 367 PetscBool fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE; 368 PetscInt i; 369 370 PetscFunctionBegin; 371 /* 372 Kinda messy, but at least this now uses DMCreateFieldDecomposition(). 373 Should probably be rewritten. 374 */ 375 if (!ilink) { 376 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL)); 377 if (pc->dm && jac->dm_splits && !jac->detect && !coupling) { 378 PetscInt numFields, f, i, j; 379 char **fieldNames; 380 IS *fields; 381 DM *dms; 382 DM subdm[128]; 383 PetscBool flg; 384 385 PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms)); 386 /* Allow the user to prescribe the splits */ 387 for (i = 0, flg = PETSC_TRUE;; i++) { 388 PetscInt ifields[128]; 389 IS compField; 390 char optionname[128], splitname[8]; 391 PetscInt nfields = numFields; 392 393 PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i)); 394 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg)); 395 if (!flg) break; 396 PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields); 397 PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i])); 398 if (nfields == 1) { 399 PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField)); 400 } else { 401 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 402 PetscCall(PCFieldSplitSetIS(pc, splitname, compField)); 403 } 404 PetscCall(ISDestroy(&compField)); 405 for (j = 0; j < nfields; ++j) { 406 f = ifields[j]; 407 PetscCall(PetscFree(fieldNames[f])); 408 PetscCall(ISDestroy(&fields[f])); 409 } 410 } 411 if (i == 0) { 412 for (f = 0; f < numFields; ++f) { 413 PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f])); 414 PetscCall(PetscFree(fieldNames[f])); 415 PetscCall(ISDestroy(&fields[f])); 416 } 417 } else { 418 for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j)); 419 PetscCall(PetscFree(dms)); 420 PetscCall(PetscMalloc1(i, &dms)); 421 for (j = 0; j < i; ++j) dms[j] = subdm[j]; 422 } 423 PetscCall(PetscFree(fieldNames)); 424 PetscCall(PetscFree(fields)); 425 if (dms) { 426 PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n")); 427 for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) { 428 const char *prefix; 429 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)(ilink->ksp), &prefix)); 430 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)(dms[i]), prefix)); 431 PetscCall(KSPSetDM(ilink->ksp, dms[i])); 432 PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE)); 433 { 434 PetscErrorCode (*func)(KSP, Mat, Mat, void *); 435 void *ctx; 436 437 PetscCall(DMKSPGetComputeOperators(pc->dm, &func, &ctx)); 438 PetscCall(DMKSPSetComputeOperators(dms[i], func, ctx)); 439 } 440 PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0)); 441 PetscCall(DMDestroy(&dms[i])); 442 } 443 PetscCall(PetscFree(dms)); 444 } 445 } else { 446 if (jac->bs <= 0) { 447 if (pc->pmat) { 448 PetscCall(MatGetBlockSize(pc->pmat, &jac->bs)); 449 } else jac->bs = 1; 450 } 451 452 if (jac->detect) { 453 IS zerodiags, rest; 454 PetscInt nmin, nmax; 455 456 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 457 if (jac->diag_use_amat) { 458 PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags)); 459 } else { 460 PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags)); 461 } 462 PetscCall(ISComplement(zerodiags, nmin, nmax, &rest)); 463 PetscCall(PCFieldSplitSetIS(pc, "0", rest)); 464 PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags)); 465 PetscCall(ISDestroy(&zerodiags)); 466 PetscCall(ISDestroy(&rest)); 467 } else if (coupling) { 468 IS coupling, rest; 469 PetscInt nmin, nmax; 470 471 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 472 if (jac->offdiag_use_amat) { 473 PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling)); 474 } else { 475 PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling)); 476 } 477 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest)); 478 PetscCall(ISSetIdentity(rest)); 479 PetscCall(PCFieldSplitSetIS(pc, "0", rest)); 480 PetscCall(PCFieldSplitSetIS(pc, "1", coupling)); 481 PetscCall(ISDestroy(&coupling)); 482 PetscCall(ISDestroy(&rest)); 483 } else { 484 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL)); 485 if (!fieldsplit_default) { 486 /* Allow user to set fields from command line, if bs was known at the time of PCSetFromOptions_FieldSplit() 487 then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */ 488 PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc)); 489 if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n")); 490 } 491 if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) { 492 Mat M = pc->pmat; 493 PetscBool isnest; 494 495 PetscCall(PetscInfo(pc, "Using default splitting of fields\n")); 496 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest)); 497 if (!isnest) { 498 M = pc->mat; 499 PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest)); 500 } 501 if (isnest) { 502 IS *fields; 503 PetscInt nf; 504 505 PetscCall(MatNestGetSize(M, &nf, NULL)); 506 PetscCall(PetscMalloc1(nf, &fields)); 507 PetscCall(MatNestGetISs(M, fields, NULL)); 508 for (i = 0; i < nf; i++) PetscCall(PCFieldSplitSetIS(pc, NULL, fields[i])); 509 PetscCall(PetscFree(fields)); 510 } else { 511 for (i = 0; i < jac->bs; i++) { 512 char splitname[8]; 513 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 514 PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i)); 515 } 516 jac->defaultsplit = PETSC_TRUE; 517 } 518 } 519 } 520 } 521 } else if (jac->nsplits == 1) { 522 if (ilink->is) { 523 IS is2; 524 PetscInt nmin, nmax; 525 526 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 527 PetscCall(ISComplement(ilink->is, nmin, nmax, &is2)); 528 PetscCall(PCFieldSplitSetIS(pc, "1", is2)); 529 PetscCall(ISDestroy(&is2)); 530 } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()"); 531 } 532 533 PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits); 534 PetscFunctionReturn(0); 535 } 536 537 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu) { 538 Mat BT, T; 539 PetscReal nrmT, nrmB; 540 541 PetscFunctionBegin; 542 PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */ 543 PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN)); 544 PetscCall(MatNorm(T, NORM_1, &nrmT)); 545 PetscCall(MatNorm(B, NORM_1, &nrmB)); 546 PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable."); 547 548 /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */ 549 /* setting N := 1/nu*I in [Ar13]. */ 550 PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT)); 551 PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, H)); /* H = A01*A01' */ 552 PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN)); /* H = A00 + nu*A01*A01' */ 553 554 PetscCall(MatDestroy(&BT)); 555 PetscCall(MatDestroy(&T)); 556 PetscFunctionReturn(0); 557 } 558 559 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *value[], PetscBool *flg); 560 561 static PetscErrorCode PCSetUp_FieldSplit(PC pc) { 562 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 563 PC_FieldSplitLink ilink; 564 PetscInt i, nsplit; 565 PetscBool sorted, sorted_col; 566 567 PetscFunctionBegin; 568 pc->failedreason = PC_NOERROR; 569 PetscCall(PCFieldSplitSetDefaults(pc)); 570 nsplit = jac->nsplits; 571 ilink = jac->head; 572 573 /* get the matrices for each split */ 574 if (!jac->issetup) { 575 PetscInt rstart, rend, nslots, bs; 576 577 jac->issetup = PETSC_TRUE; 578 579 /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */ 580 if (jac->defaultsplit || !ilink->is) { 581 if (jac->bs <= 0) jac->bs = nsplit; 582 } 583 bs = jac->bs; 584 PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend)); 585 nslots = (rend - rstart) / bs; 586 for (i = 0; i < nsplit; i++) { 587 if (jac->defaultsplit) { 588 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is)); 589 PetscCall(ISDuplicate(ilink->is, &ilink->is_col)); 590 } else if (!ilink->is) { 591 if (ilink->nfields > 1) { 592 PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col; 593 PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii)); 594 PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj)); 595 for (j = 0; j < nslots; j++) { 596 for (k = 0; k < nfields; k++) { 597 ii[nfields * j + k] = rstart + bs * j + fields[k]; 598 jj[nfields * j + k] = rstart + bs * j + fields_col[k]; 599 } 600 } 601 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is)); 602 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col)); 603 PetscCall(ISSetBlockSize(ilink->is, nfields)); 604 PetscCall(ISSetBlockSize(ilink->is_col, nfields)); 605 } else { 606 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is)); 607 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col)); 608 } 609 } 610 PetscCall(ISSorted(ilink->is, &sorted)); 611 if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col)); 612 PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split"); 613 ilink = ilink->next; 614 } 615 } 616 617 ilink = jac->head; 618 if (!jac->pmat) { 619 Vec xtmp; 620 621 PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL)); 622 PetscCall(PetscMalloc1(nsplit, &jac->pmat)); 623 PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y)); 624 for (i = 0; i < nsplit; i++) { 625 MatNullSpace sp; 626 627 /* Check for preconditioning matrix attached to IS */ 628 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i])); 629 if (jac->pmat[i]) { 630 PetscCall(PetscObjectReference((PetscObject)jac->pmat[i])); 631 if (jac->type == PC_COMPOSITE_SCHUR) { 632 jac->schur_user = jac->pmat[i]; 633 634 PetscCall(PetscObjectReference((PetscObject)jac->schur_user)); 635 } 636 } else { 637 const char *prefix; 638 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i])); 639 PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix)); 640 PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix)); 641 PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view")); 642 } 643 /* create work vectors for each split */ 644 PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i])); 645 ilink->x = jac->x[i]; 646 ilink->y = jac->y[i]; 647 ilink->z = NULL; 648 /* compute scatter contexts needed by multiplicative versions and non-default splits */ 649 PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx)); 650 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp)); 651 if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp)); 652 ilink = ilink->next; 653 } 654 PetscCall(VecDestroy(&xtmp)); 655 } else { 656 MatReuse scall; 657 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 658 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i])); 659 scall = MAT_INITIAL_MATRIX; 660 } else scall = MAT_REUSE_MATRIX; 661 662 for (i = 0; i < nsplit; i++) { 663 Mat pmat; 664 665 /* Check for preconditioning matrix attached to IS */ 666 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat)); 667 if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i])); 668 ilink = ilink->next; 669 } 670 } 671 if (jac->diag_use_amat) { 672 ilink = jac->head; 673 if (!jac->mat) { 674 PetscCall(PetscMalloc1(nsplit, &jac->mat)); 675 for (i = 0; i < nsplit; i++) { 676 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i])); 677 ilink = ilink->next; 678 } 679 } else { 680 MatReuse scall; 681 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 682 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i])); 683 scall = MAT_INITIAL_MATRIX; 684 } else scall = MAT_REUSE_MATRIX; 685 686 for (i = 0; i < nsplit; i++) { 687 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i])); 688 ilink = ilink->next; 689 } 690 } 691 } else { 692 jac->mat = jac->pmat; 693 } 694 695 /* Check for null space attached to IS */ 696 ilink = jac->head; 697 for (i = 0; i < nsplit; i++) { 698 MatNullSpace sp; 699 700 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp)); 701 if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp)); 702 ilink = ilink->next; 703 } 704 705 if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) { 706 /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */ 707 /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */ 708 ilink = jac->head; 709 if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) { 710 /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */ 711 if (!jac->Afield) { 712 PetscCall(PetscCalloc1(nsplit, &jac->Afield)); 713 if (jac->offdiag_use_amat) { 714 PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1])); 715 } else { 716 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1])); 717 } 718 } else { 719 MatReuse scall; 720 721 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 722 PetscCall(MatDestroy(&jac->Afield[1])); 723 scall = MAT_INITIAL_MATRIX; 724 } else scall = MAT_REUSE_MATRIX; 725 726 if (jac->offdiag_use_amat) { 727 PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1])); 728 } else { 729 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1])); 730 } 731 } 732 } else { 733 if (!jac->Afield) { 734 PetscCall(PetscMalloc1(nsplit, &jac->Afield)); 735 for (i = 0; i < nsplit; i++) { 736 if (jac->offdiag_use_amat) { 737 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i])); 738 } else { 739 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i])); 740 } 741 ilink = ilink->next; 742 } 743 } else { 744 MatReuse scall; 745 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 746 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i])); 747 scall = MAT_INITIAL_MATRIX; 748 } else scall = MAT_REUSE_MATRIX; 749 750 for (i = 0; i < nsplit; i++) { 751 if (jac->offdiag_use_amat) { 752 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i])); 753 } else { 754 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i])); 755 } 756 ilink = ilink->next; 757 } 758 } 759 } 760 } 761 762 if (jac->type == PC_COMPOSITE_SCHUR) { 763 IS ccis; 764 PetscBool isset, isspd; 765 PetscInt rstart, rend; 766 char lscname[256]; 767 PetscObject LSC_L; 768 769 PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields"); 770 771 /* If pc->mat is SPD, don't scale by -1 the Schur complement */ 772 if (jac->schurscale == (PetscScalar)-1.0) { 773 PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd)); 774 jac->schurscale = (isset && isspd) ? 1.0 : -1.0; 775 } 776 777 /* When extracting off-diagonal submatrices, we take complements from this range */ 778 PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend)); 779 780 if (jac->schur) { 781 KSP kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper; 782 MatReuse scall; 783 784 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 785 scall = MAT_INITIAL_MATRIX; 786 PetscCall(MatDestroy(&jac->B)); 787 PetscCall(MatDestroy(&jac->C)); 788 } else scall = MAT_REUSE_MATRIX; 789 790 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 791 ilink = jac->head; 792 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 793 if (jac->offdiag_use_amat) { 794 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B)); 795 } else { 796 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B)); 797 } 798 PetscCall(ISDestroy(&ccis)); 799 ilink = ilink->next; 800 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 801 if (jac->offdiag_use_amat) { 802 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C)); 803 } else { 804 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C)); 805 } 806 PetscCall(ISDestroy(&ccis)); 807 PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1])); 808 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) { 809 PetscCall(MatDestroy(&jac->schurp)); 810 PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp)); 811 } 812 if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0])); 813 if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0])); 814 PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac))); 815 } else { 816 const char *Dprefix; 817 char schurprefix[256], schurmatprefix[256]; 818 char schurtestoption[256]; 819 MatNullSpace sp; 820 PetscBool flg; 821 KSP kspt; 822 823 /* extract the A01 and A10 matrices */ 824 ilink = jac->head; 825 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 826 if (jac->offdiag_use_amat) { 827 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 828 } else { 829 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 830 } 831 PetscCall(ISDestroy(&ccis)); 832 ilink = ilink->next; 833 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 834 if (jac->offdiag_use_amat) { 835 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 836 } else { 837 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 838 } 839 PetscCall(ISDestroy(&ccis)); 840 841 /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */ 842 PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur)); 843 PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT)); 844 PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1])); 845 PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 846 PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix)); 847 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt)); 848 PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix)); 849 850 /* Note: this is not true in general */ 851 PetscCall(MatGetNullSpace(jac->mat[1], &sp)); 852 if (sp) PetscCall(MatSetNullSpace(jac->schur, sp)); 853 854 PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname)); 855 PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg)); 856 if (flg) { 857 DM dmInner; 858 KSP kspInner; 859 PC pcInner; 860 861 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 862 PetscCall(KSPReset(kspInner)); 863 PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0])); 864 PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 865 /* Indent this deeper to emphasize the "inner" nature of this solver. */ 866 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2)); 867 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2)); 868 PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix)); 869 870 /* Set DM for new solver */ 871 PetscCall(KSPGetDM(jac->head->ksp, &dmInner)); 872 PetscCall(KSPSetDM(kspInner, dmInner)); 873 PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE)); 874 875 /* Defaults to PCKSP as preconditioner */ 876 PetscCall(KSPGetPC(kspInner, &pcInner)); 877 PetscCall(PCSetType(pcInner, PCKSP)); 878 PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp)); 879 } else { 880 /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or 881 * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact, 882 * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for 883 * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make 884 * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used 885 * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */ 886 PetscCall(KSPSetType(jac->head->ksp, KSPGMRES)); 887 PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp)); 888 } 889 PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0])); 890 PetscCall(KSPSetFromOptions(jac->head->ksp)); 891 PetscCall(MatSetFromOptions(jac->schur)); 892 893 PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg)); 894 if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */ 895 KSP kspInner; 896 PC pcInner; 897 898 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 899 PetscCall(KSPGetPC(kspInner, &pcInner)); 900 PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg)); 901 if (flg) { 902 KSP ksp; 903 904 PetscCall(PCKSPGetKSP(pcInner, &ksp)); 905 if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE)); 906 } 907 } 908 PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname)); 909 PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg)); 910 if (flg) { 911 DM dmInner; 912 913 PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 914 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper)); 915 PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure)); 916 PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix)); 917 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1)); 918 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1)); 919 PetscCall(KSPGetDM(jac->head->ksp, &dmInner)); 920 PetscCall(KSPSetDM(jac->kspupper, dmInner)); 921 PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE)); 922 PetscCall(KSPSetFromOptions(jac->kspupper)); 923 PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0])); 924 PetscCall(VecDuplicate(jac->head->x, &jac->head->z)); 925 } else { 926 jac->kspupper = jac->head->ksp; 927 PetscCall(PetscObjectReference((PetscObject)jac->head->ksp)); 928 } 929 930 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp)); 931 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur)); 932 PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure)); 933 PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)jac->kspschur)); 934 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1)); 935 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) { 936 PC pcschur; 937 PetscCall(KSPGetPC(jac->kspschur, &pcschur)); 938 PetscCall(PCSetType(pcschur, PCNONE)); 939 /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */ 940 } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) { 941 PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user)); 942 } 943 PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac))); 944 PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix)); 945 PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix)); 946 /* propagate DM */ 947 { 948 DM sdm; 949 PetscCall(KSPGetDM(jac->head->next->ksp, &sdm)); 950 if (sdm) { 951 PetscCall(KSPSetDM(jac->kspschur, sdm)); 952 PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE)); 953 } 954 } 955 /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */ 956 /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */ 957 PetscCall(KSPSetFromOptions(jac->kspschur)); 958 } 959 PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY)); 960 PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY)); 961 962 /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */ 963 PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname)); 964 PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L)); 965 if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L)); 966 if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L)); 967 PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname)); 968 PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L)); 969 if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L)); 970 if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L)); 971 } else if (jac->type == PC_COMPOSITE_GKB) { 972 IS ccis; 973 PetscInt rstart, rend; 974 975 PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields"); 976 977 ilink = jac->head; 978 979 /* When extracting off-diagonal submatrices, we take complements from this range */ 980 PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend)); 981 982 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 983 if (jac->offdiag_use_amat) { 984 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 985 } else { 986 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 987 } 988 PetscCall(ISDestroy(&ccis)); 989 /* Create work vectors for GKB algorithm */ 990 PetscCall(VecDuplicate(ilink->x, &jac->u)); 991 PetscCall(VecDuplicate(ilink->x, &jac->Hu)); 992 PetscCall(VecDuplicate(ilink->x, &jac->w2)); 993 ilink = ilink->next; 994 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 995 if (jac->offdiag_use_amat) { 996 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 997 } else { 998 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 999 } 1000 PetscCall(ISDestroy(&ccis)); 1001 /* Create work vectors for GKB algorithm */ 1002 PetscCall(VecDuplicate(ilink->x, &jac->v)); 1003 PetscCall(VecDuplicate(ilink->x, &jac->d)); 1004 PetscCall(VecDuplicate(ilink->x, &jac->w1)); 1005 PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu)); 1006 PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz)); 1007 1008 ilink = jac->head; 1009 PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H)); 1010 if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp)); 1011 /* Create gkb_monitor context */ 1012 if (jac->gkbmonitor) { 1013 PetscInt tablevel; 1014 PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer)); 1015 PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII)); 1016 PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel)); 1017 PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel)); 1018 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1)); 1019 } 1020 } else { 1021 /* set up the individual splits' PCs */ 1022 i = 0; 1023 ilink = jac->head; 1024 while (ilink) { 1025 PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i])); 1026 /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */ 1027 if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp)); 1028 i++; 1029 ilink = ilink->next; 1030 } 1031 } 1032 1033 /* Set coordinates to the sub PC objects whenever these are set */ 1034 if (jac->coordinates_set) { 1035 PC pc_coords; 1036 if (jac->type == PC_COMPOSITE_SCHUR) { 1037 // Head is first block. 1038 PetscCall(KSPGetPC(jac->head->ksp, &pc_coords)); 1039 PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords)); 1040 // Second one is Schur block, but its KSP object is in kspschur. 1041 PetscCall(KSPGetPC(jac->kspschur, &pc_coords)); 1042 PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords)); 1043 } else if (jac->type == PC_COMPOSITE_GKB) { 1044 PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner")); 1045 } else { 1046 ilink = jac->head; 1047 while (ilink) { 1048 PetscCall(KSPGetPC(ilink->ksp, &pc_coords)); 1049 PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords)); 1050 ilink = ilink->next; 1051 } 1052 } 1053 } 1054 1055 jac->suboptionsset = PETSC_TRUE; 1056 PetscFunctionReturn(0); 1057 } 1058 1059 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \ 1060 (VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \ 1061 KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \ 1062 VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)) 1063 1064 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y) { 1065 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1066 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1067 KSP kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper; 1068 1069 PetscFunctionBegin; 1070 switch (jac->schurfactorization) { 1071 case PC_FIELDSPLIT_SCHUR_FACT_DIAG: 1072 /* [A00 0; 0 -S], positive definite, suitable for MINRES */ 1073 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1074 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1075 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1076 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1077 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1078 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1079 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1080 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1081 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1082 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1083 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1084 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1085 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1086 PetscCall(VecScale(ilinkD->y, jac->schurscale)); 1087 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1088 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1089 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1090 break; 1091 case PC_FIELDSPLIT_SCHUR_FACT_LOWER: 1092 /* [A00 0; A10 S], suitable for left preconditioning */ 1093 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1094 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1095 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1096 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1097 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1098 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1099 PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x)); 1100 PetscCall(VecScale(ilinkD->x, -1.)); 1101 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1102 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1103 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1104 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1105 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1106 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1107 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1108 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1109 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1110 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1111 break; 1112 case PC_FIELDSPLIT_SCHUR_FACT_UPPER: 1113 /* [A00 A01; 0 S], suitable for right preconditioning */ 1114 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1115 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1116 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1117 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1118 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1119 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1120 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x)); 1121 PetscCall(VecScale(ilinkA->x, -1.)); 1122 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1123 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1124 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1125 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1126 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1127 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1128 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1129 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1130 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1131 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1132 break; 1133 case PC_FIELDSPLIT_SCHUR_FACT_FULL: 1134 /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */ 1135 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1136 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1137 PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL)); 1138 PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y)); 1139 PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y)); 1140 PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL)); 1141 PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x)); 1142 PetscCall(VecScale(ilinkD->x, -1.0)); 1143 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1144 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1145 1146 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1147 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1148 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1149 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1150 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1151 1152 if (kspUpper == kspA) { 1153 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y)); 1154 PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y)); 1155 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1156 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1157 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1158 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1159 } else { 1160 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1161 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1162 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1163 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x)); 1164 PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL)); 1165 PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z)); 1166 PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z)); 1167 PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL)); 1168 PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z)); 1169 } 1170 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1171 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1172 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1173 } 1174 PetscFunctionReturn(0); 1175 } 1176 1177 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y) { 1178 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1179 PC_FieldSplitLink ilink = jac->head; 1180 PetscInt cnt, bs; 1181 1182 PetscFunctionBegin; 1183 if (jac->type == PC_COMPOSITE_ADDITIVE) { 1184 if (jac->defaultsplit) { 1185 PetscCall(VecGetBlockSize(x, &bs)); 1186 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1187 PetscCall(VecGetBlockSize(y, &bs)); 1188 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1189 PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES)); 1190 while (ilink) { 1191 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1192 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1193 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1194 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1195 ilink = ilink->next; 1196 } 1197 PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES)); 1198 } else { 1199 PetscCall(VecSet(y, 0.0)); 1200 while (ilink) { 1201 PetscCall(FieldSplitSplitSolveAdd(ilink, x, y)); 1202 ilink = ilink->next; 1203 } 1204 } 1205 } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) { 1206 PetscCall(VecSet(y, 0.0)); 1207 /* solve on first block for first block variables */ 1208 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD)); 1209 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD)); 1210 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1211 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1212 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1213 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1214 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1215 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1216 1217 /* compute the residual only onto second block variables using first block variables */ 1218 PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x)); 1219 ilink = ilink->next; 1220 PetscCall(VecScale(ilink->x, -1.0)); 1221 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1222 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1223 1224 /* solve on second block variables */ 1225 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1226 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1227 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1228 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1229 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1230 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1231 } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1232 if (!jac->w1) { 1233 PetscCall(VecDuplicate(x, &jac->w1)); 1234 PetscCall(VecDuplicate(x, &jac->w2)); 1235 } 1236 PetscCall(VecSet(y, 0.0)); 1237 PetscCall(FieldSplitSplitSolveAdd(ilink, x, y)); 1238 cnt = 1; 1239 while (ilink->next) { 1240 ilink = ilink->next; 1241 /* compute the residual only over the part of the vector needed */ 1242 PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x)); 1243 PetscCall(VecScale(ilink->x, -1.0)); 1244 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1245 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1246 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1247 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1248 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1249 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1250 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1251 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1252 } 1253 if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1254 cnt -= 2; 1255 while (ilink->previous) { 1256 ilink = ilink->previous; 1257 /* compute the residual only over the part of the vector needed */ 1258 PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x)); 1259 PetscCall(VecScale(ilink->x, -1.0)); 1260 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1261 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1262 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1263 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1264 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1265 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1266 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1267 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1268 } 1269 } 1270 } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type); 1271 PetscFunctionReturn(0); 1272 } 1273 1274 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y) { 1275 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1276 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1277 KSP ksp = ilinkA->ksp; 1278 Vec u, v, Hu, d, work1, work2; 1279 PetscScalar alpha, z, nrmz2, *vecz; 1280 PetscReal lowbnd, nu, beta; 1281 PetscInt j, iterGKB; 1282 1283 PetscFunctionBegin; 1284 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1285 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1286 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1287 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1288 1289 u = jac->u; 1290 v = jac->v; 1291 Hu = jac->Hu; 1292 d = jac->d; 1293 work1 = jac->w1; 1294 work2 = jac->w2; 1295 vecz = jac->vecz; 1296 1297 /* Change RHS to comply with matrix regularization H = A + nu*B*B' */ 1298 /* Add q = q + nu*B*b */ 1299 if (jac->gkbnu) { 1300 nu = jac->gkbnu; 1301 PetscCall(VecScale(ilinkD->x, jac->gkbnu)); 1302 PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */ 1303 } else { 1304 /* Situation when no augmented Lagrangian is used. Then we set inner */ 1305 /* matrix N = I in [Ar13], and thus nu = 1. */ 1306 nu = 1; 1307 } 1308 1309 /* Transform rhs from [q,tilde{b}] to [0,b] */ 1310 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL)); 1311 PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y)); 1312 PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y)); 1313 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL)); 1314 PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1)); 1315 PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x */ 1316 1317 /* First step of algorithm */ 1318 PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/ 1319 KSPCheckDot(ksp, beta); 1320 beta = PetscSqrtReal(nu) * beta; 1321 PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c */ 1322 PetscCall(MatMult(jac->B, v, work2)); /* u = H^{-1}*B*v */ 1323 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL)); 1324 PetscCall(KSPSolve(ksp, work2, u)); 1325 PetscCall(KSPCheckSolve(ksp, pc, u)); 1326 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL)); 1327 PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u */ 1328 PetscCall(VecDot(Hu, u, &alpha)); 1329 KSPCheckDot(ksp, alpha); 1330 PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite"); 1331 alpha = PetscSqrtReal(PetscAbsScalar(alpha)); 1332 PetscCall(VecScale(u, 1.0 / alpha)); 1333 PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c */ 1334 1335 z = beta / alpha; 1336 vecz[1] = z; 1337 1338 /* Computation of first iterate x(1) and p(1) */ 1339 PetscCall(VecAXPY(ilinkA->y, z, u)); 1340 PetscCall(VecCopy(d, ilinkD->y)); 1341 PetscCall(VecScale(ilinkD->y, -z)); 1342 1343 iterGKB = 1; 1344 lowbnd = 2 * jac->gkbtol; 1345 if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd)); 1346 1347 while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) { 1348 iterGKB += 1; 1349 PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */ 1350 PetscCall(VecAXPBY(v, nu, -alpha, work1)); 1351 PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v */ 1352 beta = beta / PetscSqrtReal(nu); 1353 PetscCall(VecScale(v, 1.0 / beta)); 1354 PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */ 1355 PetscCall(MatMult(jac->H, u, Hu)); 1356 PetscCall(VecAXPY(work2, -beta, Hu)); 1357 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL)); 1358 PetscCall(KSPSolve(ksp, work2, u)); 1359 PetscCall(KSPCheckSolve(ksp, pc, u)); 1360 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL)); 1361 PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u */ 1362 PetscCall(VecDot(Hu, u, &alpha)); 1363 KSPCheckDot(ksp, alpha); 1364 PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite"); 1365 alpha = PetscSqrtReal(PetscAbsScalar(alpha)); 1366 PetscCall(VecScale(u, 1.0 / alpha)); 1367 1368 z = -beta / alpha * z; /* z <- beta/alpha*z */ 1369 vecz[0] = z; 1370 1371 /* Computation of new iterate x(i+1) and p(i+1) */ 1372 PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */ 1373 PetscCall(VecAXPY(ilinkA->y, z, u)); /* r = r + z*u */ 1374 PetscCall(VecAXPY(ilinkD->y, -z, d)); /* p = p - z*d */ 1375 PetscCall(MatMult(jac->H, ilinkA->y, Hu)); /* ||u||_H = u'*H*u */ 1376 PetscCall(VecDot(Hu, ilinkA->y, &nrmz2)); 1377 1378 /* Compute Lower Bound estimate */ 1379 if (iterGKB > jac->gkbdelay) { 1380 lowbnd = 0.0; 1381 for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]); 1382 lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2)); 1383 } 1384 1385 for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2]; 1386 if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd)); 1387 } 1388 1389 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1390 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1391 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1392 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1393 1394 PetscFunctionReturn(0); 1395 } 1396 1397 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \ 1398 (VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \ 1399 KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \ 1400 VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)) 1401 1402 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y) { 1403 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1404 PC_FieldSplitLink ilink = jac->head; 1405 PetscInt bs; 1406 1407 PetscFunctionBegin; 1408 if (jac->type == PC_COMPOSITE_ADDITIVE) { 1409 if (jac->defaultsplit) { 1410 PetscCall(VecGetBlockSize(x, &bs)); 1411 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1412 PetscCall(VecGetBlockSize(y, &bs)); 1413 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1414 PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES)); 1415 while (ilink) { 1416 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1417 PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y)); 1418 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1419 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1420 ilink = ilink->next; 1421 } 1422 PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES)); 1423 } else { 1424 PetscCall(VecSet(y, 0.0)); 1425 while (ilink) { 1426 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1427 ilink = ilink->next; 1428 } 1429 } 1430 } else { 1431 if (!jac->w1) { 1432 PetscCall(VecDuplicate(x, &jac->w1)); 1433 PetscCall(VecDuplicate(x, &jac->w2)); 1434 } 1435 PetscCall(VecSet(y, 0.0)); 1436 if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1437 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1438 while (ilink->next) { 1439 ilink = ilink->next; 1440 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1441 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1442 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1443 } 1444 while (ilink->previous) { 1445 ilink = ilink->previous; 1446 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1447 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1448 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1449 } 1450 } else { 1451 while (ilink->next) { /* get to last entry in linked list */ 1452 ilink = ilink->next; 1453 } 1454 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1455 while (ilink->previous) { 1456 ilink = ilink->previous; 1457 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1458 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1459 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1460 } 1461 } 1462 } 1463 PetscFunctionReturn(0); 1464 } 1465 1466 static PetscErrorCode PCReset_FieldSplit(PC pc) { 1467 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1468 PC_FieldSplitLink ilink = jac->head, next; 1469 1470 PetscFunctionBegin; 1471 while (ilink) { 1472 PetscCall(KSPDestroy(&ilink->ksp)); 1473 PetscCall(VecDestroy(&ilink->x)); 1474 PetscCall(VecDestroy(&ilink->y)); 1475 PetscCall(VecDestroy(&ilink->z)); 1476 PetscCall(VecScatterDestroy(&ilink->sctx)); 1477 PetscCall(ISDestroy(&ilink->is)); 1478 PetscCall(ISDestroy(&ilink->is_col)); 1479 PetscCall(PetscFree(ilink->splitname)); 1480 PetscCall(PetscFree(ilink->fields)); 1481 PetscCall(PetscFree(ilink->fields_col)); 1482 next = ilink->next; 1483 PetscCall(PetscFree(ilink)); 1484 ilink = next; 1485 } 1486 jac->head = NULL; 1487 PetscCall(PetscFree2(jac->x, jac->y)); 1488 if (jac->mat && jac->mat != jac->pmat) { 1489 PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat)); 1490 } else if (jac->mat) { 1491 jac->mat = NULL; 1492 } 1493 if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat)); 1494 if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield)); 1495 jac->nsplits = 0; 1496 PetscCall(VecDestroy(&jac->w1)); 1497 PetscCall(VecDestroy(&jac->w2)); 1498 PetscCall(MatDestroy(&jac->schur)); 1499 PetscCall(MatDestroy(&jac->schurp)); 1500 PetscCall(MatDestroy(&jac->schur_user)); 1501 PetscCall(KSPDestroy(&jac->kspschur)); 1502 PetscCall(KSPDestroy(&jac->kspupper)); 1503 PetscCall(MatDestroy(&jac->B)); 1504 PetscCall(MatDestroy(&jac->C)); 1505 PetscCall(MatDestroy(&jac->H)); 1506 PetscCall(VecDestroy(&jac->u)); 1507 PetscCall(VecDestroy(&jac->v)); 1508 PetscCall(VecDestroy(&jac->Hu)); 1509 PetscCall(VecDestroy(&jac->d)); 1510 PetscCall(PetscFree(jac->vecz)); 1511 PetscCall(PetscViewerDestroy(&jac->gkbviewer)); 1512 jac->isrestrict = PETSC_FALSE; 1513 PetscFunctionReturn(0); 1514 } 1515 1516 static PetscErrorCode PCDestroy_FieldSplit(PC pc) { 1517 PetscFunctionBegin; 1518 PetscCall(PCReset_FieldSplit(pc)); 1519 PetscCall(PetscFree(pc->data)); 1520 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL)); 1521 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL)); 1522 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL)); 1523 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL)); 1524 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL)); 1525 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL)); 1526 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL)); 1527 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 1528 1529 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL)); 1530 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL)); 1531 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL)); 1532 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL)); 1533 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 1534 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL)); 1535 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL)); 1536 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL)); 1537 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL)); 1538 PetscFunctionReturn(0); 1539 } 1540 1541 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject) { 1542 PetscInt bs; 1543 PetscBool flg; 1544 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1545 PCCompositeType ctype; 1546 1547 PetscFunctionBegin; 1548 PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options"); 1549 PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL)); 1550 PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg)); 1551 if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs)); 1552 jac->diag_use_amat = pc->useAmat; 1553 PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL)); 1554 jac->offdiag_use_amat = pc->useAmat; 1555 PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL)); 1556 PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL)); 1557 PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */ 1558 PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg)); 1559 if (flg) PetscCall(PCFieldSplitSetType(pc, ctype)); 1560 /* Only setup fields once */ 1561 if ((jac->bs > 0) && (jac->nsplits == 0)) { 1562 /* only allow user to set fields from command line if bs is already known. 1563 otherwise user can set them in PCFieldSplitSetDefaults() */ 1564 PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc)); 1565 if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n")); 1566 } 1567 if (jac->type == PC_COMPOSITE_SCHUR) { 1568 PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg)); 1569 if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n")); 1570 PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL)); 1571 PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL)); 1572 PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL)); 1573 } else if (jac->type == PC_COMPOSITE_GKB) { 1574 PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitGKBTol", jac->gkbtol, &jac->gkbtol, NULL)); 1575 PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL)); 1576 PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitGKBNu", jac->gkbnu, &jac->gkbnu, NULL)); 1577 PetscCheck(jac->gkbnu >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nu cannot be less than 0: value %g", (double)jac->gkbnu); 1578 PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL)); 1579 PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL)); 1580 } 1581 /* 1582 In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet. 1583 But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it 1584 is called on the outer solver in case changes were made in the options database 1585 1586 But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions() 1587 if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete. 1588 Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types. 1589 1590 There could be a negative side effect of calling the KSPSetFromOptions() below. 1591 1592 If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call 1593 */ 1594 if (jac->issetup) { 1595 PC_FieldSplitLink ilink = jac->head; 1596 if (jac->type == PC_COMPOSITE_SCHUR) { 1597 if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper)); 1598 if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur)); 1599 } 1600 while (ilink) { 1601 if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp)); 1602 ilink = ilink->next; 1603 } 1604 } 1605 PetscOptionsHeadEnd(); 1606 PetscFunctionReturn(0); 1607 } 1608 1609 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col) { 1610 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1611 PC_FieldSplitLink ilink, next = jac->head; 1612 char prefix[128]; 1613 PetscInt i; 1614 1615 PetscFunctionBegin; 1616 if (jac->splitdefined) { 1617 PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname)); 1618 PetscFunctionReturn(0); 1619 } 1620 for (i = 0; i < n; i++) { 1621 PetscCheck(fields[i] < jac->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", fields[i], jac->bs); 1622 PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); 1623 } 1624 PetscCall(PetscNew(&ilink)); 1625 if (splitname) { 1626 PetscCall(PetscStrallocpy(splitname, &ilink->splitname)); 1627 } else { 1628 PetscCall(PetscMalloc1(3, &ilink->splitname)); 1629 PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits)); 1630 } 1631 ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */ 1632 PetscCall(PetscMalloc1(n, &ilink->fields)); 1633 PetscCall(PetscArraycpy(ilink->fields, fields, n)); 1634 PetscCall(PetscMalloc1(n, &ilink->fields_col)); 1635 PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n)); 1636 1637 ilink->nfields = n; 1638 ilink->next = NULL; 1639 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp)); 1640 PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure)); 1641 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1)); 1642 PetscCall(KSPSetType(ilink->ksp, KSPPREONLY)); 1643 PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)ilink->ksp)); 1644 1645 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 1646 PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix)); 1647 1648 if (!next) { 1649 jac->head = ilink; 1650 ilink->previous = NULL; 1651 } else { 1652 while (next->next) next = next->next; 1653 next->next = ilink; 1654 ilink->previous = next; 1655 } 1656 jac->nsplits++; 1657 PetscFunctionReturn(0); 1658 } 1659 1660 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp) { 1661 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1662 1663 PetscFunctionBegin; 1664 *subksp = NULL; 1665 if (n) *n = 0; 1666 if (jac->type == PC_COMPOSITE_SCHUR) { 1667 PetscInt nn; 1668 1669 PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()"); 1670 PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits); 1671 nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0); 1672 PetscCall(PetscMalloc1(nn, subksp)); 1673 (*subksp)[0] = jac->head->ksp; 1674 (*subksp)[1] = jac->kspschur; 1675 if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper; 1676 if (n) *n = nn; 1677 } 1678 PetscFunctionReturn(0); 1679 } 1680 1681 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp) { 1682 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1683 1684 PetscFunctionBegin; 1685 PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()"); 1686 PetscCall(PetscMalloc1(jac->nsplits, subksp)); 1687 PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp)); 1688 1689 (*subksp)[1] = jac->kspschur; 1690 if (n) *n = jac->nsplits; 1691 PetscFunctionReturn(0); 1692 } 1693 1694 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp) { 1695 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1696 PetscInt cnt = 0; 1697 PC_FieldSplitLink ilink = jac->head; 1698 1699 PetscFunctionBegin; 1700 PetscCall(PetscMalloc1(jac->nsplits, subksp)); 1701 while (ilink) { 1702 (*subksp)[cnt++] = ilink->ksp; 1703 ilink = ilink->next; 1704 } 1705 PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits); 1706 if (n) *n = jac->nsplits; 1707 PetscFunctionReturn(0); 1708 } 1709 1710 /*@C 1711 PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`. 1712 1713 Input Parameters: 1714 + pc - the preconditioner context 1715 - is - the index set that defines the indices to which the fieldsplit is to be restricted 1716 1717 Level: advanced 1718 1719 Developer Note: 1720 It seems the resulting `IS`s will not cover the entire space, so 1721 how can they define a convergent preconditioner? Needs explaining. 1722 1723 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 1724 @*/ 1725 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy) { 1726 PetscFunctionBegin; 1727 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1728 PetscValidHeaderSpecific(isy, IS_CLASSID, 2); 1729 PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy)); 1730 PetscFunctionReturn(0); 1731 } 1732 1733 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy) { 1734 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1735 PC_FieldSplitLink ilink = jac->head, next; 1736 PetscInt localsize, size, sizez, i; 1737 const PetscInt *ind, *indz; 1738 PetscInt *indc, *indcz; 1739 PetscBool flg; 1740 1741 PetscFunctionBegin; 1742 PetscCall(ISGetLocalSize(isy, &localsize)); 1743 PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy))); 1744 size -= localsize; 1745 while (ilink) { 1746 IS isrl, isr; 1747 PC subpc; 1748 PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl)); 1749 PetscCall(ISGetLocalSize(isrl, &localsize)); 1750 PetscCall(PetscMalloc1(localsize, &indc)); 1751 PetscCall(ISGetIndices(isrl, &ind)); 1752 PetscCall(PetscArraycpy(indc, ind, localsize)); 1753 PetscCall(ISRestoreIndices(isrl, &ind)); 1754 PetscCall(ISDestroy(&isrl)); 1755 for (i = 0; i < localsize; i++) *(indc + i) += size; 1756 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr)); 1757 PetscCall(PetscObjectReference((PetscObject)isr)); 1758 PetscCall(ISDestroy(&ilink->is)); 1759 ilink->is = isr; 1760 PetscCall(PetscObjectReference((PetscObject)isr)); 1761 PetscCall(ISDestroy(&ilink->is_col)); 1762 ilink->is_col = isr; 1763 PetscCall(ISDestroy(&isr)); 1764 PetscCall(KSPGetPC(ilink->ksp, &subpc)); 1765 PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg)); 1766 if (flg) { 1767 IS iszl, isz; 1768 MPI_Comm comm; 1769 PetscCall(ISGetLocalSize(ilink->is, &localsize)); 1770 comm = PetscObjectComm((PetscObject)ilink->is); 1771 PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl)); 1772 PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm)); 1773 sizez -= localsize; 1774 PetscCall(ISGetLocalSize(iszl, &localsize)); 1775 PetscCall(PetscMalloc1(localsize, &indcz)); 1776 PetscCall(ISGetIndices(iszl, &indz)); 1777 PetscCall(PetscArraycpy(indcz, indz, localsize)); 1778 PetscCall(ISRestoreIndices(iszl, &indz)); 1779 PetscCall(ISDestroy(&iszl)); 1780 for (i = 0; i < localsize; i++) *(indcz + i) += sizez; 1781 PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz)); 1782 PetscCall(PCFieldSplitRestrictIS(subpc, isz)); 1783 PetscCall(ISDestroy(&isz)); 1784 } 1785 next = ilink->next; 1786 ilink = next; 1787 } 1788 jac->isrestrict = PETSC_TRUE; 1789 PetscFunctionReturn(0); 1790 } 1791 1792 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is) { 1793 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1794 PC_FieldSplitLink ilink, next = jac->head; 1795 char prefix[128]; 1796 1797 PetscFunctionBegin; 1798 if (jac->splitdefined) { 1799 PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname)); 1800 PetscFunctionReturn(0); 1801 } 1802 PetscCall(PetscNew(&ilink)); 1803 if (splitname) { 1804 PetscCall(PetscStrallocpy(splitname, &ilink->splitname)); 1805 } else { 1806 PetscCall(PetscMalloc1(8, &ilink->splitname)); 1807 PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits)); 1808 } 1809 ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */ 1810 PetscCall(PetscObjectReference((PetscObject)is)); 1811 PetscCall(ISDestroy(&ilink->is)); 1812 ilink->is = is; 1813 PetscCall(PetscObjectReference((PetscObject)is)); 1814 PetscCall(ISDestroy(&ilink->is_col)); 1815 ilink->is_col = is; 1816 ilink->next = NULL; 1817 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp)); 1818 PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure)); 1819 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1)); 1820 PetscCall(KSPSetType(ilink->ksp, KSPPREONLY)); 1821 PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)ilink->ksp)); 1822 1823 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 1824 PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix)); 1825 1826 if (!next) { 1827 jac->head = ilink; 1828 ilink->previous = NULL; 1829 } else { 1830 while (next->next) next = next->next; 1831 next->next = ilink; 1832 ilink->previous = next; 1833 } 1834 jac->nsplits++; 1835 PetscFunctionReturn(0); 1836 } 1837 1838 /*@C 1839 PCFieldSplitSetFields - Sets the fields that define one particular split in the field split preconditioner 1840 1841 Logically Collective on pc 1842 1843 Input Parameters: 1844 + pc - the preconditioner context 1845 . splitname - name of this split, if NULL the number of the split is used 1846 . n - the number of fields in this split 1847 - fields - the fields in this split 1848 1849 Level: intermediate 1850 1851 Notes: 1852 Use `PCFieldSplitSetIS()` to set a general set of indices as a split. 1853 1854 `PCFieldSplitSetFields()` is for defining fields as strided blocks. For example, if the block 1855 size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean 1856 0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x.... 1857 where the numbered entries indicate what is in the split. 1858 1859 This function is called once per split (it creates a new split each time). Solve options 1860 for this split will be available under the prefix -fieldsplit_SPLITNAME_. 1861 1862 Developer Note: 1863 This routine does not actually create the `IS` representing the split, that is delayed 1864 until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be 1865 available when this routine is called. 1866 1867 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()` 1868 @*/ 1869 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col) { 1870 PetscFunctionBegin; 1871 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1872 PetscValidCharPointer(splitname, 2); 1873 PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname); 1874 PetscValidIntPointer(fields, 4); 1875 PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col)); 1876 PetscFunctionReturn(0); 1877 } 1878 1879 /*@ 1880 PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build 1881 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators. 1882 1883 Logically Collective on pc 1884 1885 Input Parameters: 1886 + pc - the preconditioner object 1887 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from 1888 1889 Options Database Keys: 1890 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks 1891 1892 Level: intermediate 1893 1894 .seealso: `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT` 1895 @*/ 1896 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg) { 1897 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1898 PetscBool isfs; 1899 1900 PetscFunctionBegin; 1901 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1902 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 1903 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 1904 jac->diag_use_amat = flg; 1905 PetscFunctionReturn(0); 1906 } 1907 1908 /*@ 1909 PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build 1910 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators. 1911 1912 Logically Collective on pc 1913 1914 Input Parameters: 1915 . pc - the preconditioner object 1916 1917 Output Parameters: 1918 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from 1919 1920 Level: intermediate 1921 1922 .seealso: `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT` 1923 @*/ 1924 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg) { 1925 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1926 PetscBool isfs; 1927 1928 PetscFunctionBegin; 1929 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1930 PetscValidBoolPointer(flg, 2); 1931 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 1932 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 1933 *flg = jac->diag_use_amat; 1934 PetscFunctionReturn(0); 1935 } 1936 1937 /*@ 1938 PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build 1939 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators. 1940 1941 Logically Collective on pc 1942 1943 Input Parameters: 1944 + pc - the preconditioner object 1945 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from 1946 1947 Options Database Keys: 1948 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks 1949 1950 Level: intermediate 1951 1952 .seealso: `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT` 1953 @*/ 1954 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg) { 1955 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1956 PetscBool isfs; 1957 1958 PetscFunctionBegin; 1959 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1960 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 1961 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 1962 jac->offdiag_use_amat = flg; 1963 PetscFunctionReturn(0); 1964 } 1965 1966 /*@ 1967 PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build 1968 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators. 1969 1970 Logically Collective on pc 1971 1972 Input Parameters: 1973 . pc - the preconditioner object 1974 1975 Output Parameters: 1976 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from 1977 1978 Level: intermediate 1979 1980 .seealso: `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT` 1981 @*/ 1982 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg) { 1983 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1984 PetscBool isfs; 1985 1986 PetscFunctionBegin; 1987 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1988 PetscValidBoolPointer(flg, 2); 1989 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 1990 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 1991 *flg = jac->offdiag_use_amat; 1992 PetscFunctionReturn(0); 1993 } 1994 1995 /*@C 1996 PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT` 1997 1998 Logically Collective on pc 1999 2000 Input Parameters: 2001 + pc - the preconditioner context 2002 . splitname - name of this split, if NULL the number of the split is used 2003 - is - the index set that defines the elements in this split 2004 2005 Notes: 2006 Use `PCFieldSplitSetFields()`, for splits defined by strided types. 2007 2008 This function is called once per split (it creates a new split each time). Solve options 2009 for this split will be available under the prefix -fieldsplit_SPLITNAME_. 2010 2011 Level: intermediate 2012 2013 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()` 2014 @*/ 2015 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is) { 2016 PetscFunctionBegin; 2017 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2018 if (splitname) PetscValidCharPointer(splitname, 2); 2019 PetscValidHeaderSpecific(is, IS_CLASSID, 3); 2020 PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is)); 2021 PetscFunctionReturn(0); 2022 } 2023 2024 /*@C 2025 PCFieldSplitGetIS - Retrieves the elements for a split as an `IS` 2026 2027 Logically Collective on pc 2028 2029 Input Parameters: 2030 + pc - the preconditioner context 2031 - splitname - name of this split 2032 2033 Output Parameter: 2034 - is - the index set that defines the elements in this split, or NULL if the split is not found 2035 2036 Level: intermediate 2037 2038 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()` 2039 @*/ 2040 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is) { 2041 PetscFunctionBegin; 2042 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2043 PetscValidCharPointer(splitname, 2); 2044 PetscValidPointer(is, 3); 2045 { 2046 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2047 PC_FieldSplitLink ilink = jac->head; 2048 PetscBool found; 2049 2050 *is = NULL; 2051 while (ilink) { 2052 PetscCall(PetscStrcmp(ilink->splitname, splitname, &found)); 2053 if (found) { 2054 *is = ilink->is; 2055 break; 2056 } 2057 ilink = ilink->next; 2058 } 2059 } 2060 PetscFunctionReturn(0); 2061 } 2062 2063 /*@C 2064 PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS` 2065 2066 Logically Collective on pc 2067 2068 Input Parameters: 2069 + pc - the preconditioner context 2070 - index - index of this split 2071 2072 Output Parameter: 2073 - is - the index set that defines the elements in this split 2074 2075 Level: intermediate 2076 2077 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()` 2078 @*/ 2079 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is) { 2080 PetscFunctionBegin; 2081 PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index); 2082 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2083 PetscValidPointer(is, 3); 2084 { 2085 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2086 PC_FieldSplitLink ilink = jac->head; 2087 PetscInt i = 0; 2088 PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits); 2089 2090 while (i < index) { 2091 ilink = ilink->next; 2092 ++i; 2093 } 2094 PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is)); 2095 } 2096 PetscFunctionReturn(0); 2097 } 2098 2099 /*@ 2100 PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the 2101 fieldsplit preconditioner when calling `PCFieldSplitSetIS()`. If not set the matrix block size is used. 2102 2103 Logically Collective on pc 2104 2105 Input Parameters: 2106 + pc - the preconditioner context 2107 - bs - the block size 2108 2109 Level: intermediate 2110 2111 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 2112 @*/ 2113 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs) { 2114 PetscFunctionBegin; 2115 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2116 PetscValidLogicalCollectiveInt(pc, bs, 2); 2117 PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs)); 2118 PetscFunctionReturn(0); 2119 } 2120 2121 /*@C 2122 PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits 2123 2124 Collective on pc 2125 2126 Input Parameter: 2127 . pc - the preconditioner context 2128 2129 Output Parameters: 2130 + n - the number of splits 2131 - subksp - the array of `KSP` contexts 2132 2133 Level: advanced 2134 2135 Notes: 2136 After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()` 2137 (not the `KSP`, just the array that contains them). 2138 2139 You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`. 2140 2141 If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the 2142 Schur complement and the `KSP` object used to iterate over the Schur complement. 2143 To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`. 2144 2145 If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the 2146 inner linear system defined by the matrix H in each loop. 2147 2148 Fortran Usage: 2149 You must pass in a `KSP` array that is large enough to contain all the `KSP`s. 2150 You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the 2151 `KSP` array must be. 2152 2153 Developer Note: 2154 There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()` 2155 2156 The Fortran interface should be modernized to return directly the array of values. 2157 2158 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()` 2159 @*/ 2160 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[]) { 2161 PetscFunctionBegin; 2162 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2163 if (n) PetscValidIntPointer(n, 2); 2164 PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp)); 2165 PetscFunctionReturn(0); 2166 } 2167 2168 /*@C 2169 PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT` 2170 2171 Collective on `KSP` 2172 2173 Input Parameter: 2174 . pc - the preconditioner context 2175 2176 Output Parameters: 2177 + n - the number of splits 2178 - subksp - the array of `KSP` contexts 2179 2180 Level: advanced 2181 2182 Notes: 2183 After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()` 2184 (not the `KSP` just the array that contains them). 2185 2186 You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`. 2187 2188 If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order) 2189 + 1 - the `KSP` used for the (1,1) block 2190 . 2 - the `KSP` used for the Schur complement (not the one used for the interior Schur solver) 2191 - 3 - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block). 2192 2193 It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`. 2194 2195 Fortran Note: 2196 You must pass in a `KSP` array that is large enough to contain all the local `KSP`s. 2197 You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the 2198 `KSP` array must be. 2199 2200 Developer Notes: 2201 There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()` 2202 2203 Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged? 2204 2205 The Fortran interface should be modernized to return directly the array of values. 2206 2207 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()` 2208 @*/ 2209 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[]) { 2210 PetscFunctionBegin; 2211 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2212 if (n) PetscValidIntPointer(n, 2); 2213 PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp)); 2214 PetscFunctionReturn(0); 2215 } 2216 2217 /*@ 2218 PCFieldSplitSetSchurPre - Indicates from what operator the preconditioner is constructucted for the Schur complement. 2219 The default is the A11 matrix. 2220 2221 Collective on pc 2222 2223 Input Parameters: 2224 + pc - the preconditioner context 2225 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default), 2226 `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`, 2227 `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL` 2228 - userpre - matrix to use for preconditioning, or NULL 2229 2230 Options Database Keys: 2231 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is a11. See notes for meaning of various arguments 2232 - -fieldsplit_1_pc_type <pctype> - the preconditioner algorithm that is used to construct the preconditioner from the operator 2233 2234 Notes: 2235 If ptype is 2236 + a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner 2237 matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix 2238 . self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix: 2239 The only preconditioner that currently works with this symbolic respresentation matrix object is the PCLSC 2240 preconditioner 2241 . user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument 2242 to this function). 2243 . selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation Sp = A11 - A10 inv(diag(A00)) A01 2244 This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be 2245 lumped before extracting the diagonal using the additional option -fieldsplit_1_mat_schur_complement_ainv_type lump 2246 - full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation 2247 computed internally by `PCFIELDSPLIT` (this is expensive) 2248 useful mostly as a test that the Schur complement approach can work for your problem 2249 2250 When solving a saddle point problem, where the A11 block is identically zero, using a11 as the ptype only makes sense 2251 with the additional option -fieldsplit_1_pc_type none. Usually for saddle point problems one would use a ptype of self and 2252 -fieldsplit_1_pc_type lsc which uses the least squares commutator to compute a preconditioner for the Schur complement. 2253 2254 Level: intermediate 2255 2256 .seealso: `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, 2257 `MatSchurComplementSetAinvType()`, `PCLSC`, 2258 `PCFieldSplitSchurPreType` 2259 @*/ 2260 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) { 2261 PetscFunctionBegin; 2262 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2263 PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre)); 2264 PetscFunctionReturn(0); 2265 } 2266 2267 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) { 2268 return PCFieldSplitSetSchurPre(pc, ptype, pre); 2269 } /* Deprecated name */ 2270 2271 /*@ 2272 PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be 2273 preconditioned. See `PCFieldSplitSetSchurPre()` for details. 2274 2275 Logically Collective on pc 2276 2277 Input Parameter: 2278 . pc - the preconditioner context 2279 2280 Output Parameters: 2281 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_PRE_USER` 2282 - userpre - matrix to use for preconditioning (with `PC_FIELDSPLIT_PRE_USER`), or NULL 2283 2284 Level: intermediate 2285 2286 .seealso: `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`, 2287 `PCFieldSplitSchurPreType` 2288 @*/ 2289 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre) { 2290 PetscFunctionBegin; 2291 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2292 PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre)); 2293 PetscFunctionReturn(0); 2294 } 2295 2296 /*@ 2297 PCFieldSplitSchurGetS - extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately 2298 2299 Not collective 2300 2301 Input Parameter: 2302 . pc - the preconditioner context 2303 2304 Output Parameter: 2305 . S - the Schur complement matrix 2306 2307 Note: 2308 This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`. 2309 2310 Level: advanced 2311 2312 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`, 2313 `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()` 2314 @*/ 2315 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S) { 2316 const char *t; 2317 PetscBool isfs; 2318 PC_FieldSplit *jac; 2319 2320 PetscFunctionBegin; 2321 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2322 PetscCall(PetscObjectGetType((PetscObject)pc, &t)); 2323 PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs)); 2324 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t); 2325 jac = (PC_FieldSplit *)pc->data; 2326 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type); 2327 if (S) *S = jac->schur; 2328 PetscFunctionReturn(0); 2329 } 2330 2331 /*@ 2332 PCFieldSplitSchurRestoreS - returns the `MATSCHURCOMPLEMENT` matrix used by this `PC` 2333 2334 Not collective 2335 2336 Input Parameters: 2337 + pc - the preconditioner context 2338 - S - the Schur complement matrix 2339 2340 Level: advanced 2341 2342 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()` 2343 @*/ 2344 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S) { 2345 const char *t; 2346 PetscBool isfs; 2347 PC_FieldSplit *jac; 2348 2349 PetscFunctionBegin; 2350 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2351 PetscCall(PetscObjectGetType((PetscObject)pc, &t)); 2352 PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs)); 2353 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t); 2354 jac = (PC_FieldSplit *)pc->data; 2355 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type); 2356 PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten"); 2357 PetscFunctionReturn(0); 2358 } 2359 2360 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) { 2361 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2362 2363 PetscFunctionBegin; 2364 jac->schurpre = ptype; 2365 if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) { 2366 PetscCall(MatDestroy(&jac->schur_user)); 2367 jac->schur_user = pre; 2368 PetscCall(PetscObjectReference((PetscObject)jac->schur_user)); 2369 } 2370 PetscFunctionReturn(0); 2371 } 2372 2373 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre) { 2374 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2375 2376 PetscFunctionBegin; 2377 *ptype = jac->schurpre; 2378 *pre = jac->schur_user; 2379 PetscFunctionReturn(0); 2380 } 2381 2382 /*@ 2383 PCFieldSplitSetSchurFactType - sets which blocks of the approximate block factorization to retain in the preconditioner 2384 2385 Collective on pc 2386 2387 Input Parameters: 2388 + pc - the preconditioner context 2389 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default 2390 2391 Options Database Key: 2392 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is full 2393 2394 Level: intermediate 2395 2396 Notes: 2397 The FULL factorization is 2398 2399 .vb 2400 (A B) = (1 0) (A 0) (1 Ainv*B) = L D U 2401 (C E) (C*Ainv 1) (0 S) (0 1) 2402 .vb 2403 where S = E - C*Ainv*B. In practice, the full factorization is applied via block triangular solves with the grouping L*(D*U). UPPER uses D*U, LOWER uses L*D, 2404 and DIAG is the diagonal part with the sign of S flipped (because this makes the preconditioner positive definite for many formulations, thus allowing the use of KSPMINRES). Sign flipping of S can be turned off with PCFieldSplitSetSchurScale(). 2405 2406 If A and S are solved exactly 2407 .vb 2408 *) FULL factorization is a direct solver. 2409 *) The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations. 2410 *) With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations. 2411 .ve 2412 2413 If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner 2414 application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice. 2415 2416 For symmetric problems in which A is positive definite and S is negative definite, DIAG can be used with `KSPMINRES`. 2417 2418 A flexible method like `KSPFGMRES` or `KSPGCR` must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S). 2419 2420 References: 2421 + * - Murphy, Golub, and Wathen, A note on preconditioning indefinite linear systems, SIAM J. Sci. Comput., 21 (2000). 2422 - * - Ipsen, A note on preconditioning nonsymmetric matrices, SIAM J. Sci. Comput., 23 (2001). 2423 2424 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()` 2425 @*/ 2426 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype) { 2427 PetscFunctionBegin; 2428 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2429 PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype)); 2430 PetscFunctionReturn(0); 2431 } 2432 2433 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype) { 2434 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2435 2436 PetscFunctionBegin; 2437 jac->schurfactorization = ftype; 2438 PetscFunctionReturn(0); 2439 } 2440 2441 /*@ 2442 PCFieldSplitSetSchurScale - Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`. 2443 2444 Collective on pc 2445 2446 Input Parameters: 2447 + pc - the preconditioner context 2448 - scale - scaling factor for the Schur complement 2449 2450 Options Database Key: 2451 . -pc_fieldsplit_schur_scale - default is -1.0 2452 2453 Level: intermediate 2454 2455 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetSchurFactType()` 2456 @*/ 2457 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale) { 2458 PetscFunctionBegin; 2459 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2460 PetscValidLogicalCollectiveScalar(pc, scale, 2); 2461 PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale)); 2462 PetscFunctionReturn(0); 2463 } 2464 2465 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale) { 2466 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2467 2468 PetscFunctionBegin; 2469 jac->schurscale = scale; 2470 PetscFunctionReturn(0); 2471 } 2472 2473 /*@C 2474 PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement 2475 2476 Collective on pc 2477 2478 Input Parameter: 2479 . pc - the preconditioner context 2480 2481 Output Parameters: 2482 + A00 - the (0,0) block 2483 . A01 - the (0,1) block 2484 . A10 - the (1,0) block 2485 - A11 - the (1,1) block 2486 2487 Level: advanced 2488 2489 .seealso: `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()` 2490 @*/ 2491 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11) { 2492 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2493 2494 PetscFunctionBegin; 2495 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2496 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach."); 2497 if (A00) *A00 = jac->pmat[0]; 2498 if (A01) *A01 = jac->B; 2499 if (A10) *A10 = jac->C; 2500 if (A11) *A11 = jac->pmat[1]; 2501 PetscFunctionReturn(0); 2502 } 2503 2504 /*@ 2505 PCFieldSplitSetGKBTol - Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT` 2506 2507 Collective on pc 2508 2509 Input Parameters: 2510 + pc - the preconditioner context 2511 - tolerance - the solver tolerance 2512 2513 Options Database Key: 2514 . -pc_fieldsplit_gkb_tol - default is 1e-5 2515 2516 Level: intermediate 2517 2518 Note: 2519 The generalized GKB algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. 2520 It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than 2521 this estimate, the stopping criterion is satisfactory in practical cases [A13]. 2522 2523 [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013. 2524 2525 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()` 2526 @*/ 2527 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance) { 2528 PetscFunctionBegin; 2529 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2530 PetscValidLogicalCollectiveReal(pc, tolerance, 2); 2531 PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance)); 2532 PetscFunctionReturn(0); 2533 } 2534 2535 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance) { 2536 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2537 2538 PetscFunctionBegin; 2539 jac->gkbtol = tolerance; 2540 PetscFunctionReturn(0); 2541 } 2542 2543 /*@ 2544 PCFieldSplitSetGKBMaxit - Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT` 2545 2546 Collective on pc 2547 2548 Input Parameters: 2549 + pc - the preconditioner context 2550 - maxit - the maximum number of iterations 2551 2552 Options Database Key: 2553 . -pc_fieldsplit_gkb_maxit - default is 100 2554 2555 Level: intermediate 2556 2557 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()` 2558 @*/ 2559 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit) { 2560 PetscFunctionBegin; 2561 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2562 PetscValidLogicalCollectiveInt(pc, maxit, 2); 2563 PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit)); 2564 PetscFunctionReturn(0); 2565 } 2566 2567 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit) { 2568 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2569 2570 PetscFunctionBegin; 2571 jac->gkbmaxit = maxit; 2572 PetscFunctionReturn(0); 2573 } 2574 2575 /*@ 2576 PCFieldSplitSetGKBDelay - Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization in `PCFIELDSPLIT` 2577 preconditioner. 2578 2579 Collective on pc 2580 2581 Input Parameters: 2582 + pc - the preconditioner context 2583 - delay - the delay window in the lower bound estimate 2584 2585 Options Database Key: 2586 . -pc_fieldsplit_gkb_delay - default is 5 2587 2588 Level: intermediate 2589 2590 Note: 2591 The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error ||u-u^k||_H 2592 is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + delay), and thus the algorithm needs 2593 at least (delay + 1) iterations to stop. For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to 2594 2595 [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013. 2596 2597 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()` 2598 @*/ 2599 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay) { 2600 PetscFunctionBegin; 2601 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2602 PetscValidLogicalCollectiveInt(pc, delay, 2); 2603 PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay)); 2604 PetscFunctionReturn(0); 2605 } 2606 2607 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay) { 2608 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2609 2610 PetscFunctionBegin; 2611 jac->gkbdelay = delay; 2612 PetscFunctionReturn(0); 2613 } 2614 2615 /*@ 2616 PCFieldSplitSetGKBNu - Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the Golub-Kahan bidiagonalization preconditioner 2617 in `PCFIELDSPLIT` 2618 2619 Collective on pc 2620 2621 Input Parameters: 2622 + pc - the preconditioner context 2623 - nu - the shift parameter 2624 2625 Options Database Keys: 2626 . -pc_fieldsplit_gkb_nu - default is 1 2627 2628 Level: intermediate 2629 2630 Notes: 2631 This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by chosing nu sufficiently big. However, 2632 if nu is chosen too big, the matrix H might be badly conditioned and the solution of the linear system Hx = b in the inner loop gets difficult. It is therefore 2633 necessary to find a good balance in between the convergence of the inner and outer loop. 2634 2635 For nu = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in [Ar13] is then chosen as identity. 2636 2637 [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013. 2638 2639 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()` 2640 @*/ 2641 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu) { 2642 PetscFunctionBegin; 2643 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2644 PetscValidLogicalCollectiveReal(pc, nu, 2); 2645 PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu)); 2646 PetscFunctionReturn(0); 2647 } 2648 2649 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu) { 2650 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2651 2652 PetscFunctionBegin; 2653 jac->gkbnu = nu; 2654 PetscFunctionReturn(0); 2655 } 2656 2657 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type) { 2658 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2659 2660 PetscFunctionBegin; 2661 jac->type = type; 2662 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 2663 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL)); 2664 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL)); 2665 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL)); 2666 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL)); 2667 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL)); 2668 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL)); 2669 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL)); 2670 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL)); 2671 2672 if (type == PC_COMPOSITE_SCHUR) { 2673 pc->ops->apply = PCApply_FieldSplit_Schur; 2674 pc->ops->view = PCView_FieldSplit_Schur; 2675 2676 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur)); 2677 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit)); 2678 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit)); 2679 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit)); 2680 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit)); 2681 } else if (type == PC_COMPOSITE_GKB) { 2682 pc->ops->apply = PCApply_FieldSplit_GKB; 2683 pc->ops->view = PCView_FieldSplit_GKB; 2684 2685 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 2686 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit)); 2687 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit)); 2688 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit)); 2689 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit)); 2690 } else { 2691 pc->ops->apply = PCApply_FieldSplit; 2692 pc->ops->view = PCView_FieldSplit; 2693 2694 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 2695 } 2696 PetscFunctionReturn(0); 2697 } 2698 2699 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs) { 2700 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2701 2702 PetscFunctionBegin; 2703 PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs); 2704 PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs); 2705 jac->bs = bs; 2706 PetscFunctionReturn(0); 2707 } 2708 2709 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) { 2710 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2711 PC_FieldSplitLink ilink_current = jac->head; 2712 IS is_owned; 2713 2714 PetscFunctionBegin; 2715 jac->coordinates_set = PETSC_TRUE; // Internal flag 2716 PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, PETSC_NULL)); 2717 2718 while (ilink_current) { 2719 // For each IS, embed it to get local coords indces 2720 IS is_coords; 2721 PetscInt ndofs_block; 2722 const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block 2723 2724 // Setting drop to true for safety. It should make no difference. 2725 PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords)); 2726 PetscCall(ISGetLocalSize(is_coords, &ndofs_block)); 2727 PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration)); 2728 2729 // Allocate coordinates vector and set it directly 2730 PetscCall(PetscMalloc1(ndofs_block * dim, &(ilink_current->coords))); 2731 for (PetscInt dof = 0; dof < ndofs_block; ++dof) { 2732 for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d]; 2733 } 2734 ilink_current->dim = dim; 2735 ilink_current->ndofs = ndofs_block; 2736 PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration)); 2737 PetscCall(ISDestroy(&is_coords)); 2738 ilink_current = ilink_current->next; 2739 } 2740 PetscCall(ISDestroy(&is_owned)); 2741 PetscFunctionReturn(0); 2742 } 2743 2744 /*@ 2745 PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT` 2746 2747 Collective on pc 2748 2749 Input Parameters: 2750 + pc - the preconditioner context 2751 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 2752 2753 Options Database Key: 2754 . -pc_fieldsplit_type <type: one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type 2755 2756 Level: Intermediate 2757 2758 .seealso: `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`, 2759 `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 2760 @*/ 2761 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type) { 2762 PetscFunctionBegin; 2763 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2764 PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type)); 2765 PetscFunctionReturn(0); 2766 } 2767 2768 /*@ 2769 PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT` 2770 2771 Not collective 2772 2773 Input Parameter: 2774 . pc - the preconditioner context 2775 2776 Output Parameter: 2777 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 2778 2779 Level: Intermediate 2780 2781 .seealso: `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`, 2782 `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 2783 @*/ 2784 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type) { 2785 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2786 2787 PetscFunctionBegin; 2788 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2789 PetscValidIntPointer(type, 2); 2790 *type = jac->type; 2791 PetscFunctionReturn(0); 2792 } 2793 2794 /*@ 2795 PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible. 2796 2797 Logically Collective on pc 2798 2799 Input Parameters: 2800 + pc - the preconditioner context 2801 - flg - boolean indicating whether to use field splits defined by the `DM` 2802 2803 Options Database Key: 2804 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM` 2805 2806 Level: Intermediate 2807 2808 .seealso: `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()` 2809 @*/ 2810 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg) { 2811 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2812 PetscBool isfs; 2813 2814 PetscFunctionBegin; 2815 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2816 PetscValidLogicalCollectiveBool(pc, flg, 2); 2817 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2818 if (isfs) jac->dm_splits = flg; 2819 PetscFunctionReturn(0); 2820 } 2821 2822 /*@ 2823 PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible. 2824 2825 Logically Collective 2826 2827 Input Parameter: 2828 . pc - the preconditioner context 2829 2830 Output Parameter: 2831 . flg - boolean indicating whether to use field splits defined by the `DM` 2832 2833 Level: Intermediate 2834 2835 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()` 2836 @*/ 2837 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg) { 2838 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2839 PetscBool isfs; 2840 2841 PetscFunctionBegin; 2842 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2843 PetscValidBoolPointer(flg, 2); 2844 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2845 if (isfs) { 2846 if (flg) *flg = jac->dm_splits; 2847 } 2848 PetscFunctionReturn(0); 2849 } 2850 2851 /*@ 2852 PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries. 2853 2854 Logically Collective 2855 2856 Input Parameter: 2857 . pc - the preconditioner context 2858 2859 Output Parameter: 2860 . flg - boolean indicating whether to detect fields or not 2861 2862 Level: Intermediate 2863 2864 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()` 2865 @*/ 2866 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg) { 2867 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2868 2869 PetscFunctionBegin; 2870 *flg = jac->detect; 2871 PetscFunctionReturn(0); 2872 } 2873 2874 /*@ 2875 PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries. 2876 2877 Logically Collective 2878 2879 Input Parameter: 2880 . pc - the preconditioner context 2881 2882 Output Parameter: 2883 . flg - boolean indicating whether to detect fields or not 2884 2885 Options Database Key: 2886 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point 2887 2888 Note: 2889 Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`). 2890 2891 Level: Intermediate 2892 2893 .seealso: `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF` 2894 @*/ 2895 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg) { 2896 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2897 2898 PetscFunctionBegin; 2899 jac->detect = flg; 2900 if (jac->detect) { 2901 PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR)); 2902 PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL)); 2903 } 2904 PetscFunctionReturn(0); 2905 } 2906 2907 /*MC 2908 PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual 2909 collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details. 2910 2911 To set options on the solvers for each block append `-fieldsplit_` to all the `PC` 2912 options database keys. For example, `-fieldsplit_pc_type ilu -fieldsplit_pc_factor_levels 1` 2913 2914 To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()` 2915 and set the options directly on the resulting `KSP` object 2916 2917 Level: intermediate 2918 2919 Options Database Keys: 2920 + -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split 2921 . -pc_fieldsplit_default - automatically add any fields to additional splits that have not 2922 been supplied explicitly by `-pc_fieldsplit_%d_fields` 2923 . -pc_fieldsplit_block_size <bs> - size of block that defines fields (i.e. there are bs fields) 2924 . -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting 2925 . -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is a11; see `PCFieldSplitSetSchurPre()` 2926 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - set factorization type when using `-pc_fieldsplit_type schur`; see `PCFieldSplitSetSchurFactType()` 2927 - -pc_fieldsplit_detect_saddle_point - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver 2928 2929 Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` . 2930 For all other solvers they are `-fieldsplit_%d_` for the `d`th field; use `-fieldsplit_` for all fields. 2931 The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_` 2932 2933 Notes: 2934 Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries and `PCFieldSplitSetIS()` 2935 to define a split by an arbitrary collection of entries. 2936 2937 If no splits are set the default is used. The splits are defined by entries strided by bs, 2938 beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`, 2939 if this is not called the block size defaults to the blocksize of the second matrix passed 2940 to `KSPSetOperators()`/`PCSetOperators()`. 2941 2942 For the Schur complement preconditioner if 2943 2944 ```{math} 2945 J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right] 2946 ``` 2947 2948 the preconditioner using `full` factorization is logically 2949 ```{math} 2950 \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right] 2951 ``` 2952 where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`. $S$ is the Schur complement 2953 ```{math} 2954 S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01} 2955 ``` 2956 which is usually dense and not stored explicitly. The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given 2957 in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSub\text{ksp}()` when field number is 0, 2958 it returns the KSP associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default 2959 $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$. 2960 2961 The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above, 2962 `diag` gives 2963 ```{math} 2964 \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & -\text{ksp}(S) \end{array}\right] 2965 ``` 2966 Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$ so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip 2967 can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of 2968 ```{math} 2969 \left[\begin{array}{cc} A_{00} & 0 \\ A_{10} & S \end{array}\right] 2970 ``` 2971 where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of 2972 ```{math} 2973 \left[\begin{array}{cc} A_{00} & A_{01} \\ 0 & S \end{array}\right] 2974 ``` 2975 where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s. 2976 2977 If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS` 2978 is used automatically for a second block. 2979 2980 The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1. 2981 Generally it should be used with the `MATAIJ` format. 2982 2983 The forms of these preconditioners are closely related if not identical to forms derived as "Distributive Iterations", see, 2984 for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`Wesseling2009`. 2985 One can also use `PCFIELDSPLIT` 2986 inside a smoother resulting in "Distributive Smoothers". 2987 2988 See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`. 2989 2990 The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the 2991 residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables. 2992 2993 The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape 2994 ```{math} 2995 \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right] 2996 ``` 2997 with $A_{00}$ positive semi-definite. The implementation follows {cite}`Arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$. 2998 A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`. 2999 3000 References: 3001 3002 ```{bibliography} 3003 :filter: docname in docnames 3004 ``` 3005 3006 Developer Note: 3007 The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplify the implementation of the preconditioners and their 3008 user API. 3009 3010 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`, 3011 `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`, 3012 `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`, 3013 `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()` 3014 M*/ 3015 3016 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc) { 3017 PC_FieldSplit *jac; 3018 3019 PetscFunctionBegin; 3020 PetscCall(PetscNewLog(pc, &jac)); 3021 3022 jac->bs = -1; 3023 jac->nsplits = 0; 3024 jac->type = PC_COMPOSITE_MULTIPLICATIVE; 3025 jac->schurpre = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */ 3026 jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL; 3027 jac->schurscale = -1.0; 3028 jac->dm_splits = PETSC_TRUE; 3029 jac->detect = PETSC_FALSE; 3030 jac->gkbtol = 1e-5; 3031 jac->gkbdelay = 5; 3032 jac->gkbnu = 1; 3033 jac->gkbmaxit = 100; 3034 jac->gkbmonitor = PETSC_FALSE; 3035 jac->coordinates_set = PETSC_FALSE; 3036 3037 pc->data = (void *)jac; 3038 3039 pc->ops->apply = PCApply_FieldSplit; 3040 pc->ops->applytranspose = PCApplyTranspose_FieldSplit; 3041 pc->ops->setup = PCSetUp_FieldSplit; 3042 pc->ops->reset = PCReset_FieldSplit; 3043 pc->ops->destroy = PCDestroy_FieldSplit; 3044 pc->ops->setfromoptions = PCSetFromOptions_FieldSplit; 3045 pc->ops->view = PCView_FieldSplit; 3046 pc->ops->applyrichardson = NULL; 3047 3048 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit)); 3049 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 3050 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit)); 3051 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit)); 3052 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit)); 3053 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit)); 3054 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit)); 3055 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit)); 3056 PetscFunctionReturn(0); 3057 } 3058