1 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 2 #include <petsc/private/kspimpl.h> /* This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/ 3 #include <petscdm.h> 4 5 const char *const PCFieldSplitSchurPreTypes[] = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL}; 6 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL}; 7 8 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4; 9 10 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink; 11 struct _PC_FieldSplitLink { 12 KSP ksp; 13 Vec x, y, z; 14 char *splitname; 15 PetscInt nfields; 16 PetscInt *fields, *fields_col; 17 VecScatter sctx; 18 IS is, is_col; 19 PC_FieldSplitLink next, previous; 20 PetscLogEvent event; 21 22 /* Used only when setting coordinates with PCSetCoordinates */ 23 PetscInt dim; 24 PetscInt ndofs; 25 PetscReal *coords; 26 }; 27 28 typedef struct { 29 PCCompositeType type; 30 PetscBool defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */ 31 PetscBool splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */ 32 PetscInt bs; /* Block size for IS and Mat structures */ 33 PetscInt nsplits; /* Number of field divisions defined */ 34 Vec *x, *y, w1, w2; 35 Mat *mat; /* The diagonal block for each split */ 36 Mat *pmat; /* The preconditioning diagonal block for each split */ 37 Mat *Afield; /* The rows of the matrix associated with each split */ 38 PetscBool issetup; 39 40 /* Only used when Schur complement preconditioning is used */ 41 Mat B; /* The (0,1) block */ 42 Mat C; /* The (1,0) block */ 43 Mat schur; /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */ 44 Mat schurp; /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */ 45 Mat schur_user; /* User-provided preconditioning matrix for the Schur complement */ 46 PCFieldSplitSchurPreType schurpre; /* Determines which preconditioning matrix is used for the Schur complement */ 47 PCFieldSplitSchurFactType schurfactorization; 48 KSP kspschur; /* The solver for S */ 49 KSP kspupper; /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */ 50 PetscScalar schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */ 51 52 /* Only used when Golub-Kahan bidiagonalization preconditioning is used */ 53 Mat H; /* The modified matrix H = A00 + nu*A01*A01' */ 54 PetscReal gkbtol; /* Stopping tolerance for lower bound estimate */ 55 PetscInt gkbdelay; /* The delay window for the stopping criterion */ 56 PetscReal gkbnu; /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */ 57 PetscInt gkbmaxit; /* Maximum number of iterations for outer loop */ 58 PetscBool gkbmonitor; /* Monitor for gkb iterations and the lower bound error */ 59 PetscViewer gkbviewer; /* Viewer context for gkbmonitor */ 60 Vec u, v, d, Hu; /* Work vectors for the GKB algorithm */ 61 PetscScalar *vecz; /* Contains intermediate values, eg for lower bound */ 62 63 PC_FieldSplitLink head; 64 PetscBool isrestrict; /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */ 65 PetscBool suboptionsset; /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */ 66 PetscBool dm_splits; /* Whether to use DMCreateFieldDecomposition() whenever possible */ 67 PetscBool diag_use_amat; /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */ 68 PetscBool offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */ 69 PetscBool detect; /* Whether to form 2-way split by finding zero diagonal entries */ 70 PetscBool coordinates_set; /* Whether PCSetCoordinates has been called */ 71 } PC_FieldSplit; 72 73 /* 74 Note: 75 there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of 76 inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the 77 PC you could change this. 78 */ 79 80 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it. This way the 81 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */ 82 static Mat FieldSplitSchurPre(PC_FieldSplit *jac) 83 { 84 switch (jac->schurpre) { 85 case PC_FIELDSPLIT_SCHUR_PRE_SELF: 86 return jac->schur; 87 case PC_FIELDSPLIT_SCHUR_PRE_SELFP: 88 return jac->schurp; 89 case PC_FIELDSPLIT_SCHUR_PRE_A11: 90 return jac->pmat[1]; 91 case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */ 92 case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */ 93 default: 94 return jac->schur_user ? jac->schur_user : jac->pmat[1]; 95 } 96 } 97 98 #include <petscdraw.h> 99 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer) 100 { 101 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 102 PetscBool iascii, isdraw; 103 PetscInt i, j; 104 PC_FieldSplitLink ilink = jac->head; 105 106 PetscFunctionBegin; 107 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 108 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 109 if (iascii) { 110 if (jac->bs > 0) { 111 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs)); 112 } else { 113 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits)); 114 } 115 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 116 if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for diagonal blocks\n")); 117 if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for off-diagonal blocks\n")); 118 PetscCall(PetscViewerASCIIPrintf(viewer, " Solver info for each split is in the following KSP objects:\n")); 119 for (i = 0; i < jac->nsplits; i++) { 120 if (ilink->fields) { 121 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i)); 122 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 123 for (j = 0; j < ilink->nfields; j++) { 124 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 125 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 126 } 127 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 128 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 129 } else { 130 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i)); 131 } 132 PetscCall(KSPView(ilink->ksp, viewer)); 133 ilink = ilink->next; 134 } 135 } 136 137 if (isdraw) { 138 PetscDraw draw; 139 PetscReal x, y, w, wd; 140 141 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 142 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 143 w = 2 * PetscMin(1.0 - x, x); 144 wd = w / (jac->nsplits + 1); 145 x = x - wd * (jac->nsplits - 1) / 2.0; 146 for (i = 0; i < jac->nsplits; i++) { 147 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 148 PetscCall(KSPView(ilink->ksp, viewer)); 149 PetscCall(PetscDrawPopCurrentPoint(draw)); 150 x += wd; 151 ilink = ilink->next; 152 } 153 } 154 PetscFunctionReturn(0); 155 } 156 157 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer) 158 { 159 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 160 PetscBool iascii, isdraw; 161 PetscInt i, j; 162 PC_FieldSplitLink ilink = jac->head; 163 MatSchurComplementAinvType atype; 164 165 PetscFunctionBegin; 166 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 167 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 168 if (iascii) { 169 if (jac->bs > 0) { 170 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization])); 171 } else { 172 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization])); 173 } 174 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 175 switch (jac->schurpre) { 176 case PC_FIELDSPLIT_SCHUR_PRE_SELF: 177 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from S itself\n")); 178 break; 179 case PC_FIELDSPLIT_SCHUR_PRE_SELFP: 180 PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype)); 181 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's ")))); 182 break; 183 case PC_FIELDSPLIT_SCHUR_PRE_A11: 184 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from A11\n")); 185 break; 186 case PC_FIELDSPLIT_SCHUR_PRE_FULL: 187 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from the exact Schur complement\n")); 188 break; 189 case PC_FIELDSPLIT_SCHUR_PRE_USER: 190 if (jac->schur_user) { 191 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from user provided matrix\n")); 192 } else { 193 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from A11\n")); 194 } 195 break; 196 default: 197 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre); 198 } 199 PetscCall(PetscViewerASCIIPrintf(viewer, " Split info:\n")); 200 PetscCall(PetscViewerASCIIPushTab(viewer)); 201 for (i = 0; i < jac->nsplits; i++) { 202 if (ilink->fields) { 203 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i)); 204 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 205 for (j = 0; j < ilink->nfields; j++) { 206 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 207 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 208 } 209 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 210 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 211 } else { 212 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i)); 213 } 214 ilink = ilink->next; 215 } 216 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n")); 217 PetscCall(PetscViewerASCIIPushTab(viewer)); 218 if (jac->head) { 219 PetscCall(KSPView(jac->head->ksp, viewer)); 220 } else PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 221 PetscCall(PetscViewerASCIIPopTab(viewer)); 222 if (jac->head && jac->kspupper != jac->head->ksp) { 223 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor \n")); 224 PetscCall(PetscViewerASCIIPushTab(viewer)); 225 if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer)); 226 else PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 227 PetscCall(PetscViewerASCIIPopTab(viewer)); 228 } 229 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01 \n")); 230 PetscCall(PetscViewerASCIIPushTab(viewer)); 231 if (jac->kspschur) { 232 PetscCall(KSPView(jac->kspschur, viewer)); 233 } else { 234 PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 235 } 236 PetscCall(PetscViewerASCIIPopTab(viewer)); 237 PetscCall(PetscViewerASCIIPopTab(viewer)); 238 } else if (isdraw && jac->head) { 239 PetscDraw draw; 240 PetscReal x, y, w, wd, h; 241 PetscInt cnt = 2; 242 char str[32]; 243 244 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 245 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 246 if (jac->kspupper != jac->head->ksp) cnt++; 247 w = 2 * PetscMin(1.0 - x, x); 248 wd = w / (cnt + 1); 249 250 PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization])); 251 PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 252 y -= h; 253 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) { 254 PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11])); 255 } else { 256 PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre])); 257 } 258 PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 259 y -= h; 260 x = x - wd * (cnt - 1) / 2.0; 261 262 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 263 PetscCall(KSPView(jac->head->ksp, viewer)); 264 PetscCall(PetscDrawPopCurrentPoint(draw)); 265 if (jac->kspupper != jac->head->ksp) { 266 x += wd; 267 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 268 PetscCall(KSPView(jac->kspupper, viewer)); 269 PetscCall(PetscDrawPopCurrentPoint(draw)); 270 } 271 x += wd; 272 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 273 PetscCall(KSPView(jac->kspschur, viewer)); 274 PetscCall(PetscDrawPopCurrentPoint(draw)); 275 } 276 PetscFunctionReturn(0); 277 } 278 279 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer) 280 { 281 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 282 PetscBool iascii, isdraw; 283 PetscInt i, j; 284 PC_FieldSplitLink ilink = jac->head; 285 286 PetscFunctionBegin; 287 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 288 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 289 if (iascii) { 290 if (jac->bs > 0) { 291 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs)); 292 } else { 293 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits)); 294 } 295 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 296 if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for diagonal blocks\n")); 297 if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for off-diagonal blocks\n")); 298 299 PetscCall(PetscViewerASCIIPrintf(viewer, " Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit)); 300 PetscCall(PetscViewerASCIIPrintf(viewer, " Solver info for H = A00 + nu*A01*A01' matrix:\n")); 301 PetscCall(PetscViewerASCIIPushTab(viewer)); 302 303 if (ilink->fields) { 304 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields ")); 305 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 306 for (j = 0; j < ilink->nfields; j++) { 307 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 308 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 309 } 310 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 311 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 312 } else { 313 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n")); 314 } 315 PetscCall(KSPView(ilink->ksp, viewer)); 316 317 PetscCall(PetscViewerASCIIPopTab(viewer)); 318 } 319 320 if (isdraw) { 321 PetscDraw draw; 322 PetscReal x, y, w, wd; 323 324 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 325 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 326 w = 2 * PetscMin(1.0 - x, x); 327 wd = w / (jac->nsplits + 1); 328 x = x - wd * (jac->nsplits - 1) / 2.0; 329 for (i = 0; i < jac->nsplits; i++) { 330 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 331 PetscCall(KSPView(ilink->ksp, viewer)); 332 PetscCall(PetscDrawPopCurrentPoint(draw)); 333 x += wd; 334 ilink = ilink->next; 335 } 336 } 337 PetscFunctionReturn(0); 338 } 339 340 /* Precondition: jac->bs is set to a meaningful value */ 341 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc) 342 { 343 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 344 PetscInt i, nfields, *ifields, nfields_col, *ifields_col; 345 PetscBool flg, flg_col; 346 char optionname[128], splitname[8], optionname_col[128]; 347 348 PetscFunctionBegin; 349 PetscCall(PetscMalloc1(jac->bs, &ifields)); 350 PetscCall(PetscMalloc1(jac->bs, &ifields_col)); 351 for (i = 0, flg = PETSC_TRUE;; i++) { 352 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 353 PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i)); 354 PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i)); 355 nfields = jac->bs; 356 nfields_col = jac->bs; 357 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg)); 358 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col)); 359 if (!flg) break; 360 else if (flg && !flg_col) { 361 PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields"); 362 PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields)); 363 } else { 364 PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields"); 365 PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match"); 366 PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col)); 367 } 368 } 369 if (i > 0) { 370 /* Makes command-line setting of splits take precedence over setting them in code. 371 Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would 372 create new splits, which would probably not be what the user wanted. */ 373 jac->splitdefined = PETSC_TRUE; 374 } 375 PetscCall(PetscFree(ifields)); 376 PetscCall(PetscFree(ifields_col)); 377 PetscFunctionReturn(0); 378 } 379 380 static PetscErrorCode PCFieldSplitSetDefaults(PC pc) 381 { 382 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 383 PC_FieldSplitLink ilink = jac->head; 384 PetscBool fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE; 385 PetscInt i; 386 387 PetscFunctionBegin; 388 /* 389 Kinda messy, but at least this now uses DMCreateFieldDecomposition(). 390 Should probably be rewritten. 391 */ 392 if (!ilink) { 393 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL)); 394 if (pc->dm && jac->dm_splits && !jac->detect && !coupling) { 395 PetscInt numFields, f, i, j; 396 char **fieldNames; 397 IS *fields; 398 DM *dms; 399 DM subdm[128]; 400 PetscBool flg; 401 402 PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms)); 403 /* Allow the user to prescribe the splits */ 404 for (i = 0, flg = PETSC_TRUE;; i++) { 405 PetscInt ifields[128]; 406 IS compField; 407 char optionname[128], splitname[8]; 408 PetscInt nfields = numFields; 409 410 PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i)); 411 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg)); 412 if (!flg) break; 413 PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields); 414 PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i])); 415 if (nfields == 1) { 416 PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField)); 417 } else { 418 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 419 PetscCall(PCFieldSplitSetIS(pc, splitname, compField)); 420 } 421 PetscCall(ISDestroy(&compField)); 422 for (j = 0; j < nfields; ++j) { 423 f = ifields[j]; 424 PetscCall(PetscFree(fieldNames[f])); 425 PetscCall(ISDestroy(&fields[f])); 426 } 427 } 428 if (i == 0) { 429 for (f = 0; f < numFields; ++f) { 430 PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f])); 431 PetscCall(PetscFree(fieldNames[f])); 432 PetscCall(ISDestroy(&fields[f])); 433 } 434 } else { 435 for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j)); 436 PetscCall(PetscFree(dms)); 437 PetscCall(PetscMalloc1(i, &dms)); 438 for (j = 0; j < i; ++j) dms[j] = subdm[j]; 439 } 440 PetscCall(PetscFree(fieldNames)); 441 PetscCall(PetscFree(fields)); 442 if (dms) { 443 PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n")); 444 for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) { 445 const char *prefix; 446 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)(ilink->ksp), &prefix)); 447 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)(dms[i]), prefix)); 448 PetscCall(KSPSetDM(ilink->ksp, dms[i])); 449 PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE)); 450 { 451 PetscErrorCode (*func)(KSP, Mat, Mat, void *); 452 void *ctx; 453 454 PetscCall(DMKSPGetComputeOperators(pc->dm, &func, &ctx)); 455 PetscCall(DMKSPSetComputeOperators(dms[i], func, ctx)); 456 } 457 PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0)); 458 PetscCall(DMDestroy(&dms[i])); 459 } 460 PetscCall(PetscFree(dms)); 461 } 462 } else { 463 if (jac->bs <= 0) { 464 if (pc->pmat) { 465 PetscCall(MatGetBlockSize(pc->pmat, &jac->bs)); 466 } else jac->bs = 1; 467 } 468 469 if (jac->detect) { 470 IS zerodiags, rest; 471 PetscInt nmin, nmax; 472 473 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 474 if (jac->diag_use_amat) { 475 PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags)); 476 } else { 477 PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags)); 478 } 479 PetscCall(ISComplement(zerodiags, nmin, nmax, &rest)); 480 PetscCall(PCFieldSplitSetIS(pc, "0", rest)); 481 PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags)); 482 PetscCall(ISDestroy(&zerodiags)); 483 PetscCall(ISDestroy(&rest)); 484 } else if (coupling) { 485 IS coupling, rest; 486 PetscInt nmin, nmax; 487 488 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 489 if (jac->offdiag_use_amat) { 490 PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling)); 491 } else { 492 PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling)); 493 } 494 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest)); 495 PetscCall(ISSetIdentity(rest)); 496 PetscCall(PCFieldSplitSetIS(pc, "0", rest)); 497 PetscCall(PCFieldSplitSetIS(pc, "1", coupling)); 498 PetscCall(ISDestroy(&coupling)); 499 PetscCall(ISDestroy(&rest)); 500 } else { 501 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL)); 502 if (!fieldsplit_default) { 503 /* Allow user to set fields from command line, if bs was known at the time of PCSetFromOptions_FieldSplit() 504 then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */ 505 PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc)); 506 if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n")); 507 } 508 if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) { 509 Mat M = pc->pmat; 510 PetscBool isnest; 511 512 PetscCall(PetscInfo(pc, "Using default splitting of fields\n")); 513 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest)); 514 if (!isnest) { 515 M = pc->mat; 516 PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest)); 517 } 518 if (isnest) { 519 IS *fields; 520 PetscInt nf; 521 522 PetscCall(MatNestGetSize(M, &nf, NULL)); 523 PetscCall(PetscMalloc1(nf, &fields)); 524 PetscCall(MatNestGetISs(M, fields, NULL)); 525 for (i = 0; i < nf; i++) PetscCall(PCFieldSplitSetIS(pc, NULL, fields[i])); 526 PetscCall(PetscFree(fields)); 527 } else { 528 for (i = 0; i < jac->bs; i++) { 529 char splitname[8]; 530 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 531 PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i)); 532 } 533 jac->defaultsplit = PETSC_TRUE; 534 } 535 } 536 } 537 } 538 } else if (jac->nsplits == 1) { 539 IS is2; 540 PetscInt nmin, nmax; 541 542 PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()"); 543 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 544 PetscCall(ISComplement(ilink->is, nmin, nmax, &is2)); 545 PetscCall(PCFieldSplitSetIS(pc, "1", is2)); 546 PetscCall(ISDestroy(&is2)); 547 } 548 549 PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits); 550 PetscFunctionReturn(0); 551 } 552 553 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu) 554 { 555 Mat BT, T; 556 PetscReal nrmT, nrmB; 557 558 PetscFunctionBegin; 559 PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */ 560 PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN)); 561 PetscCall(MatNorm(T, NORM_1, &nrmT)); 562 PetscCall(MatNorm(B, NORM_1, &nrmB)); 563 PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable."); 564 565 /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */ 566 /* setting N := 1/nu*I in [Ar13]. */ 567 PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT)); 568 PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, H)); /* H = A01*A01' */ 569 PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN)); /* H = A00 + nu*A01*A01' */ 570 571 PetscCall(MatDestroy(&BT)); 572 PetscCall(MatDestroy(&T)); 573 PetscFunctionReturn(0); 574 } 575 576 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *value[], PetscBool *flg); 577 578 static PetscErrorCode PCSetUp_FieldSplit(PC pc) 579 { 580 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 581 PC_FieldSplitLink ilink; 582 PetscInt i, nsplit; 583 PetscBool sorted, sorted_col; 584 585 PetscFunctionBegin; 586 pc->failedreason = PC_NOERROR; 587 PetscCall(PCFieldSplitSetDefaults(pc)); 588 nsplit = jac->nsplits; 589 ilink = jac->head; 590 591 /* get the matrices for each split */ 592 if (!jac->issetup) { 593 PetscInt rstart, rend, nslots, bs; 594 595 jac->issetup = PETSC_TRUE; 596 597 /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */ 598 if (jac->defaultsplit || !ilink->is) { 599 if (jac->bs <= 0) jac->bs = nsplit; 600 } 601 bs = jac->bs; 602 PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend)); 603 nslots = (rend - rstart) / bs; 604 for (i = 0; i < nsplit; i++) { 605 if (jac->defaultsplit) { 606 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is)); 607 PetscCall(ISDuplicate(ilink->is, &ilink->is_col)); 608 } else if (!ilink->is) { 609 if (ilink->nfields > 1) { 610 PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col; 611 PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii)); 612 PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj)); 613 for (j = 0; j < nslots; j++) { 614 for (k = 0; k < nfields; k++) { 615 ii[nfields * j + k] = rstart + bs * j + fields[k]; 616 jj[nfields * j + k] = rstart + bs * j + fields_col[k]; 617 } 618 } 619 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is)); 620 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col)); 621 PetscCall(ISSetBlockSize(ilink->is, nfields)); 622 PetscCall(ISSetBlockSize(ilink->is_col, nfields)); 623 } else { 624 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is)); 625 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col)); 626 } 627 } 628 PetscCall(ISSorted(ilink->is, &sorted)); 629 if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col)); 630 PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split"); 631 ilink = ilink->next; 632 } 633 } 634 635 ilink = jac->head; 636 if (!jac->pmat) { 637 Vec xtmp; 638 639 PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL)); 640 PetscCall(PetscMalloc1(nsplit, &jac->pmat)); 641 PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y)); 642 for (i = 0; i < nsplit; i++) { 643 MatNullSpace sp; 644 645 /* Check for preconditioning matrix attached to IS */ 646 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i])); 647 if (jac->pmat[i]) { 648 PetscCall(PetscObjectReference((PetscObject)jac->pmat[i])); 649 if (jac->type == PC_COMPOSITE_SCHUR) { 650 jac->schur_user = jac->pmat[i]; 651 652 PetscCall(PetscObjectReference((PetscObject)jac->schur_user)); 653 } 654 } else { 655 const char *prefix; 656 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i])); 657 PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix)); 658 PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix)); 659 PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view")); 660 } 661 /* create work vectors for each split */ 662 PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i])); 663 ilink->x = jac->x[i]; 664 ilink->y = jac->y[i]; 665 ilink->z = NULL; 666 /* compute scatter contexts needed by multiplicative versions and non-default splits */ 667 PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx)); 668 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp)); 669 if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp)); 670 ilink = ilink->next; 671 } 672 PetscCall(VecDestroy(&xtmp)); 673 } else { 674 MatReuse scall; 675 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 676 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i])); 677 scall = MAT_INITIAL_MATRIX; 678 } else scall = MAT_REUSE_MATRIX; 679 680 for (i = 0; i < nsplit; i++) { 681 Mat pmat; 682 683 /* Check for preconditioning matrix attached to IS */ 684 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat)); 685 if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i])); 686 ilink = ilink->next; 687 } 688 } 689 if (jac->diag_use_amat) { 690 ilink = jac->head; 691 if (!jac->mat) { 692 PetscCall(PetscMalloc1(nsplit, &jac->mat)); 693 for (i = 0; i < nsplit; i++) { 694 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i])); 695 ilink = ilink->next; 696 } 697 } else { 698 MatReuse scall; 699 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 700 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i])); 701 scall = MAT_INITIAL_MATRIX; 702 } else scall = MAT_REUSE_MATRIX; 703 704 for (i = 0; i < nsplit; i++) { 705 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i])); 706 ilink = ilink->next; 707 } 708 } 709 } else { 710 jac->mat = jac->pmat; 711 } 712 713 /* Check for null space attached to IS */ 714 ilink = jac->head; 715 for (i = 0; i < nsplit; i++) { 716 MatNullSpace sp; 717 718 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp)); 719 if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp)); 720 ilink = ilink->next; 721 } 722 723 if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) { 724 /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */ 725 /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */ 726 ilink = jac->head; 727 if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) { 728 /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */ 729 if (!jac->Afield) { 730 PetscCall(PetscCalloc1(nsplit, &jac->Afield)); 731 if (jac->offdiag_use_amat) { 732 PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1])); 733 } else { 734 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1])); 735 } 736 } else { 737 MatReuse scall; 738 739 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 740 PetscCall(MatDestroy(&jac->Afield[1])); 741 scall = MAT_INITIAL_MATRIX; 742 } else scall = MAT_REUSE_MATRIX; 743 744 if (jac->offdiag_use_amat) { 745 PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1])); 746 } else { 747 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1])); 748 } 749 } 750 } else { 751 if (!jac->Afield) { 752 PetscCall(PetscMalloc1(nsplit, &jac->Afield)); 753 for (i = 0; i < nsplit; i++) { 754 if (jac->offdiag_use_amat) { 755 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i])); 756 } else { 757 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i])); 758 } 759 ilink = ilink->next; 760 } 761 } else { 762 MatReuse scall; 763 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 764 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i])); 765 scall = MAT_INITIAL_MATRIX; 766 } else scall = MAT_REUSE_MATRIX; 767 768 for (i = 0; i < nsplit; i++) { 769 if (jac->offdiag_use_amat) { 770 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i])); 771 } else { 772 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i])); 773 } 774 ilink = ilink->next; 775 } 776 } 777 } 778 } 779 780 if (jac->type == PC_COMPOSITE_SCHUR) { 781 IS ccis; 782 PetscBool isset, isspd; 783 PetscInt rstart, rend; 784 char lscname[256]; 785 PetscObject LSC_L; 786 787 PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields"); 788 789 /* If pc->mat is SPD, don't scale by -1 the Schur complement */ 790 if (jac->schurscale == (PetscScalar)-1.0) { 791 PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd)); 792 jac->schurscale = (isset && isspd) ? 1.0 : -1.0; 793 } 794 795 /* When extracting off-diagonal submatrices, we take complements from this range */ 796 PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend)); 797 798 if (jac->schur) { 799 KSP kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper; 800 MatReuse scall; 801 802 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 803 scall = MAT_INITIAL_MATRIX; 804 PetscCall(MatDestroy(&jac->B)); 805 PetscCall(MatDestroy(&jac->C)); 806 } else scall = MAT_REUSE_MATRIX; 807 808 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 809 ilink = jac->head; 810 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 811 if (jac->offdiag_use_amat) { 812 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B)); 813 } else { 814 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B)); 815 } 816 PetscCall(ISDestroy(&ccis)); 817 ilink = ilink->next; 818 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 819 if (jac->offdiag_use_amat) { 820 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C)); 821 } else { 822 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C)); 823 } 824 PetscCall(ISDestroy(&ccis)); 825 PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1])); 826 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) { 827 PetscCall(MatDestroy(&jac->schurp)); 828 PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp)); 829 } 830 if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0])); 831 if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0])); 832 PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac))); 833 } else { 834 const char *Dprefix; 835 char schurprefix[256], schurmatprefix[256]; 836 char schurtestoption[256]; 837 MatNullSpace sp; 838 PetscBool flg; 839 KSP kspt; 840 841 /* extract the A01 and A10 matrices */ 842 ilink = jac->head; 843 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 844 if (jac->offdiag_use_amat) { 845 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 846 } else { 847 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 848 } 849 PetscCall(ISDestroy(&ccis)); 850 ilink = ilink->next; 851 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 852 if (jac->offdiag_use_amat) { 853 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 854 } else { 855 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 856 } 857 PetscCall(ISDestroy(&ccis)); 858 859 /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */ 860 PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur)); 861 PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT)); 862 PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1])); 863 PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 864 PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix)); 865 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt)); 866 PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix)); 867 868 /* Note: this is not true in general */ 869 PetscCall(MatGetNullSpace(jac->mat[1], &sp)); 870 if (sp) PetscCall(MatSetNullSpace(jac->schur, sp)); 871 872 PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname)); 873 PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg)); 874 if (flg) { 875 DM dmInner; 876 KSP kspInner; 877 PC pcInner; 878 879 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 880 PetscCall(KSPReset(kspInner)); 881 PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0])); 882 PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 883 /* Indent this deeper to emphasize the "inner" nature of this solver. */ 884 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2)); 885 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2)); 886 PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix)); 887 888 /* Set DM for new solver */ 889 PetscCall(KSPGetDM(jac->head->ksp, &dmInner)); 890 PetscCall(KSPSetDM(kspInner, dmInner)); 891 PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE)); 892 893 /* Defaults to PCKSP as preconditioner */ 894 PetscCall(KSPGetPC(kspInner, &pcInner)); 895 PetscCall(PCSetType(pcInner, PCKSP)); 896 PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp)); 897 } else { 898 /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or 899 * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact, 900 * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for 901 * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make 902 * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used 903 * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */ 904 PetscCall(KSPSetType(jac->head->ksp, KSPGMRES)); 905 PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp)); 906 } 907 PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0])); 908 PetscCall(KSPSetFromOptions(jac->head->ksp)); 909 PetscCall(MatSetFromOptions(jac->schur)); 910 911 PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg)); 912 if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */ 913 KSP kspInner; 914 PC pcInner; 915 916 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 917 PetscCall(KSPGetPC(kspInner, &pcInner)); 918 PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg)); 919 if (flg) { 920 KSP ksp; 921 922 PetscCall(PCKSPGetKSP(pcInner, &ksp)); 923 if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE)); 924 } 925 } 926 PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname)); 927 PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg)); 928 if (flg) { 929 DM dmInner; 930 931 PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 932 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper)); 933 PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure)); 934 PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix)); 935 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1)); 936 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1)); 937 PetscCall(KSPGetDM(jac->head->ksp, &dmInner)); 938 PetscCall(KSPSetDM(jac->kspupper, dmInner)); 939 PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE)); 940 PetscCall(KSPSetFromOptions(jac->kspupper)); 941 PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0])); 942 PetscCall(VecDuplicate(jac->head->x, &jac->head->z)); 943 } else { 944 jac->kspupper = jac->head->ksp; 945 PetscCall(PetscObjectReference((PetscObject)jac->head->ksp)); 946 } 947 948 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp)); 949 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur)); 950 PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure)); 951 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1)); 952 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) { 953 PC pcschur; 954 PetscCall(KSPGetPC(jac->kspschur, &pcschur)); 955 PetscCall(PCSetType(pcschur, PCNONE)); 956 /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */ 957 } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) { 958 PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user)); 959 } 960 PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac))); 961 PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix)); 962 PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix)); 963 /* propagate DM */ 964 { 965 DM sdm; 966 PetscCall(KSPGetDM(jac->head->next->ksp, &sdm)); 967 if (sdm) { 968 PetscCall(KSPSetDM(jac->kspschur, sdm)); 969 PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE)); 970 } 971 } 972 /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */ 973 /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */ 974 PetscCall(KSPSetFromOptions(jac->kspschur)); 975 } 976 PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY)); 977 PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY)); 978 979 /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */ 980 PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname)); 981 PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L)); 982 if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L)); 983 if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L)); 984 PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname)); 985 PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L)); 986 if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L)); 987 if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L)); 988 } else if (jac->type == PC_COMPOSITE_GKB) { 989 IS ccis; 990 PetscInt rstart, rend; 991 992 PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields"); 993 994 ilink = jac->head; 995 996 /* When extracting off-diagonal submatrices, we take complements from this range */ 997 PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend)); 998 999 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 1000 if (jac->offdiag_use_amat) { 1001 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 1002 } else { 1003 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 1004 } 1005 PetscCall(ISDestroy(&ccis)); 1006 /* Create work vectors for GKB algorithm */ 1007 PetscCall(VecDuplicate(ilink->x, &jac->u)); 1008 PetscCall(VecDuplicate(ilink->x, &jac->Hu)); 1009 PetscCall(VecDuplicate(ilink->x, &jac->w2)); 1010 ilink = ilink->next; 1011 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 1012 if (jac->offdiag_use_amat) { 1013 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 1014 } else { 1015 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 1016 } 1017 PetscCall(ISDestroy(&ccis)); 1018 /* Create work vectors for GKB algorithm */ 1019 PetscCall(VecDuplicate(ilink->x, &jac->v)); 1020 PetscCall(VecDuplicate(ilink->x, &jac->d)); 1021 PetscCall(VecDuplicate(ilink->x, &jac->w1)); 1022 PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu)); 1023 PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz)); 1024 1025 ilink = jac->head; 1026 PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H)); 1027 if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp)); 1028 /* Create gkb_monitor context */ 1029 if (jac->gkbmonitor) { 1030 PetscInt tablevel; 1031 PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer)); 1032 PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII)); 1033 PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel)); 1034 PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel)); 1035 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1)); 1036 } 1037 } else { 1038 /* set up the individual splits' PCs */ 1039 i = 0; 1040 ilink = jac->head; 1041 while (ilink) { 1042 PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i])); 1043 /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */ 1044 if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp)); 1045 i++; 1046 ilink = ilink->next; 1047 } 1048 } 1049 1050 /* Set coordinates to the sub PC objects whenever these are set */ 1051 if (jac->coordinates_set) { 1052 PC pc_coords; 1053 if (jac->type == PC_COMPOSITE_SCHUR) { 1054 // Head is first block. 1055 PetscCall(KSPGetPC(jac->head->ksp, &pc_coords)); 1056 PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords)); 1057 // Second one is Schur block, but its KSP object is in kspschur. 1058 PetscCall(KSPGetPC(jac->kspschur, &pc_coords)); 1059 PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords)); 1060 } else if (jac->type == PC_COMPOSITE_GKB) { 1061 PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner")); 1062 } else { 1063 ilink = jac->head; 1064 while (ilink) { 1065 PetscCall(KSPGetPC(ilink->ksp, &pc_coords)); 1066 PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords)); 1067 ilink = ilink->next; 1068 } 1069 } 1070 } 1071 1072 jac->suboptionsset = PETSC_TRUE; 1073 PetscFunctionReturn(0); 1074 } 1075 1076 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \ 1077 (VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \ 1078 KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \ 1079 VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)) 1080 1081 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y) 1082 { 1083 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1084 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1085 KSP kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper; 1086 1087 PetscFunctionBegin; 1088 switch (jac->schurfactorization) { 1089 case PC_FIELDSPLIT_SCHUR_FACT_DIAG: 1090 /* [A00 0; 0 -S], positive definite, suitable for MINRES */ 1091 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1092 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1093 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1094 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1095 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1096 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1097 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1098 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1099 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1100 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1101 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1102 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1103 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1104 PetscCall(VecScale(ilinkD->y, jac->schurscale)); 1105 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1106 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1107 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1108 break; 1109 case PC_FIELDSPLIT_SCHUR_FACT_LOWER: 1110 /* [A00 0; A10 S], suitable for left preconditioning */ 1111 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1112 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1113 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1114 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1115 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1116 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1117 PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x)); 1118 PetscCall(VecScale(ilinkD->x, -1.)); 1119 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1120 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1121 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1122 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1123 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1124 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1125 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1126 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1127 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1128 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1129 break; 1130 case PC_FIELDSPLIT_SCHUR_FACT_UPPER: 1131 /* [A00 A01; 0 S], suitable for right preconditioning */ 1132 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1133 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1134 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1135 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1136 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1137 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1138 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x)); 1139 PetscCall(VecScale(ilinkA->x, -1.)); 1140 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1141 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1142 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1143 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1144 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1145 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1146 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1147 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1148 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1149 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1150 break; 1151 case PC_FIELDSPLIT_SCHUR_FACT_FULL: 1152 /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */ 1153 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1154 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1155 PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL)); 1156 PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y)); 1157 PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y)); 1158 PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL)); 1159 PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x)); 1160 PetscCall(VecScale(ilinkD->x, -1.0)); 1161 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1162 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1163 1164 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1165 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1166 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1167 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1168 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1169 1170 if (kspUpper == kspA) { 1171 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y)); 1172 PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y)); 1173 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1174 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1175 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1176 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1177 } else { 1178 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1179 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1180 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1181 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x)); 1182 PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL)); 1183 PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z)); 1184 PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z)); 1185 PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL)); 1186 PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z)); 1187 } 1188 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1189 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1190 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1191 } 1192 PetscFunctionReturn(0); 1193 } 1194 1195 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y) 1196 { 1197 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1198 PC_FieldSplitLink ilink = jac->head; 1199 PetscInt cnt, bs; 1200 1201 PetscFunctionBegin; 1202 if (jac->type == PC_COMPOSITE_ADDITIVE) { 1203 if (jac->defaultsplit) { 1204 PetscCall(VecGetBlockSize(x, &bs)); 1205 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1206 PetscCall(VecGetBlockSize(y, &bs)); 1207 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1208 PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES)); 1209 while (ilink) { 1210 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1211 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1212 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1213 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1214 ilink = ilink->next; 1215 } 1216 PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES)); 1217 } else { 1218 PetscCall(VecSet(y, 0.0)); 1219 while (ilink) { 1220 PetscCall(FieldSplitSplitSolveAdd(ilink, x, y)); 1221 ilink = ilink->next; 1222 } 1223 } 1224 } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) { 1225 PetscCall(VecSet(y, 0.0)); 1226 /* solve on first block for first block variables */ 1227 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD)); 1228 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD)); 1229 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1230 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1231 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1232 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1233 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1234 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1235 1236 /* compute the residual only onto second block variables using first block variables */ 1237 PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x)); 1238 ilink = ilink->next; 1239 PetscCall(VecScale(ilink->x, -1.0)); 1240 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1241 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1242 1243 /* solve on second block variables */ 1244 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1245 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1246 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1247 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1248 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1249 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1250 } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1251 if (!jac->w1) { 1252 PetscCall(VecDuplicate(x, &jac->w1)); 1253 PetscCall(VecDuplicate(x, &jac->w2)); 1254 } 1255 PetscCall(VecSet(y, 0.0)); 1256 PetscCall(FieldSplitSplitSolveAdd(ilink, x, y)); 1257 cnt = 1; 1258 while (ilink->next) { 1259 ilink = ilink->next; 1260 /* compute the residual only over the part of the vector needed */ 1261 PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x)); 1262 PetscCall(VecScale(ilink->x, -1.0)); 1263 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1264 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1265 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1266 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1267 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1268 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1269 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1270 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1271 } 1272 if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1273 cnt -= 2; 1274 while (ilink->previous) { 1275 ilink = ilink->previous; 1276 /* compute the residual only over the part of the vector needed */ 1277 PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x)); 1278 PetscCall(VecScale(ilink->x, -1.0)); 1279 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1280 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1281 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1282 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1283 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1284 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1285 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1286 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1287 } 1288 } 1289 } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type); 1290 PetscFunctionReturn(0); 1291 } 1292 1293 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y) 1294 { 1295 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1296 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1297 KSP ksp = ilinkA->ksp; 1298 Vec u, v, Hu, d, work1, work2; 1299 PetscScalar alpha, z, nrmz2, *vecz; 1300 PetscReal lowbnd, nu, beta; 1301 PetscInt j, iterGKB; 1302 1303 PetscFunctionBegin; 1304 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1305 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1306 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1307 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1308 1309 u = jac->u; 1310 v = jac->v; 1311 Hu = jac->Hu; 1312 d = jac->d; 1313 work1 = jac->w1; 1314 work2 = jac->w2; 1315 vecz = jac->vecz; 1316 1317 /* Change RHS to comply with matrix regularization H = A + nu*B*B' */ 1318 /* Add q = q + nu*B*b */ 1319 if (jac->gkbnu) { 1320 nu = jac->gkbnu; 1321 PetscCall(VecScale(ilinkD->x, jac->gkbnu)); 1322 PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */ 1323 } else { 1324 /* Situation when no augmented Lagrangian is used. Then we set inner */ 1325 /* matrix N = I in [Ar13], and thus nu = 1. */ 1326 nu = 1; 1327 } 1328 1329 /* Transform rhs from [q,tilde{b}] to [0,b] */ 1330 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL)); 1331 PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y)); 1332 PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y)); 1333 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL)); 1334 PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1)); 1335 PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x */ 1336 1337 /* First step of algorithm */ 1338 PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/ 1339 KSPCheckDot(ksp, beta); 1340 beta = PetscSqrtReal(nu) * beta; 1341 PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c */ 1342 PetscCall(MatMult(jac->B, v, work2)); /* u = H^{-1}*B*v */ 1343 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL)); 1344 PetscCall(KSPSolve(ksp, work2, u)); 1345 PetscCall(KSPCheckSolve(ksp, pc, u)); 1346 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL)); 1347 PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u */ 1348 PetscCall(VecDot(Hu, u, &alpha)); 1349 KSPCheckDot(ksp, alpha); 1350 PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite"); 1351 alpha = PetscSqrtReal(PetscAbsScalar(alpha)); 1352 PetscCall(VecScale(u, 1.0 / alpha)); 1353 PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c */ 1354 1355 z = beta / alpha; 1356 vecz[1] = z; 1357 1358 /* Computation of first iterate x(1) and p(1) */ 1359 PetscCall(VecAXPY(ilinkA->y, z, u)); 1360 PetscCall(VecCopy(d, ilinkD->y)); 1361 PetscCall(VecScale(ilinkD->y, -z)); 1362 1363 iterGKB = 1; 1364 lowbnd = 2 * jac->gkbtol; 1365 if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd)); 1366 1367 while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) { 1368 iterGKB += 1; 1369 PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */ 1370 PetscCall(VecAXPBY(v, nu, -alpha, work1)); 1371 PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v */ 1372 beta = beta / PetscSqrtReal(nu); 1373 PetscCall(VecScale(v, 1.0 / beta)); 1374 PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */ 1375 PetscCall(MatMult(jac->H, u, Hu)); 1376 PetscCall(VecAXPY(work2, -beta, Hu)); 1377 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL)); 1378 PetscCall(KSPSolve(ksp, work2, u)); 1379 PetscCall(KSPCheckSolve(ksp, pc, u)); 1380 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL)); 1381 PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u */ 1382 PetscCall(VecDot(Hu, u, &alpha)); 1383 KSPCheckDot(ksp, alpha); 1384 PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite"); 1385 alpha = PetscSqrtReal(PetscAbsScalar(alpha)); 1386 PetscCall(VecScale(u, 1.0 / alpha)); 1387 1388 z = -beta / alpha * z; /* z <- beta/alpha*z */ 1389 vecz[0] = z; 1390 1391 /* Computation of new iterate x(i+1) and p(i+1) */ 1392 PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */ 1393 PetscCall(VecAXPY(ilinkA->y, z, u)); /* r = r + z*u */ 1394 PetscCall(VecAXPY(ilinkD->y, -z, d)); /* p = p - z*d */ 1395 PetscCall(MatMult(jac->H, ilinkA->y, Hu)); /* ||u||_H = u'*H*u */ 1396 PetscCall(VecDot(Hu, ilinkA->y, &nrmz2)); 1397 1398 /* Compute Lower Bound estimate */ 1399 if (iterGKB > jac->gkbdelay) { 1400 lowbnd = 0.0; 1401 for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]); 1402 lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2)); 1403 } 1404 1405 for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2]; 1406 if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd)); 1407 } 1408 1409 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1410 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1411 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1412 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1413 1414 PetscFunctionReturn(0); 1415 } 1416 1417 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \ 1418 (VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \ 1419 KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \ 1420 VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)) 1421 1422 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y) 1423 { 1424 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1425 PC_FieldSplitLink ilink = jac->head; 1426 PetscInt bs; 1427 1428 PetscFunctionBegin; 1429 if (jac->type == PC_COMPOSITE_ADDITIVE) { 1430 if (jac->defaultsplit) { 1431 PetscCall(VecGetBlockSize(x, &bs)); 1432 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1433 PetscCall(VecGetBlockSize(y, &bs)); 1434 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1435 PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES)); 1436 while (ilink) { 1437 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1438 PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y)); 1439 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1440 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1441 ilink = ilink->next; 1442 } 1443 PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES)); 1444 } else { 1445 PetscCall(VecSet(y, 0.0)); 1446 while (ilink) { 1447 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1448 ilink = ilink->next; 1449 } 1450 } 1451 } else { 1452 if (!jac->w1) { 1453 PetscCall(VecDuplicate(x, &jac->w1)); 1454 PetscCall(VecDuplicate(x, &jac->w2)); 1455 } 1456 PetscCall(VecSet(y, 0.0)); 1457 if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1458 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1459 while (ilink->next) { 1460 ilink = ilink->next; 1461 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1462 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1463 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1464 } 1465 while (ilink->previous) { 1466 ilink = ilink->previous; 1467 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1468 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1469 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1470 } 1471 } else { 1472 while (ilink->next) { /* get to last entry in linked list */ 1473 ilink = ilink->next; 1474 } 1475 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1476 while (ilink->previous) { 1477 ilink = ilink->previous; 1478 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1479 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1480 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1481 } 1482 } 1483 } 1484 PetscFunctionReturn(0); 1485 } 1486 1487 static PetscErrorCode PCReset_FieldSplit(PC pc) 1488 { 1489 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1490 PC_FieldSplitLink ilink = jac->head, next; 1491 1492 PetscFunctionBegin; 1493 while (ilink) { 1494 PetscCall(KSPDestroy(&ilink->ksp)); 1495 PetscCall(VecDestroy(&ilink->x)); 1496 PetscCall(VecDestroy(&ilink->y)); 1497 PetscCall(VecDestroy(&ilink->z)); 1498 PetscCall(VecScatterDestroy(&ilink->sctx)); 1499 PetscCall(ISDestroy(&ilink->is)); 1500 PetscCall(ISDestroy(&ilink->is_col)); 1501 PetscCall(PetscFree(ilink->splitname)); 1502 PetscCall(PetscFree(ilink->fields)); 1503 PetscCall(PetscFree(ilink->fields_col)); 1504 next = ilink->next; 1505 PetscCall(PetscFree(ilink)); 1506 ilink = next; 1507 } 1508 jac->head = NULL; 1509 PetscCall(PetscFree2(jac->x, jac->y)); 1510 if (jac->mat && jac->mat != jac->pmat) { 1511 PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat)); 1512 } else if (jac->mat) { 1513 jac->mat = NULL; 1514 } 1515 if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat)); 1516 if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield)); 1517 jac->nsplits = 0; 1518 PetscCall(VecDestroy(&jac->w1)); 1519 PetscCall(VecDestroy(&jac->w2)); 1520 PetscCall(MatDestroy(&jac->schur)); 1521 PetscCall(MatDestroy(&jac->schurp)); 1522 PetscCall(MatDestroy(&jac->schur_user)); 1523 PetscCall(KSPDestroy(&jac->kspschur)); 1524 PetscCall(KSPDestroy(&jac->kspupper)); 1525 PetscCall(MatDestroy(&jac->B)); 1526 PetscCall(MatDestroy(&jac->C)); 1527 PetscCall(MatDestroy(&jac->H)); 1528 PetscCall(VecDestroy(&jac->u)); 1529 PetscCall(VecDestroy(&jac->v)); 1530 PetscCall(VecDestroy(&jac->Hu)); 1531 PetscCall(VecDestroy(&jac->d)); 1532 PetscCall(PetscFree(jac->vecz)); 1533 PetscCall(PetscViewerDestroy(&jac->gkbviewer)); 1534 jac->isrestrict = PETSC_FALSE; 1535 PetscFunctionReturn(0); 1536 } 1537 1538 static PetscErrorCode PCDestroy_FieldSplit(PC pc) 1539 { 1540 PetscFunctionBegin; 1541 PetscCall(PCReset_FieldSplit(pc)); 1542 PetscCall(PetscFree(pc->data)); 1543 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL)); 1544 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL)); 1545 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL)); 1546 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL)); 1547 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL)); 1548 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL)); 1549 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL)); 1550 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 1551 1552 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL)); 1553 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL)); 1554 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL)); 1555 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL)); 1556 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 1557 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL)); 1558 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL)); 1559 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL)); 1560 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL)); 1561 PetscFunctionReturn(0); 1562 } 1563 1564 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject) 1565 { 1566 PetscInt bs; 1567 PetscBool flg; 1568 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1569 PCCompositeType ctype; 1570 1571 PetscFunctionBegin; 1572 PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options"); 1573 PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL)); 1574 PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg)); 1575 if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs)); 1576 jac->diag_use_amat = pc->useAmat; 1577 PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL)); 1578 jac->offdiag_use_amat = pc->useAmat; 1579 PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL)); 1580 PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL)); 1581 PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */ 1582 PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg)); 1583 if (flg) PetscCall(PCFieldSplitSetType(pc, ctype)); 1584 /* Only setup fields once */ 1585 if ((jac->bs > 0) && (jac->nsplits == 0)) { 1586 /* only allow user to set fields from command line if bs is already known. 1587 otherwise user can set them in PCFieldSplitSetDefaults() */ 1588 PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc)); 1589 if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n")); 1590 } 1591 if (jac->type == PC_COMPOSITE_SCHUR) { 1592 PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg)); 1593 if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n")); 1594 PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL)); 1595 PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL)); 1596 PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL)); 1597 } else if (jac->type == PC_COMPOSITE_GKB) { 1598 PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitGKBTol", jac->gkbtol, &jac->gkbtol, NULL)); 1599 PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL)); 1600 PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitGKBNu", jac->gkbnu, &jac->gkbnu, NULL)); 1601 PetscCheck(jac->gkbnu >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nu cannot be less than 0: value %g", (double)jac->gkbnu); 1602 PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL)); 1603 PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL)); 1604 } 1605 /* 1606 In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet. 1607 But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it 1608 is called on the outer solver in case changes were made in the options database 1609 1610 But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions() 1611 if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete. 1612 Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types. 1613 1614 There could be a negative side effect of calling the KSPSetFromOptions() below. 1615 1616 If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call 1617 */ 1618 if (jac->issetup) { 1619 PC_FieldSplitLink ilink = jac->head; 1620 if (jac->type == PC_COMPOSITE_SCHUR) { 1621 if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper)); 1622 if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur)); 1623 } 1624 while (ilink) { 1625 if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp)); 1626 ilink = ilink->next; 1627 } 1628 } 1629 PetscOptionsHeadEnd(); 1630 PetscFunctionReturn(0); 1631 } 1632 1633 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col) 1634 { 1635 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1636 PC_FieldSplitLink ilink, next = jac->head; 1637 char prefix[128]; 1638 PetscInt i; 1639 1640 PetscFunctionBegin; 1641 if (jac->splitdefined) { 1642 PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname)); 1643 PetscFunctionReturn(0); 1644 } 1645 for (i = 0; i < n; i++) { 1646 PetscCheck(fields[i] < jac->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", fields[i], jac->bs); 1647 PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); 1648 } 1649 PetscCall(PetscNew(&ilink)); 1650 if (splitname) { 1651 PetscCall(PetscStrallocpy(splitname, &ilink->splitname)); 1652 } else { 1653 PetscCall(PetscMalloc1(3, &ilink->splitname)); 1654 PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits)); 1655 } 1656 ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */ 1657 PetscCall(PetscMalloc1(n, &ilink->fields)); 1658 PetscCall(PetscArraycpy(ilink->fields, fields, n)); 1659 PetscCall(PetscMalloc1(n, &ilink->fields_col)); 1660 PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n)); 1661 1662 ilink->nfields = n; 1663 ilink->next = NULL; 1664 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp)); 1665 PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure)); 1666 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1)); 1667 PetscCall(KSPSetType(ilink->ksp, KSPPREONLY)); 1668 1669 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 1670 PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix)); 1671 1672 if (!next) { 1673 jac->head = ilink; 1674 ilink->previous = NULL; 1675 } else { 1676 while (next->next) next = next->next; 1677 next->next = ilink; 1678 ilink->previous = next; 1679 } 1680 jac->nsplits++; 1681 PetscFunctionReturn(0); 1682 } 1683 1684 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp) 1685 { 1686 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1687 1688 PetscFunctionBegin; 1689 *subksp = NULL; 1690 if (n) *n = 0; 1691 if (jac->type == PC_COMPOSITE_SCHUR) { 1692 PetscInt nn; 1693 1694 PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()"); 1695 PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits); 1696 nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0); 1697 PetscCall(PetscMalloc1(nn, subksp)); 1698 (*subksp)[0] = jac->head->ksp; 1699 (*subksp)[1] = jac->kspschur; 1700 if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper; 1701 if (n) *n = nn; 1702 } 1703 PetscFunctionReturn(0); 1704 } 1705 1706 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp) 1707 { 1708 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1709 1710 PetscFunctionBegin; 1711 PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()"); 1712 PetscCall(PetscMalloc1(jac->nsplits, subksp)); 1713 PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp)); 1714 1715 (*subksp)[1] = jac->kspschur; 1716 if (n) *n = jac->nsplits; 1717 PetscFunctionReturn(0); 1718 } 1719 1720 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp) 1721 { 1722 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1723 PetscInt cnt = 0; 1724 PC_FieldSplitLink ilink = jac->head; 1725 1726 PetscFunctionBegin; 1727 PetscCall(PetscMalloc1(jac->nsplits, subksp)); 1728 while (ilink) { 1729 (*subksp)[cnt++] = ilink->ksp; 1730 ilink = ilink->next; 1731 } 1732 PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits); 1733 if (n) *n = jac->nsplits; 1734 PetscFunctionReturn(0); 1735 } 1736 1737 /*@C 1738 PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`. 1739 1740 Input Parameters: 1741 + pc - the preconditioner context 1742 - is - the index set that defines the indices to which the fieldsplit is to be restricted 1743 1744 Level: advanced 1745 1746 Developer Note: 1747 It seems the resulting `IS`s will not cover the entire space, so 1748 how can they define a convergent preconditioner? Needs explaining. 1749 1750 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 1751 @*/ 1752 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy) 1753 { 1754 PetscFunctionBegin; 1755 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1756 PetscValidHeaderSpecific(isy, IS_CLASSID, 2); 1757 PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy)); 1758 PetscFunctionReturn(0); 1759 } 1760 1761 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy) 1762 { 1763 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1764 PC_FieldSplitLink ilink = jac->head, next; 1765 PetscInt localsize, size, sizez, i; 1766 const PetscInt *ind, *indz; 1767 PetscInt *indc, *indcz; 1768 PetscBool flg; 1769 1770 PetscFunctionBegin; 1771 PetscCall(ISGetLocalSize(isy, &localsize)); 1772 PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy))); 1773 size -= localsize; 1774 while (ilink) { 1775 IS isrl, isr; 1776 PC subpc; 1777 PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl)); 1778 PetscCall(ISGetLocalSize(isrl, &localsize)); 1779 PetscCall(PetscMalloc1(localsize, &indc)); 1780 PetscCall(ISGetIndices(isrl, &ind)); 1781 PetscCall(PetscArraycpy(indc, ind, localsize)); 1782 PetscCall(ISRestoreIndices(isrl, &ind)); 1783 PetscCall(ISDestroy(&isrl)); 1784 for (i = 0; i < localsize; i++) *(indc + i) += size; 1785 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr)); 1786 PetscCall(PetscObjectReference((PetscObject)isr)); 1787 PetscCall(ISDestroy(&ilink->is)); 1788 ilink->is = isr; 1789 PetscCall(PetscObjectReference((PetscObject)isr)); 1790 PetscCall(ISDestroy(&ilink->is_col)); 1791 ilink->is_col = isr; 1792 PetscCall(ISDestroy(&isr)); 1793 PetscCall(KSPGetPC(ilink->ksp, &subpc)); 1794 PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg)); 1795 if (flg) { 1796 IS iszl, isz; 1797 MPI_Comm comm; 1798 PetscCall(ISGetLocalSize(ilink->is, &localsize)); 1799 comm = PetscObjectComm((PetscObject)ilink->is); 1800 PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl)); 1801 PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm)); 1802 sizez -= localsize; 1803 PetscCall(ISGetLocalSize(iszl, &localsize)); 1804 PetscCall(PetscMalloc1(localsize, &indcz)); 1805 PetscCall(ISGetIndices(iszl, &indz)); 1806 PetscCall(PetscArraycpy(indcz, indz, localsize)); 1807 PetscCall(ISRestoreIndices(iszl, &indz)); 1808 PetscCall(ISDestroy(&iszl)); 1809 for (i = 0; i < localsize; i++) *(indcz + i) += sizez; 1810 PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz)); 1811 PetscCall(PCFieldSplitRestrictIS(subpc, isz)); 1812 PetscCall(ISDestroy(&isz)); 1813 } 1814 next = ilink->next; 1815 ilink = next; 1816 } 1817 jac->isrestrict = PETSC_TRUE; 1818 PetscFunctionReturn(0); 1819 } 1820 1821 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is) 1822 { 1823 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1824 PC_FieldSplitLink ilink, next = jac->head; 1825 char prefix[128]; 1826 1827 PetscFunctionBegin; 1828 if (jac->splitdefined) { 1829 PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname)); 1830 PetscFunctionReturn(0); 1831 } 1832 PetscCall(PetscNew(&ilink)); 1833 if (splitname) { 1834 PetscCall(PetscStrallocpy(splitname, &ilink->splitname)); 1835 } else { 1836 PetscCall(PetscMalloc1(8, &ilink->splitname)); 1837 PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits)); 1838 } 1839 ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */ 1840 PetscCall(PetscObjectReference((PetscObject)is)); 1841 PetscCall(ISDestroy(&ilink->is)); 1842 ilink->is = is; 1843 PetscCall(PetscObjectReference((PetscObject)is)); 1844 PetscCall(ISDestroy(&ilink->is_col)); 1845 ilink->is_col = is; 1846 ilink->next = NULL; 1847 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp)); 1848 PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure)); 1849 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1)); 1850 PetscCall(KSPSetType(ilink->ksp, KSPPREONLY)); 1851 1852 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 1853 PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix)); 1854 1855 if (!next) { 1856 jac->head = ilink; 1857 ilink->previous = NULL; 1858 } else { 1859 while (next->next) next = next->next; 1860 next->next = ilink; 1861 ilink->previous = next; 1862 } 1863 jac->nsplits++; 1864 PetscFunctionReturn(0); 1865 } 1866 1867 /*@C 1868 PCFieldSplitSetFields - Sets the fields that define one particular split in the field split preconditioner 1869 1870 Logically Collective on pc 1871 1872 Input Parameters: 1873 + pc - the preconditioner context 1874 . splitname - name of this split, if NULL the number of the split is used 1875 . n - the number of fields in this split 1876 - fields - the fields in this split 1877 1878 Level: intermediate 1879 1880 Notes: 1881 Use `PCFieldSplitSetIS()` to set a general set of indices as a split. 1882 1883 `PCFieldSplitSetFields()` is for defining fields as strided blocks. For example, if the block 1884 size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean 1885 0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x.... 1886 where the numbered entries indicate what is in the split. 1887 1888 This function is called once per split (it creates a new split each time). Solve options 1889 for this split will be available under the prefix -fieldsplit_SPLITNAME_. 1890 1891 Developer Note: 1892 This routine does not actually create the `IS` representing the split, that is delayed 1893 until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be 1894 available when this routine is called. 1895 1896 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()` 1897 @*/ 1898 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col) 1899 { 1900 PetscFunctionBegin; 1901 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1902 PetscValidCharPointer(splitname, 2); 1903 PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname); 1904 PetscValidIntPointer(fields, 4); 1905 PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col)); 1906 PetscFunctionReturn(0); 1907 } 1908 1909 /*@ 1910 PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build 1911 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators. 1912 1913 Logically Collective on pc 1914 1915 Input Parameters: 1916 + pc - the preconditioner object 1917 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from 1918 1919 Options Database Keys: 1920 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks 1921 1922 Level: intermediate 1923 1924 .seealso: `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT` 1925 @*/ 1926 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg) 1927 { 1928 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1929 PetscBool isfs; 1930 1931 PetscFunctionBegin; 1932 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1933 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 1934 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 1935 jac->diag_use_amat = flg; 1936 PetscFunctionReturn(0); 1937 } 1938 1939 /*@ 1940 PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build 1941 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators. 1942 1943 Logically Collective on pc 1944 1945 Input Parameters: 1946 . pc - the preconditioner object 1947 1948 Output Parameters: 1949 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from 1950 1951 Level: intermediate 1952 1953 .seealso: `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT` 1954 @*/ 1955 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg) 1956 { 1957 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1958 PetscBool isfs; 1959 1960 PetscFunctionBegin; 1961 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1962 PetscValidBoolPointer(flg, 2); 1963 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 1964 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 1965 *flg = jac->diag_use_amat; 1966 PetscFunctionReturn(0); 1967 } 1968 1969 /*@ 1970 PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build 1971 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators. 1972 1973 Logically Collective on pc 1974 1975 Input Parameters: 1976 + pc - the preconditioner object 1977 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from 1978 1979 Options Database Keys: 1980 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks 1981 1982 Level: intermediate 1983 1984 .seealso: `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT` 1985 @*/ 1986 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg) 1987 { 1988 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1989 PetscBool isfs; 1990 1991 PetscFunctionBegin; 1992 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1993 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 1994 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 1995 jac->offdiag_use_amat = flg; 1996 PetscFunctionReturn(0); 1997 } 1998 1999 /*@ 2000 PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build 2001 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators. 2002 2003 Logically Collective on pc 2004 2005 Input Parameters: 2006 . pc - the preconditioner object 2007 2008 Output Parameters: 2009 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from 2010 2011 Level: intermediate 2012 2013 .seealso: `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT` 2014 @*/ 2015 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg) 2016 { 2017 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2018 PetscBool isfs; 2019 2020 PetscFunctionBegin; 2021 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2022 PetscValidBoolPointer(flg, 2); 2023 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2024 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2025 *flg = jac->offdiag_use_amat; 2026 PetscFunctionReturn(0); 2027 } 2028 2029 /*@C 2030 PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT` 2031 2032 Logically Collective on pc 2033 2034 Input Parameters: 2035 + pc - the preconditioner context 2036 . splitname - name of this split, if NULL the number of the split is used 2037 - is - the index set that defines the elements in this split 2038 2039 Notes: 2040 Use `PCFieldSplitSetFields()`, for splits defined by strided types. 2041 2042 This function is called once per split (it creates a new split each time). Solve options 2043 for this split will be available under the prefix -fieldsplit_SPLITNAME_. 2044 2045 Level: intermediate 2046 2047 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()` 2048 @*/ 2049 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is) 2050 { 2051 PetscFunctionBegin; 2052 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2053 if (splitname) PetscValidCharPointer(splitname, 2); 2054 PetscValidHeaderSpecific(is, IS_CLASSID, 3); 2055 PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is)); 2056 PetscFunctionReturn(0); 2057 } 2058 2059 /*@C 2060 PCFieldSplitGetIS - Retrieves the elements for a split as an `IS` 2061 2062 Logically Collective on pc 2063 2064 Input Parameters: 2065 + pc - the preconditioner context 2066 - splitname - name of this split 2067 2068 Output Parameter: 2069 - is - the index set that defines the elements in this split, or NULL if the split is not found 2070 2071 Level: intermediate 2072 2073 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()` 2074 @*/ 2075 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is) 2076 { 2077 PetscFunctionBegin; 2078 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2079 PetscValidCharPointer(splitname, 2); 2080 PetscValidPointer(is, 3); 2081 { 2082 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2083 PC_FieldSplitLink ilink = jac->head; 2084 PetscBool found; 2085 2086 *is = NULL; 2087 while (ilink) { 2088 PetscCall(PetscStrcmp(ilink->splitname, splitname, &found)); 2089 if (found) { 2090 *is = ilink->is; 2091 break; 2092 } 2093 ilink = ilink->next; 2094 } 2095 } 2096 PetscFunctionReturn(0); 2097 } 2098 2099 /*@C 2100 PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS` 2101 2102 Logically Collective on pc 2103 2104 Input Parameters: 2105 + pc - the preconditioner context 2106 - index - index of this split 2107 2108 Output Parameter: 2109 - is - the index set that defines the elements in this split 2110 2111 Level: intermediate 2112 2113 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()` 2114 @*/ 2115 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is) 2116 { 2117 PetscFunctionBegin; 2118 PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index); 2119 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2120 PetscValidPointer(is, 3); 2121 { 2122 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2123 PC_FieldSplitLink ilink = jac->head; 2124 PetscInt i = 0; 2125 PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits); 2126 2127 while (i < index) { 2128 ilink = ilink->next; 2129 ++i; 2130 } 2131 PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is)); 2132 } 2133 PetscFunctionReturn(0); 2134 } 2135 2136 /*@ 2137 PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the 2138 fieldsplit preconditioner when calling `PCFieldSplitSetIS()`. If not set the matrix block size is used. 2139 2140 Logically Collective on pc 2141 2142 Input Parameters: 2143 + pc - the preconditioner context 2144 - bs - the block size 2145 2146 Level: intermediate 2147 2148 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 2149 @*/ 2150 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs) 2151 { 2152 PetscFunctionBegin; 2153 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2154 PetscValidLogicalCollectiveInt(pc, bs, 2); 2155 PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs)); 2156 PetscFunctionReturn(0); 2157 } 2158 2159 /*@C 2160 PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits 2161 2162 Collective on pc 2163 2164 Input Parameter: 2165 . pc - the preconditioner context 2166 2167 Output Parameters: 2168 + n - the number of splits 2169 - subksp - the array of `KSP` contexts 2170 2171 Level: advanced 2172 2173 Notes: 2174 After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()` 2175 (not the `KSP`, just the array that contains them). 2176 2177 You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`. 2178 2179 If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the 2180 Schur complement and the `KSP` object used to iterate over the Schur complement. 2181 To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`. 2182 2183 If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the 2184 inner linear system defined by the matrix H in each loop. 2185 2186 Fortran Usage: 2187 You must pass in a `KSP` array that is large enough to contain all the `KSP`s. 2188 You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the 2189 `KSP` array must be. 2190 2191 Developer Note: 2192 There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()` 2193 2194 The Fortran interface should be modernized to return directly the array of values. 2195 2196 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()` 2197 @*/ 2198 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[]) 2199 { 2200 PetscFunctionBegin; 2201 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2202 if (n) PetscValidIntPointer(n, 2); 2203 PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp)); 2204 PetscFunctionReturn(0); 2205 } 2206 2207 /*@C 2208 PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT` 2209 2210 Collective on `KSP` 2211 2212 Input Parameter: 2213 . pc - the preconditioner context 2214 2215 Output Parameters: 2216 + n - the number of splits 2217 - subksp - the array of `KSP` contexts 2218 2219 Level: advanced 2220 2221 Notes: 2222 After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()` 2223 (not the `KSP` just the array that contains them). 2224 2225 You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`. 2226 2227 If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order) 2228 + 1 - the `KSP` used for the (1,1) block 2229 . 2 - the `KSP` used for the Schur complement (not the one used for the interior Schur solver) 2230 - 3 - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block). 2231 2232 It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`. 2233 2234 Fortran Note: 2235 You must pass in a `KSP` array that is large enough to contain all the local `KSP`s. 2236 You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the 2237 `KSP` array must be. 2238 2239 Developer Notes: 2240 There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()` 2241 2242 Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged? 2243 2244 The Fortran interface should be modernized to return directly the array of values. 2245 2246 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()` 2247 @*/ 2248 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[]) 2249 { 2250 PetscFunctionBegin; 2251 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2252 if (n) PetscValidIntPointer(n, 2); 2253 PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp)); 2254 PetscFunctionReturn(0); 2255 } 2256 2257 /*@ 2258 PCFieldSplitSetSchurPre - Indicates from what operator the preconditioner is constructucted for the Schur complement. 2259 The default is the A11 matrix. 2260 2261 Collective on pc 2262 2263 Input Parameters: 2264 + pc - the preconditioner context 2265 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default), 2266 `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`, 2267 `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL` 2268 - pre - matrix to use for preconditioning, or NULL 2269 2270 Options Database Keys: 2271 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is a11. See notes for meaning of various arguments 2272 - -fieldsplit_1_pc_type <pctype> - the preconditioner algorithm that is used to construct the preconditioner from the operator 2273 2274 Notes: 2275 If ptype is 2276 + a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner 2277 matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix 2278 . self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix: 2279 The only preconditioner that currently works with this symbolic respresentation matrix object is the PCLSC 2280 preconditioner 2281 . user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument 2282 to this function). 2283 . selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation Sp = A11 - A10 inv(diag(A00)) A01 2284 This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be 2285 lumped before extracting the diagonal using the additional option -fieldsplit_1_mat_schur_complement_ainv_type lump 2286 - full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation 2287 computed internally by `PCFIELDSPLIT` (this is expensive) 2288 useful mostly as a test that the Schur complement approach can work for your problem 2289 2290 When solving a saddle point problem, where the A11 block is identically zero, using a11 as the ptype only makes sense 2291 with the additional option -fieldsplit_1_pc_type none. Usually for saddle point problems one would use a ptype of self and 2292 -fieldsplit_1_pc_type lsc which uses the least squares commutator to compute a preconditioner for the Schur complement. 2293 2294 Level: intermediate 2295 2296 .seealso: `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, 2297 `MatSchurComplementSetAinvType()`, `PCLSC`, 2298 `PCFieldSplitSchurPreType` 2299 @*/ 2300 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2301 { 2302 PetscFunctionBegin; 2303 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2304 PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre)); 2305 PetscFunctionReturn(0); 2306 } 2307 2308 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2309 { 2310 return PCFieldSplitSetSchurPre(pc, ptype, pre); 2311 } /* Deprecated name */ 2312 2313 /*@ 2314 PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be 2315 preconditioned. See `PCFieldSplitSetSchurPre()` for details. 2316 2317 Logically Collective on pc 2318 2319 Input Parameter: 2320 . pc - the preconditioner context 2321 2322 Output Parameters: 2323 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_PRE_USER` 2324 - pre - matrix to use for preconditioning (with `PC_FIELDSPLIT_PRE_USER`), or NULL 2325 2326 Level: intermediate 2327 2328 .seealso: `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`, 2329 `PCFieldSplitSchurPreType` 2330 @*/ 2331 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre) 2332 { 2333 PetscFunctionBegin; 2334 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2335 PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre)); 2336 PetscFunctionReturn(0); 2337 } 2338 2339 /*@ 2340 PCFieldSplitSchurGetS - extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately 2341 2342 Not collective 2343 2344 Input Parameter: 2345 . pc - the preconditioner context 2346 2347 Output Parameter: 2348 . S - the Schur complement matrix 2349 2350 Note: 2351 This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`. 2352 2353 Level: advanced 2354 2355 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`, 2356 `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()` 2357 @*/ 2358 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S) 2359 { 2360 const char *t; 2361 PetscBool isfs; 2362 PC_FieldSplit *jac; 2363 2364 PetscFunctionBegin; 2365 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2366 PetscCall(PetscObjectGetType((PetscObject)pc, &t)); 2367 PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs)); 2368 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t); 2369 jac = (PC_FieldSplit *)pc->data; 2370 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type); 2371 if (S) *S = jac->schur; 2372 PetscFunctionReturn(0); 2373 } 2374 2375 /*@ 2376 PCFieldSplitSchurRestoreS - returns the `MATSCHURCOMPLEMENT` matrix used by this `PC` 2377 2378 Not collective 2379 2380 Input Parameters: 2381 + pc - the preconditioner context 2382 - S - the Schur complement matrix 2383 2384 Level: advanced 2385 2386 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()` 2387 @*/ 2388 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S) 2389 { 2390 const char *t; 2391 PetscBool isfs; 2392 PC_FieldSplit *jac; 2393 2394 PetscFunctionBegin; 2395 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2396 PetscCall(PetscObjectGetType((PetscObject)pc, &t)); 2397 PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs)); 2398 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t); 2399 jac = (PC_FieldSplit *)pc->data; 2400 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type); 2401 PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten"); 2402 PetscFunctionReturn(0); 2403 } 2404 2405 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2406 { 2407 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2408 2409 PetscFunctionBegin; 2410 jac->schurpre = ptype; 2411 if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) { 2412 PetscCall(MatDestroy(&jac->schur_user)); 2413 jac->schur_user = pre; 2414 PetscCall(PetscObjectReference((PetscObject)jac->schur_user)); 2415 } 2416 PetscFunctionReturn(0); 2417 } 2418 2419 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre) 2420 { 2421 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2422 2423 PetscFunctionBegin; 2424 *ptype = jac->schurpre; 2425 *pre = jac->schur_user; 2426 PetscFunctionReturn(0); 2427 } 2428 2429 /*@ 2430 PCFieldSplitSetSchurFactType - sets which blocks of the approximate block factorization to retain in the preconditioner 2431 2432 Collective on pc 2433 2434 Input Parameters: 2435 + pc - the preconditioner context 2436 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default 2437 2438 Options Database Key: 2439 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is full 2440 2441 Level: intermediate 2442 2443 Notes: 2444 The FULL factorization is 2445 2446 .vb 2447 (A B) = (1 0) (A 0) (1 Ainv*B) = L D U 2448 (C E) (C*Ainv 1) (0 S) (0 1) 2449 .vb 2450 where S = E - C*Ainv*B. In practice, the full factorization is applied via block triangular solves with the grouping L*(D*U). UPPER uses D*U, LOWER uses L*D, 2451 and DIAG is the diagonal part with the sign of S flipped (because this makes the preconditioner positive definite for many formulations, thus allowing the use of KSPMINRES). Sign flipping of S can be turned off with PCFieldSplitSetSchurScale(). 2452 2453 If A and S are solved exactly 2454 .vb 2455 *) FULL factorization is a direct solver. 2456 *) The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations. 2457 *) With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations. 2458 .ve 2459 2460 If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner 2461 application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice. 2462 2463 For symmetric problems in which A is positive definite and S is negative definite, DIAG can be used with `KSPMINRES`. 2464 2465 A flexible method like `KSPFGMRES` or `KSPGCR` must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S). 2466 2467 References: 2468 + * - Murphy, Golub, and Wathen, A note on preconditioning indefinite linear systems, SIAM J. Sci. Comput., 21 (2000). 2469 - * - Ipsen, A note on preconditioning nonsymmetric matrices, SIAM J. Sci. Comput., 23 (2001). 2470 2471 .seealso: `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()` 2472 @*/ 2473 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype) 2474 { 2475 PetscFunctionBegin; 2476 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2477 PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype)); 2478 PetscFunctionReturn(0); 2479 } 2480 2481 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype) 2482 { 2483 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2484 2485 PetscFunctionBegin; 2486 jac->schurfactorization = ftype; 2487 PetscFunctionReturn(0); 2488 } 2489 2490 /*@ 2491 PCFieldSplitSetSchurScale - Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`. 2492 2493 Collective on pc 2494 2495 Input Parameters: 2496 + pc - the preconditioner context 2497 - scale - scaling factor for the Schur complement 2498 2499 Options Database Key: 2500 . -pc_fieldsplit_schur_scale - default is -1.0 2501 2502 Level: intermediate 2503 2504 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetSchurFactType()` 2505 @*/ 2506 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale) 2507 { 2508 PetscFunctionBegin; 2509 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2510 PetscValidLogicalCollectiveScalar(pc, scale, 2); 2511 PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale)); 2512 PetscFunctionReturn(0); 2513 } 2514 2515 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale) 2516 { 2517 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2518 2519 PetscFunctionBegin; 2520 jac->schurscale = scale; 2521 PetscFunctionReturn(0); 2522 } 2523 2524 /*@C 2525 PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement 2526 2527 Collective on pc 2528 2529 Input Parameter: 2530 . pc - the preconditioner context 2531 2532 Output Parameters: 2533 + A00 - the (0,0) block 2534 . A01 - the (0,1) block 2535 . A10 - the (1,0) block 2536 - A11 - the (1,1) block 2537 2538 Level: advanced 2539 2540 .seealso: `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()` 2541 @*/ 2542 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11) 2543 { 2544 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2545 2546 PetscFunctionBegin; 2547 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2548 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach."); 2549 if (A00) *A00 = jac->pmat[0]; 2550 if (A01) *A01 = jac->B; 2551 if (A10) *A10 = jac->C; 2552 if (A11) *A11 = jac->pmat[1]; 2553 PetscFunctionReturn(0); 2554 } 2555 2556 /*@ 2557 PCFieldSplitSetGKBTol - Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT` 2558 2559 Collective on pc 2560 2561 Input Parameters: 2562 + pc - the preconditioner context 2563 - tolerance - the solver tolerance 2564 2565 Options Database Key: 2566 . -pc_fieldsplit_gkb_tol - default is 1e-5 2567 2568 Level: intermediate 2569 2570 Note: 2571 The generalized GKB algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. 2572 It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than 2573 this estimate, the stopping criterion is satisfactory in practical cases [A13]. 2574 2575 [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013. 2576 2577 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()` 2578 @*/ 2579 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance) 2580 { 2581 PetscFunctionBegin; 2582 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2583 PetscValidLogicalCollectiveReal(pc, tolerance, 2); 2584 PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance)); 2585 PetscFunctionReturn(0); 2586 } 2587 2588 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance) 2589 { 2590 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2591 2592 PetscFunctionBegin; 2593 jac->gkbtol = tolerance; 2594 PetscFunctionReturn(0); 2595 } 2596 2597 /*@ 2598 PCFieldSplitSetGKBMaxit - Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT` 2599 2600 Collective on pc 2601 2602 Input Parameters: 2603 + pc - the preconditioner context 2604 - maxit - the maximum number of iterations 2605 2606 Options Database Key: 2607 . -pc_fieldsplit_gkb_maxit - default is 100 2608 2609 Level: intermediate 2610 2611 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()` 2612 @*/ 2613 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit) 2614 { 2615 PetscFunctionBegin; 2616 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2617 PetscValidLogicalCollectiveInt(pc, maxit, 2); 2618 PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit)); 2619 PetscFunctionReturn(0); 2620 } 2621 2622 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit) 2623 { 2624 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2625 2626 PetscFunctionBegin; 2627 jac->gkbmaxit = maxit; 2628 PetscFunctionReturn(0); 2629 } 2630 2631 /*@ 2632 PCFieldSplitSetGKBDelay - Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization in `PCFIELDSPLIT` 2633 preconditioner. 2634 2635 Collective on pc 2636 2637 Input Parameters: 2638 + pc - the preconditioner context 2639 - delay - the delay window in the lower bound estimate 2640 2641 Options Database Key: 2642 . -pc_fieldsplit_gkb_delay - default is 5 2643 2644 Level: intermediate 2645 2646 Note: 2647 The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error ||u-u^k||_H 2648 is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + delay), and thus the algorithm needs 2649 at least (delay + 1) iterations to stop. For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to 2650 2651 [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013. 2652 2653 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()` 2654 @*/ 2655 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay) 2656 { 2657 PetscFunctionBegin; 2658 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2659 PetscValidLogicalCollectiveInt(pc, delay, 2); 2660 PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay)); 2661 PetscFunctionReturn(0); 2662 } 2663 2664 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay) 2665 { 2666 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2667 2668 PetscFunctionBegin; 2669 jac->gkbdelay = delay; 2670 PetscFunctionReturn(0); 2671 } 2672 2673 /*@ 2674 PCFieldSplitSetGKBNu - Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the Golub-Kahan bidiagonalization preconditioner 2675 in `PCFIELDSPLIT` 2676 2677 Collective on pc 2678 2679 Input Parameters: 2680 + pc - the preconditioner context 2681 - nu - the shift parameter 2682 2683 Options Database Keys: 2684 . -pc_fieldsplit_gkb_nu - default is 1 2685 2686 Level: intermediate 2687 2688 Notes: 2689 This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by chosing nu sufficiently big. However, 2690 if nu is chosen too big, the matrix H might be badly conditioned and the solution of the linear system Hx = b in the inner loop gets difficult. It is therefore 2691 necessary to find a good balance in between the convergence of the inner and outer loop. 2692 2693 For nu = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in [Ar13] is then chosen as identity. 2694 2695 [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013. 2696 2697 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()` 2698 @*/ 2699 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu) 2700 { 2701 PetscFunctionBegin; 2702 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2703 PetscValidLogicalCollectiveReal(pc, nu, 2); 2704 PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu)); 2705 PetscFunctionReturn(0); 2706 } 2707 2708 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu) 2709 { 2710 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2711 2712 PetscFunctionBegin; 2713 jac->gkbnu = nu; 2714 PetscFunctionReturn(0); 2715 } 2716 2717 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type) 2718 { 2719 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2720 2721 PetscFunctionBegin; 2722 jac->type = type; 2723 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 2724 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL)); 2725 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL)); 2726 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL)); 2727 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL)); 2728 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL)); 2729 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL)); 2730 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL)); 2731 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL)); 2732 2733 if (type == PC_COMPOSITE_SCHUR) { 2734 pc->ops->apply = PCApply_FieldSplit_Schur; 2735 pc->ops->view = PCView_FieldSplit_Schur; 2736 2737 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur)); 2738 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit)); 2739 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit)); 2740 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit)); 2741 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit)); 2742 } else if (type == PC_COMPOSITE_GKB) { 2743 pc->ops->apply = PCApply_FieldSplit_GKB; 2744 pc->ops->view = PCView_FieldSplit_GKB; 2745 2746 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 2747 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit)); 2748 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit)); 2749 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit)); 2750 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit)); 2751 } else { 2752 pc->ops->apply = PCApply_FieldSplit; 2753 pc->ops->view = PCView_FieldSplit; 2754 2755 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 2756 } 2757 PetscFunctionReturn(0); 2758 } 2759 2760 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs) 2761 { 2762 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2763 2764 PetscFunctionBegin; 2765 PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs); 2766 PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs); 2767 jac->bs = bs; 2768 PetscFunctionReturn(0); 2769 } 2770 2771 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 2772 { 2773 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2774 PC_FieldSplitLink ilink_current = jac->head; 2775 IS is_owned; 2776 2777 PetscFunctionBegin; 2778 jac->coordinates_set = PETSC_TRUE; // Internal flag 2779 PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, PETSC_NULL)); 2780 2781 while (ilink_current) { 2782 // For each IS, embed it to get local coords indces 2783 IS is_coords; 2784 PetscInt ndofs_block; 2785 const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block 2786 2787 // Setting drop to true for safety. It should make no difference. 2788 PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords)); 2789 PetscCall(ISGetLocalSize(is_coords, &ndofs_block)); 2790 PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration)); 2791 2792 // Allocate coordinates vector and set it directly 2793 PetscCall(PetscMalloc1(ndofs_block * dim, &(ilink_current->coords))); 2794 for (PetscInt dof = 0; dof < ndofs_block; ++dof) { 2795 for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d]; 2796 } 2797 ilink_current->dim = dim; 2798 ilink_current->ndofs = ndofs_block; 2799 PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration)); 2800 PetscCall(ISDestroy(&is_coords)); 2801 ilink_current = ilink_current->next; 2802 } 2803 PetscCall(ISDestroy(&is_owned)); 2804 PetscFunctionReturn(0); 2805 } 2806 2807 /*@ 2808 PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT` 2809 2810 Collective on pc 2811 2812 Input Parameters: 2813 + pc - the preconditioner context 2814 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 2815 2816 Options Database Key: 2817 . -pc_fieldsplit_type <type: one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type 2818 2819 Level: Intermediate 2820 2821 .seealso: `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`, 2822 `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 2823 @*/ 2824 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type) 2825 { 2826 PetscFunctionBegin; 2827 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2828 PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type)); 2829 PetscFunctionReturn(0); 2830 } 2831 2832 /*@ 2833 PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT` 2834 2835 Not collective 2836 2837 Input Parameter: 2838 . pc - the preconditioner context 2839 2840 Output Parameter: 2841 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 2842 2843 Level: Intermediate 2844 2845 .seealso: `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`, 2846 `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 2847 @*/ 2848 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type) 2849 { 2850 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2851 2852 PetscFunctionBegin; 2853 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2854 PetscValidIntPointer(type, 2); 2855 *type = jac->type; 2856 PetscFunctionReturn(0); 2857 } 2858 2859 /*@ 2860 PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible. 2861 2862 Logically Collective on pc 2863 2864 Input Parameters: 2865 + pc - the preconditioner context 2866 - flg - boolean indicating whether to use field splits defined by the `DM` 2867 2868 Options Database Key: 2869 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM` 2870 2871 Level: Intermediate 2872 2873 .seealso: `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()` 2874 @*/ 2875 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg) 2876 { 2877 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2878 PetscBool isfs; 2879 2880 PetscFunctionBegin; 2881 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2882 PetscValidLogicalCollectiveBool(pc, flg, 2); 2883 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2884 if (isfs) jac->dm_splits = flg; 2885 PetscFunctionReturn(0); 2886 } 2887 2888 /*@ 2889 PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible. 2890 2891 Logically Collective 2892 2893 Input Parameter: 2894 . pc - the preconditioner context 2895 2896 Output Parameter: 2897 . flg - boolean indicating whether to use field splits defined by the `DM` 2898 2899 Level: Intermediate 2900 2901 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()` 2902 @*/ 2903 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg) 2904 { 2905 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2906 PetscBool isfs; 2907 2908 PetscFunctionBegin; 2909 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2910 PetscValidBoolPointer(flg, 2); 2911 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2912 if (isfs) { 2913 if (flg) *flg = jac->dm_splits; 2914 } 2915 PetscFunctionReturn(0); 2916 } 2917 2918 /*@ 2919 PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries. 2920 2921 Logically Collective 2922 2923 Input Parameter: 2924 . pc - the preconditioner context 2925 2926 Output Parameter: 2927 . flg - boolean indicating whether to detect fields or not 2928 2929 Level: Intermediate 2930 2931 .seealso: `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()` 2932 @*/ 2933 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg) 2934 { 2935 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2936 2937 PetscFunctionBegin; 2938 *flg = jac->detect; 2939 PetscFunctionReturn(0); 2940 } 2941 2942 /*@ 2943 PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries. 2944 2945 Logically Collective 2946 2947 Input Parameter: 2948 . pc - the preconditioner context 2949 2950 Output Parameter: 2951 . flg - boolean indicating whether to detect fields or not 2952 2953 Options Database Key: 2954 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point 2955 2956 Note: 2957 Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`). 2958 2959 Level: Intermediate 2960 2961 .seealso: `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF` 2962 @*/ 2963 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg) 2964 { 2965 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2966 2967 PetscFunctionBegin; 2968 jac->detect = flg; 2969 if (jac->detect) { 2970 PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR)); 2971 PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL)); 2972 } 2973 PetscFunctionReturn(0); 2974 } 2975 2976 /*MC 2977 PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual 2978 collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details. 2979 2980 To set options on the solvers for each block append `-fieldsplit_` to all the `PC` 2981 options database keys. For example, `-fieldsplit_pc_type ilu -fieldsplit_pc_factor_levels 1` 2982 2983 To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()` 2984 and set the options directly on the resulting `KSP` object 2985 2986 Level: intermediate 2987 2988 Options Database Keys: 2989 + -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split 2990 . -pc_fieldsplit_default - automatically add any fields to additional splits that have not 2991 been supplied explicitly by `-pc_fieldsplit_%d_fields` 2992 . -pc_fieldsplit_block_size <bs> - size of block that defines fields (i.e. there are bs fields) 2993 . -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting 2994 . -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is a11; see `PCFieldSplitSetSchurPre()` 2995 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - set factorization type when using `-pc_fieldsplit_type schur`; see `PCFieldSplitSetSchurFactType()` 2996 - -pc_fieldsplit_detect_saddle_point - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver 2997 2998 Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` . 2999 For all other solvers they are `-fieldsplit_%d_` for the `d`th field; use `-fieldsplit_` for all fields. 3000 The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_` 3001 3002 Notes: 3003 Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries and `PCFieldSplitSetIS()` 3004 to define a split by an arbitrary collection of entries. 3005 3006 If no splits are set the default is used. The splits are defined by entries strided by bs, 3007 beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`, 3008 if this is not called the block size defaults to the blocksize of the second matrix passed 3009 to `KSPSetOperators()`/`PCSetOperators()`. 3010 3011 For the Schur complement preconditioner if 3012 3013 ```{math} 3014 J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right] 3015 ``` 3016 3017 the preconditioner using `full` factorization is logically 3018 ```{math} 3019 \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right] 3020 ``` 3021 where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`. $S$ is the Schur complement 3022 ```{math} 3023 S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01} 3024 ``` 3025 which is usually dense and not stored explicitly. The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given 3026 in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSub\text{ksp}()` when field number is 0, 3027 it returns the KSP associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default 3028 $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$. 3029 3030 The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above, 3031 `diag` gives 3032 ```{math} 3033 \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & -\text{ksp}(S) \end{array}\right] 3034 ``` 3035 Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$ so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip 3036 can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of 3037 ```{math} 3038 \left[\begin{array}{cc} A_{00} & 0 \\ A_{10} & S \end{array}\right] 3039 ``` 3040 where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of 3041 ```{math} 3042 \left[\begin{array}{cc} A_{00} & A_{01} \\ 0 & S \end{array}\right] 3043 ``` 3044 where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s. 3045 3046 If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS` 3047 is used automatically for a second block. 3048 3049 The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1. 3050 Generally it should be used with the `MATAIJ` format. 3051 3052 The forms of these preconditioners are closely related if not identical to forms derived as "Distributive Iterations", see, 3053 for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`Wesseling2009`. 3054 One can also use `PCFIELDSPLIT` 3055 inside a smoother resulting in "Distributive Smoothers". 3056 3057 See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`. 3058 3059 The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the 3060 residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables. 3061 3062 The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape 3063 ```{math} 3064 \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right] 3065 ``` 3066 with $A_{00}$ positive semi-definite. The implementation follows {cite}`Arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$. 3067 A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`. 3068 3069 References: 3070 3071 ```{bibliography} 3072 :filter: docname in docnames 3073 ``` 3074 3075 Developer Note: 3076 The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplify the implementation of the preconditioners and their 3077 user API. 3078 3079 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`, 3080 `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`, 3081 `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`, 3082 `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()` 3083 M*/ 3084 3085 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc) 3086 { 3087 PC_FieldSplit *jac; 3088 3089 PetscFunctionBegin; 3090 PetscCall(PetscNew(&jac)); 3091 3092 jac->bs = -1; 3093 jac->nsplits = 0; 3094 jac->type = PC_COMPOSITE_MULTIPLICATIVE; 3095 jac->schurpre = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */ 3096 jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL; 3097 jac->schurscale = -1.0; 3098 jac->dm_splits = PETSC_TRUE; 3099 jac->detect = PETSC_FALSE; 3100 jac->gkbtol = 1e-5; 3101 jac->gkbdelay = 5; 3102 jac->gkbnu = 1; 3103 jac->gkbmaxit = 100; 3104 jac->gkbmonitor = PETSC_FALSE; 3105 jac->coordinates_set = PETSC_FALSE; 3106 3107 pc->data = (void *)jac; 3108 3109 pc->ops->apply = PCApply_FieldSplit; 3110 pc->ops->applytranspose = PCApplyTranspose_FieldSplit; 3111 pc->ops->setup = PCSetUp_FieldSplit; 3112 pc->ops->reset = PCReset_FieldSplit; 3113 pc->ops->destroy = PCDestroy_FieldSplit; 3114 pc->ops->setfromoptions = PCSetFromOptions_FieldSplit; 3115 pc->ops->view = PCView_FieldSplit; 3116 pc->ops->applyrichardson = NULL; 3117 3118 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit)); 3119 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 3120 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit)); 3121 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit)); 3122 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit)); 3123 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit)); 3124 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit)); 3125 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit)); 3126 PetscFunctionReturn(0); 3127 } 3128