1 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 2 #include <petsc/private/kspimpl.h> /* This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/ 3 #include <petscdm.h> 4 5 const char *const PCFieldSplitSchurPreTypes[] = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL}; 6 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL}; 7 8 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4; 9 10 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink; 11 struct _PC_FieldSplitLink { 12 KSP ksp; 13 Vec x, y, z; 14 char *splitname; 15 PetscInt nfields; 16 PetscInt *fields, *fields_col; 17 VecScatter sctx; 18 IS is, is_col; 19 PC_FieldSplitLink next, previous; 20 PetscLogEvent event; 21 22 /* Used only when setting coordinates with PCSetCoordinates */ 23 PetscInt dim; 24 PetscInt ndofs; 25 PetscReal *coords; 26 }; 27 28 typedef struct { 29 PCCompositeType type; 30 PetscBool defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */ 31 PetscBool splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */ 32 PetscInt bs; /* Block size for IS and Mat structures */ 33 PetscInt nsplits; /* Number of field divisions defined */ 34 Vec *x, *y, w1, w2; 35 Mat *mat; /* The diagonal block for each split */ 36 Mat *pmat; /* The preconditioning diagonal block for each split */ 37 Mat *Afield; /* The rows of the matrix associated with each split */ 38 PetscBool issetup; 39 40 /* Only used when Schur complement preconditioning is used */ 41 Mat B; /* The (0,1) block */ 42 Mat C; /* The (1,0) block */ 43 Mat schur; /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */ 44 Mat schurp; /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */ 45 Mat schur_user; /* User-provided preconditioning matrix for the Schur complement */ 46 PCFieldSplitSchurPreType schurpre; /* Determines which preconditioning matrix is used for the Schur complement */ 47 PCFieldSplitSchurFactType schurfactorization; 48 KSP kspschur; /* The solver for S */ 49 KSP kspupper; /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */ 50 PetscScalar schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */ 51 52 /* Only used when Golub-Kahan bidiagonalization preconditioning is used */ 53 Mat H; /* The modified matrix H = A00 + nu*A01*A01' */ 54 PetscReal gkbtol; /* Stopping tolerance for lower bound estimate */ 55 PetscInt gkbdelay; /* The delay window for the stopping criterion */ 56 PetscReal gkbnu; /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */ 57 PetscInt gkbmaxit; /* Maximum number of iterations for outer loop */ 58 PetscBool gkbmonitor; /* Monitor for gkb iterations and the lower bound error */ 59 PetscViewer gkbviewer; /* Viewer context for gkbmonitor */ 60 Vec u, v, d, Hu; /* Work vectors for the GKB algorithm */ 61 PetscScalar *vecz; /* Contains intermediate values, eg for lower bound */ 62 63 PC_FieldSplitLink head; 64 PetscBool isrestrict; /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */ 65 PetscBool suboptionsset; /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */ 66 PetscBool dm_splits; /* Whether to use DMCreateFieldDecomposition() whenever possible */ 67 PetscBool diag_use_amat; /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */ 68 PetscBool offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */ 69 PetscBool detect; /* Whether to form 2-way split by finding zero diagonal entries */ 70 PetscBool coordinates_set; /* Whether PCSetCoordinates has been called */ 71 } PC_FieldSplit; 72 73 /* 74 Note: 75 there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of 76 inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the 77 PC you could change this. 78 */ 79 80 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it. This way the 81 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */ 82 static Mat FieldSplitSchurPre(PC_FieldSplit *jac) 83 { 84 switch (jac->schurpre) { 85 case PC_FIELDSPLIT_SCHUR_PRE_SELF: 86 return jac->schur; 87 case PC_FIELDSPLIT_SCHUR_PRE_SELFP: 88 return jac->schurp; 89 case PC_FIELDSPLIT_SCHUR_PRE_A11: 90 return jac->pmat[1]; 91 case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */ 92 case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */ 93 default: 94 return jac->schur_user ? jac->schur_user : jac->pmat[1]; 95 } 96 } 97 98 #include <petscdraw.h> 99 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer) 100 { 101 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 102 PetscBool iascii, isdraw; 103 PetscInt i, j; 104 PC_FieldSplitLink ilink = jac->head; 105 106 PetscFunctionBegin; 107 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 108 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 109 if (iascii) { 110 if (jac->bs > 0) { 111 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs)); 112 } else { 113 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits)); 114 } 115 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 116 if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for diagonal blocks\n")); 117 if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for off-diagonal blocks\n")); 118 PetscCall(PetscViewerASCIIPrintf(viewer, " Solver info for each split is in the following KSP objects:\n")); 119 for (i = 0; i < jac->nsplits; i++) { 120 if (ilink->fields) { 121 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i)); 122 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 123 for (j = 0; j < ilink->nfields; j++) { 124 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 125 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 126 } 127 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 128 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 129 } else { 130 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i)); 131 } 132 PetscCall(KSPView(ilink->ksp, viewer)); 133 ilink = ilink->next; 134 } 135 } 136 137 if (isdraw) { 138 PetscDraw draw; 139 PetscReal x, y, w, wd; 140 141 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 142 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 143 w = 2 * PetscMin(1.0 - x, x); 144 wd = w / (jac->nsplits + 1); 145 x = x - wd * (jac->nsplits - 1) / 2.0; 146 for (i = 0; i < jac->nsplits; i++) { 147 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 148 PetscCall(KSPView(ilink->ksp, viewer)); 149 PetscCall(PetscDrawPopCurrentPoint(draw)); 150 x += wd; 151 ilink = ilink->next; 152 } 153 } 154 PetscFunctionReturn(PETSC_SUCCESS); 155 } 156 157 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer) 158 { 159 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 160 PetscBool iascii, isdraw; 161 PetscInt i, j; 162 PC_FieldSplitLink ilink = jac->head; 163 MatSchurComplementAinvType atype; 164 165 PetscFunctionBegin; 166 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 167 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 168 if (iascii) { 169 if (jac->bs > 0) { 170 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization])); 171 } else { 172 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization])); 173 } 174 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 175 switch (jac->schurpre) { 176 case PC_FIELDSPLIT_SCHUR_PRE_SELF: 177 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from S itself\n")); 178 break; 179 case PC_FIELDSPLIT_SCHUR_PRE_SELFP: 180 if (jac->schur) { 181 PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype)); 182 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's ")))); 183 } 184 break; 185 case PC_FIELDSPLIT_SCHUR_PRE_A11: 186 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from A11\n")); 187 break; 188 case PC_FIELDSPLIT_SCHUR_PRE_FULL: 189 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from the exact Schur complement\n")); 190 break; 191 case PC_FIELDSPLIT_SCHUR_PRE_USER: 192 if (jac->schur_user) { 193 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from user provided matrix\n")); 194 } else { 195 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from A11\n")); 196 } 197 break; 198 default: 199 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre); 200 } 201 PetscCall(PetscViewerASCIIPrintf(viewer, " Split info:\n")); 202 PetscCall(PetscViewerASCIIPushTab(viewer)); 203 for (i = 0; i < jac->nsplits; i++) { 204 if (ilink->fields) { 205 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i)); 206 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 207 for (j = 0; j < ilink->nfields; j++) { 208 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 209 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 210 } 211 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 212 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 213 } else { 214 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i)); 215 } 216 ilink = ilink->next; 217 } 218 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n")); 219 PetscCall(PetscViewerASCIIPushTab(viewer)); 220 if (jac->head) { 221 PetscCall(KSPView(jac->head->ksp, viewer)); 222 } else PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 223 PetscCall(PetscViewerASCIIPopTab(viewer)); 224 if (jac->head && jac->kspupper != jac->head->ksp) { 225 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor \n")); 226 PetscCall(PetscViewerASCIIPushTab(viewer)); 227 if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer)); 228 else PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 229 PetscCall(PetscViewerASCIIPopTab(viewer)); 230 } 231 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01 \n")); 232 PetscCall(PetscViewerASCIIPushTab(viewer)); 233 if (jac->kspschur) { 234 PetscCall(KSPView(jac->kspschur, viewer)); 235 } else { 236 PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 237 } 238 PetscCall(PetscViewerASCIIPopTab(viewer)); 239 PetscCall(PetscViewerASCIIPopTab(viewer)); 240 } else if (isdraw && jac->head) { 241 PetscDraw draw; 242 PetscReal x, y, w, wd, h; 243 PetscInt cnt = 2; 244 char str[32]; 245 246 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 247 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 248 if (jac->kspupper != jac->head->ksp) cnt++; 249 w = 2 * PetscMin(1.0 - x, x); 250 wd = w / (cnt + 1); 251 252 PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization])); 253 PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 254 y -= h; 255 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) { 256 PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11])); 257 } else { 258 PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre])); 259 } 260 PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 261 y -= h; 262 x = x - wd * (cnt - 1) / 2.0; 263 264 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 265 PetscCall(KSPView(jac->head->ksp, viewer)); 266 PetscCall(PetscDrawPopCurrentPoint(draw)); 267 if (jac->kspupper != jac->head->ksp) { 268 x += wd; 269 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 270 PetscCall(KSPView(jac->kspupper, viewer)); 271 PetscCall(PetscDrawPopCurrentPoint(draw)); 272 } 273 x += wd; 274 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 275 PetscCall(KSPView(jac->kspschur, viewer)); 276 PetscCall(PetscDrawPopCurrentPoint(draw)); 277 } 278 PetscFunctionReturn(PETSC_SUCCESS); 279 } 280 281 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer) 282 { 283 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 284 PetscBool iascii, isdraw; 285 PetscInt i, j; 286 PC_FieldSplitLink ilink = jac->head; 287 288 PetscFunctionBegin; 289 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 290 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 291 if (iascii) { 292 if (jac->bs > 0) { 293 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs)); 294 } else { 295 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits)); 296 } 297 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 298 if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for diagonal blocks\n")); 299 if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for off-diagonal blocks\n")); 300 301 PetscCall(PetscViewerASCIIPrintf(viewer, " Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit)); 302 PetscCall(PetscViewerASCIIPrintf(viewer, " Solver info for H = A00 + nu*A01*A01' matrix:\n")); 303 PetscCall(PetscViewerASCIIPushTab(viewer)); 304 305 if (ilink->fields) { 306 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields ")); 307 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 308 for (j = 0; j < ilink->nfields; j++) { 309 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 310 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 311 } 312 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 313 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 314 } else { 315 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n")); 316 } 317 PetscCall(KSPView(ilink->ksp, viewer)); 318 319 PetscCall(PetscViewerASCIIPopTab(viewer)); 320 } 321 322 if (isdraw) { 323 PetscDraw draw; 324 PetscReal x, y, w, wd; 325 326 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 327 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 328 w = 2 * PetscMin(1.0 - x, x); 329 wd = w / (jac->nsplits + 1); 330 x = x - wd * (jac->nsplits - 1) / 2.0; 331 for (i = 0; i < jac->nsplits; i++) { 332 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 333 PetscCall(KSPView(ilink->ksp, viewer)); 334 PetscCall(PetscDrawPopCurrentPoint(draw)); 335 x += wd; 336 ilink = ilink->next; 337 } 338 } 339 PetscFunctionReturn(PETSC_SUCCESS); 340 } 341 342 /* Precondition: jac->bs is set to a meaningful value */ 343 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc) 344 { 345 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 346 PetscInt i, nfields, *ifields, nfields_col, *ifields_col; 347 PetscBool flg, flg_col; 348 char optionname[128], splitname[8], optionname_col[128]; 349 350 PetscFunctionBegin; 351 PetscCall(PetscMalloc1(jac->bs, &ifields)); 352 PetscCall(PetscMalloc1(jac->bs, &ifields_col)); 353 for (i = 0, flg = PETSC_TRUE;; i++) { 354 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 355 PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i)); 356 PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i)); 357 nfields = jac->bs; 358 nfields_col = jac->bs; 359 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg)); 360 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col)); 361 if (!flg) break; 362 else if (flg && !flg_col) { 363 PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields"); 364 PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields)); 365 } else { 366 PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields"); 367 PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match"); 368 PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col)); 369 } 370 } 371 if (i > 0) { 372 /* Makes command-line setting of splits take precedence over setting them in code. 373 Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would 374 create new splits, which would probably not be what the user wanted. */ 375 jac->splitdefined = PETSC_TRUE; 376 } 377 PetscCall(PetscFree(ifields)); 378 PetscCall(PetscFree(ifields_col)); 379 PetscFunctionReturn(PETSC_SUCCESS); 380 } 381 382 static PetscErrorCode PCFieldSplitSetDefaults(PC pc) 383 { 384 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 385 PC_FieldSplitLink ilink = jac->head; 386 PetscBool fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE; 387 PetscInt i; 388 389 PetscFunctionBegin; 390 /* 391 Kinda messy, but at least this now uses DMCreateFieldDecomposition(). 392 Should probably be rewritten. 393 */ 394 if (!ilink) { 395 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL)); 396 if (pc->dm && jac->dm_splits && !jac->detect && !coupling) { 397 PetscInt numFields, f, i, j; 398 char **fieldNames; 399 IS *fields; 400 DM *dms; 401 DM subdm[128]; 402 PetscBool flg; 403 404 PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms)); 405 /* Allow the user to prescribe the splits */ 406 for (i = 0, flg = PETSC_TRUE;; i++) { 407 PetscInt ifields[128]; 408 IS compField; 409 char optionname[128], splitname[8]; 410 PetscInt nfields = numFields; 411 412 PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i)); 413 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg)); 414 if (!flg) break; 415 PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields); 416 PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i])); 417 if (nfields == 1) { 418 PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField)); 419 } else { 420 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 421 PetscCall(PCFieldSplitSetIS(pc, splitname, compField)); 422 } 423 PetscCall(ISDestroy(&compField)); 424 for (j = 0; j < nfields; ++j) { 425 f = ifields[j]; 426 PetscCall(PetscFree(fieldNames[f])); 427 PetscCall(ISDestroy(&fields[f])); 428 } 429 } 430 if (i == 0) { 431 for (f = 0; f < numFields; ++f) { 432 PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f])); 433 PetscCall(PetscFree(fieldNames[f])); 434 PetscCall(ISDestroy(&fields[f])); 435 } 436 } else { 437 for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j)); 438 PetscCall(PetscFree(dms)); 439 PetscCall(PetscMalloc1(i, &dms)); 440 for (j = 0; j < i; ++j) dms[j] = subdm[j]; 441 } 442 PetscCall(PetscFree(fieldNames)); 443 PetscCall(PetscFree(fields)); 444 if (dms) { 445 PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n")); 446 for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) { 447 const char *prefix; 448 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)(ilink->ksp), &prefix)); 449 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)(dms[i]), prefix)); 450 PetscCall(KSPSetDM(ilink->ksp, dms[i])); 451 PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE)); 452 { 453 PetscErrorCode (*func)(KSP, Mat, Mat, void *); 454 void *ctx; 455 456 PetscCall(DMKSPGetComputeOperators(pc->dm, &func, &ctx)); 457 PetscCall(DMKSPSetComputeOperators(dms[i], func, ctx)); 458 } 459 PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0)); 460 PetscCall(DMDestroy(&dms[i])); 461 } 462 PetscCall(PetscFree(dms)); 463 } 464 } else { 465 if (jac->bs <= 0) { 466 if (pc->pmat) { 467 PetscCall(MatGetBlockSize(pc->pmat, &jac->bs)); 468 } else jac->bs = 1; 469 } 470 471 if (jac->detect) { 472 IS zerodiags, rest; 473 PetscInt nmin, nmax; 474 475 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 476 if (jac->diag_use_amat) { 477 PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags)); 478 } else { 479 PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags)); 480 } 481 PetscCall(ISComplement(zerodiags, nmin, nmax, &rest)); 482 PetscCall(PCFieldSplitSetIS(pc, "0", rest)); 483 PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags)); 484 PetscCall(ISDestroy(&zerodiags)); 485 PetscCall(ISDestroy(&rest)); 486 } else if (coupling) { 487 IS coupling, rest; 488 PetscInt nmin, nmax; 489 490 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 491 if (jac->offdiag_use_amat) { 492 PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling)); 493 } else { 494 PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling)); 495 } 496 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest)); 497 PetscCall(ISSetIdentity(rest)); 498 PetscCall(PCFieldSplitSetIS(pc, "0", rest)); 499 PetscCall(PCFieldSplitSetIS(pc, "1", coupling)); 500 PetscCall(ISDestroy(&coupling)); 501 PetscCall(ISDestroy(&rest)); 502 } else { 503 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL)); 504 if (!fieldsplit_default) { 505 /* Allow user to set fields from command line, if bs was known at the time of PCSetFromOptions_FieldSplit() 506 then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */ 507 PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc)); 508 if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n")); 509 } 510 if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) { 511 Mat M = pc->pmat; 512 PetscBool isnest; 513 514 PetscCall(PetscInfo(pc, "Using default splitting of fields\n")); 515 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest)); 516 if (!isnest) { 517 M = pc->mat; 518 PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest)); 519 } 520 if (isnest) { 521 IS *fields; 522 PetscInt nf; 523 524 PetscCall(MatNestGetSize(M, &nf, NULL)); 525 PetscCall(PetscMalloc1(nf, &fields)); 526 PetscCall(MatNestGetISs(M, fields, NULL)); 527 for (i = 0; i < nf; i++) PetscCall(PCFieldSplitSetIS(pc, NULL, fields[i])); 528 PetscCall(PetscFree(fields)); 529 } else { 530 for (i = 0; i < jac->bs; i++) { 531 char splitname[8]; 532 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 533 PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i)); 534 } 535 jac->defaultsplit = PETSC_TRUE; 536 } 537 } 538 } 539 } 540 } else if (jac->nsplits == 1) { 541 IS is2; 542 PetscInt nmin, nmax; 543 544 PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()"); 545 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 546 PetscCall(ISComplement(ilink->is, nmin, nmax, &is2)); 547 PetscCall(PCFieldSplitSetIS(pc, "1", is2)); 548 PetscCall(ISDestroy(&is2)); 549 } 550 551 PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits); 552 PetscFunctionReturn(PETSC_SUCCESS); 553 } 554 555 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu) 556 { 557 Mat BT, T; 558 PetscReal nrmT, nrmB; 559 560 PetscFunctionBegin; 561 PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */ 562 PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN)); 563 PetscCall(MatNorm(T, NORM_1, &nrmT)); 564 PetscCall(MatNorm(B, NORM_1, &nrmB)); 565 PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable."); 566 567 /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */ 568 /* setting N := 1/nu*I in [Ar13]. */ 569 PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT)); 570 PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, H)); /* H = A01*A01' */ 571 PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN)); /* H = A00 + nu*A01*A01' */ 572 573 PetscCall(MatDestroy(&BT)); 574 PetscCall(MatDestroy(&T)); 575 PetscFunctionReturn(PETSC_SUCCESS); 576 } 577 578 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *value[], PetscBool *flg); 579 580 static PetscErrorCode PCSetUp_FieldSplit(PC pc) 581 { 582 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 583 PC_FieldSplitLink ilink; 584 PetscInt i, nsplit; 585 PetscBool sorted, sorted_col; 586 587 PetscFunctionBegin; 588 pc->failedreason = PC_NOERROR; 589 PetscCall(PCFieldSplitSetDefaults(pc)); 590 nsplit = jac->nsplits; 591 ilink = jac->head; 592 593 /* get the matrices for each split */ 594 if (!jac->issetup) { 595 PetscInt rstart, rend, nslots, bs; 596 597 jac->issetup = PETSC_TRUE; 598 599 /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */ 600 if (jac->defaultsplit || !ilink->is) { 601 if (jac->bs <= 0) jac->bs = nsplit; 602 } 603 604 /* MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */ 605 PetscCall(MatGetBlockSize(pc->pmat, &bs)); 606 if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) { 607 PetscBool blk; 608 609 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL)); 610 PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes"); 611 } 612 613 bs = jac->bs; 614 PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend)); 615 nslots = (rend - rstart) / bs; 616 for (i = 0; i < nsplit; i++) { 617 if (jac->defaultsplit) { 618 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is)); 619 PetscCall(ISDuplicate(ilink->is, &ilink->is_col)); 620 } else if (!ilink->is) { 621 if (ilink->nfields > 1) { 622 PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col; 623 PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii)); 624 PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj)); 625 for (j = 0; j < nslots; j++) { 626 for (k = 0; k < nfields; k++) { 627 ii[nfields * j + k] = rstart + bs * j + fields[k]; 628 jj[nfields * j + k] = rstart + bs * j + fields_col[k]; 629 } 630 } 631 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is)); 632 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col)); 633 PetscCall(ISSetBlockSize(ilink->is, nfields)); 634 PetscCall(ISSetBlockSize(ilink->is_col, nfields)); 635 } else { 636 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is)); 637 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col)); 638 } 639 } 640 PetscCall(ISSorted(ilink->is, &sorted)); 641 if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col)); 642 PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split"); 643 ilink = ilink->next; 644 } 645 } 646 647 ilink = jac->head; 648 if (!jac->pmat) { 649 Vec xtmp; 650 651 PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL)); 652 PetscCall(PetscMalloc1(nsplit, &jac->pmat)); 653 PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y)); 654 for (i = 0; i < nsplit; i++) { 655 MatNullSpace sp; 656 657 /* Check for preconditioning matrix attached to IS */ 658 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i])); 659 if (jac->pmat[i]) { 660 PetscCall(PetscObjectReference((PetscObject)jac->pmat[i])); 661 if (jac->type == PC_COMPOSITE_SCHUR) { 662 jac->schur_user = jac->pmat[i]; 663 664 PetscCall(PetscObjectReference((PetscObject)jac->schur_user)); 665 } 666 } else { 667 const char *prefix; 668 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i])); 669 PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix)); 670 PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix)); 671 PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view")); 672 } 673 /* create work vectors for each split */ 674 PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i])); 675 ilink->x = jac->x[i]; 676 ilink->y = jac->y[i]; 677 ilink->z = NULL; 678 /* compute scatter contexts needed by multiplicative versions and non-default splits */ 679 PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx)); 680 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp)); 681 if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp)); 682 ilink = ilink->next; 683 } 684 PetscCall(VecDestroy(&xtmp)); 685 } else { 686 MatReuse scall; 687 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 688 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i])); 689 scall = MAT_INITIAL_MATRIX; 690 } else scall = MAT_REUSE_MATRIX; 691 692 for (i = 0; i < nsplit; i++) { 693 Mat pmat; 694 695 /* Check for preconditioning matrix attached to IS */ 696 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat)); 697 if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i])); 698 ilink = ilink->next; 699 } 700 } 701 if (jac->diag_use_amat) { 702 ilink = jac->head; 703 if (!jac->mat) { 704 PetscCall(PetscMalloc1(nsplit, &jac->mat)); 705 for (i = 0; i < nsplit; i++) { 706 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i])); 707 ilink = ilink->next; 708 } 709 } else { 710 MatReuse scall; 711 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 712 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i])); 713 scall = MAT_INITIAL_MATRIX; 714 } else scall = MAT_REUSE_MATRIX; 715 716 for (i = 0; i < nsplit; i++) { 717 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i])); 718 ilink = ilink->next; 719 } 720 } 721 } else { 722 jac->mat = jac->pmat; 723 } 724 725 /* Check for null space attached to IS */ 726 ilink = jac->head; 727 for (i = 0; i < nsplit; i++) { 728 MatNullSpace sp; 729 730 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp)); 731 if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp)); 732 ilink = ilink->next; 733 } 734 735 if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) { 736 /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */ 737 /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */ 738 ilink = jac->head; 739 if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) { 740 /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */ 741 if (!jac->Afield) { 742 PetscCall(PetscCalloc1(nsplit, &jac->Afield)); 743 if (jac->offdiag_use_amat) { 744 PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1])); 745 } else { 746 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1])); 747 } 748 } else { 749 MatReuse scall; 750 751 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 752 PetscCall(MatDestroy(&jac->Afield[1])); 753 scall = MAT_INITIAL_MATRIX; 754 } else scall = MAT_REUSE_MATRIX; 755 756 if (jac->offdiag_use_amat) { 757 PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1])); 758 } else { 759 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1])); 760 } 761 } 762 } else { 763 if (!jac->Afield) { 764 PetscCall(PetscMalloc1(nsplit, &jac->Afield)); 765 for (i = 0; i < nsplit; i++) { 766 if (jac->offdiag_use_amat) { 767 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i])); 768 } else { 769 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i])); 770 } 771 ilink = ilink->next; 772 } 773 } else { 774 MatReuse scall; 775 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 776 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i])); 777 scall = MAT_INITIAL_MATRIX; 778 } else scall = MAT_REUSE_MATRIX; 779 780 for (i = 0; i < nsplit; i++) { 781 if (jac->offdiag_use_amat) { 782 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i])); 783 } else { 784 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i])); 785 } 786 ilink = ilink->next; 787 } 788 } 789 } 790 } 791 792 if (jac->type == PC_COMPOSITE_SCHUR) { 793 IS ccis; 794 PetscBool isset, isspd; 795 PetscInt rstart, rend; 796 char lscname[256]; 797 PetscObject LSC_L; 798 799 PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields"); 800 801 /* If pc->mat is SPD, don't scale by -1 the Schur complement */ 802 if (jac->schurscale == (PetscScalar)-1.0) { 803 PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd)); 804 jac->schurscale = (isset && isspd) ? 1.0 : -1.0; 805 } 806 807 /* When extracting off-diagonal submatrices, we take complements from this range */ 808 PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend)); 809 810 if (jac->schur) { 811 KSP kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper; 812 MatReuse scall; 813 814 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 815 scall = MAT_INITIAL_MATRIX; 816 PetscCall(MatDestroy(&jac->B)); 817 PetscCall(MatDestroy(&jac->C)); 818 } else scall = MAT_REUSE_MATRIX; 819 820 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 821 ilink = jac->head; 822 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 823 if (jac->offdiag_use_amat) { 824 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B)); 825 } else { 826 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B)); 827 } 828 PetscCall(ISDestroy(&ccis)); 829 ilink = ilink->next; 830 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 831 if (jac->offdiag_use_amat) { 832 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C)); 833 } else { 834 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C)); 835 } 836 PetscCall(ISDestroy(&ccis)); 837 PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1])); 838 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) { 839 PetscCall(MatDestroy(&jac->schurp)); 840 PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp)); 841 } 842 if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0])); 843 if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0])); 844 PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac))); 845 } else { 846 const char *Dprefix; 847 char schurprefix[256], schurmatprefix[256]; 848 char schurtestoption[256]; 849 MatNullSpace sp; 850 PetscBool flg; 851 KSP kspt; 852 853 /* extract the A01 and A10 matrices */ 854 ilink = jac->head; 855 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 856 if (jac->offdiag_use_amat) { 857 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 858 } else { 859 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 860 } 861 PetscCall(ISDestroy(&ccis)); 862 ilink = ilink->next; 863 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 864 if (jac->offdiag_use_amat) { 865 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 866 } else { 867 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 868 } 869 PetscCall(ISDestroy(&ccis)); 870 871 /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */ 872 PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur)); 873 PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT)); 874 PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1])); 875 PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 876 PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix)); 877 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt)); 878 PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix)); 879 880 /* Note: this is not true in general */ 881 PetscCall(MatGetNullSpace(jac->mat[1], &sp)); 882 if (sp) PetscCall(MatSetNullSpace(jac->schur, sp)); 883 884 PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname)); 885 PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg)); 886 if (flg) { 887 DM dmInner; 888 KSP kspInner; 889 PC pcInner; 890 891 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 892 PetscCall(KSPReset(kspInner)); 893 PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0])); 894 PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 895 /* Indent this deeper to emphasize the "inner" nature of this solver. */ 896 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2)); 897 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2)); 898 PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix)); 899 900 /* Set DM for new solver */ 901 PetscCall(KSPGetDM(jac->head->ksp, &dmInner)); 902 PetscCall(KSPSetDM(kspInner, dmInner)); 903 PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE)); 904 905 /* Defaults to PCKSP as preconditioner */ 906 PetscCall(KSPGetPC(kspInner, &pcInner)); 907 PetscCall(PCSetType(pcInner, PCKSP)); 908 PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp)); 909 } else { 910 /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or 911 * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact, 912 * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for 913 * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make 914 * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used 915 * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */ 916 PetscCall(KSPSetType(jac->head->ksp, KSPGMRES)); 917 PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp)); 918 } 919 PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0])); 920 PetscCall(KSPSetFromOptions(jac->head->ksp)); 921 PetscCall(MatSetFromOptions(jac->schur)); 922 923 PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg)); 924 if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */ 925 KSP kspInner; 926 PC pcInner; 927 928 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 929 PetscCall(KSPGetPC(kspInner, &pcInner)); 930 PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg)); 931 if (flg) { 932 KSP ksp; 933 934 PetscCall(PCKSPGetKSP(pcInner, &ksp)); 935 if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE)); 936 } 937 } 938 PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname)); 939 PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, &flg)); 940 if (flg) { 941 DM dmInner; 942 943 PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 944 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper)); 945 PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure)); 946 PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix)); 947 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1)); 948 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1)); 949 PetscCall(KSPGetDM(jac->head->ksp, &dmInner)); 950 PetscCall(KSPSetDM(jac->kspupper, dmInner)); 951 PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE)); 952 PetscCall(KSPSetFromOptions(jac->kspupper)); 953 PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0])); 954 PetscCall(VecDuplicate(jac->head->x, &jac->head->z)); 955 } else { 956 jac->kspupper = jac->head->ksp; 957 PetscCall(PetscObjectReference((PetscObject)jac->head->ksp)); 958 } 959 960 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp)); 961 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur)); 962 PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure)); 963 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1)); 964 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) { 965 PC pcschur; 966 PetscCall(KSPGetPC(jac->kspschur, &pcschur)); 967 PetscCall(PCSetType(pcschur, PCNONE)); 968 /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */ 969 } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) { 970 PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user)); 971 } 972 PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac))); 973 PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix)); 974 PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix)); 975 /* propagate DM */ 976 { 977 DM sdm; 978 PetscCall(KSPGetDM(jac->head->next->ksp, &sdm)); 979 if (sdm) { 980 PetscCall(KSPSetDM(jac->kspschur, sdm)); 981 PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE)); 982 } 983 } 984 /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */ 985 /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */ 986 PetscCall(KSPSetFromOptions(jac->kspschur)); 987 } 988 PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY)); 989 PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY)); 990 991 /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */ 992 PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname)); 993 PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L)); 994 if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L)); 995 if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L)); 996 PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname)); 997 PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L)); 998 if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L)); 999 if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L)); 1000 } else if (jac->type == PC_COMPOSITE_GKB) { 1001 IS ccis; 1002 PetscInt rstart, rend; 1003 1004 PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields"); 1005 1006 ilink = jac->head; 1007 1008 /* When extracting off-diagonal submatrices, we take complements from this range */ 1009 PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend)); 1010 1011 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 1012 if (jac->offdiag_use_amat) { 1013 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 1014 } else { 1015 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 1016 } 1017 PetscCall(ISDestroy(&ccis)); 1018 /* Create work vectors for GKB algorithm */ 1019 PetscCall(VecDuplicate(ilink->x, &jac->u)); 1020 PetscCall(VecDuplicate(ilink->x, &jac->Hu)); 1021 PetscCall(VecDuplicate(ilink->x, &jac->w2)); 1022 ilink = ilink->next; 1023 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 1024 if (jac->offdiag_use_amat) { 1025 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 1026 } else { 1027 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 1028 } 1029 PetscCall(ISDestroy(&ccis)); 1030 /* Create work vectors for GKB algorithm */ 1031 PetscCall(VecDuplicate(ilink->x, &jac->v)); 1032 PetscCall(VecDuplicate(ilink->x, &jac->d)); 1033 PetscCall(VecDuplicate(ilink->x, &jac->w1)); 1034 PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu)); 1035 PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz)); 1036 1037 ilink = jac->head; 1038 PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H)); 1039 if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp)); 1040 /* Create gkb_monitor context */ 1041 if (jac->gkbmonitor) { 1042 PetscInt tablevel; 1043 PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer)); 1044 PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII)); 1045 PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel)); 1046 PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel)); 1047 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1)); 1048 } 1049 } else { 1050 /* set up the individual splits' PCs */ 1051 i = 0; 1052 ilink = jac->head; 1053 while (ilink) { 1054 PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i])); 1055 /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */ 1056 if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp)); 1057 i++; 1058 ilink = ilink->next; 1059 } 1060 } 1061 1062 /* Set coordinates to the sub PC objects whenever these are set */ 1063 if (jac->coordinates_set) { 1064 PC pc_coords; 1065 if (jac->type == PC_COMPOSITE_SCHUR) { 1066 // Head is first block. 1067 PetscCall(KSPGetPC(jac->head->ksp, &pc_coords)); 1068 PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords)); 1069 // Second one is Schur block, but its KSP object is in kspschur. 1070 PetscCall(KSPGetPC(jac->kspschur, &pc_coords)); 1071 PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords)); 1072 } else if (jac->type == PC_COMPOSITE_GKB) { 1073 PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner")); 1074 } else { 1075 ilink = jac->head; 1076 while (ilink) { 1077 PetscCall(KSPGetPC(ilink->ksp, &pc_coords)); 1078 PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords)); 1079 ilink = ilink->next; 1080 } 1081 } 1082 } 1083 1084 jac->suboptionsset = PETSC_TRUE; 1085 PetscFunctionReturn(PETSC_SUCCESS); 1086 } 1087 1088 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \ 1089 ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \ 1090 KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \ 1091 VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE))) 1092 1093 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y) 1094 { 1095 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1096 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1097 KSP kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper; 1098 1099 PetscFunctionBegin; 1100 switch (jac->schurfactorization) { 1101 case PC_FIELDSPLIT_SCHUR_FACT_DIAG: 1102 /* [A00 0; 0 -S], positive definite, suitable for MINRES */ 1103 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1104 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1105 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1106 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1107 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1108 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1109 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1110 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1111 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1112 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1113 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1114 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1115 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1116 PetscCall(VecScale(ilinkD->y, jac->schurscale)); 1117 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1118 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1119 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1120 break; 1121 case PC_FIELDSPLIT_SCHUR_FACT_LOWER: 1122 /* [A00 0; A10 S], suitable for left preconditioning */ 1123 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1124 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1125 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1126 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1127 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1128 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1129 PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x)); 1130 PetscCall(VecScale(ilinkD->x, -1.)); 1131 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1132 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1133 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1134 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1135 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1136 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1137 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1138 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1139 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1140 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1141 break; 1142 case PC_FIELDSPLIT_SCHUR_FACT_UPPER: 1143 /* [A00 A01; 0 S], suitable for right preconditioning */ 1144 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1145 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1146 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1147 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1148 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1149 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1150 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x)); 1151 PetscCall(VecScale(ilinkA->x, -1.)); 1152 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1153 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1154 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1155 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1156 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1157 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1158 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1159 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1160 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1161 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1162 break; 1163 case PC_FIELDSPLIT_SCHUR_FACT_FULL: 1164 /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */ 1165 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1166 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1167 PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL)); 1168 PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y)); 1169 PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y)); 1170 PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL)); 1171 PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x)); 1172 PetscCall(VecScale(ilinkD->x, -1.0)); 1173 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1174 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1175 1176 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1177 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1178 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1179 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1180 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1181 1182 if (kspUpper == kspA) { 1183 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y)); 1184 PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y)); 1185 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1186 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1187 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1188 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1189 } else { 1190 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1191 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1192 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1193 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x)); 1194 PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL)); 1195 PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z)); 1196 PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z)); 1197 PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL)); 1198 PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z)); 1199 } 1200 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1201 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1202 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1203 } 1204 PetscFunctionReturn(PETSC_SUCCESS); 1205 } 1206 1207 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y) 1208 { 1209 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1210 PC_FieldSplitLink ilink = jac->head; 1211 PetscInt cnt, bs; 1212 1213 PetscFunctionBegin; 1214 if (jac->type == PC_COMPOSITE_ADDITIVE) { 1215 if (jac->defaultsplit) { 1216 PetscCall(VecGetBlockSize(x, &bs)); 1217 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1218 PetscCall(VecGetBlockSize(y, &bs)); 1219 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1220 PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES)); 1221 while (ilink) { 1222 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1223 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1224 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1225 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1226 ilink = ilink->next; 1227 } 1228 PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES)); 1229 } else { 1230 PetscCall(VecSet(y, 0.0)); 1231 while (ilink) { 1232 PetscCall(FieldSplitSplitSolveAdd(ilink, x, y)); 1233 ilink = ilink->next; 1234 } 1235 } 1236 } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) { 1237 PetscCall(VecSet(y, 0.0)); 1238 /* solve on first block for first block variables */ 1239 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD)); 1240 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD)); 1241 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1242 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1243 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1244 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1245 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1246 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1247 1248 /* compute the residual only onto second block variables using first block variables */ 1249 PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x)); 1250 ilink = ilink->next; 1251 PetscCall(VecScale(ilink->x, -1.0)); 1252 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1253 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1254 1255 /* solve on second block variables */ 1256 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1257 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1258 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1259 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1260 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1261 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1262 } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1263 if (!jac->w1) { 1264 PetscCall(VecDuplicate(x, &jac->w1)); 1265 PetscCall(VecDuplicate(x, &jac->w2)); 1266 } 1267 PetscCall(VecSet(y, 0.0)); 1268 PetscCall(FieldSplitSplitSolveAdd(ilink, x, y)); 1269 cnt = 1; 1270 while (ilink->next) { 1271 ilink = ilink->next; 1272 /* compute the residual only over the part of the vector needed */ 1273 PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x)); 1274 PetscCall(VecScale(ilink->x, -1.0)); 1275 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1276 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1277 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1278 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1279 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1280 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1281 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1282 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1283 } 1284 if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1285 cnt -= 2; 1286 while (ilink->previous) { 1287 ilink = ilink->previous; 1288 /* compute the residual only over the part of the vector needed */ 1289 PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x)); 1290 PetscCall(VecScale(ilink->x, -1.0)); 1291 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1292 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1293 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1294 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1295 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1296 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1297 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1298 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1299 } 1300 } 1301 } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type); 1302 PetscFunctionReturn(PETSC_SUCCESS); 1303 } 1304 1305 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y) 1306 { 1307 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1308 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1309 KSP ksp = ilinkA->ksp; 1310 Vec u, v, Hu, d, work1, work2; 1311 PetscScalar alpha, z, nrmz2, *vecz; 1312 PetscReal lowbnd, nu, beta; 1313 PetscInt j, iterGKB; 1314 1315 PetscFunctionBegin; 1316 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1317 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1318 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1319 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1320 1321 u = jac->u; 1322 v = jac->v; 1323 Hu = jac->Hu; 1324 d = jac->d; 1325 work1 = jac->w1; 1326 work2 = jac->w2; 1327 vecz = jac->vecz; 1328 1329 /* Change RHS to comply with matrix regularization H = A + nu*B*B' */ 1330 /* Add q = q + nu*B*b */ 1331 if (jac->gkbnu) { 1332 nu = jac->gkbnu; 1333 PetscCall(VecScale(ilinkD->x, jac->gkbnu)); 1334 PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */ 1335 } else { 1336 /* Situation when no augmented Lagrangian is used. Then we set inner */ 1337 /* matrix N = I in [Ar13], and thus nu = 1. */ 1338 nu = 1; 1339 } 1340 1341 /* Transform rhs from [q,tilde{b}] to [0,b] */ 1342 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL)); 1343 PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y)); 1344 PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y)); 1345 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL)); 1346 PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1)); 1347 PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x */ 1348 1349 /* First step of algorithm */ 1350 PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/ 1351 KSPCheckDot(ksp, beta); 1352 beta = PetscSqrtReal(nu) * beta; 1353 PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c */ 1354 PetscCall(MatMult(jac->B, v, work2)); /* u = H^{-1}*B*v */ 1355 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL)); 1356 PetscCall(KSPSolve(ksp, work2, u)); 1357 PetscCall(KSPCheckSolve(ksp, pc, u)); 1358 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL)); 1359 PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u */ 1360 PetscCall(VecDot(Hu, u, &alpha)); 1361 KSPCheckDot(ksp, alpha); 1362 PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite"); 1363 alpha = PetscSqrtReal(PetscAbsScalar(alpha)); 1364 PetscCall(VecScale(u, 1.0 / alpha)); 1365 PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c */ 1366 1367 z = beta / alpha; 1368 vecz[1] = z; 1369 1370 /* Computation of first iterate x(1) and p(1) */ 1371 PetscCall(VecAXPY(ilinkA->y, z, u)); 1372 PetscCall(VecCopy(d, ilinkD->y)); 1373 PetscCall(VecScale(ilinkD->y, -z)); 1374 1375 iterGKB = 1; 1376 lowbnd = 2 * jac->gkbtol; 1377 if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd)); 1378 1379 while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) { 1380 iterGKB += 1; 1381 PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */ 1382 PetscCall(VecAXPBY(v, nu, -alpha, work1)); 1383 PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v */ 1384 beta = beta / PetscSqrtReal(nu); 1385 PetscCall(VecScale(v, 1.0 / beta)); 1386 PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */ 1387 PetscCall(MatMult(jac->H, u, Hu)); 1388 PetscCall(VecAXPY(work2, -beta, Hu)); 1389 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL)); 1390 PetscCall(KSPSolve(ksp, work2, u)); 1391 PetscCall(KSPCheckSolve(ksp, pc, u)); 1392 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL)); 1393 PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u */ 1394 PetscCall(VecDot(Hu, u, &alpha)); 1395 KSPCheckDot(ksp, alpha); 1396 PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite"); 1397 alpha = PetscSqrtReal(PetscAbsScalar(alpha)); 1398 PetscCall(VecScale(u, 1.0 / alpha)); 1399 1400 z = -beta / alpha * z; /* z <- beta/alpha*z */ 1401 vecz[0] = z; 1402 1403 /* Computation of new iterate x(i+1) and p(i+1) */ 1404 PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */ 1405 PetscCall(VecAXPY(ilinkA->y, z, u)); /* r = r + z*u */ 1406 PetscCall(VecAXPY(ilinkD->y, -z, d)); /* p = p - z*d */ 1407 PetscCall(MatMult(jac->H, ilinkA->y, Hu)); /* ||u||_H = u'*H*u */ 1408 PetscCall(VecDot(Hu, ilinkA->y, &nrmz2)); 1409 1410 /* Compute Lower Bound estimate */ 1411 if (iterGKB > jac->gkbdelay) { 1412 lowbnd = 0.0; 1413 for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]); 1414 lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2)); 1415 } 1416 1417 for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2]; 1418 if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd)); 1419 } 1420 1421 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1422 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1423 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1424 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1425 1426 PetscFunctionReturn(PETSC_SUCCESS); 1427 } 1428 1429 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \ 1430 ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \ 1431 KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \ 1432 VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE))) 1433 1434 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y) 1435 { 1436 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1437 PC_FieldSplitLink ilink = jac->head; 1438 PetscInt bs; 1439 1440 PetscFunctionBegin; 1441 if (jac->type == PC_COMPOSITE_ADDITIVE) { 1442 if (jac->defaultsplit) { 1443 PetscCall(VecGetBlockSize(x, &bs)); 1444 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1445 PetscCall(VecGetBlockSize(y, &bs)); 1446 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1447 PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES)); 1448 while (ilink) { 1449 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1450 PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y)); 1451 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1452 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1453 ilink = ilink->next; 1454 } 1455 PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES)); 1456 } else { 1457 PetscCall(VecSet(y, 0.0)); 1458 while (ilink) { 1459 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1460 ilink = ilink->next; 1461 } 1462 } 1463 } else { 1464 if (!jac->w1) { 1465 PetscCall(VecDuplicate(x, &jac->w1)); 1466 PetscCall(VecDuplicate(x, &jac->w2)); 1467 } 1468 PetscCall(VecSet(y, 0.0)); 1469 if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1470 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1471 while (ilink->next) { 1472 ilink = ilink->next; 1473 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1474 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1475 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1476 } 1477 while (ilink->previous) { 1478 ilink = ilink->previous; 1479 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1480 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1481 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1482 } 1483 } else { 1484 while (ilink->next) { /* get to last entry in linked list */ 1485 ilink = ilink->next; 1486 } 1487 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1488 while (ilink->previous) { 1489 ilink = ilink->previous; 1490 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1491 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1492 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1493 } 1494 } 1495 } 1496 PetscFunctionReturn(PETSC_SUCCESS); 1497 } 1498 1499 static PetscErrorCode PCReset_FieldSplit(PC pc) 1500 { 1501 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1502 PC_FieldSplitLink ilink = jac->head, next; 1503 1504 PetscFunctionBegin; 1505 while (ilink) { 1506 PetscCall(KSPDestroy(&ilink->ksp)); 1507 PetscCall(VecDestroy(&ilink->x)); 1508 PetscCall(VecDestroy(&ilink->y)); 1509 PetscCall(VecDestroy(&ilink->z)); 1510 PetscCall(VecScatterDestroy(&ilink->sctx)); 1511 PetscCall(ISDestroy(&ilink->is)); 1512 PetscCall(ISDestroy(&ilink->is_col)); 1513 PetscCall(PetscFree(ilink->splitname)); 1514 PetscCall(PetscFree(ilink->fields)); 1515 PetscCall(PetscFree(ilink->fields_col)); 1516 next = ilink->next; 1517 PetscCall(PetscFree(ilink)); 1518 ilink = next; 1519 } 1520 jac->head = NULL; 1521 PetscCall(PetscFree2(jac->x, jac->y)); 1522 if (jac->mat && jac->mat != jac->pmat) { 1523 PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat)); 1524 } else if (jac->mat) { 1525 jac->mat = NULL; 1526 } 1527 if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat)); 1528 if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield)); 1529 jac->nsplits = 0; 1530 PetscCall(VecDestroy(&jac->w1)); 1531 PetscCall(VecDestroy(&jac->w2)); 1532 PetscCall(MatDestroy(&jac->schur)); 1533 PetscCall(MatDestroy(&jac->schurp)); 1534 PetscCall(MatDestroy(&jac->schur_user)); 1535 PetscCall(KSPDestroy(&jac->kspschur)); 1536 PetscCall(KSPDestroy(&jac->kspupper)); 1537 PetscCall(MatDestroy(&jac->B)); 1538 PetscCall(MatDestroy(&jac->C)); 1539 PetscCall(MatDestroy(&jac->H)); 1540 PetscCall(VecDestroy(&jac->u)); 1541 PetscCall(VecDestroy(&jac->v)); 1542 PetscCall(VecDestroy(&jac->Hu)); 1543 PetscCall(VecDestroy(&jac->d)); 1544 PetscCall(PetscFree(jac->vecz)); 1545 PetscCall(PetscViewerDestroy(&jac->gkbviewer)); 1546 jac->isrestrict = PETSC_FALSE; 1547 PetscFunctionReturn(PETSC_SUCCESS); 1548 } 1549 1550 static PetscErrorCode PCDestroy_FieldSplit(PC pc) 1551 { 1552 PetscFunctionBegin; 1553 PetscCall(PCReset_FieldSplit(pc)); 1554 PetscCall(PetscFree(pc->data)); 1555 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL)); 1556 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL)); 1557 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL)); 1558 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL)); 1559 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL)); 1560 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL)); 1561 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL)); 1562 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 1563 1564 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL)); 1565 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL)); 1566 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL)); 1567 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL)); 1568 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 1569 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL)); 1570 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL)); 1571 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL)); 1572 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL)); 1573 PetscFunctionReturn(PETSC_SUCCESS); 1574 } 1575 1576 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject) 1577 { 1578 PetscInt bs; 1579 PetscBool flg; 1580 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1581 PCCompositeType ctype; 1582 1583 PetscFunctionBegin; 1584 PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options"); 1585 PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL)); 1586 PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg)); 1587 if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs)); 1588 jac->diag_use_amat = pc->useAmat; 1589 PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL)); 1590 jac->offdiag_use_amat = pc->useAmat; 1591 PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL)); 1592 PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL)); 1593 PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */ 1594 PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg)); 1595 if (flg) PetscCall(PCFieldSplitSetType(pc, ctype)); 1596 /* Only setup fields once */ 1597 if ((jac->bs > 0) && (jac->nsplits == 0)) { 1598 /* only allow user to set fields from command line if bs is already known. 1599 otherwise user can set them in PCFieldSplitSetDefaults() */ 1600 PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc)); 1601 if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n")); 1602 } 1603 if (jac->type == PC_COMPOSITE_SCHUR) { 1604 PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg)); 1605 if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n")); 1606 PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL)); 1607 PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL)); 1608 PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL)); 1609 } else if (jac->type == PC_COMPOSITE_GKB) { 1610 PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitGKBTol", jac->gkbtol, &jac->gkbtol, NULL)); 1611 PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL)); 1612 PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitGKBNu", jac->gkbnu, &jac->gkbnu, NULL)); 1613 PetscCheck(jac->gkbnu >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nu cannot be less than 0: value %g", (double)jac->gkbnu); 1614 PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL)); 1615 PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL)); 1616 } 1617 /* 1618 In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet. 1619 But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it 1620 is called on the outer solver in case changes were made in the options database 1621 1622 But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions() 1623 if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete. 1624 Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types. 1625 1626 There could be a negative side effect of calling the KSPSetFromOptions() below. 1627 1628 If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call 1629 */ 1630 if (jac->issetup) { 1631 PC_FieldSplitLink ilink = jac->head; 1632 if (jac->type == PC_COMPOSITE_SCHUR) { 1633 if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper)); 1634 if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur)); 1635 } 1636 while (ilink) { 1637 if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp)); 1638 ilink = ilink->next; 1639 } 1640 } 1641 PetscOptionsHeadEnd(); 1642 PetscFunctionReturn(PETSC_SUCCESS); 1643 } 1644 1645 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col) 1646 { 1647 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1648 PC_FieldSplitLink ilink, next = jac->head; 1649 char prefix[128]; 1650 PetscInt i; 1651 1652 PetscFunctionBegin; 1653 if (jac->splitdefined) { 1654 PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname)); 1655 PetscFunctionReturn(PETSC_SUCCESS); 1656 } 1657 for (i = 0; i < n; i++) { 1658 PetscCheck(fields[i] < jac->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", fields[i], jac->bs); 1659 PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); 1660 } 1661 PetscCall(PetscNew(&ilink)); 1662 if (splitname) { 1663 PetscCall(PetscStrallocpy(splitname, &ilink->splitname)); 1664 } else { 1665 PetscCall(PetscMalloc1(3, &ilink->splitname)); 1666 PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits)); 1667 } 1668 ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */ 1669 PetscCall(PetscMalloc1(n, &ilink->fields)); 1670 PetscCall(PetscArraycpy(ilink->fields, fields, n)); 1671 PetscCall(PetscMalloc1(n, &ilink->fields_col)); 1672 PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n)); 1673 1674 ilink->nfields = n; 1675 ilink->next = NULL; 1676 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp)); 1677 PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure)); 1678 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1)); 1679 PetscCall(KSPSetType(ilink->ksp, KSPPREONLY)); 1680 1681 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 1682 PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix)); 1683 1684 if (!next) { 1685 jac->head = ilink; 1686 ilink->previous = NULL; 1687 } else { 1688 while (next->next) next = next->next; 1689 next->next = ilink; 1690 ilink->previous = next; 1691 } 1692 jac->nsplits++; 1693 PetscFunctionReturn(PETSC_SUCCESS); 1694 } 1695 1696 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp) 1697 { 1698 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1699 1700 PetscFunctionBegin; 1701 *subksp = NULL; 1702 if (n) *n = 0; 1703 if (jac->type == PC_COMPOSITE_SCHUR) { 1704 PetscInt nn; 1705 1706 PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()"); 1707 PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits); 1708 nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0); 1709 PetscCall(PetscMalloc1(nn, subksp)); 1710 (*subksp)[0] = jac->head->ksp; 1711 (*subksp)[1] = jac->kspschur; 1712 if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper; 1713 if (n) *n = nn; 1714 } 1715 PetscFunctionReturn(PETSC_SUCCESS); 1716 } 1717 1718 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp) 1719 { 1720 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1721 1722 PetscFunctionBegin; 1723 PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()"); 1724 PetscCall(PetscMalloc1(jac->nsplits, subksp)); 1725 PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp)); 1726 1727 (*subksp)[1] = jac->kspschur; 1728 if (n) *n = jac->nsplits; 1729 PetscFunctionReturn(PETSC_SUCCESS); 1730 } 1731 1732 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp) 1733 { 1734 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1735 PetscInt cnt = 0; 1736 PC_FieldSplitLink ilink = jac->head; 1737 1738 PetscFunctionBegin; 1739 PetscCall(PetscMalloc1(jac->nsplits, subksp)); 1740 while (ilink) { 1741 (*subksp)[cnt++] = ilink->ksp; 1742 ilink = ilink->next; 1743 } 1744 PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits); 1745 if (n) *n = jac->nsplits; 1746 PetscFunctionReturn(PETSC_SUCCESS); 1747 } 1748 1749 /*@C 1750 PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`. 1751 1752 Input Parameters: 1753 + pc - the preconditioner context 1754 - isy - the index set that defines the indices to which the fieldsplit is to be restricted 1755 1756 Level: advanced 1757 1758 Developer Notes: 1759 It seems the resulting `IS`s will not cover the entire space, so 1760 how can they define a convergent preconditioner? Needs explaining. 1761 1762 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 1763 @*/ 1764 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy) 1765 { 1766 PetscFunctionBegin; 1767 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1768 PetscValidHeaderSpecific(isy, IS_CLASSID, 2); 1769 PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy)); 1770 PetscFunctionReturn(PETSC_SUCCESS); 1771 } 1772 1773 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy) 1774 { 1775 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1776 PC_FieldSplitLink ilink = jac->head, next; 1777 PetscInt localsize, size, sizez, i; 1778 const PetscInt *ind, *indz; 1779 PetscInt *indc, *indcz; 1780 PetscBool flg; 1781 1782 PetscFunctionBegin; 1783 PetscCall(ISGetLocalSize(isy, &localsize)); 1784 PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy))); 1785 size -= localsize; 1786 while (ilink) { 1787 IS isrl, isr; 1788 PC subpc; 1789 PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl)); 1790 PetscCall(ISGetLocalSize(isrl, &localsize)); 1791 PetscCall(PetscMalloc1(localsize, &indc)); 1792 PetscCall(ISGetIndices(isrl, &ind)); 1793 PetscCall(PetscArraycpy(indc, ind, localsize)); 1794 PetscCall(ISRestoreIndices(isrl, &ind)); 1795 PetscCall(ISDestroy(&isrl)); 1796 for (i = 0; i < localsize; i++) *(indc + i) += size; 1797 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr)); 1798 PetscCall(PetscObjectReference((PetscObject)isr)); 1799 PetscCall(ISDestroy(&ilink->is)); 1800 ilink->is = isr; 1801 PetscCall(PetscObjectReference((PetscObject)isr)); 1802 PetscCall(ISDestroy(&ilink->is_col)); 1803 ilink->is_col = isr; 1804 PetscCall(ISDestroy(&isr)); 1805 PetscCall(KSPGetPC(ilink->ksp, &subpc)); 1806 PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg)); 1807 if (flg) { 1808 IS iszl, isz; 1809 MPI_Comm comm; 1810 PetscCall(ISGetLocalSize(ilink->is, &localsize)); 1811 comm = PetscObjectComm((PetscObject)ilink->is); 1812 PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl)); 1813 PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm)); 1814 sizez -= localsize; 1815 PetscCall(ISGetLocalSize(iszl, &localsize)); 1816 PetscCall(PetscMalloc1(localsize, &indcz)); 1817 PetscCall(ISGetIndices(iszl, &indz)); 1818 PetscCall(PetscArraycpy(indcz, indz, localsize)); 1819 PetscCall(ISRestoreIndices(iszl, &indz)); 1820 PetscCall(ISDestroy(&iszl)); 1821 for (i = 0; i < localsize; i++) *(indcz + i) += sizez; 1822 PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz)); 1823 PetscCall(PCFieldSplitRestrictIS(subpc, isz)); 1824 PetscCall(ISDestroy(&isz)); 1825 } 1826 next = ilink->next; 1827 ilink = next; 1828 } 1829 jac->isrestrict = PETSC_TRUE; 1830 PetscFunctionReturn(PETSC_SUCCESS); 1831 } 1832 1833 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is) 1834 { 1835 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1836 PC_FieldSplitLink ilink, next = jac->head; 1837 char prefix[128]; 1838 1839 PetscFunctionBegin; 1840 if (jac->splitdefined) { 1841 PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname)); 1842 PetscFunctionReturn(PETSC_SUCCESS); 1843 } 1844 PetscCall(PetscNew(&ilink)); 1845 if (splitname) { 1846 PetscCall(PetscStrallocpy(splitname, &ilink->splitname)); 1847 } else { 1848 PetscCall(PetscMalloc1(8, &ilink->splitname)); 1849 PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits)); 1850 } 1851 ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */ 1852 PetscCall(PetscObjectReference((PetscObject)is)); 1853 PetscCall(ISDestroy(&ilink->is)); 1854 ilink->is = is; 1855 PetscCall(PetscObjectReference((PetscObject)is)); 1856 PetscCall(ISDestroy(&ilink->is_col)); 1857 ilink->is_col = is; 1858 ilink->next = NULL; 1859 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp)); 1860 PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure)); 1861 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1)); 1862 PetscCall(KSPSetType(ilink->ksp, KSPPREONLY)); 1863 1864 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 1865 PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix)); 1866 1867 if (!next) { 1868 jac->head = ilink; 1869 ilink->previous = NULL; 1870 } else { 1871 while (next->next) next = next->next; 1872 next->next = ilink; 1873 ilink->previous = next; 1874 } 1875 jac->nsplits++; 1876 PetscFunctionReturn(PETSC_SUCCESS); 1877 } 1878 1879 /*@C 1880 PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT` 1881 1882 Logically Collective 1883 1884 Input Parameters: 1885 + pc - the preconditioner context 1886 . splitname - name of this split, if `NULL` the number of the split is used 1887 . n - the number of fields in this split 1888 . fields - the fields in this split 1889 - fields_col - generally the same as fields, if it does not match fields then the matrix block that is solved for this set of fields comes from an off-diagonal block 1890 of the matrix and fields_col provides the column indices for that block 1891 1892 Level: intermediate 1893 1894 Notes: 1895 Use `PCFieldSplitSetIS()` to set a general set of indices as a split. 1896 1897 `PCFieldSplitSetFields()` is for defining fields as strided blocks. For example, if the block 1898 size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean 1899 0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x.... 1900 where the numbered entries indicate what is in the split. 1901 1902 This function is called once per split (it creates a new split each time). Solve options 1903 for this split will be available under the prefix `-fieldsplit_SPLITNAME_`. 1904 1905 `PCFieldSplitSetIS()` does not support having a fields_col different from fields 1906 1907 Developer Notes: 1908 This routine does not actually create the `IS` representing the split, that is delayed 1909 until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be 1910 available when this routine is called. 1911 1912 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()` 1913 @*/ 1914 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col) 1915 { 1916 PetscFunctionBegin; 1917 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1918 PetscValidCharPointer(splitname, 2); 1919 PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname); 1920 PetscValidIntPointer(fields, 4); 1921 PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col)); 1922 PetscFunctionReturn(PETSC_SUCCESS); 1923 } 1924 1925 /*@ 1926 PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build 1927 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators. 1928 1929 Logically Collective 1930 1931 Input Parameters: 1932 + pc - the preconditioner object 1933 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from 1934 1935 Options Database Key: 1936 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks 1937 1938 Level: intermediate 1939 1940 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT` 1941 @*/ 1942 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg) 1943 { 1944 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1945 PetscBool isfs; 1946 1947 PetscFunctionBegin; 1948 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1949 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 1950 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 1951 jac->diag_use_amat = flg; 1952 PetscFunctionReturn(PETSC_SUCCESS); 1953 } 1954 1955 /*@ 1956 PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build 1957 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators. 1958 1959 Logically Collective 1960 1961 Input Parameter: 1962 . pc - the preconditioner object 1963 1964 Output Parameter: 1965 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from 1966 1967 Level: intermediate 1968 1969 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT` 1970 @*/ 1971 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg) 1972 { 1973 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1974 PetscBool isfs; 1975 1976 PetscFunctionBegin; 1977 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1978 PetscValidBoolPointer(flg, 2); 1979 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 1980 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 1981 *flg = jac->diag_use_amat; 1982 PetscFunctionReturn(PETSC_SUCCESS); 1983 } 1984 1985 /*@ 1986 PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build 1987 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators. 1988 1989 Logically Collective 1990 1991 Input Parameters: 1992 + pc - the preconditioner object 1993 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from 1994 1995 Options Database Key: 1996 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks 1997 1998 Level: intermediate 1999 2000 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT` 2001 @*/ 2002 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg) 2003 { 2004 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2005 PetscBool isfs; 2006 2007 PetscFunctionBegin; 2008 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2009 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2010 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2011 jac->offdiag_use_amat = flg; 2012 PetscFunctionReturn(PETSC_SUCCESS); 2013 } 2014 2015 /*@ 2016 PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build 2017 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat)) was used to supply the operators. 2018 2019 Logically Collective 2020 2021 Input Parameter: 2022 . pc - the preconditioner object 2023 2024 Output Parameter: 2025 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from 2026 2027 Level: intermediate 2028 2029 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT` 2030 @*/ 2031 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg) 2032 { 2033 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2034 PetscBool isfs; 2035 2036 PetscFunctionBegin; 2037 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2038 PetscValidBoolPointer(flg, 2); 2039 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2040 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2041 *flg = jac->offdiag_use_amat; 2042 PetscFunctionReturn(PETSC_SUCCESS); 2043 } 2044 2045 /*@C 2046 PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT` 2047 2048 Logically Collective 2049 2050 Input Parameters: 2051 + pc - the preconditioner context 2052 . splitname - name of this split, if `NULL` the number of the split is used 2053 - is - the index set that defines the elements in this split 2054 2055 Level: intermediate 2056 2057 Notes: 2058 Use `PCFieldSplitSetFields()`, for splits defined by strided types. 2059 2060 This function is called once per split (it creates a new split each time). Solve options 2061 for this split will be available under the prefix -fieldsplit_SPLITNAME_. 2062 2063 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()` 2064 @*/ 2065 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is) 2066 { 2067 PetscFunctionBegin; 2068 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2069 if (splitname) PetscValidCharPointer(splitname, 2); 2070 PetscValidHeaderSpecific(is, IS_CLASSID, 3); 2071 PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is)); 2072 PetscFunctionReturn(PETSC_SUCCESS); 2073 } 2074 2075 /*@C 2076 PCFieldSplitGetIS - Retrieves the elements for a split as an `IS` 2077 2078 Logically Collective 2079 2080 Input Parameters: 2081 + pc - the preconditioner context 2082 - splitname - name of this split 2083 2084 Output Parameter: 2085 . is - the index set that defines the elements in this split, or `NULL` if the split is not found 2086 2087 Level: intermediate 2088 2089 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()` 2090 @*/ 2091 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is) 2092 { 2093 PetscFunctionBegin; 2094 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2095 PetscValidCharPointer(splitname, 2); 2096 PetscValidPointer(is, 3); 2097 { 2098 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2099 PC_FieldSplitLink ilink = jac->head; 2100 PetscBool found; 2101 2102 *is = NULL; 2103 while (ilink) { 2104 PetscCall(PetscStrcmp(ilink->splitname, splitname, &found)); 2105 if (found) { 2106 *is = ilink->is; 2107 break; 2108 } 2109 ilink = ilink->next; 2110 } 2111 } 2112 PetscFunctionReturn(PETSC_SUCCESS); 2113 } 2114 2115 /*@C 2116 PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS` 2117 2118 Logically Collective 2119 2120 Input Parameters: 2121 + pc - the preconditioner context 2122 - index - index of this split 2123 2124 Output Parameter: 2125 . is - the index set that defines the elements in this split 2126 2127 Level: intermediate 2128 2129 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()` 2130 @*/ 2131 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is) 2132 { 2133 PetscFunctionBegin; 2134 PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index); 2135 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2136 PetscValidPointer(is, 3); 2137 { 2138 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2139 PC_FieldSplitLink ilink = jac->head; 2140 PetscInt i = 0; 2141 PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits); 2142 2143 while (i < index) { 2144 ilink = ilink->next; 2145 ++i; 2146 } 2147 PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is)); 2148 } 2149 PetscFunctionReturn(PETSC_SUCCESS); 2150 } 2151 2152 /*@ 2153 PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the 2154 fieldsplit preconditioner when calling `PCFieldSplitSetIS()`. If not set the matrix block size is used. 2155 2156 Logically Collective 2157 2158 Input Parameters: 2159 + pc - the preconditioner context 2160 - bs - the block size 2161 2162 Level: intermediate 2163 2164 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 2165 @*/ 2166 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs) 2167 { 2168 PetscFunctionBegin; 2169 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2170 PetscValidLogicalCollectiveInt(pc, bs, 2); 2171 PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs)); 2172 PetscFunctionReturn(PETSC_SUCCESS); 2173 } 2174 2175 /*@C 2176 PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits 2177 2178 Collective 2179 2180 Input Parameter: 2181 . pc - the preconditioner context 2182 2183 Output Parameters: 2184 + n - the number of splits 2185 - subksp - the array of `KSP` contexts 2186 2187 Level: advanced 2188 2189 Notes: 2190 After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()` 2191 (not the `KSP`, just the array that contains them). 2192 2193 You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`. 2194 2195 If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the 2196 Schur complement and the `KSP` object used to iterate over the Schur complement. 2197 To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`. 2198 2199 If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the 2200 inner linear system defined by the matrix H in each loop. 2201 2202 Fortran Notes: 2203 You must pass in a `KSP` array that is large enough to contain all the `KSP`s. 2204 You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the 2205 `KSP` array must be. 2206 2207 Developer Notes: 2208 There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()` 2209 2210 The Fortran interface should be modernized to return directly the array of values. 2211 2212 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()` 2213 @*/ 2214 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[]) 2215 { 2216 PetscFunctionBegin; 2217 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2218 if (n) PetscValidIntPointer(n, 2); 2219 PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp)); 2220 PetscFunctionReturn(PETSC_SUCCESS); 2221 } 2222 2223 /*@C 2224 PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT` 2225 2226 Collective 2227 2228 Input Parameter: 2229 . pc - the preconditioner context 2230 2231 Output Parameters: 2232 + n - the number of splits 2233 - subksp - the array of `KSP` contexts 2234 2235 Level: advanced 2236 2237 Notes: 2238 After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()` 2239 (not the `KSP` just the array that contains them). 2240 2241 You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`. 2242 2243 If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order) 2244 + 1 - the `KSP` used for the (1,1) block 2245 . 2 - the `KSP` used for the Schur complement (not the one used for the interior Schur solver) 2246 - 3 - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block). 2247 2248 It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`. 2249 2250 Fortran Notes: 2251 You must pass in a `KSP` array that is large enough to contain all the local `KSP`s. 2252 You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the 2253 `KSP` array must be. 2254 2255 Developer Notes: 2256 There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()` 2257 2258 Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged? 2259 2260 The Fortran interface should be modernized to return directly the array of values. 2261 2262 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()` 2263 @*/ 2264 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[]) 2265 { 2266 PetscFunctionBegin; 2267 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2268 if (n) PetscValidIntPointer(n, 2); 2269 PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp)); 2270 PetscFunctionReturn(PETSC_SUCCESS); 2271 } 2272 2273 /*@ 2274 PCFieldSplitSetSchurPre - Indicates from what operator the preconditioner is constructucted for the Schur complement. 2275 The default is the A11 matrix. 2276 2277 Collective 2278 2279 Input Parameters: 2280 + pc - the preconditioner context 2281 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default), 2282 `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`, 2283 `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL` 2284 - pre - matrix to use for preconditioning, or `NULL` 2285 2286 Options Database Keys: 2287 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is a11. See notes for meaning of various arguments 2288 - -fieldsplit_1_pc_type <pctype> - the preconditioner algorithm that is used to construct the preconditioner from the operator 2289 2290 Level: intermediate 2291 2292 Notes: 2293 If ptype is 2294 + a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner 2295 matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix 2296 . self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix: 2297 The only preconditioner that currently works with this symbolic representation matrix object is the `PCLSC` 2298 preconditioner 2299 . user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument 2300 to this function). 2301 . selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation Sp = A11 - A10 inv(diag(A00)) A01 2302 This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be 2303 lumped before extracting the diagonal using the additional option -fieldsplit_1_mat_schur_complement_ainv_type lump 2304 - full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation 2305 computed internally by `PCFIELDSPLIT` (this is expensive) 2306 useful mostly as a test that the Schur complement approach can work for your problem 2307 2308 When solving a saddle point problem, where the A11 block is identically zero, using a11 as the ptype only makes sense 2309 with the additional option -fieldsplit_1_pc_type none. Usually for saddle point problems one would use a ptype of self and 2310 -fieldsplit_1_pc_type lsc which uses the least squares commutator to compute a preconditioner for the Schur complement. 2311 2312 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, 2313 `MatSchurComplementSetAinvType()`, `PCLSC`, 2314 2315 @*/ 2316 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2317 { 2318 PetscFunctionBegin; 2319 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2320 PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre)); 2321 PetscFunctionReturn(PETSC_SUCCESS); 2322 } 2323 2324 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2325 { 2326 return PCFieldSplitSetSchurPre(pc, ptype, pre); 2327 } /* Deprecated name */ 2328 2329 /*@ 2330 PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be 2331 preconditioned. See `PCFieldSplitSetSchurPre()` for details. 2332 2333 Logically Collective 2334 2335 Input Parameter: 2336 . pc - the preconditioner context 2337 2338 Output Parameters: 2339 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_PRE_USER` 2340 - pre - matrix to use for preconditioning (with `PC_FIELDSPLIT_PRE_USER`), or NULL 2341 2342 Level: intermediate 2343 2344 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`, 2345 2346 @*/ 2347 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre) 2348 { 2349 PetscFunctionBegin; 2350 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2351 PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre)); 2352 PetscFunctionReturn(PETSC_SUCCESS); 2353 } 2354 2355 /*@ 2356 PCFieldSplitSchurGetS - extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately 2357 2358 Not Collective 2359 2360 Input Parameter: 2361 . pc - the preconditioner context 2362 2363 Output Parameter: 2364 . S - the Schur complement matrix 2365 2366 Level: advanced 2367 2368 Note: 2369 This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`. 2370 2371 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`, 2372 `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()` 2373 @*/ 2374 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S) 2375 { 2376 const char *t; 2377 PetscBool isfs; 2378 PC_FieldSplit *jac; 2379 2380 PetscFunctionBegin; 2381 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2382 PetscCall(PetscObjectGetType((PetscObject)pc, &t)); 2383 PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs)); 2384 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t); 2385 jac = (PC_FieldSplit *)pc->data; 2386 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type); 2387 if (S) *S = jac->schur; 2388 PetscFunctionReturn(PETSC_SUCCESS); 2389 } 2390 2391 /*@ 2392 PCFieldSplitSchurRestoreS - returns the `MATSCHURCOMPLEMENT` matrix used by this `PC` 2393 2394 Not Collective 2395 2396 Input Parameters: 2397 + pc - the preconditioner context 2398 - S - the Schur complement matrix 2399 2400 Level: advanced 2401 2402 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()` 2403 @*/ 2404 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S) 2405 { 2406 const char *t; 2407 PetscBool isfs; 2408 PC_FieldSplit *jac; 2409 2410 PetscFunctionBegin; 2411 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2412 PetscCall(PetscObjectGetType((PetscObject)pc, &t)); 2413 PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs)); 2414 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t); 2415 jac = (PC_FieldSplit *)pc->data; 2416 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type); 2417 PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten"); 2418 PetscFunctionReturn(PETSC_SUCCESS); 2419 } 2420 2421 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2422 { 2423 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2424 2425 PetscFunctionBegin; 2426 jac->schurpre = ptype; 2427 if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) { 2428 PetscCall(MatDestroy(&jac->schur_user)); 2429 jac->schur_user = pre; 2430 PetscCall(PetscObjectReference((PetscObject)jac->schur_user)); 2431 } 2432 PetscFunctionReturn(PETSC_SUCCESS); 2433 } 2434 2435 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre) 2436 { 2437 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2438 2439 PetscFunctionBegin; 2440 *ptype = jac->schurpre; 2441 *pre = jac->schur_user; 2442 PetscFunctionReturn(PETSC_SUCCESS); 2443 } 2444 2445 /*@ 2446 PCFieldSplitSetSchurFactType - sets which blocks of the approximate block factorization to retain in the preconditioner 2447 2448 Collective 2449 2450 Input Parameters: 2451 + pc - the preconditioner context 2452 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default 2453 2454 Options Database Key: 2455 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is full 2456 2457 Level: intermediate 2458 2459 Notes: 2460 The FULL factorization is 2461 2462 .vb 2463 (A B) = (1 0) (A 0) (1 Ainv*B) = L D U 2464 (C E) (C*Ainv 1) (0 S) (0 1) 2465 .vb 2466 where S = E - C*Ainv*B. In practice, the full factorization is applied via block triangular solves with the grouping L*(D*U). UPPER uses D*U, LOWER uses L*D, 2467 and DIAG is the diagonal part with the sign of S flipped (because this makes the preconditioner positive definite for many formulations, thus allowing the use of `KSPMINRES)`. 2468 Sign flipping of S can be turned off with `PCFieldSplitSetSchurScale()`. 2469 2470 If A and S are solved exactly 2471 .vb 2472 *) FULL factorization is a direct solver. 2473 *) The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations. 2474 *) With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations. 2475 .ve 2476 2477 If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner 2478 application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice. 2479 2480 For symmetric problems in which A is positive definite and S is negative definite, DIAG can be used with `KSPMINRES`. 2481 2482 A flexible method like `KSPFGMRES` or `KSPGCR` must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S). 2483 2484 References: 2485 + * - Murphy, Golub, and Wathen, A note on preconditioning indefinite linear systems, SIAM J. Sci. Comput., 21 (2000). 2486 - * - Ipsen, A note on preconditioning nonsymmetric matrices, SIAM J. Sci. Comput., 23 (2001). 2487 2488 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()` 2489 @*/ 2490 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype) 2491 { 2492 PetscFunctionBegin; 2493 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2494 PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype)); 2495 PetscFunctionReturn(PETSC_SUCCESS); 2496 } 2497 2498 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype) 2499 { 2500 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2501 2502 PetscFunctionBegin; 2503 jac->schurfactorization = ftype; 2504 PetscFunctionReturn(PETSC_SUCCESS); 2505 } 2506 2507 /*@ 2508 PCFieldSplitSetSchurScale - Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`. 2509 2510 Collective 2511 2512 Input Parameters: 2513 + pc - the preconditioner context 2514 - scale - scaling factor for the Schur complement 2515 2516 Options Database Key: 2517 . -pc_fieldsplit_schur_scale - default is -1.0 2518 2519 Level: intermediate 2520 2521 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetSchurFactType()` 2522 @*/ 2523 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale) 2524 { 2525 PetscFunctionBegin; 2526 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2527 PetscValidLogicalCollectiveScalar(pc, scale, 2); 2528 PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale)); 2529 PetscFunctionReturn(PETSC_SUCCESS); 2530 } 2531 2532 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale) 2533 { 2534 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2535 2536 PetscFunctionBegin; 2537 jac->schurscale = scale; 2538 PetscFunctionReturn(PETSC_SUCCESS); 2539 } 2540 2541 /*@C 2542 PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement 2543 2544 Collective 2545 2546 Input Parameter: 2547 . pc - the preconditioner context 2548 2549 Output Parameters: 2550 + A00 - the (0,0) block 2551 . A01 - the (0,1) block 2552 . A10 - the (1,0) block 2553 - A11 - the (1,1) block 2554 2555 Level: advanced 2556 2557 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()` 2558 @*/ 2559 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11) 2560 { 2561 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2562 2563 PetscFunctionBegin; 2564 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2565 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach."); 2566 if (A00) *A00 = jac->pmat[0]; 2567 if (A01) *A01 = jac->B; 2568 if (A10) *A10 = jac->C; 2569 if (A11) *A11 = jac->pmat[1]; 2570 PetscFunctionReturn(PETSC_SUCCESS); 2571 } 2572 2573 /*@ 2574 PCFieldSplitSetGKBTol - Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT` 2575 2576 Collective 2577 2578 Input Parameters: 2579 + pc - the preconditioner context 2580 - tolerance - the solver tolerance 2581 2582 Options Database Key: 2583 . -pc_fieldsplit_gkb_tol - default is 1e-5 2584 2585 Level: intermediate 2586 2587 Note: 2588 The generalized GKB algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. 2589 It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than 2590 this estimate, the stopping criterion is satisfactory in practical cases [A13]. 2591 2592 References: 2593 [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013. 2594 2595 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()` 2596 @*/ 2597 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance) 2598 { 2599 PetscFunctionBegin; 2600 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2601 PetscValidLogicalCollectiveReal(pc, tolerance, 2); 2602 PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance)); 2603 PetscFunctionReturn(PETSC_SUCCESS); 2604 } 2605 2606 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance) 2607 { 2608 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2609 2610 PetscFunctionBegin; 2611 jac->gkbtol = tolerance; 2612 PetscFunctionReturn(PETSC_SUCCESS); 2613 } 2614 2615 /*@ 2616 PCFieldSplitSetGKBMaxit - Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT` 2617 2618 Collective 2619 2620 Input Parameters: 2621 + pc - the preconditioner context 2622 - maxit - the maximum number of iterations 2623 2624 Options Database Key: 2625 . -pc_fieldsplit_gkb_maxit - default is 100 2626 2627 Level: intermediate 2628 2629 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()` 2630 @*/ 2631 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit) 2632 { 2633 PetscFunctionBegin; 2634 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2635 PetscValidLogicalCollectiveInt(pc, maxit, 2); 2636 PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit)); 2637 PetscFunctionReturn(PETSC_SUCCESS); 2638 } 2639 2640 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit) 2641 { 2642 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2643 2644 PetscFunctionBegin; 2645 jac->gkbmaxit = maxit; 2646 PetscFunctionReturn(PETSC_SUCCESS); 2647 } 2648 2649 /*@ 2650 PCFieldSplitSetGKBDelay - Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization in `PCFIELDSPLIT` 2651 preconditioner. 2652 2653 Collective 2654 2655 Input Parameters: 2656 + pc - the preconditioner context 2657 - delay - the delay window in the lower bound estimate 2658 2659 Options Database Key: 2660 . -pc_fieldsplit_gkb_delay - default is 5 2661 2662 Level: intermediate 2663 2664 Note: 2665 The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error ||u-u^k||_H 2666 is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + delay), and thus the algorithm needs 2667 at least (delay + 1) iterations to stop. For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to 2668 2669 References: 2670 [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013. 2671 2672 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()` 2673 @*/ 2674 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay) 2675 { 2676 PetscFunctionBegin; 2677 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2678 PetscValidLogicalCollectiveInt(pc, delay, 2); 2679 PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay)); 2680 PetscFunctionReturn(PETSC_SUCCESS); 2681 } 2682 2683 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay) 2684 { 2685 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2686 2687 PetscFunctionBegin; 2688 jac->gkbdelay = delay; 2689 PetscFunctionReturn(PETSC_SUCCESS); 2690 } 2691 2692 /*@ 2693 PCFieldSplitSetGKBNu - Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the Golub-Kahan bidiagonalization preconditioner 2694 in `PCFIELDSPLIT` 2695 2696 Collective 2697 2698 Input Parameters: 2699 + pc - the preconditioner context 2700 - nu - the shift parameter 2701 2702 Options Database Key: 2703 . -pc_fieldsplit_gkb_nu - default is 1 2704 2705 Level: intermediate 2706 2707 Notes: 2708 This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing nu sufficiently big. However, 2709 if nu is chosen too big, the matrix H might be badly conditioned and the solution of the linear system Hx = b in the inner loop becomes difficult. It is therefore 2710 necessary to find a good balance in between the convergence of the inner and outer loop. 2711 2712 For nu = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in [Ar13] is then chosen as identity. 2713 2714 References: 2715 [Ar13] Generalized Golub-Kahan bidiagonalization and stopping criteria, SIAM J. Matrix Anal. Appl., Vol. 34, No. 2, pp. 571-592, 2013. 2716 2717 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()` 2718 @*/ 2719 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu) 2720 { 2721 PetscFunctionBegin; 2722 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2723 PetscValidLogicalCollectiveReal(pc, nu, 2); 2724 PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu)); 2725 PetscFunctionReturn(PETSC_SUCCESS); 2726 } 2727 2728 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu) 2729 { 2730 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2731 2732 PetscFunctionBegin; 2733 jac->gkbnu = nu; 2734 PetscFunctionReturn(PETSC_SUCCESS); 2735 } 2736 2737 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type) 2738 { 2739 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2740 2741 PetscFunctionBegin; 2742 jac->type = type; 2743 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 2744 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL)); 2745 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL)); 2746 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL)); 2747 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL)); 2748 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL)); 2749 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL)); 2750 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL)); 2751 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL)); 2752 2753 if (type == PC_COMPOSITE_SCHUR) { 2754 pc->ops->apply = PCApply_FieldSplit_Schur; 2755 pc->ops->view = PCView_FieldSplit_Schur; 2756 2757 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur)); 2758 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit)); 2759 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit)); 2760 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit)); 2761 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit)); 2762 } else if (type == PC_COMPOSITE_GKB) { 2763 pc->ops->apply = PCApply_FieldSplit_GKB; 2764 pc->ops->view = PCView_FieldSplit_GKB; 2765 2766 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 2767 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit)); 2768 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit)); 2769 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit)); 2770 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit)); 2771 } else { 2772 pc->ops->apply = PCApply_FieldSplit; 2773 pc->ops->view = PCView_FieldSplit; 2774 2775 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 2776 } 2777 PetscFunctionReturn(PETSC_SUCCESS); 2778 } 2779 2780 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs) 2781 { 2782 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2783 2784 PetscFunctionBegin; 2785 PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs); 2786 PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs); 2787 jac->bs = bs; 2788 PetscFunctionReturn(PETSC_SUCCESS); 2789 } 2790 2791 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 2792 { 2793 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2794 PC_FieldSplitLink ilink_current = jac->head; 2795 IS is_owned; 2796 2797 PetscFunctionBegin; 2798 jac->coordinates_set = PETSC_TRUE; // Internal flag 2799 PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL)); 2800 2801 while (ilink_current) { 2802 // For each IS, embed it to get local coords indces 2803 IS is_coords; 2804 PetscInt ndofs_block; 2805 const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block 2806 2807 // Setting drop to true for safety. It should make no difference. 2808 PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords)); 2809 PetscCall(ISGetLocalSize(is_coords, &ndofs_block)); 2810 PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration)); 2811 2812 // Allocate coordinates vector and set it directly 2813 PetscCall(PetscMalloc1(ndofs_block * dim, &(ilink_current->coords))); 2814 for (PetscInt dof = 0; dof < ndofs_block; ++dof) { 2815 for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d]; 2816 } 2817 ilink_current->dim = dim; 2818 ilink_current->ndofs = ndofs_block; 2819 PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration)); 2820 PetscCall(ISDestroy(&is_coords)); 2821 ilink_current = ilink_current->next; 2822 } 2823 PetscCall(ISDestroy(&is_owned)); 2824 PetscFunctionReturn(PETSC_SUCCESS); 2825 } 2826 2827 /*@ 2828 PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT` 2829 2830 Collective 2831 2832 Input Parameters: 2833 + pc - the preconditioner context 2834 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 2835 2836 Options Database Key: 2837 . -pc_fieldsplit_type <type: one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type 2838 2839 Level: intermediate 2840 2841 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`, 2842 `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 2843 @*/ 2844 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type) 2845 { 2846 PetscFunctionBegin; 2847 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2848 PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type)); 2849 PetscFunctionReturn(PETSC_SUCCESS); 2850 } 2851 2852 /*@ 2853 PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT` 2854 2855 Not collective 2856 2857 Input Parameter: 2858 . pc - the preconditioner context 2859 2860 Output Parameter: 2861 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 2862 2863 Level: intermediate 2864 2865 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`, 2866 `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 2867 @*/ 2868 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type) 2869 { 2870 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2871 2872 PetscFunctionBegin; 2873 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2874 PetscValidIntPointer(type, 2); 2875 *type = jac->type; 2876 PetscFunctionReturn(PETSC_SUCCESS); 2877 } 2878 2879 /*@ 2880 PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible. 2881 2882 Logically Collective 2883 2884 Input Parameters: 2885 + pc - the preconditioner context 2886 - flg - boolean indicating whether to use field splits defined by the `DM` 2887 2888 Options Database Key: 2889 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM` 2890 2891 Level: intermediate 2892 2893 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()` 2894 @*/ 2895 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg) 2896 { 2897 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2898 PetscBool isfs; 2899 2900 PetscFunctionBegin; 2901 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2902 PetscValidLogicalCollectiveBool(pc, flg, 2); 2903 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2904 if (isfs) jac->dm_splits = flg; 2905 PetscFunctionReturn(PETSC_SUCCESS); 2906 } 2907 2908 /*@ 2909 PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible. 2910 2911 Logically Collective 2912 2913 Input Parameter: 2914 . pc - the preconditioner context 2915 2916 Output Parameter: 2917 . flg - boolean indicating whether to use field splits defined by the `DM` 2918 2919 Level: intermediate 2920 2921 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()` 2922 @*/ 2923 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg) 2924 { 2925 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2926 PetscBool isfs; 2927 2928 PetscFunctionBegin; 2929 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2930 PetscValidBoolPointer(flg, 2); 2931 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2932 if (isfs) { 2933 if (flg) *flg = jac->dm_splits; 2934 } 2935 PetscFunctionReturn(PETSC_SUCCESS); 2936 } 2937 2938 /*@ 2939 PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries. 2940 2941 Logically Collective 2942 2943 Input Parameter: 2944 . pc - the preconditioner context 2945 2946 Output Parameter: 2947 . flg - boolean indicating whether to detect fields or not 2948 2949 Level: intermediate 2950 2951 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()` 2952 @*/ 2953 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg) 2954 { 2955 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2956 2957 PetscFunctionBegin; 2958 *flg = jac->detect; 2959 PetscFunctionReturn(PETSC_SUCCESS); 2960 } 2961 2962 /*@ 2963 PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries. 2964 2965 Logically Collective 2966 2967 Input Parameter: 2968 . pc - the preconditioner context 2969 2970 Output Parameter: 2971 . flg - boolean indicating whether to detect fields or not 2972 2973 Options Database Key: 2974 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point 2975 2976 Level: intermediate 2977 2978 Note: 2979 Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`). 2980 2981 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF` 2982 @*/ 2983 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg) 2984 { 2985 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2986 2987 PetscFunctionBegin; 2988 jac->detect = flg; 2989 if (jac->detect) { 2990 PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR)); 2991 PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL)); 2992 } 2993 PetscFunctionReturn(PETSC_SUCCESS); 2994 } 2995 2996 /*MC 2997 PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual 2998 collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details. 2999 3000 Options Database Keys: 3001 + -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split 3002 . -pc_fieldsplit_default - automatically add any fields to additional splits that have not 3003 been supplied explicitly by `-pc_fieldsplit_%d_fields` 3004 . -pc_fieldsplit_block_size <bs> - size of block that defines fields (i.e. there are bs fields) 3005 . -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting 3006 . -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is a11; see `PCFieldSplitSetSchurPre()` 3007 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - set factorization type when using `-pc_fieldsplit_type schur`; see `PCFieldSplitSetSchurFactType()` 3008 - -pc_fieldsplit_detect_saddle_point - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver 3009 3010 Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` . 3011 The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_` 3012 For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields. 3013 3014 To set options on the solvers for each block append `-fieldsplit_` to all the `PC` 3015 options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1` 3016 3017 To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()` 3018 and set the options directly on the resulting `KSP` object 3019 3020 Level: intermediate 3021 3022 Notes: 3023 Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries and `PCFieldSplitSetIS()` 3024 to define a split by an arbitrary collection of entries. 3025 3026 If no splits are set the default is used. The splits are defined by entries strided by bs, 3027 beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`, 3028 if this is not called the block size defaults to the blocksize of the second matrix passed 3029 to `KSPSetOperators()`/`PCSetOperators()`. 3030 3031 For the Schur complement preconditioner if 3032 3033 ```{math} 3034 J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right] 3035 ``` 3036 3037 the preconditioner using `full` factorization is logically 3038 ```{math} 3039 \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right] 3040 ``` 3041 where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`. $S$ is the Schur complement 3042 ```{math} 3043 S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01} 3044 ``` 3045 which is usually dense and not stored explicitly. The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given 3046 in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0, 3047 it returns the KSP associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default 3048 $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$. 3049 3050 The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above, 3051 `diag` gives 3052 ```{math} 3053 \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & -\text{ksp}(S) \end{array}\right] 3054 ``` 3055 Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$ so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip 3056 can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of 3057 ```{math} 3058 \left[\begin{array}{cc} A_{00} & 0 \\ A_{10} & S \end{array}\right] 3059 ``` 3060 where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of 3061 ```{math} 3062 \left[\begin{array}{cc} A_{00} & A_{01} \\ 0 & S \end{array}\right] 3063 ``` 3064 where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s. 3065 3066 If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS` 3067 is used automatically for a second block. 3068 3069 The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1. 3070 Generally it should be used with the `MATAIJ` format. 3071 3072 The forms of these preconditioners are closely related if not identical to forms derived as "Distributive Iterations", see, 3073 for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`Wesseling2009`. 3074 One can also use `PCFIELDSPLIT` 3075 inside a smoother resulting in "Distributive Smoothers". 3076 3077 See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`. 3078 3079 The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the 3080 residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables. 3081 3082 The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape 3083 ```{math} 3084 \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right] 3085 ``` 3086 with $A_{00}$ positive semi-definite. The implementation follows {cite}`Arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$. 3087 A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`. 3088 3089 Developer Note: 3090 The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their 3091 user API. 3092 3093 References: 3094 ```{bibliography} 3095 :filter: docname in docnames 3096 ``` 3097 3098 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`, 3099 `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`, 3100 `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`, 3101 `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()` 3102 M*/ 3103 3104 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc) 3105 { 3106 PC_FieldSplit *jac; 3107 3108 PetscFunctionBegin; 3109 PetscCall(PetscNew(&jac)); 3110 3111 jac->bs = -1; 3112 jac->nsplits = 0; 3113 jac->type = PC_COMPOSITE_MULTIPLICATIVE; 3114 jac->schurpre = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */ 3115 jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL; 3116 jac->schurscale = -1.0; 3117 jac->dm_splits = PETSC_TRUE; 3118 jac->detect = PETSC_FALSE; 3119 jac->gkbtol = 1e-5; 3120 jac->gkbdelay = 5; 3121 jac->gkbnu = 1; 3122 jac->gkbmaxit = 100; 3123 jac->gkbmonitor = PETSC_FALSE; 3124 jac->coordinates_set = PETSC_FALSE; 3125 3126 pc->data = (void *)jac; 3127 3128 pc->ops->apply = PCApply_FieldSplit; 3129 pc->ops->applytranspose = PCApplyTranspose_FieldSplit; 3130 pc->ops->setup = PCSetUp_FieldSplit; 3131 pc->ops->reset = PCReset_FieldSplit; 3132 pc->ops->destroy = PCDestroy_FieldSplit; 3133 pc->ops->setfromoptions = PCSetFromOptions_FieldSplit; 3134 pc->ops->view = PCView_FieldSplit; 3135 pc->ops->applyrichardson = NULL; 3136 3137 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit)); 3138 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 3139 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit)); 3140 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit)); 3141 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit)); 3142 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit)); 3143 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit)); 3144 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit)); 3145 PetscFunctionReturn(PETSC_SUCCESS); 3146 } 3147