1 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 2 #include <petsc/private/kspimpl.h> /* This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/ 3 #include <petscdm.h> 4 #include <petscdevice.h> 5 #if PetscDefined(HAVE_CUDA) 6 #include <petscdevice_cuda.h> 7 #endif 8 #if PetscDefined(HAVE_HIP) 9 #include <petscdevice_hip.h> 10 #endif 11 12 const char *const PCFieldSplitSchurPreTypes[] = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL}; 13 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL}; 14 15 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4; 16 17 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink; 18 struct _PC_FieldSplitLink { 19 KSP ksp; 20 Vec x, y, z; 21 char *splitname; 22 PetscInt nfields; 23 PetscInt *fields, *fields_col; 24 VecScatter sctx; 25 IS is, is_col; 26 PC_FieldSplitLink next, previous; 27 PetscLogEvent event; 28 29 /* Used only when setting coordinates with PCSetCoordinates */ 30 PetscInt dim; 31 PetscInt ndofs; 32 PetscReal *coords; 33 }; 34 35 typedef struct { 36 PCCompositeType type; 37 PetscBool defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */ 38 PetscBool splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */ 39 PetscInt bs; /* Block size for IS and Mat structures */ 40 PetscInt nsplits; /* Number of field divisions defined */ 41 Vec *x, *y, w1, w2; 42 Mat *mat; /* The diagonal block for each split */ 43 Mat *pmat; /* The preconditioning diagonal block for each split */ 44 Mat *Afield; /* The rows of the matrix associated with each split */ 45 PetscBool issetup; 46 47 /* Only used when Schur complement preconditioning is used */ 48 Mat B; /* The (0,1) block */ 49 Mat C; /* The (1,0) block */ 50 Mat schur; /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */ 51 Mat schurp; /* Assembled approximation to S built by MatSchurComplement to be used as a matrix for constructing the preconditioner when solving with S */ 52 Mat schur_user; /* User-provided matrix for constructing the preconditioner for the Schur complement */ 53 PCFieldSplitSchurPreType schurpre; /* Determines which matrix is used for the Schur complement */ 54 PCFieldSplitSchurFactType schurfactorization; 55 KSP kspschur; /* The solver for S */ 56 KSP kspupper; /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */ 57 PetscScalar schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */ 58 59 /* Only used when Golub-Kahan bidiagonalization preconditioning is used */ 60 Mat H; /* The modified matrix H = A00 + nu*A01*A01' */ 61 PetscReal gkbtol; /* Stopping tolerance for lower bound estimate */ 62 PetscInt gkbdelay; /* The delay window for the stopping criterion */ 63 PetscReal gkbnu; /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */ 64 PetscInt gkbmaxit; /* Maximum number of iterations for outer loop */ 65 PetscBool gkbmonitor; /* Monitor for gkb iterations and the lower bound error */ 66 PetscViewer gkbviewer; /* Viewer context for gkbmonitor */ 67 Vec u, v, d, Hu; /* Work vectors for the GKB algorithm */ 68 PetscScalar *vecz; /* Contains intermediate values, eg for lower bound */ 69 70 PC_FieldSplitLink head; 71 PetscBool isrestrict; /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */ 72 PetscBool suboptionsset; /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */ 73 PetscBool dm_splits; /* Whether to use DMCreateFieldDecomposition() whenever possible */ 74 PetscBool diag_use_amat; /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */ 75 PetscBool offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */ 76 PetscBool detect; /* Whether to form 2-way split by finding zero diagonal entries */ 77 PetscBool coordinates_set; /* Whether PCSetCoordinates has been called */ 78 } PC_FieldSplit; 79 80 /* 81 Note: 82 there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of 83 inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the 84 PC you could change this. 85 */ 86 87 /* This helper is so that setting a user-provided matrix is orthogonal to choosing to use it. This way the 88 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */ 89 static Mat FieldSplitSchurPre(PC_FieldSplit *jac) 90 { 91 switch (jac->schurpre) { 92 case PC_FIELDSPLIT_SCHUR_PRE_SELF: 93 return jac->schur; 94 case PC_FIELDSPLIT_SCHUR_PRE_SELFP: 95 return jac->schurp; 96 case PC_FIELDSPLIT_SCHUR_PRE_A11: 97 return jac->pmat[1]; 98 case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */ 99 case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */ 100 default: 101 return jac->schur_user ? jac->schur_user : jac->pmat[1]; 102 } 103 } 104 105 #include <petscdraw.h> 106 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer) 107 { 108 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 109 PetscBool isascii, isdraw; 110 PetscInt i, j; 111 PC_FieldSplitLink ilink = jac->head; 112 113 PetscFunctionBegin; 114 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 115 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 116 if (isascii) { 117 if (jac->bs > 0) { 118 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs)); 119 } else { 120 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits)); 121 } 122 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 123 if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for diagonal blocks\n")); 124 if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for off-diagonal blocks\n")); 125 PetscCall(PetscViewerASCIIPrintf(viewer, " Solver info for each split is in the following KSP objects:\n")); 126 for (i = 0; i < jac->nsplits; i++) { 127 if (ilink->fields) { 128 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i)); 129 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 130 for (j = 0; j < ilink->nfields; j++) { 131 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 132 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 133 } 134 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 135 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 136 } else { 137 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i)); 138 } 139 PetscCall(KSPView(ilink->ksp, viewer)); 140 ilink = ilink->next; 141 } 142 } 143 144 if (isdraw) { 145 PetscDraw draw; 146 PetscReal x, y, w, wd; 147 148 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 149 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 150 w = 2 * PetscMin(1.0 - x, x); 151 wd = w / (jac->nsplits + 1); 152 x = x - wd * (jac->nsplits - 1) / 2.0; 153 for (i = 0; i < jac->nsplits; i++) { 154 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 155 PetscCall(KSPView(ilink->ksp, viewer)); 156 PetscCall(PetscDrawPopCurrentPoint(draw)); 157 x += wd; 158 ilink = ilink->next; 159 } 160 } 161 PetscFunctionReturn(PETSC_SUCCESS); 162 } 163 164 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer) 165 { 166 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 167 PetscBool isascii, isdraw; 168 PetscInt i, j; 169 PC_FieldSplitLink ilink = jac->head; 170 MatSchurComplementAinvType atype; 171 172 PetscFunctionBegin; 173 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 174 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 175 if (isascii) { 176 if (jac->bs > 0) { 177 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization])); 178 } else { 179 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization])); 180 } 181 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 182 switch (jac->schurpre) { 183 case PC_FIELDSPLIT_SCHUR_PRE_SELF: 184 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from S itself\n")); 185 break; 186 case PC_FIELDSPLIT_SCHUR_PRE_SELFP: 187 if (jac->schur) { 188 PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype)); 189 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's ")))); 190 } 191 break; 192 case PC_FIELDSPLIT_SCHUR_PRE_A11: 193 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from A11\n")); 194 break; 195 case PC_FIELDSPLIT_SCHUR_PRE_FULL: 196 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from the exact Schur complement\n")); 197 break; 198 case PC_FIELDSPLIT_SCHUR_PRE_USER: 199 if (jac->schur_user) { 200 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from user provided matrix\n")); 201 } else { 202 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from A11\n")); 203 } 204 break; 205 default: 206 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre); 207 } 208 PetscCall(PetscViewerASCIIPrintf(viewer, " Split info:\n")); 209 PetscCall(PetscViewerASCIIPushTab(viewer)); 210 for (i = 0; i < jac->nsplits; i++) { 211 if (ilink->fields) { 212 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i)); 213 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 214 for (j = 0; j < ilink->nfields; j++) { 215 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 216 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 217 } 218 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 219 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 220 } else { 221 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i)); 222 } 223 ilink = ilink->next; 224 } 225 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n")); 226 PetscCall(PetscViewerASCIIPushTab(viewer)); 227 if (jac->head) PetscCall(KSPView(jac->head->ksp, viewer)); 228 else PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 229 PetscCall(PetscViewerASCIIPopTab(viewer)); 230 if (jac->head && jac->kspupper != jac->head->ksp) { 231 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n")); 232 PetscCall(PetscViewerASCIIPushTab(viewer)); 233 if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer)); 234 else PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 235 PetscCall(PetscViewerASCIIPopTab(viewer)); 236 } 237 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n")); 238 PetscCall(PetscViewerASCIIPushTab(viewer)); 239 if (jac->kspschur) { 240 PetscCall(KSPView(jac->kspschur, viewer)); 241 } else { 242 PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 243 } 244 PetscCall(PetscViewerASCIIPopTab(viewer)); 245 PetscCall(PetscViewerASCIIPopTab(viewer)); 246 } else if (isdraw && jac->head) { 247 PetscDraw draw; 248 PetscReal x, y, w, wd, h; 249 PetscInt cnt = 2; 250 char str[32]; 251 252 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 253 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 254 if (jac->kspupper != jac->head->ksp) cnt++; 255 w = 2 * PetscMin(1.0 - x, x); 256 wd = w / (cnt + 1); 257 258 PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization])); 259 PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 260 y -= h; 261 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) { 262 PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11])); 263 } else { 264 PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre])); 265 } 266 PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 267 y -= h; 268 x = x - wd * (cnt - 1) / 2.0; 269 270 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 271 PetscCall(KSPView(jac->head->ksp, viewer)); 272 PetscCall(PetscDrawPopCurrentPoint(draw)); 273 if (jac->kspupper != jac->head->ksp) { 274 x += wd; 275 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 276 PetscCall(KSPView(jac->kspupper, viewer)); 277 PetscCall(PetscDrawPopCurrentPoint(draw)); 278 } 279 x += wd; 280 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 281 PetscCall(KSPView(jac->kspschur, viewer)); 282 PetscCall(PetscDrawPopCurrentPoint(draw)); 283 } 284 PetscFunctionReturn(PETSC_SUCCESS); 285 } 286 287 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer) 288 { 289 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 290 PetscBool isascii, isdraw; 291 PetscInt i, j; 292 PC_FieldSplitLink ilink = jac->head; 293 294 PetscFunctionBegin; 295 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 296 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 297 if (isascii) { 298 if (jac->bs > 0) { 299 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs)); 300 } else { 301 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits)); 302 } 303 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 304 if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for diagonal blocks\n")); 305 if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for off-diagonal blocks\n")); 306 307 PetscCall(PetscViewerASCIIPrintf(viewer, " Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit)); 308 PetscCall(PetscViewerASCIIPrintf(viewer, " Solver info for H = A00 + nu*A01*A01' matrix:\n")); 309 PetscCall(PetscViewerASCIIPushTab(viewer)); 310 311 if (ilink->fields) { 312 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields ")); 313 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 314 for (j = 0; j < ilink->nfields; j++) { 315 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 316 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 317 } 318 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 319 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 320 } else { 321 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n")); 322 } 323 PetscCall(KSPView(ilink->ksp, viewer)); 324 325 PetscCall(PetscViewerASCIIPopTab(viewer)); 326 } 327 328 if (isdraw) { 329 PetscDraw draw; 330 PetscReal x, y, w, wd; 331 332 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 333 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 334 w = 2 * PetscMin(1.0 - x, x); 335 wd = w / (jac->nsplits + 1); 336 x = x - wd * (jac->nsplits - 1) / 2.0; 337 for (i = 0; i < jac->nsplits; i++) { 338 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 339 PetscCall(KSPView(ilink->ksp, viewer)); 340 PetscCall(PetscDrawPopCurrentPoint(draw)); 341 x += wd; 342 ilink = ilink->next; 343 } 344 } 345 PetscFunctionReturn(PETSC_SUCCESS); 346 } 347 348 /* Precondition: jac->bs is set to a meaningful value or MATNEST */ 349 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc) 350 { 351 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 352 PetscInt bs, i, nfields, *ifields, nfields_col, *ifields_col; 353 PetscBool flg, flg_col, mnest; 354 char optionname[128], splitname[8], optionname_col[128]; 355 356 PetscFunctionBegin; 357 PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest)); 358 if (mnest) { 359 PetscCall(MatNestGetSize(pc->pmat, &bs, NULL)); 360 } else { 361 bs = jac->bs; 362 } 363 PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col)); 364 for (i = 0, flg = PETSC_TRUE;; i++) { 365 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 366 PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i)); 367 PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i)); 368 nfields = bs; 369 nfields_col = bs; 370 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg)); 371 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col)); 372 if (!flg) break; 373 else if (flg && !flg_col) { 374 PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields"); 375 PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields)); 376 } else { 377 PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields"); 378 PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match"); 379 PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col)); 380 } 381 } 382 if (i > 0) { 383 /* Makes command-line setting of splits take precedence over setting them in code. 384 Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would 385 create new splits, which would probably not be what the user wanted. */ 386 jac->splitdefined = PETSC_TRUE; 387 } 388 PetscCall(PetscFree2(ifields, ifields_col)); 389 PetscFunctionReturn(PETSC_SUCCESS); 390 } 391 392 static PetscErrorCode PCFieldSplitSetDefaults(PC pc) 393 { 394 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 395 PC_FieldSplitLink ilink = jac->head; 396 PetscBool fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE; 397 PetscInt i; 398 399 PetscFunctionBegin; 400 /* 401 Kinda messy, but at least this now uses DMCreateFieldDecomposition(). 402 Should probably be rewritten. 403 */ 404 if (!ilink) { 405 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL)); 406 if (pc->dm && jac->dm_splits && !jac->detect && !coupling) { 407 PetscInt numFields, f, i, j; 408 char **fieldNames; 409 IS *fields; 410 DM *dms; 411 DM subdm[128]; 412 PetscBool flg; 413 414 PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms)); 415 /* Allow the user to prescribe the splits */ 416 for (i = 0, flg = PETSC_TRUE;; i++) { 417 PetscInt ifields[128]; 418 IS compField; 419 char optionname[128], splitname[8]; 420 PetscInt nfields = numFields; 421 422 PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i)); 423 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg)); 424 if (!flg) break; 425 PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields); 426 PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i])); 427 if (nfields == 1) { 428 PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField)); 429 } else { 430 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 431 PetscCall(PCFieldSplitSetIS(pc, splitname, compField)); 432 } 433 PetscCall(ISDestroy(&compField)); 434 for (j = 0; j < nfields; ++j) { 435 f = ifields[j]; 436 PetscCall(PetscFree(fieldNames[f])); 437 PetscCall(ISDestroy(&fields[f])); 438 } 439 } 440 if (i == 0) { 441 for (f = 0; f < numFields; ++f) { 442 PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f])); 443 PetscCall(PetscFree(fieldNames[f])); 444 PetscCall(ISDestroy(&fields[f])); 445 } 446 } else { 447 for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j)); 448 PetscCall(PetscFree(dms)); 449 PetscCall(PetscMalloc1(i, &dms)); 450 for (j = 0; j < i; ++j) dms[j] = subdm[j]; 451 } 452 PetscCall(PetscFree(fieldNames)); 453 PetscCall(PetscFree(fields)); 454 if (dms) { 455 PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n")); 456 for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) { 457 const char *prefix; 458 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix)); 459 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix)); 460 PetscCall(KSPSetDM(ilink->ksp, dms[i])); 461 PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE)); 462 PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0)); 463 PetscCall(DMDestroy(&dms[i])); 464 } 465 PetscCall(PetscFree(dms)); 466 } 467 } else { 468 if (jac->bs <= 0) { 469 if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &jac->bs)); 470 else jac->bs = 1; 471 } 472 473 if (jac->detect) { 474 IS zerodiags, rest; 475 PetscInt nmin, nmax; 476 477 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 478 if (jac->diag_use_amat) { 479 PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags)); 480 } else { 481 PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags)); 482 } 483 PetscCall(ISComplement(zerodiags, nmin, nmax, &rest)); 484 PetscCall(PCFieldSplitSetIS(pc, "0", rest)); 485 PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags)); 486 PetscCall(ISDestroy(&zerodiags)); 487 PetscCall(ISDestroy(&rest)); 488 } else if (coupling) { 489 IS coupling, rest; 490 PetscInt nmin, nmax; 491 492 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 493 if (jac->offdiag_use_amat) { 494 PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling)); 495 } else { 496 PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling)); 497 } 498 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest)); 499 PetscCall(ISSetIdentity(rest)); 500 PetscCall(PCFieldSplitSetIS(pc, "0", rest)); 501 PetscCall(PCFieldSplitSetIS(pc, "1", coupling)); 502 PetscCall(ISDestroy(&coupling)); 503 PetscCall(ISDestroy(&rest)); 504 } else { 505 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL)); 506 if (!fieldsplit_default) { 507 /* Allow user to set fields from command line, if bs was known at the time of PCSetFromOptions_FieldSplit() 508 then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */ 509 PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc)); 510 if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n")); 511 } 512 if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) { 513 Mat M = pc->pmat; 514 PetscBool isnest; 515 PetscInt nf; 516 517 PetscCall(PetscInfo(pc, "Using default splitting of fields\n")); 518 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest)); 519 if (!isnest) { 520 M = pc->mat; 521 PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest)); 522 } 523 if (!isnest) nf = jac->bs; 524 else PetscCall(MatNestGetSize(M, &nf, NULL)); 525 for (i = 0; i < nf; i++) { 526 char splitname[8]; 527 528 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 529 PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i)); 530 } 531 jac->defaultsplit = PETSC_TRUE; 532 } 533 } 534 } 535 } else if (jac->nsplits == 1) { 536 IS is2; 537 PetscInt nmin, nmax; 538 539 PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()"); 540 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 541 PetscCall(ISComplement(ilink->is, nmin, nmax, &is2)); 542 PetscCall(PCFieldSplitSetIS(pc, "1", is2)); 543 PetscCall(ISDestroy(&is2)); 544 } 545 546 PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits); 547 PetscFunctionReturn(PETSC_SUCCESS); 548 } 549 550 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu) 551 { 552 Mat BT, T; 553 PetscReal nrmT, nrmB; 554 555 PetscFunctionBegin; 556 PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */ 557 PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN)); 558 PetscCall(MatNorm(T, NORM_1, &nrmT)); 559 PetscCall(MatNorm(B, NORM_1, &nrmB)); 560 PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable."); 561 562 /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */ 563 /* setting N := 1/nu*I in [Ar13]. */ 564 PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT)); 565 PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_CURRENT, H)); /* H = A01*A01' */ 566 PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN)); /* H = A00 + nu*A01*A01' */ 567 568 PetscCall(MatDestroy(&BT)); 569 PetscCall(MatDestroy(&T)); 570 PetscFunctionReturn(PETSC_SUCCESS); 571 } 572 573 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg); 574 575 static PetscErrorCode PCSetUp_FieldSplit(PC pc) 576 { 577 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 578 PC_FieldSplitLink ilink; 579 PetscInt i, nsplit; 580 PetscBool matnest = PETSC_FALSE; 581 582 PetscFunctionBegin; 583 pc->failedreason = PC_NOERROR; 584 PetscCall(PCFieldSplitSetDefaults(pc)); 585 nsplit = jac->nsplits; 586 ilink = jac->head; 587 if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest)); 588 589 /* get the matrices for each split */ 590 if (!jac->issetup) { 591 PetscInt rstart, rend, nslots, bs; 592 593 jac->issetup = PETSC_TRUE; 594 595 /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */ 596 if (jac->defaultsplit || !ilink->is) { 597 if (jac->bs <= 0) jac->bs = nsplit; 598 } 599 600 /* MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */ 601 PetscCall(MatGetBlockSize(pc->pmat, &bs)); 602 if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) { 603 PetscBool blk; 604 605 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL)); 606 PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes"); 607 } 608 609 if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */ 610 bs = jac->bs; 611 PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend)); 612 nslots = (rend - rstart) / bs; 613 for (i = 0; i < nsplit; i++) { 614 if (jac->defaultsplit) { 615 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is)); 616 PetscCall(PetscObjectReference((PetscObject)ilink->is)); 617 ilink->is_col = ilink->is; 618 } else if (!ilink->is) { 619 PetscBool same_fields = PETSC_TRUE; 620 621 for (PetscInt k = 0; k < ilink->nfields; k++) { 622 if (ilink->fields[k] != ilink->fields_col[k]) same_fields = PETSC_FALSE; 623 } 624 625 if (ilink->nfields > 1) { 626 PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col; 627 628 PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii)); 629 if (!same_fields) PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj)); 630 for (j = 0; j < nslots; j++) { 631 for (k = 0; k < nfields; k++) { 632 ii[nfields * j + k] = rstart + bs * j + fields[k]; 633 if (!same_fields) jj[nfields * j + k] = rstart + bs * j + fields_col[k]; 634 } 635 } 636 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is)); 637 if (!same_fields) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col)); 638 else { 639 PetscCall(PetscObjectReference((PetscObject)ilink->is)); 640 ilink->is_col = ilink->is; 641 } 642 PetscCall(ISSetBlockSize(ilink->is, nfields)); 643 PetscCall(ISSetBlockSize(ilink->is_col, nfields)); 644 } else { 645 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is)); 646 if (!same_fields) PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col)); 647 else { 648 PetscCall(PetscObjectReference((PetscObject)ilink->is)); 649 ilink->is_col = ilink->is; 650 } 651 } 652 } 653 ilink = ilink->next; 654 } 655 } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */ 656 IS *rowis, *colis, *ises = NULL; 657 PetscInt mis, nis; 658 659 PetscCall(MatNestGetSize(pc->pmat, &mis, &nis)); 660 PetscCall(PetscMalloc2(mis, &rowis, nis, &colis)); 661 PetscCall(MatNestGetISs(pc->pmat, rowis, colis)); 662 if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises)); 663 664 for (i = 0; i < nsplit; i++) { 665 if (jac->defaultsplit) { 666 PetscCall(ISDuplicate(rowis[i], &ilink->is)); 667 PetscCall(PetscObjectReference((PetscObject)ilink->is)); 668 ilink->is_col = ilink->is; 669 } else if (!ilink->is) { 670 if (ilink->nfields > 1) { 671 for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]]; 672 PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is)); 673 } else { 674 PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is)); 675 } 676 PetscCall(PetscObjectReference((PetscObject)ilink->is)); 677 ilink->is_col = ilink->is; 678 } 679 ilink = ilink->next; 680 } 681 PetscCall(PetscFree2(rowis, colis)); 682 PetscCall(PetscFree(ises)); 683 } 684 } 685 686 ilink = jac->head; 687 if (!jac->pmat) { 688 Vec xtmp; 689 690 PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL)); 691 PetscCall(PetscMalloc1(nsplit, &jac->pmat)); 692 PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y)); 693 for (i = 0; i < nsplit; i++) { 694 MatNullSpace sp; 695 696 /* Check for matrix attached to IS */ 697 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i])); 698 if (jac->pmat[i]) { 699 PetscCall(PetscObjectReference((PetscObject)jac->pmat[i])); 700 if (jac->type == PC_COMPOSITE_SCHUR) { 701 jac->schur_user = jac->pmat[i]; 702 703 PetscCall(PetscObjectReference((PetscObject)jac->schur_user)); 704 } 705 } else { 706 const char *prefix; 707 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i])); 708 PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix)); 709 if (!prefix) { 710 PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix)); 711 PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix)); 712 } 713 PetscCall(MatSetFromOptions(jac->pmat[i])); 714 PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view")); 715 } 716 /* create work vectors for each split */ 717 PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i])); 718 ilink->x = jac->x[i]; 719 ilink->y = jac->y[i]; 720 ilink->z = NULL; 721 /* compute scatter contexts needed by multiplicative versions and non-default splits */ 722 PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx)); 723 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp)); 724 if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp)); 725 ilink = ilink->next; 726 } 727 PetscCall(VecDestroy(&xtmp)); 728 } else { 729 MatReuse scall; 730 MatNullSpace *nullsp = NULL; 731 732 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 733 PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp)); 734 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i])); 735 scall = MAT_INITIAL_MATRIX; 736 } else scall = MAT_REUSE_MATRIX; 737 738 for (i = 0; i < nsplit; i++) { 739 Mat pmat; 740 741 /* Check for matrix attached to IS */ 742 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat)); 743 if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i])); 744 ilink = ilink->next; 745 } 746 if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp)); 747 } 748 if (jac->diag_use_amat) { 749 ilink = jac->head; 750 if (!jac->mat) { 751 PetscCall(PetscMalloc1(nsplit, &jac->mat)); 752 for (i = 0; i < nsplit; i++) { 753 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i])); 754 ilink = ilink->next; 755 } 756 } else { 757 MatReuse scall; 758 MatNullSpace *nullsp = NULL; 759 760 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 761 PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp)); 762 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i])); 763 scall = MAT_INITIAL_MATRIX; 764 } else scall = MAT_REUSE_MATRIX; 765 766 for (i = 0; i < nsplit; i++) { 767 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i])); 768 ilink = ilink->next; 769 } 770 if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp)); 771 } 772 } else { 773 jac->mat = jac->pmat; 774 } 775 776 /* Check for null space attached to IS */ 777 ilink = jac->head; 778 for (i = 0; i < nsplit; i++) { 779 MatNullSpace sp; 780 781 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp)); 782 if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp)); 783 ilink = ilink->next; 784 } 785 786 if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) { 787 /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */ 788 /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */ 789 ilink = jac->head; 790 if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) { 791 /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */ 792 if (!jac->Afield) { 793 PetscCall(PetscCalloc1(nsplit, &jac->Afield)); 794 if (jac->offdiag_use_amat) { 795 PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1])); 796 } else { 797 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1])); 798 } 799 } else { 800 MatReuse scall; 801 802 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 803 PetscCall(MatDestroy(&jac->Afield[1])); 804 scall = MAT_INITIAL_MATRIX; 805 } else scall = MAT_REUSE_MATRIX; 806 807 if (jac->offdiag_use_amat) { 808 PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1])); 809 } else { 810 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1])); 811 } 812 } 813 } else { 814 if (!jac->Afield) { 815 PetscCall(PetscMalloc1(nsplit, &jac->Afield)); 816 for (i = 0; i < nsplit; i++) { 817 if (jac->offdiag_use_amat) { 818 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i])); 819 } else { 820 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i])); 821 } 822 ilink = ilink->next; 823 } 824 } else { 825 MatReuse scall; 826 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 827 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i])); 828 scall = MAT_INITIAL_MATRIX; 829 } else scall = MAT_REUSE_MATRIX; 830 831 for (i = 0; i < nsplit; i++) { 832 if (jac->offdiag_use_amat) { 833 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i])); 834 } else { 835 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i])); 836 } 837 ilink = ilink->next; 838 } 839 } 840 } 841 } 842 843 if (jac->type == PC_COMPOSITE_SCHUR) { 844 IS ccis; 845 PetscBool isset, isspd = PETSC_FALSE, issym = PETSC_FALSE, flg; 846 PetscInt rstart, rend; 847 char lscname[256]; 848 PetscObject LSC_L; 849 850 PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields"); 851 852 /* If pc->mat is SPD, don't scale by -1 the Schur complement */ 853 PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd)); 854 if (jac->schurscale == (PetscScalar)-1.0) jac->schurscale = (isset && isspd) ? 1.0 : -1.0; 855 PetscCall(MatIsSymmetricKnown(pc->pmat, &isset, &issym)); 856 857 /* When extracting off-diagonal submatrices, we take complements from this range */ 858 PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend)); 859 PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 860 861 if (jac->schur) { 862 KSP kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper; 863 MatReuse scall; 864 865 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 866 scall = MAT_INITIAL_MATRIX; 867 PetscCall(MatDestroy(&jac->B)); 868 PetscCall(MatDestroy(&jac->C)); 869 } else scall = MAT_REUSE_MATRIX; 870 871 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 872 ilink = jac->head; 873 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 874 if (jac->offdiag_use_amat) { 875 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B)); 876 } else { 877 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B)); 878 } 879 PetscCall(ISDestroy(&ccis)); 880 if (!flg) { 881 ilink = ilink->next; 882 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 883 if (jac->offdiag_use_amat) { 884 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C)); 885 } else { 886 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C)); 887 } 888 PetscCall(ISDestroy(&ccis)); 889 } else { 890 PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg)); 891 if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C)); 892 else PetscCall(MatCreateTranspose(jac->B, &jac->C)); 893 } 894 PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1])); 895 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) { 896 PetscCall(MatDestroy(&jac->schurp)); 897 PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp)); 898 } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL && jac->kspupper != jac->head->ksp) { 899 PetscCall(MatDestroy(&jac->schur_user)); 900 PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user)); 901 } 902 if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0])); 903 if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0])); 904 PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac))); 905 } else { 906 const char *Dprefix; 907 char schurprefix[256], schurmatprefix[256]; 908 char schurtestoption[256]; 909 MatNullSpace sp; 910 KSP kspt; 911 912 /* extract the A01 and A10 matrices */ 913 ilink = jac->head; 914 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 915 if (jac->offdiag_use_amat) { 916 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 917 } else { 918 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 919 } 920 PetscCall(ISDestroy(&ccis)); 921 ilink = ilink->next; 922 if (!flg) { 923 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 924 if (jac->offdiag_use_amat) { 925 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 926 } else { 927 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 928 } 929 PetscCall(ISDestroy(&ccis)); 930 } else { 931 PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg)); 932 if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C)); 933 else PetscCall(MatCreateTranspose(jac->B, &jac->C)); 934 } 935 /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */ 936 PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur)); 937 PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT)); 938 PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1])); 939 PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 940 PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix)); 941 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt)); 942 PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix)); 943 944 /* Note: this is not true in general */ 945 PetscCall(MatGetNullSpace(jac->mat[1], &sp)); 946 if (sp) PetscCall(MatSetNullSpace(jac->schur, sp)); 947 948 PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname)); 949 PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg)); 950 if (flg) { 951 DM dmInner; 952 KSP kspInner; 953 PC pcInner; 954 955 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 956 PetscCall(KSPReset(kspInner)); 957 PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0])); 958 PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 959 /* Indent this deeper to emphasize the "inner" nature of this solver. */ 960 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2)); 961 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2)); 962 PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix)); 963 964 /* Set DM for new solver */ 965 PetscCall(KSPGetDM(jac->head->ksp, &dmInner)); 966 PetscCall(KSPSetDM(kspInner, dmInner)); 967 PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE)); 968 969 /* Defaults to PCKSP as preconditioner */ 970 PetscCall(KSPGetPC(kspInner, &pcInner)); 971 PetscCall(PCSetType(pcInner, PCKSP)); 972 PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp)); 973 } else { 974 /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or 975 * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact, 976 * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for 977 * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make 978 * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used 979 * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */ 980 PetscCall(KSPSetType(jac->head->ksp, KSPGMRES)); 981 PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp)); 982 } 983 PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0])); 984 PetscCall(KSPSetFromOptions(jac->head->ksp)); 985 PetscCall(MatSetFromOptions(jac->schur)); 986 987 PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg)); 988 if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */ 989 KSP kspInner; 990 PC pcInner; 991 992 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 993 PetscCall(KSPGetPC(kspInner, &pcInner)); 994 PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg)); 995 if (flg) { 996 KSP ksp; 997 998 PetscCall(PCKSPGetKSP(pcInner, &ksp)); 999 if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE)); 1000 } 1001 } 1002 PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname)); 1003 PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg)); 1004 if (flg) { 1005 DM dmInner; 1006 1007 PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 1008 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper)); 1009 PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel)); 1010 PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure)); 1011 PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix)); 1012 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1)); 1013 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1)); 1014 PetscCall(KSPGetDM(jac->head->ksp, &dmInner)); 1015 PetscCall(KSPSetDM(jac->kspupper, dmInner)); 1016 PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE)); 1017 PetscCall(KSPSetFromOptions(jac->kspupper)); 1018 PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0])); 1019 PetscCall(VecDuplicate(jac->head->x, &jac->head->z)); 1020 } else { 1021 jac->kspupper = jac->head->ksp; 1022 PetscCall(PetscObjectReference((PetscObject)jac->head->ksp)); 1023 } 1024 1025 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp)); 1026 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur)); 1027 PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel)); 1028 PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure)); 1029 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1)); 1030 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) { 1031 PC pcschur; 1032 PetscCall(KSPGetPC(jac->kspschur, &pcschur)); 1033 PetscCall(PCSetType(pcschur, PCNONE)); 1034 /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */ 1035 } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) { 1036 if (jac->schurfactorization != PC_FIELDSPLIT_SCHUR_FACT_FULL || jac->kspupper != jac->head->ksp) PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user)); 1037 } 1038 PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac))); 1039 PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix)); 1040 PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix)); 1041 /* propagate DM */ 1042 { 1043 DM sdm; 1044 PetscCall(KSPGetDM(jac->head->next->ksp, &sdm)); 1045 if (sdm) { 1046 PetscCall(KSPSetDM(jac->kspschur, sdm)); 1047 PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE)); 1048 } 1049 } 1050 /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */ 1051 /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */ 1052 PetscCall(KSPSetFromOptions(jac->kspschur)); 1053 } 1054 PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY)); 1055 PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY)); 1056 if (issym) PetscCall(MatSetOption(jac->schur, MAT_SYMMETRIC, PETSC_TRUE)); 1057 if (isspd) PetscCall(MatSetOption(jac->schur, MAT_SPD, PETSC_TRUE)); 1058 1059 /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */ 1060 PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname)); 1061 PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L)); 1062 if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L)); 1063 if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", LSC_L)); 1064 PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname)); 1065 PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L)); 1066 if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L)); 1067 if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", LSC_L)); 1068 } else if (jac->type == PC_COMPOSITE_GKB) { 1069 IS ccis; 1070 PetscInt rstart, rend; 1071 1072 PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields"); 1073 1074 ilink = jac->head; 1075 1076 /* When extracting off-diagonal submatrices, we take complements from this range */ 1077 PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend)); 1078 1079 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 1080 if (jac->offdiag_use_amat) { 1081 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 1082 } else { 1083 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 1084 } 1085 PetscCall(ISDestroy(&ccis)); 1086 /* Create work vectors for GKB algorithm */ 1087 PetscCall(VecDuplicate(ilink->x, &jac->u)); 1088 PetscCall(VecDuplicate(ilink->x, &jac->Hu)); 1089 PetscCall(VecDuplicate(ilink->x, &jac->w2)); 1090 ilink = ilink->next; 1091 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 1092 if (jac->offdiag_use_amat) { 1093 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 1094 } else { 1095 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 1096 } 1097 PetscCall(ISDestroy(&ccis)); 1098 /* Create work vectors for GKB algorithm */ 1099 PetscCall(VecDuplicate(ilink->x, &jac->v)); 1100 PetscCall(VecDuplicate(ilink->x, &jac->d)); 1101 PetscCall(VecDuplicate(ilink->x, &jac->w1)); 1102 PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu)); 1103 PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz)); 1104 1105 ilink = jac->head; 1106 PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H)); 1107 if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp)); 1108 /* Create gkb_monitor context */ 1109 if (jac->gkbmonitor) { 1110 PetscInt tablevel; 1111 PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer)); 1112 PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII)); 1113 PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel)); 1114 PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel)); 1115 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1)); 1116 } 1117 } else { 1118 /* set up the individual splits' PCs */ 1119 i = 0; 1120 ilink = jac->head; 1121 while (ilink) { 1122 PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i])); 1123 /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */ 1124 if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp)); 1125 i++; 1126 ilink = ilink->next; 1127 } 1128 } 1129 1130 /* Set coordinates to the sub PC objects whenever these are set */ 1131 if (jac->coordinates_set) { 1132 PC pc_coords; 1133 if (jac->type == PC_COMPOSITE_SCHUR) { 1134 // Head is first block. 1135 PetscCall(KSPGetPC(jac->head->ksp, &pc_coords)); 1136 PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords)); 1137 // Second one is Schur block, but its KSP object is in kspschur. 1138 PetscCall(KSPGetPC(jac->kspschur, &pc_coords)); 1139 PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords)); 1140 } else if (jac->type == PC_COMPOSITE_GKB) { 1141 PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n")); 1142 } else { 1143 ilink = jac->head; 1144 while (ilink) { 1145 PetscCall(KSPGetPC(ilink->ksp, &pc_coords)); 1146 PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords)); 1147 ilink = ilink->next; 1148 } 1149 } 1150 } 1151 1152 jac->suboptionsset = PETSC_TRUE; 1153 PetscFunctionReturn(PETSC_SUCCESS); 1154 } 1155 1156 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_Schur(PC pc) 1157 { 1158 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1159 PC_FieldSplitLink ilinkA = jac->head; 1160 KSP kspA = ilinkA->ksp, kspUpper = jac->kspupper; 1161 1162 PetscFunctionBegin; 1163 if (jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL && kspUpper != kspA) { 1164 PetscCall(KSPSetUp(kspUpper)); 1165 PetscCall(KSPSetUpOnBlocks(kspUpper)); 1166 } 1167 PetscCall(KSPSetUp(kspA)); 1168 PetscCall(KSPSetUpOnBlocks(kspA)); 1169 if (jac->schurpre != PC_FIELDSPLIT_SCHUR_PRE_FULL) { 1170 PetscCall(KSPSetUp(jac->kspschur)); 1171 PetscCall(KSPSetUpOnBlocks(jac->kspschur)); 1172 } else if (kspUpper == kspA) { 1173 Mat A; 1174 PetscInt m, M, N; 1175 VecType vtype; 1176 PetscMemType mtype; 1177 PetscScalar *array; 1178 1179 PetscCall(MatGetSize(jac->B, &M, &N)); 1180 PetscCall(MatGetLocalSize(jac->B, &m, NULL)); 1181 PetscCall(MatGetVecType(jac->B, &vtype)); 1182 PetscCall(VecGetArrayAndMemType(ilinkA->x, &array, &mtype)); 1183 PetscCall(VecRestoreArrayAndMemType(ilinkA->x, &array)); 1184 if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(PetscMalloc1(m * (N + 1), &array)); 1185 #if PetscDefined(HAVE_CUDA) 1186 else if (PetscMemTypeCUDA(mtype)) PetscCallCUDA(cudaMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1))); 1187 #endif 1188 #if PetscDefined(HAVE_HIP) 1189 else if (PetscMemTypeHIP(mtype)) PetscCallHIP(hipMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1))); 1190 #endif 1191 PetscCall(MatCreateDenseFromVecType(PetscObjectComm((PetscObject)jac->schur), vtype, m, PETSC_DECIDE, M, N + 1, -1, array, &A)); // number of columns of the Schur complement plus one 1192 PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)A)); 1193 PetscCall(MatDestroy(&A)); 1194 } 1195 PetscFunctionReturn(PETSC_SUCCESS); 1196 } 1197 1198 static PetscErrorCode PCSetUpOnBlocks_FieldSplit(PC pc) 1199 { 1200 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1201 PC_FieldSplitLink ilink = jac->head; 1202 1203 PetscFunctionBegin; 1204 while (ilink) { 1205 PetscCall(KSPSetUp(ilink->ksp)); 1206 PetscCall(KSPSetUpOnBlocks(ilink->ksp)); 1207 ilink = ilink->next; 1208 } 1209 PetscFunctionReturn(PETSC_SUCCESS); 1210 } 1211 1212 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_GKB(PC pc) 1213 { 1214 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1215 PC_FieldSplitLink ilinkA = jac->head; 1216 KSP ksp = ilinkA->ksp; 1217 1218 PetscFunctionBegin; 1219 PetscCall(KSPSetUp(ksp)); 1220 PetscCall(KSPSetUpOnBlocks(ksp)); 1221 PetscFunctionReturn(PETSC_SUCCESS); 1222 } 1223 1224 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y) 1225 { 1226 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1227 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1228 KSP kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper; 1229 Mat AinvB = NULL; 1230 PetscInt N, P; 1231 1232 PetscFunctionBegin; 1233 switch (jac->schurfactorization) { 1234 case PC_FIELDSPLIT_SCHUR_FACT_DIAG: 1235 /* [A00 0; 0 -S], positive definite, suitable for MINRES */ 1236 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1237 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1238 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1239 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1240 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1241 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1242 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1243 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1244 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1245 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1246 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1247 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1248 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1249 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1250 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1251 PetscCall(VecScale(ilinkD->y, jac->schurscale)); 1252 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1253 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1254 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1255 break; 1256 case PC_FIELDSPLIT_SCHUR_FACT_LOWER: 1257 /* [A00 0; A10 S], suitable for left preconditioning */ 1258 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1259 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1260 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1261 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1262 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1263 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1264 PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x)); 1265 PetscCall(VecScale(ilinkD->x, -1.)); 1266 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1267 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1268 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1269 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1270 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1271 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1272 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1273 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1274 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1275 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1276 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1277 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1278 break; 1279 case PC_FIELDSPLIT_SCHUR_FACT_UPPER: 1280 /* [A00 A01; 0 S], suitable for right preconditioning */ 1281 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1282 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1283 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1284 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1285 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1286 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1287 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1288 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1289 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x)); 1290 PetscCall(VecScale(ilinkA->x, -1.)); 1291 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1292 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1293 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1294 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1295 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1296 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1297 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1298 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1299 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1300 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1301 break; 1302 case PC_FIELDSPLIT_SCHUR_FACT_FULL: 1303 /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */ 1304 PetscCall(MatGetSize(jac->B, NULL, &P)); 1305 N = P; 1306 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1307 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1308 PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL)); 1309 if (kspUpper == kspA) { 1310 PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB)); 1311 if (AinvB) { 1312 PetscCall(MatGetSize(AinvB, NULL, &N)); 1313 if (N > P) { // first time PCApply_FieldSplit_Schur() is called 1314 PetscMemType mtype; 1315 Vec c = NULL; 1316 PetscScalar *array; 1317 PetscInt m, M; 1318 1319 PetscCall(MatGetSize(jac->B, &M, NULL)); 1320 PetscCall(MatGetLocalSize(jac->B, &m, NULL)); 1321 PetscCall(MatDenseGetArrayAndMemType(AinvB, &array, &mtype)); 1322 if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c)); 1323 #if PetscDefined(HAVE_CUDA) 1324 else if (PetscMemTypeCUDA(mtype)) PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c)); 1325 #endif 1326 #if PetscDefined(HAVE_HIP) 1327 else if (PetscMemTypeHIP(mtype)) PetscCall(VecCreateMPIHIPWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c)); 1328 #endif 1329 PetscCall(MatDenseRestoreArrayAndMemType(AinvB, &array)); 1330 PetscCall(VecCopy(ilinkA->x, c)); 1331 PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user)); 1332 PetscCall(KSPSetOperators(jac->kspschur, jac->schur, jac->schur_user)); 1333 PetscCall(VecCopy(c, ilinkA->y)); // retrieve the solution as the last column of the composed Mat 1334 PetscCall(VecDestroy(&c)); 1335 } 1336 } 1337 } 1338 if (N == P) PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y)); 1339 PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y)); 1340 PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL)); 1341 PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x)); 1342 PetscCall(VecScale(ilinkD->x, -1.0)); 1343 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1344 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1345 1346 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1347 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1348 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1349 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1350 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1351 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1352 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1353 1354 if (kspUpper == kspA) { 1355 if (!AinvB) { 1356 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y)); 1357 PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y)); 1358 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1359 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1360 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1361 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1362 } else PetscCall(MatMultAdd(AinvB, ilinkD->y, ilinkA->y, ilinkA->y)); 1363 } else { 1364 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1365 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1366 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1367 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x)); 1368 PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL)); 1369 PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z)); 1370 PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z)); 1371 PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL)); 1372 PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z)); 1373 } 1374 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1375 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1376 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1377 } 1378 PetscFunctionReturn(PETSC_SUCCESS); 1379 } 1380 1381 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y) 1382 { 1383 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1384 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1385 KSP kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper; 1386 1387 PetscFunctionBegin; 1388 switch (jac->schurfactorization) { 1389 case PC_FIELDSPLIT_SCHUR_FACT_DIAG: 1390 /* [A00 0; 0 -S], positive definite, suitable for MINRES */ 1391 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1392 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1393 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1394 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1395 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1396 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1397 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1398 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1399 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1400 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1401 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1402 PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); 1403 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1404 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1405 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1406 PetscCall(VecScale(ilinkD->y, jac->schurscale)); 1407 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1408 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1409 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1410 break; 1411 case PC_FIELDSPLIT_SCHUR_FACT_UPPER: 1412 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1413 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1414 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1415 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1416 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1417 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1418 PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x)); 1419 PetscCall(VecScale(ilinkD->x, -1.)); 1420 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1421 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1422 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1423 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1424 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1425 PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); 1426 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1427 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1428 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1429 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1430 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1431 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1432 break; 1433 case PC_FIELDSPLIT_SCHUR_FACT_LOWER: 1434 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1435 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1436 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1437 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1438 PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); 1439 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1440 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1441 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1442 PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x)); 1443 PetscCall(VecScale(ilinkA->x, -1.)); 1444 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1445 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1446 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1447 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1448 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1449 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1450 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1451 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1452 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1453 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1454 break; 1455 case PC_FIELDSPLIT_SCHUR_FACT_FULL: 1456 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1457 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1458 PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL)); 1459 PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y)); 1460 PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y)); 1461 PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL)); 1462 PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x)); 1463 PetscCall(VecScale(ilinkD->x, -1.0)); 1464 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1465 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1466 1467 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1468 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1469 PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); 1470 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1471 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1472 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1473 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1474 1475 if (kspLower == kspA) { 1476 PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y)); 1477 PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y)); 1478 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1479 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1480 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1481 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1482 } else { 1483 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1484 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1485 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1486 PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x)); 1487 PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL)); 1488 PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z)); 1489 PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z)); 1490 PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL)); 1491 PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z)); 1492 } 1493 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1494 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1495 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1496 } 1497 PetscFunctionReturn(PETSC_SUCCESS); 1498 } 1499 1500 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \ 1501 ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \ 1502 KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \ 1503 VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE))) 1504 1505 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y) 1506 { 1507 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1508 PC_FieldSplitLink ilink = jac->head; 1509 PetscInt cnt, bs; 1510 1511 PetscFunctionBegin; 1512 if (jac->type == PC_COMPOSITE_ADDITIVE) { 1513 PetscBool matnest; 1514 1515 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest)); 1516 if (jac->defaultsplit && !matnest) { 1517 PetscCall(VecGetBlockSize(x, &bs)); 1518 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1519 PetscCall(VecGetBlockSize(y, &bs)); 1520 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1521 PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES)); 1522 while (ilink) { 1523 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1524 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1525 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1526 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1527 ilink = ilink->next; 1528 } 1529 PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES)); 1530 } else { 1531 PetscCall(VecSet(y, 0.0)); 1532 while (ilink) { 1533 PetscCall(FieldSplitSplitSolveAdd(ilink, x, y)); 1534 ilink = ilink->next; 1535 } 1536 } 1537 } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) { 1538 PetscCall(VecSet(y, 0.0)); 1539 /* solve on first block for first block variables */ 1540 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD)); 1541 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD)); 1542 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1543 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1544 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1545 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1546 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1547 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1548 1549 /* compute the residual only onto second block variables using first block variables */ 1550 PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x)); 1551 ilink = ilink->next; 1552 PetscCall(VecScale(ilink->x, -1.0)); 1553 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1554 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1555 1556 /* solve on second block variables */ 1557 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1558 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1559 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1560 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1561 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1562 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1563 } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1564 if (!jac->w1) { 1565 PetscCall(VecDuplicate(x, &jac->w1)); 1566 PetscCall(VecDuplicate(x, &jac->w2)); 1567 } 1568 PetscCall(VecSet(y, 0.0)); 1569 PetscCall(FieldSplitSplitSolveAdd(ilink, x, y)); 1570 cnt = 1; 1571 while (ilink->next) { 1572 ilink = ilink->next; 1573 /* compute the residual only over the part of the vector needed */ 1574 PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x)); 1575 PetscCall(VecScale(ilink->x, -1.0)); 1576 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1577 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1578 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1579 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1580 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1581 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1582 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1583 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1584 } 1585 if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1586 cnt -= 2; 1587 while (ilink->previous) { 1588 ilink = ilink->previous; 1589 /* compute the residual only over the part of the vector needed */ 1590 PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x)); 1591 PetscCall(VecScale(ilink->x, -1.0)); 1592 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1593 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1594 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1595 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1596 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1597 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1598 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1599 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1600 } 1601 } 1602 } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type); 1603 PetscFunctionReturn(PETSC_SUCCESS); 1604 } 1605 1606 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y) 1607 { 1608 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1609 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1610 KSP ksp = ilinkA->ksp; 1611 Vec u, v, Hu, d, work1, work2; 1612 PetscScalar alpha, z, nrmz2, *vecz; 1613 PetscReal lowbnd, nu, beta; 1614 PetscInt j, iterGKB; 1615 1616 PetscFunctionBegin; 1617 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1618 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1619 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1620 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1621 1622 u = jac->u; 1623 v = jac->v; 1624 Hu = jac->Hu; 1625 d = jac->d; 1626 work1 = jac->w1; 1627 work2 = jac->w2; 1628 vecz = jac->vecz; 1629 1630 /* Change RHS to comply with matrix regularization H = A + nu*B*B' */ 1631 /* Add q = q + nu*B*b */ 1632 if (jac->gkbnu) { 1633 nu = jac->gkbnu; 1634 PetscCall(VecScale(ilinkD->x, jac->gkbnu)); 1635 PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */ 1636 } else { 1637 /* Situation when no augmented Lagrangian is used. Then we set inner */ 1638 /* matrix N = I in [Ar13], and thus nu = 1. */ 1639 nu = 1; 1640 } 1641 1642 /* Transform rhs from [q,tilde{b}] to [0,b] */ 1643 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL)); 1644 PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y)); 1645 PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y)); 1646 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL)); 1647 PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1)); 1648 PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x */ 1649 1650 /* First step of algorithm */ 1651 PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/ 1652 KSPCheckDot(ksp, beta); 1653 beta = PetscSqrtReal(nu) * beta; 1654 PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c */ 1655 PetscCall(MatMult(jac->B, v, work2)); /* u = H^{-1}*B*v */ 1656 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL)); 1657 PetscCall(KSPSolve(ksp, work2, u)); 1658 PetscCall(KSPCheckSolve(ksp, pc, u)); 1659 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL)); 1660 PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u */ 1661 PetscCall(VecDot(Hu, u, &alpha)); 1662 KSPCheckDot(ksp, alpha); 1663 PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite"); 1664 alpha = PetscSqrtReal(PetscAbsScalar(alpha)); 1665 PetscCall(VecScale(u, 1.0 / alpha)); 1666 PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c */ 1667 1668 z = beta / alpha; 1669 vecz[1] = z; 1670 1671 /* Computation of first iterate x(1) and p(1) */ 1672 PetscCall(VecAXPY(ilinkA->y, z, u)); 1673 PetscCall(VecCopy(d, ilinkD->y)); 1674 PetscCall(VecScale(ilinkD->y, -z)); 1675 1676 iterGKB = 1; 1677 lowbnd = 2 * jac->gkbtol; 1678 if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd)); 1679 1680 while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) { 1681 iterGKB += 1; 1682 PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */ 1683 PetscCall(VecAXPBY(v, nu, -alpha, work1)); 1684 PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v */ 1685 beta = beta / PetscSqrtReal(nu); 1686 PetscCall(VecScale(v, 1.0 / beta)); 1687 PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */ 1688 PetscCall(MatMult(jac->H, u, Hu)); 1689 PetscCall(VecAXPY(work2, -beta, Hu)); 1690 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL)); 1691 PetscCall(KSPSolve(ksp, work2, u)); 1692 PetscCall(KSPCheckSolve(ksp, pc, u)); 1693 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL)); 1694 PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u */ 1695 PetscCall(VecDot(Hu, u, &alpha)); 1696 KSPCheckDot(ksp, alpha); 1697 PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite"); 1698 alpha = PetscSqrtReal(PetscAbsScalar(alpha)); 1699 PetscCall(VecScale(u, 1.0 / alpha)); 1700 1701 z = -beta / alpha * z; /* z <- beta/alpha*z */ 1702 vecz[0] = z; 1703 1704 /* Computation of new iterate x(i+1) and p(i+1) */ 1705 PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */ 1706 PetscCall(VecAXPY(ilinkA->y, z, u)); /* r = r + z*u */ 1707 PetscCall(VecAXPY(ilinkD->y, -z, d)); /* p = p - z*d */ 1708 PetscCall(MatMult(jac->H, ilinkA->y, Hu)); /* ||u||_H = u'*H*u */ 1709 PetscCall(VecDot(Hu, ilinkA->y, &nrmz2)); 1710 1711 /* Compute Lower Bound estimate */ 1712 if (iterGKB > jac->gkbdelay) { 1713 lowbnd = 0.0; 1714 for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]); 1715 lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2)); 1716 } 1717 1718 for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2]; 1719 if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd)); 1720 } 1721 1722 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1723 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1724 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1725 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1726 PetscFunctionReturn(PETSC_SUCCESS); 1727 } 1728 1729 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \ 1730 ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \ 1731 KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \ 1732 VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE))) 1733 1734 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y) 1735 { 1736 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1737 PC_FieldSplitLink ilink = jac->head; 1738 PetscInt bs; 1739 1740 PetscFunctionBegin; 1741 if (jac->type == PC_COMPOSITE_ADDITIVE) { 1742 PetscBool matnest; 1743 1744 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest)); 1745 if (jac->defaultsplit && !matnest) { 1746 PetscCall(VecGetBlockSize(x, &bs)); 1747 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1748 PetscCall(VecGetBlockSize(y, &bs)); 1749 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1750 PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES)); 1751 while (ilink) { 1752 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1753 PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y)); 1754 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1755 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1756 ilink = ilink->next; 1757 } 1758 PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES)); 1759 } else { 1760 PetscCall(VecSet(y, 0.0)); 1761 while (ilink) { 1762 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1763 ilink = ilink->next; 1764 } 1765 } 1766 } else { 1767 if (!jac->w1) { 1768 PetscCall(VecDuplicate(x, &jac->w1)); 1769 PetscCall(VecDuplicate(x, &jac->w2)); 1770 } 1771 PetscCall(VecSet(y, 0.0)); 1772 if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1773 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1774 while (ilink->next) { 1775 ilink = ilink->next; 1776 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1777 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1778 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1779 } 1780 while (ilink->previous) { 1781 ilink = ilink->previous; 1782 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1783 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1784 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1785 } 1786 } else { 1787 while (ilink->next) { /* get to last entry in linked list */ 1788 ilink = ilink->next; 1789 } 1790 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1791 while (ilink->previous) { 1792 ilink = ilink->previous; 1793 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1794 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1795 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1796 } 1797 } 1798 } 1799 PetscFunctionReturn(PETSC_SUCCESS); 1800 } 1801 1802 static PetscErrorCode PCReset_FieldSplit(PC pc) 1803 { 1804 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1805 PC_FieldSplitLink ilink = jac->head, next; 1806 1807 PetscFunctionBegin; 1808 while (ilink) { 1809 PetscCall(KSPDestroy(&ilink->ksp)); 1810 PetscCall(VecDestroy(&ilink->x)); 1811 PetscCall(VecDestroy(&ilink->y)); 1812 PetscCall(VecDestroy(&ilink->z)); 1813 PetscCall(VecScatterDestroy(&ilink->sctx)); 1814 PetscCall(ISDestroy(&ilink->is)); 1815 PetscCall(ISDestroy(&ilink->is_col)); 1816 PetscCall(PetscFree(ilink->splitname)); 1817 PetscCall(PetscFree(ilink->fields)); 1818 PetscCall(PetscFree(ilink->fields_col)); 1819 next = ilink->next; 1820 PetscCall(PetscFree(ilink)); 1821 ilink = next; 1822 } 1823 jac->head = NULL; 1824 PetscCall(PetscFree2(jac->x, jac->y)); 1825 if (jac->mat && jac->mat != jac->pmat) { 1826 PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat)); 1827 } else if (jac->mat) { 1828 jac->mat = NULL; 1829 } 1830 if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat)); 1831 if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield)); 1832 jac->nsplits = 0; 1833 PetscCall(VecDestroy(&jac->w1)); 1834 PetscCall(VecDestroy(&jac->w2)); 1835 if (jac->schur) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", NULL)); 1836 PetscCall(MatDestroy(&jac->schur)); 1837 PetscCall(MatDestroy(&jac->schurp)); 1838 PetscCall(MatDestroy(&jac->schur_user)); 1839 PetscCall(KSPDestroy(&jac->kspschur)); 1840 PetscCall(KSPDestroy(&jac->kspupper)); 1841 PetscCall(MatDestroy(&jac->B)); 1842 PetscCall(MatDestroy(&jac->C)); 1843 PetscCall(MatDestroy(&jac->H)); 1844 PetscCall(VecDestroy(&jac->u)); 1845 PetscCall(VecDestroy(&jac->v)); 1846 PetscCall(VecDestroy(&jac->Hu)); 1847 PetscCall(VecDestroy(&jac->d)); 1848 PetscCall(PetscFree(jac->vecz)); 1849 PetscCall(PetscViewerDestroy(&jac->gkbviewer)); 1850 jac->isrestrict = PETSC_FALSE; 1851 PetscFunctionReturn(PETSC_SUCCESS); 1852 } 1853 1854 static PetscErrorCode PCDestroy_FieldSplit(PC pc) 1855 { 1856 PetscFunctionBegin; 1857 PetscCall(PCReset_FieldSplit(pc)); 1858 PetscCall(PetscFree(pc->data)); 1859 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL)); 1860 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL)); 1861 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL)); 1862 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL)); 1863 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL)); 1864 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL)); 1865 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL)); 1866 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 1867 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL)); 1868 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL)); 1869 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL)); 1870 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL)); 1871 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL)); 1872 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL)); 1873 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL)); 1874 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL)); 1875 PetscFunctionReturn(PETSC_SUCCESS); 1876 } 1877 1878 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems PetscOptionsObject) 1879 { 1880 PetscInt bs; 1881 PetscBool flg; 1882 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1883 PCCompositeType ctype; 1884 1885 PetscFunctionBegin; 1886 PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options"); 1887 PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL)); 1888 PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg)); 1889 if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs)); 1890 jac->diag_use_amat = pc->useAmat; 1891 PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL)); 1892 jac->offdiag_use_amat = pc->useAmat; 1893 PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL)); 1894 PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL)); 1895 PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */ 1896 PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg)); 1897 if (flg) PetscCall(PCFieldSplitSetType(pc, ctype)); 1898 /* Only setup fields once */ 1899 if (jac->bs > 0 && jac->nsplits == 0) { 1900 /* only allow user to set fields from command line. 1901 otherwise user can set them in PCFieldSplitSetDefaults() */ 1902 PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc)); 1903 if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n")); 1904 } 1905 if (jac->type == PC_COMPOSITE_SCHUR) { 1906 PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg)); 1907 if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n")); 1908 PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL)); 1909 PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL)); 1910 PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL)); 1911 } else if (jac->type == PC_COMPOSITE_GKB) { 1912 PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL)); 1913 PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL)); 1914 PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0)); 1915 PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL)); 1916 PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL)); 1917 } 1918 /* 1919 In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet. 1920 But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it 1921 is called on the outer solver in case changes were made in the options database 1922 1923 But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions() 1924 if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete. 1925 Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types. 1926 1927 There could be a negative side effect of calling the KSPSetFromOptions() below. 1928 1929 If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call 1930 */ 1931 if (jac->issetup) { 1932 PC_FieldSplitLink ilink = jac->head; 1933 if (jac->type == PC_COMPOSITE_SCHUR) { 1934 if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper)); 1935 if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur)); 1936 } 1937 while (ilink) { 1938 if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp)); 1939 ilink = ilink->next; 1940 } 1941 } 1942 PetscOptionsHeadEnd(); 1943 PetscFunctionReturn(PETSC_SUCCESS); 1944 } 1945 1946 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col) 1947 { 1948 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1949 PC_FieldSplitLink ilink, next = jac->head; 1950 char prefix[128]; 1951 PetscInt i; 1952 PetscLogEvent nse; 1953 1954 PetscFunctionBegin; 1955 if (jac->splitdefined) { 1956 PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname)); 1957 PetscFunctionReturn(PETSC_SUCCESS); 1958 } 1959 for (i = 0; i < n; i++) PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); 1960 PetscCall(PetscNew(&ilink)); 1961 if (splitname) { 1962 PetscCall(PetscStrallocpy(splitname, &ilink->splitname)); 1963 } else { 1964 PetscCall(PetscMalloc1(3, &ilink->splitname)); 1965 PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits)); 1966 } 1967 PetscCall(PetscMPIIntCast(jac->nsplits, &nse)); 1968 ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */ 1969 PetscCall(PetscMalloc1(n, &ilink->fields)); 1970 PetscCall(PetscArraycpy(ilink->fields, fields, n)); 1971 PetscCall(PetscMalloc1(n, &ilink->fields_col)); 1972 PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n)); 1973 1974 ilink->nfields = n; 1975 ilink->next = NULL; 1976 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp)); 1977 PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel)); 1978 PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure)); 1979 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1)); 1980 PetscCall(KSPSetType(ilink->ksp, KSPPREONLY)); 1981 1982 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 1983 PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix)); 1984 1985 if (!next) { 1986 jac->head = ilink; 1987 ilink->previous = NULL; 1988 } else { 1989 while (next->next) next = next->next; 1990 next->next = ilink; 1991 ilink->previous = next; 1992 } 1993 jac->nsplits++; 1994 PetscFunctionReturn(PETSC_SUCCESS); 1995 } 1996 1997 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp) 1998 { 1999 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2000 2001 PetscFunctionBegin; 2002 *subksp = NULL; 2003 if (n) *n = 0; 2004 if (jac->type == PC_COMPOSITE_SCHUR) { 2005 PetscInt nn; 2006 2007 PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()"); 2008 PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits); 2009 nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0); 2010 PetscCall(PetscMalloc1(nn, subksp)); 2011 (*subksp)[0] = jac->head->ksp; 2012 (*subksp)[1] = jac->kspschur; 2013 if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper; 2014 if (n) *n = nn; 2015 } 2016 PetscFunctionReturn(PETSC_SUCCESS); 2017 } 2018 2019 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp) 2020 { 2021 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2022 2023 PetscFunctionBegin; 2024 PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()"); 2025 PetscCall(PetscMalloc1(jac->nsplits, subksp)); 2026 PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp)); 2027 2028 (*subksp)[1] = jac->kspschur; 2029 if (n) *n = jac->nsplits; 2030 PetscFunctionReturn(PETSC_SUCCESS); 2031 } 2032 2033 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp) 2034 { 2035 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2036 PetscInt cnt = 0; 2037 PC_FieldSplitLink ilink = jac->head; 2038 2039 PetscFunctionBegin; 2040 PetscCall(PetscMalloc1(jac->nsplits, subksp)); 2041 while (ilink) { 2042 (*subksp)[cnt++] = ilink->ksp; 2043 ilink = ilink->next; 2044 } 2045 PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits); 2046 if (n) *n = jac->nsplits; 2047 PetscFunctionReturn(PETSC_SUCCESS); 2048 } 2049 2050 /*@ 2051 PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`. 2052 2053 Input Parameters: 2054 + pc - the preconditioner context 2055 - isy - the index set that defines the indices to which the fieldsplit is to be restricted 2056 2057 Level: advanced 2058 2059 Developer Notes: 2060 It seems the resulting `IS`s will not cover the entire space, so 2061 how can they define a convergent preconditioner? Needs explaining. 2062 2063 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 2064 @*/ 2065 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy) 2066 { 2067 PetscFunctionBegin; 2068 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2069 PetscValidHeaderSpecific(isy, IS_CLASSID, 2); 2070 PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy)); 2071 PetscFunctionReturn(PETSC_SUCCESS); 2072 } 2073 2074 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy) 2075 { 2076 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2077 PC_FieldSplitLink ilink = jac->head, next; 2078 PetscInt localsize, size, sizez, i; 2079 const PetscInt *ind, *indz; 2080 PetscInt *indc, *indcz; 2081 PetscBool flg; 2082 2083 PetscFunctionBegin; 2084 PetscCall(ISGetLocalSize(isy, &localsize)); 2085 PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy))); 2086 size -= localsize; 2087 while (ilink) { 2088 IS isrl, isr; 2089 PC subpc; 2090 PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl)); 2091 PetscCall(ISGetLocalSize(isrl, &localsize)); 2092 PetscCall(PetscMalloc1(localsize, &indc)); 2093 PetscCall(ISGetIndices(isrl, &ind)); 2094 PetscCall(PetscArraycpy(indc, ind, localsize)); 2095 PetscCall(ISRestoreIndices(isrl, &ind)); 2096 PetscCall(ISDestroy(&isrl)); 2097 for (i = 0; i < localsize; i++) *(indc + i) += size; 2098 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr)); 2099 PetscCall(PetscObjectReference((PetscObject)isr)); 2100 PetscCall(ISDestroy(&ilink->is)); 2101 ilink->is = isr; 2102 PetscCall(PetscObjectReference((PetscObject)isr)); 2103 PetscCall(ISDestroy(&ilink->is_col)); 2104 ilink->is_col = isr; 2105 PetscCall(ISDestroy(&isr)); 2106 PetscCall(KSPGetPC(ilink->ksp, &subpc)); 2107 PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg)); 2108 if (flg) { 2109 IS iszl, isz; 2110 MPI_Comm comm; 2111 PetscCall(ISGetLocalSize(ilink->is, &localsize)); 2112 comm = PetscObjectComm((PetscObject)ilink->is); 2113 PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl)); 2114 PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm)); 2115 sizez -= localsize; 2116 PetscCall(ISGetLocalSize(iszl, &localsize)); 2117 PetscCall(PetscMalloc1(localsize, &indcz)); 2118 PetscCall(ISGetIndices(iszl, &indz)); 2119 PetscCall(PetscArraycpy(indcz, indz, localsize)); 2120 PetscCall(ISRestoreIndices(iszl, &indz)); 2121 PetscCall(ISDestroy(&iszl)); 2122 for (i = 0; i < localsize; i++) *(indcz + i) += sizez; 2123 PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz)); 2124 PetscCall(PCFieldSplitRestrictIS(subpc, isz)); 2125 PetscCall(ISDestroy(&isz)); 2126 } 2127 next = ilink->next; 2128 ilink = next; 2129 } 2130 jac->isrestrict = PETSC_TRUE; 2131 PetscFunctionReturn(PETSC_SUCCESS); 2132 } 2133 2134 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is) 2135 { 2136 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2137 PC_FieldSplitLink ilink, next = jac->head; 2138 char prefix[128]; 2139 PetscLogEvent nse; 2140 2141 PetscFunctionBegin; 2142 if (jac->splitdefined) { 2143 PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname)); 2144 PetscFunctionReturn(PETSC_SUCCESS); 2145 } 2146 PetscCall(PetscNew(&ilink)); 2147 if (splitname) { 2148 PetscCall(PetscStrallocpy(splitname, &ilink->splitname)); 2149 } else { 2150 PetscCall(PetscMalloc1(8, &ilink->splitname)); 2151 PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits)); 2152 } 2153 PetscCall(PetscMPIIntCast(jac->nsplits, &nse)); 2154 ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */ 2155 PetscCall(PetscObjectReference((PetscObject)is)); 2156 PetscCall(ISDestroy(&ilink->is)); 2157 ilink->is = is; 2158 PetscCall(PetscObjectReference((PetscObject)is)); 2159 PetscCall(ISDestroy(&ilink->is_col)); 2160 ilink->is_col = is; 2161 ilink->next = NULL; 2162 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp)); 2163 PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel)); 2164 PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure)); 2165 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1)); 2166 PetscCall(KSPSetType(ilink->ksp, KSPPREONLY)); 2167 2168 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 2169 PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix)); 2170 2171 if (!next) { 2172 jac->head = ilink; 2173 ilink->previous = NULL; 2174 } else { 2175 while (next->next) next = next->next; 2176 next->next = ilink; 2177 ilink->previous = next; 2178 } 2179 jac->nsplits++; 2180 PetscFunctionReturn(PETSC_SUCCESS); 2181 } 2182 2183 /*@ 2184 PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT` 2185 2186 Logically Collective 2187 2188 Input Parameters: 2189 + pc - the preconditioner context 2190 . splitname - name of this split, if `NULL` the number of the split is used 2191 . n - the number of fields in this split 2192 . fields - the fields in this split 2193 - fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block 2194 of the matrix and `fields_col` provides the column indices for that block 2195 2196 Options Database Key: 2197 . -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split 2198 2199 Level: intermediate 2200 2201 Notes: 2202 Use `PCFieldSplitSetIS()` to set a general set of indices as a split. 2203 2204 If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`. 2205 2206 If the matrix used to construct the preconditioner is not `MATNEST` then 2207 `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlockSize()` or 2208 to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block 2209 size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean 2210 0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x23x56x8.. x12x45x78x.... 2211 where the numbered entries indicate what is in the split. 2212 2213 This function is called once per split (it creates a new split each time). Solve options 2214 for this split will be available under the prefix `-fieldsplit_SPLITNAME_`. 2215 2216 `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields` 2217 2218 Developer Notes: 2219 This routine does not actually create the `IS` representing the split, that is delayed 2220 until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be 2221 available when this routine is called. 2222 2223 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`, 2224 `MatSetBlockSize()`, `MatCreateNest()` 2225 @*/ 2226 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt fields[], const PetscInt fields_col[]) 2227 { 2228 PetscFunctionBegin; 2229 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2230 PetscAssertPointer(splitname, 2); 2231 PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname); 2232 PetscAssertPointer(fields, 4); 2233 PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col)); 2234 PetscFunctionReturn(PETSC_SUCCESS); 2235 } 2236 2237 /*@ 2238 PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build 2239 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators. 2240 2241 Logically Collective 2242 2243 Input Parameters: 2244 + pc - the preconditioner object 2245 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from 2246 2247 Options Database Key: 2248 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks 2249 2250 Level: intermediate 2251 2252 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT` 2253 @*/ 2254 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg) 2255 { 2256 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2257 PetscBool isfs; 2258 2259 PetscFunctionBegin; 2260 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2261 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2262 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2263 jac->diag_use_amat = flg; 2264 PetscFunctionReturn(PETSC_SUCCESS); 2265 } 2266 2267 /*@ 2268 PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build 2269 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators. 2270 2271 Logically Collective 2272 2273 Input Parameter: 2274 . pc - the preconditioner object 2275 2276 Output Parameter: 2277 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from 2278 2279 Level: intermediate 2280 2281 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT` 2282 @*/ 2283 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg) 2284 { 2285 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2286 PetscBool isfs; 2287 2288 PetscFunctionBegin; 2289 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2290 PetscAssertPointer(flg, 2); 2291 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2292 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2293 *flg = jac->diag_use_amat; 2294 PetscFunctionReturn(PETSC_SUCCESS); 2295 } 2296 2297 /*@ 2298 PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build 2299 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators. 2300 2301 Logically Collective 2302 2303 Input Parameters: 2304 + pc - the preconditioner object 2305 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from 2306 2307 Options Database Key: 2308 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks 2309 2310 Level: intermediate 2311 2312 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT` 2313 @*/ 2314 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg) 2315 { 2316 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2317 PetscBool isfs; 2318 2319 PetscFunctionBegin; 2320 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2321 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2322 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2323 jac->offdiag_use_amat = flg; 2324 PetscFunctionReturn(PETSC_SUCCESS); 2325 } 2326 2327 /*@ 2328 PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build 2329 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators. 2330 2331 Logically Collective 2332 2333 Input Parameter: 2334 . pc - the preconditioner object 2335 2336 Output Parameter: 2337 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from 2338 2339 Level: intermediate 2340 2341 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT` 2342 @*/ 2343 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg) 2344 { 2345 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2346 PetscBool isfs; 2347 2348 PetscFunctionBegin; 2349 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2350 PetscAssertPointer(flg, 2); 2351 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2352 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2353 *flg = jac->offdiag_use_amat; 2354 PetscFunctionReturn(PETSC_SUCCESS); 2355 } 2356 2357 /*@ 2358 PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT` 2359 2360 Logically Collective 2361 2362 Input Parameters: 2363 + pc - the preconditioner context 2364 . splitname - name of this split, if `NULL` the number of the split is used 2365 - is - the index set that defines the elements in this split 2366 2367 Level: intermediate 2368 2369 Notes: 2370 Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST` 2371 2372 This function is called once per split (it creates a new split each time). Solve options 2373 for this split will be available under the prefix -fieldsplit_SPLITNAME_. 2374 2375 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()` 2376 @*/ 2377 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is) 2378 { 2379 PetscFunctionBegin; 2380 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2381 if (splitname) PetscAssertPointer(splitname, 2); 2382 PetscValidHeaderSpecific(is, IS_CLASSID, 3); 2383 PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is)); 2384 PetscFunctionReturn(PETSC_SUCCESS); 2385 } 2386 2387 /*@ 2388 PCFieldSplitGetIS - Retrieves the elements for a split as an `IS` 2389 2390 Logically Collective 2391 2392 Input Parameters: 2393 + pc - the preconditioner context 2394 - splitname - name of this split 2395 2396 Output Parameter: 2397 . is - the index set that defines the elements in this split, or `NULL` if the split is not found 2398 2399 Level: intermediate 2400 2401 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()` 2402 @*/ 2403 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is) 2404 { 2405 PetscFunctionBegin; 2406 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2407 PetscAssertPointer(splitname, 2); 2408 PetscAssertPointer(is, 3); 2409 { 2410 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2411 PC_FieldSplitLink ilink = jac->head; 2412 PetscBool found; 2413 2414 *is = NULL; 2415 while (ilink) { 2416 PetscCall(PetscStrcmp(ilink->splitname, splitname, &found)); 2417 if (found) { 2418 *is = ilink->is; 2419 break; 2420 } 2421 ilink = ilink->next; 2422 } 2423 } 2424 PetscFunctionReturn(PETSC_SUCCESS); 2425 } 2426 2427 /*@ 2428 PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS` 2429 2430 Logically Collective 2431 2432 Input Parameters: 2433 + pc - the preconditioner context 2434 - index - index of this split 2435 2436 Output Parameter: 2437 . is - the index set that defines the elements in this split 2438 2439 Level: intermediate 2440 2441 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`, 2442 2443 @*/ 2444 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is) 2445 { 2446 PetscFunctionBegin; 2447 PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index); 2448 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2449 PetscAssertPointer(is, 3); 2450 { 2451 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2452 PC_FieldSplitLink ilink = jac->head; 2453 PetscInt i = 0; 2454 PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits); 2455 2456 while (i < index) { 2457 ilink = ilink->next; 2458 ++i; 2459 } 2460 PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is)); 2461 } 2462 PetscFunctionReturn(PETSC_SUCCESS); 2463 } 2464 2465 /*@ 2466 PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the 2467 fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used. 2468 2469 Logically Collective 2470 2471 Input Parameters: 2472 + pc - the preconditioner context 2473 - bs - the block size 2474 2475 Level: intermediate 2476 2477 Note: 2478 If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields. 2479 2480 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 2481 @*/ 2482 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs) 2483 { 2484 PetscFunctionBegin; 2485 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2486 PetscValidLogicalCollectiveInt(pc, bs, 2); 2487 PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs)); 2488 PetscFunctionReturn(PETSC_SUCCESS); 2489 } 2490 2491 /*@C 2492 PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits 2493 2494 Collective 2495 2496 Input Parameter: 2497 . pc - the preconditioner context 2498 2499 Output Parameters: 2500 + n - the number of splits 2501 - subksp - the array of `KSP` contexts 2502 2503 Level: advanced 2504 2505 Notes: 2506 After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()` 2507 (not the `KSP`, just the array that contains them). 2508 2509 You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`. 2510 2511 If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the 2512 Schur complement and the `KSP` object used to iterate over the Schur complement. 2513 To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`. 2514 2515 If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the 2516 inner linear system defined by the matrix H in each loop. 2517 2518 Fortran Note: 2519 Call `PCFieldSplitRestoreSubKSP()` when the array of `KSP` is no longer needed 2520 2521 Developer Notes: 2522 There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()` 2523 2524 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()` 2525 @*/ 2526 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[]) 2527 { 2528 PetscFunctionBegin; 2529 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2530 if (n) PetscAssertPointer(n, 2); 2531 PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp)); 2532 PetscFunctionReturn(PETSC_SUCCESS); 2533 } 2534 2535 /*@C 2536 PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT` 2537 2538 Collective 2539 2540 Input Parameter: 2541 . pc - the preconditioner context 2542 2543 Output Parameters: 2544 + n - the number of splits 2545 - subksp - the array of `KSP` contexts 2546 2547 Level: advanced 2548 2549 Notes: 2550 After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()` 2551 (not the `KSP` just the array that contains them). 2552 2553 You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`. 2554 2555 If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order) 2556 + 1 - the `KSP` used for the (1,1) block 2557 . 2 - the `KSP` used for the Schur complement (not the one used for the interior Schur solver) 2558 - 3 - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block). 2559 2560 It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`. 2561 2562 Fortran Note: 2563 Call `PCFieldSplitSchurRestoreSubKSP()` when the array of `KSP` is no longer needed 2564 2565 Developer Notes: 2566 There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()` 2567 2568 Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged? 2569 2570 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()` 2571 @*/ 2572 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[]) 2573 { 2574 PetscFunctionBegin; 2575 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2576 if (n) PetscAssertPointer(n, 2); 2577 PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp)); 2578 PetscFunctionReturn(PETSC_SUCCESS); 2579 } 2580 2581 /*@ 2582 PCFieldSplitSetSchurPre - Indicates from what operator the preconditioner is constructed for the Schur complement. 2583 The default is the A11 matrix. 2584 2585 Collective 2586 2587 Input Parameters: 2588 + pc - the preconditioner context 2589 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default), 2590 `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`, 2591 `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL` 2592 - pre - matrix to use for preconditioning, or `NULL` 2593 2594 Options Database Keys: 2595 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments 2596 - -fieldsplit_1_pc_type <pctype> - the preconditioner algorithm that is used to construct the preconditioner from the operator 2597 2598 Level: intermediate 2599 2600 Notes: 2601 If ptype is 2602 + a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner 2603 matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix 2604 . self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix: 2605 The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM` 2606 . user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument 2607 to this function). 2608 . selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $ 2609 This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be 2610 lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump` 2611 - full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation 2612 computed internally by `PCFIELDSPLIT` (this is expensive) 2613 useful mostly as a test that the Schur complement approach can work for your problem 2614 2615 When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense 2616 with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and 2617 `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement. 2618 2619 Developer Note: 2620 The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere. 2621 2622 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, 2623 `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()` 2624 @*/ 2625 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2626 { 2627 PetscFunctionBegin; 2628 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2629 PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre)); 2630 PetscFunctionReturn(PETSC_SUCCESS); 2631 } 2632 2633 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2634 { 2635 return PCFieldSplitSetSchurPre(pc, ptype, pre); 2636 } /* Deprecated name */ 2637 2638 /*@ 2639 PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be 2640 preconditioned. See `PCFieldSplitSetSchurPre()` for details. 2641 2642 Logically Collective 2643 2644 Input Parameter: 2645 . pc - the preconditioner context 2646 2647 Output Parameters: 2648 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER` 2649 - pre - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL` 2650 2651 Level: intermediate 2652 2653 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC` 2654 @*/ 2655 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre) 2656 { 2657 PetscFunctionBegin; 2658 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2659 PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre)); 2660 PetscFunctionReturn(PETSC_SUCCESS); 2661 } 2662 2663 /*@ 2664 PCFieldSplitSchurGetS - extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately 2665 2666 Not Collective 2667 2668 Input Parameter: 2669 . pc - the preconditioner context 2670 2671 Output Parameter: 2672 . S - the Schur complement matrix 2673 2674 Level: advanced 2675 2676 Note: 2677 This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`. 2678 2679 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`, 2680 `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()` 2681 @*/ 2682 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S) 2683 { 2684 const char *t; 2685 PetscBool isfs; 2686 PC_FieldSplit *jac; 2687 2688 PetscFunctionBegin; 2689 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2690 PetscCall(PetscObjectGetType((PetscObject)pc, &t)); 2691 PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs)); 2692 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t); 2693 jac = (PC_FieldSplit *)pc->data; 2694 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type); 2695 if (S) *S = jac->schur; 2696 PetscFunctionReturn(PETSC_SUCCESS); 2697 } 2698 2699 /*@ 2700 PCFieldSplitSchurRestoreS - returns the `MATSCHURCOMPLEMENT` matrix used by this `PC` 2701 2702 Not Collective 2703 2704 Input Parameters: 2705 + pc - the preconditioner context 2706 - S - the Schur complement matrix 2707 2708 Level: advanced 2709 2710 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()` 2711 @*/ 2712 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S) 2713 { 2714 const char *t; 2715 PetscBool isfs; 2716 PC_FieldSplit *jac; 2717 2718 PetscFunctionBegin; 2719 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2720 PetscCall(PetscObjectGetType((PetscObject)pc, &t)); 2721 PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs)); 2722 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t); 2723 jac = (PC_FieldSplit *)pc->data; 2724 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type); 2725 PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten"); 2726 PetscFunctionReturn(PETSC_SUCCESS); 2727 } 2728 2729 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2730 { 2731 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2732 2733 PetscFunctionBegin; 2734 jac->schurpre = ptype; 2735 if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) { 2736 PetscCall(MatDestroy(&jac->schur_user)); 2737 jac->schur_user = pre; 2738 PetscCall(PetscObjectReference((PetscObject)jac->schur_user)); 2739 } 2740 PetscFunctionReturn(PETSC_SUCCESS); 2741 } 2742 2743 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre) 2744 { 2745 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2746 2747 PetscFunctionBegin; 2748 if (ptype) *ptype = jac->schurpre; 2749 if (pre) *pre = jac->schur_user; 2750 PetscFunctionReturn(PETSC_SUCCESS); 2751 } 2752 2753 /*@ 2754 PCFieldSplitSetSchurFactType - sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note` 2755 2756 Collective 2757 2758 Input Parameters: 2759 + pc - the preconditioner context 2760 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default 2761 2762 Options Database Key: 2763 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full` 2764 2765 Level: intermediate 2766 2767 Notes: 2768 The `full` factorization is 2769 2770 ```{math} 2771 \left(\begin{array}{cc} A & B \\ 2772 C & E \\ 2773 \end{array}\right) = 2774 \left(\begin{array}{cc} I & 0 \\ 2775 C A^{-1} & I \\ 2776 \end{array}\right) 2777 \left(\begin{array}{cc} A & 0 \\ 2778 0 & S \\ 2779 \end{array}\right) 2780 \left(\begin{array}{cc} I & A^{-1}B \\ 2781 0 & I \\ 2782 \end{array}\right) = L D U, 2783 ``` 2784 2785 where $ S = E - C A^{-1} B $. In practice, the full factorization is applied via block triangular solves with the grouping $L(DU)$. `upper` uses $DU$, `lower` uses $LD$, 2786 and `diag` is the diagonal part with the sign of $S$ flipped (because this makes the preconditioner positive definite for many formulations, 2787 thus allowing the use of `KSPMINRES)`. Sign flipping of $S$ can be turned off with `PCFieldSplitSetSchurScale()`. 2788 2789 If $A$ and $S$ are solved exactly 2790 + 1 - `full` factorization is a direct solver. 2791 . 2 - The preconditioned operator with `lower` or `upper` has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations. 2792 - 3 - With `diag`, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations. 2793 2794 If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner 2795 application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice. 2796 2797 For symmetric problems in which $A$ is positive definite and $S$ is negative definite, `diag` can be used with `KSPMINRES`. 2798 2799 A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g., a few iterations of a Krylov method is used to solve with $A$ or $S$). 2800 2801 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`, 2802 [](sec_flexibleksp), `PCFieldSplitSetSchurPre()` 2803 @*/ 2804 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype) 2805 { 2806 PetscFunctionBegin; 2807 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2808 PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype)); 2809 PetscFunctionReturn(PETSC_SUCCESS); 2810 } 2811 2812 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype) 2813 { 2814 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2815 2816 PetscFunctionBegin; 2817 jac->schurfactorization = ftype; 2818 PetscFunctionReturn(PETSC_SUCCESS); 2819 } 2820 2821 /*@ 2822 PCFieldSplitSetSchurScale - Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`. 2823 2824 Collective 2825 2826 Input Parameters: 2827 + pc - the preconditioner context 2828 - scale - scaling factor for the Schur complement 2829 2830 Options Database Key: 2831 . -pc_fieldsplit_schur_scale <scale> - default is -1.0 2832 2833 Level: intermediate 2834 2835 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()` 2836 @*/ 2837 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale) 2838 { 2839 PetscFunctionBegin; 2840 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2841 PetscValidLogicalCollectiveScalar(pc, scale, 2); 2842 PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale)); 2843 PetscFunctionReturn(PETSC_SUCCESS); 2844 } 2845 2846 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale) 2847 { 2848 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2849 2850 PetscFunctionBegin; 2851 jac->schurscale = scale; 2852 PetscFunctionReturn(PETSC_SUCCESS); 2853 } 2854 2855 /*@C 2856 PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement 2857 2858 Collective 2859 2860 Input Parameter: 2861 . pc - the preconditioner context 2862 2863 Output Parameters: 2864 + A00 - the (0,0) block 2865 . A01 - the (0,1) block 2866 . A10 - the (1,0) block 2867 - A11 - the (1,1) block 2868 2869 Level: advanced 2870 2871 Note: 2872 Use `NULL` for any unneeded output arguments 2873 2874 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()` 2875 @*/ 2876 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11) 2877 { 2878 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2879 2880 PetscFunctionBegin; 2881 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2882 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach."); 2883 if (A00) *A00 = jac->pmat[0]; 2884 if (A01) *A01 = jac->B; 2885 if (A10) *A10 = jac->C; 2886 if (A11) *A11 = jac->pmat[1]; 2887 PetscFunctionReturn(PETSC_SUCCESS); 2888 } 2889 2890 /*@ 2891 PCFieldSplitSetGKBTol - Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT` 2892 2893 Collective 2894 2895 Input Parameters: 2896 + pc - the preconditioner context 2897 - tolerance - the solver tolerance 2898 2899 Options Database Key: 2900 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5 2901 2902 Level: intermediate 2903 2904 Note: 2905 The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion. 2906 It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than 2907 this estimate, the stopping criterion is satisfactory in practical cases. 2908 2909 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()` 2910 @*/ 2911 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance) 2912 { 2913 PetscFunctionBegin; 2914 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2915 PetscValidLogicalCollectiveReal(pc, tolerance, 2); 2916 PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance)); 2917 PetscFunctionReturn(PETSC_SUCCESS); 2918 } 2919 2920 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance) 2921 { 2922 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2923 2924 PetscFunctionBegin; 2925 jac->gkbtol = tolerance; 2926 PetscFunctionReturn(PETSC_SUCCESS); 2927 } 2928 2929 /*@ 2930 PCFieldSplitSetGKBMaxit - Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT` 2931 2932 Collective 2933 2934 Input Parameters: 2935 + pc - the preconditioner context 2936 - maxit - the maximum number of iterations 2937 2938 Options Database Key: 2939 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100 2940 2941 Level: intermediate 2942 2943 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()` 2944 @*/ 2945 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit) 2946 { 2947 PetscFunctionBegin; 2948 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2949 PetscValidLogicalCollectiveInt(pc, maxit, 2); 2950 PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit)); 2951 PetscFunctionReturn(PETSC_SUCCESS); 2952 } 2953 2954 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit) 2955 { 2956 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2957 2958 PetscFunctionBegin; 2959 jac->gkbmaxit = maxit; 2960 PetscFunctionReturn(PETSC_SUCCESS); 2961 } 2962 2963 /*@ 2964 PCFieldSplitSetGKBDelay - Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT` 2965 preconditioner. 2966 2967 Collective 2968 2969 Input Parameters: 2970 + pc - the preconditioner context 2971 - delay - the delay window in the lower bound estimate 2972 2973 Options Database Key: 2974 . -pc_fieldsplit_gkb_delay <delay> - default is 5 2975 2976 Level: intermediate 2977 2978 Notes: 2979 The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $ 2980 is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs 2981 at least (`delay` + 1) iterations to stop. 2982 2983 For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013` 2984 2985 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()` 2986 @*/ 2987 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay) 2988 { 2989 PetscFunctionBegin; 2990 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2991 PetscValidLogicalCollectiveInt(pc, delay, 2); 2992 PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay)); 2993 PetscFunctionReturn(PETSC_SUCCESS); 2994 } 2995 2996 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay) 2997 { 2998 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2999 3000 PetscFunctionBegin; 3001 jac->gkbdelay = delay; 3002 PetscFunctionReturn(PETSC_SUCCESS); 3003 } 3004 3005 /*@ 3006 PCFieldSplitSetGKBNu - Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the 3007 Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT` 3008 3009 Collective 3010 3011 Input Parameters: 3012 + pc - the preconditioner context 3013 - nu - the shift parameter 3014 3015 Options Database Key: 3016 . -pc_fieldsplit_gkb_nu <nu> - default is 1 3017 3018 Level: intermediate 3019 3020 Notes: 3021 This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However, 3022 if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore 3023 necessary to find a good balance in between the convergence of the inner and outer loop. 3024 3025 For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity. 3026 3027 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()` 3028 @*/ 3029 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu) 3030 { 3031 PetscFunctionBegin; 3032 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 3033 PetscValidLogicalCollectiveReal(pc, nu, 2); 3034 PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu)); 3035 PetscFunctionReturn(PETSC_SUCCESS); 3036 } 3037 3038 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu) 3039 { 3040 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3041 3042 PetscFunctionBegin; 3043 jac->gkbnu = nu; 3044 PetscFunctionReturn(PETSC_SUCCESS); 3045 } 3046 3047 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type) 3048 { 3049 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3050 3051 PetscFunctionBegin; 3052 jac->type = type; 3053 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 3054 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL)); 3055 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL)); 3056 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL)); 3057 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL)); 3058 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL)); 3059 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL)); 3060 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL)); 3061 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL)); 3062 3063 if (type == PC_COMPOSITE_SCHUR) { 3064 pc->ops->apply = PCApply_FieldSplit_Schur; 3065 pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur; 3066 pc->ops->view = PCView_FieldSplit_Schur; 3067 pc->ops->setuponblocks = PCSetUpOnBlocks_FieldSplit_Schur; 3068 3069 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur)); 3070 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit)); 3071 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit)); 3072 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit)); 3073 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit)); 3074 } else if (type == PC_COMPOSITE_GKB) { 3075 pc->ops->apply = PCApply_FieldSplit_GKB; 3076 pc->ops->applytranspose = NULL; 3077 pc->ops->view = PCView_FieldSplit_GKB; 3078 pc->ops->setuponblocks = PCSetUpOnBlocks_FieldSplit_GKB; 3079 3080 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 3081 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit)); 3082 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit)); 3083 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit)); 3084 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit)); 3085 } else { 3086 pc->ops->apply = PCApply_FieldSplit; 3087 pc->ops->applytranspose = PCApplyTranspose_FieldSplit; 3088 pc->ops->view = PCView_FieldSplit; 3089 pc->ops->setuponblocks = PCSetUpOnBlocks_FieldSplit; 3090 3091 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 3092 } 3093 PetscFunctionReturn(PETSC_SUCCESS); 3094 } 3095 3096 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs) 3097 { 3098 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3099 3100 PetscFunctionBegin; 3101 PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs); 3102 PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs); 3103 jac->bs = bs; 3104 PetscFunctionReturn(PETSC_SUCCESS); 3105 } 3106 3107 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 3108 { 3109 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3110 PC_FieldSplitLink ilink_current = jac->head; 3111 IS is_owned; 3112 3113 PetscFunctionBegin; 3114 jac->coordinates_set = PETSC_TRUE; // Internal flag 3115 PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL)); 3116 3117 while (ilink_current) { 3118 // For each IS, embed it to get local coords indces 3119 IS is_coords; 3120 PetscInt ndofs_block; 3121 const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block 3122 3123 // Setting drop to true for safety. It should make no difference. 3124 PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords)); 3125 PetscCall(ISGetLocalSize(is_coords, &ndofs_block)); 3126 PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration)); 3127 3128 // Allocate coordinates vector and set it directly 3129 PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords)); 3130 for (PetscInt dof = 0; dof < ndofs_block; ++dof) { 3131 for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d]; 3132 } 3133 ilink_current->dim = dim; 3134 ilink_current->ndofs = ndofs_block; 3135 PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration)); 3136 PetscCall(ISDestroy(&is_coords)); 3137 ilink_current = ilink_current->next; 3138 } 3139 PetscCall(ISDestroy(&is_owned)); 3140 PetscFunctionReturn(PETSC_SUCCESS); 3141 } 3142 3143 /*@ 3144 PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT` 3145 3146 Collective 3147 3148 Input Parameters: 3149 + pc - the preconditioner context 3150 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, 3151 `PC_COMPOSITE_GKB` 3152 3153 Options Database Key: 3154 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type 3155 3156 Level: intermediate 3157 3158 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`, 3159 `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()` 3160 @*/ 3161 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type) 3162 { 3163 PetscFunctionBegin; 3164 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 3165 PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type)); 3166 PetscFunctionReturn(PETSC_SUCCESS); 3167 } 3168 3169 /*@ 3170 PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT` 3171 3172 Not collective 3173 3174 Input Parameter: 3175 . pc - the preconditioner context 3176 3177 Output Parameter: 3178 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 3179 3180 Level: intermediate 3181 3182 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`, 3183 `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 3184 @*/ 3185 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type) 3186 { 3187 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3188 3189 PetscFunctionBegin; 3190 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 3191 PetscAssertPointer(type, 2); 3192 *type = jac->type; 3193 PetscFunctionReturn(PETSC_SUCCESS); 3194 } 3195 3196 /*@ 3197 PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible. 3198 3199 Logically Collective 3200 3201 Input Parameters: 3202 + pc - the preconditioner context 3203 - flg - boolean indicating whether to use field splits defined by the `DM` 3204 3205 Options Database Key: 3206 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM` 3207 3208 Level: intermediate 3209 3210 Developer Note: 3211 The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database 3212 3213 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 3214 @*/ 3215 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg) 3216 { 3217 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3218 PetscBool isfs; 3219 3220 PetscFunctionBegin; 3221 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 3222 PetscValidLogicalCollectiveBool(pc, flg, 2); 3223 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 3224 if (isfs) jac->dm_splits = flg; 3225 PetscFunctionReturn(PETSC_SUCCESS); 3226 } 3227 3228 /*@ 3229 PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible. 3230 3231 Logically Collective 3232 3233 Input Parameter: 3234 . pc - the preconditioner context 3235 3236 Output Parameter: 3237 . flg - boolean indicating whether to use field splits defined by the `DM` 3238 3239 Level: intermediate 3240 3241 Developer Note: 3242 The name should be `PCFieldSplitGetUseDMSplits()` 3243 3244 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 3245 @*/ 3246 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg) 3247 { 3248 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3249 PetscBool isfs; 3250 3251 PetscFunctionBegin; 3252 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 3253 PetscAssertPointer(flg, 2); 3254 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 3255 if (isfs) { 3256 if (flg) *flg = jac->dm_splits; 3257 } 3258 PetscFunctionReturn(PETSC_SUCCESS); 3259 } 3260 3261 /*@ 3262 PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries. 3263 3264 Logically Collective 3265 3266 Input Parameter: 3267 . pc - the preconditioner context 3268 3269 Output Parameter: 3270 . flg - boolean indicating whether to detect fields or not 3271 3272 Level: intermediate 3273 3274 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()` 3275 @*/ 3276 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg) 3277 { 3278 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3279 3280 PetscFunctionBegin; 3281 *flg = jac->detect; 3282 PetscFunctionReturn(PETSC_SUCCESS); 3283 } 3284 3285 /*@ 3286 PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries. 3287 3288 Logically Collective 3289 3290 Input Parameter: 3291 . pc - the preconditioner context 3292 3293 Output Parameter: 3294 . flg - boolean indicating whether to detect fields or not 3295 3296 Options Database Key: 3297 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point 3298 3299 Level: intermediate 3300 3301 Note: 3302 Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`). 3303 3304 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF` 3305 @*/ 3306 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg) 3307 { 3308 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3309 3310 PetscFunctionBegin; 3311 jac->detect = flg; 3312 if (jac->detect) { 3313 PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR)); 3314 PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL)); 3315 } 3316 PetscFunctionReturn(PETSC_SUCCESS); 3317 } 3318 3319 /*MC 3320 PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual 3321 collections of variables (that may overlap) called fields or splits. Each field often represents a different continuum variable 3322 represented on a grid, such as velocity, pressure, or temperature. 3323 In the literature these are sometimes called block preconditioners; but should not be confused with `PCBJACOBI`. 3324 See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details. 3325 3326 Options Database Keys: 3327 + -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split 3328 . -pc_fieldsplit_default - automatically add any fields to additional splits that have not 3329 been supplied explicitly by `-pc_fieldsplit_%d_fields` 3330 . -pc_fieldsplit_block_size <bs> - size of block that defines fields (i.e. there are bs fields) 3331 when the matrix is not of `MatType` `MATNEST` 3332 . -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting 3333 . -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`; see `PCFieldSplitSetSchurPre()` 3334 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - set factorization type when using `-pc_fieldsplit_type schur`; 3335 see `PCFieldSplitSetSchurFactType()` 3336 . -pc_fieldsplit_dm_splits <true,false> (default is true) - Whether to use `DMCreateFieldDecomposition()` for splits 3337 - -pc_fieldsplit_detect_saddle_point - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver 3338 3339 Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` . 3340 The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_` 3341 For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields. 3342 3343 To set options on the solvers for all blocks, prepend `-fieldsplit_` to all the `PC` 3344 options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`. 3345 3346 To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()` 3347 and set the options directly on the resulting `KSP` object 3348 3349 Level: intermediate 3350 3351 Notes: 3352 Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()` 3353 to define a split by an arbitrary collection of entries. 3354 3355 If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports 3356 `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs, 3357 beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`, 3358 if this is not called the block size defaults to the blocksize of the second matrix passed 3359 to `KSPSetOperators()`/`PCSetOperators()`. 3360 3361 For the Schur complement preconditioner if 3362 ```{math} 3363 J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right] 3364 ``` 3365 3366 the preconditioner using `full` factorization is logically 3367 ```{math} 3368 \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right] 3369 ``` 3370 where the action of $\text{ksp}(A_{00})$ is applied using the `KSP` solver with prefix `-fieldsplit_0_`. $S$ is the Schur complement 3371 ```{math} 3372 S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01} 3373 ``` 3374 which is usually dense and not stored explicitly. The action of $\text{ksp}(S)$ is computed using the `KSP` solver with prefix `-fieldsplit_splitname_` (where `splitname` 3375 was given in providing the SECOND split or 1 if not given). Accordingly, if using `PCFieldSplitGetSubKSP()`, the array of sub-`KSP` contexts will hold two `KSP`s: at its 3376 0th index, the `KSP` associated with `-fieldsplit_0_`, and at its 1st index, the `KSP` corresponding to `-fieldsplit_1_`. 3377 By default, $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$. 3378 3379 The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above, 3380 `diag` gives 3381 ```{math} 3382 \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\ 0 & -\text{ksp}(S) \end{array}\right] 3383 ``` 3384 Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$ so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip 3385 can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of 3386 ```{math} 3387 \left[\begin{array}{cc} A_{00} & 0 \\ A_{10} & S \end{array}\right] 3388 ``` 3389 where the inverses of $A_{00}$ and $S$ are applied using `KSP`s. The upper factorization is the inverse of 3390 ```{math} 3391 \left[\begin{array}{cc} A_{00} & A_{01} \\ 0 & S \end{array}\right] 3392 ``` 3393 where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s. 3394 3395 If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS` 3396 is used automatically for a second submatrix. 3397 3398 The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1. 3399 Generally it should be used with the `MATAIJ` or `MATNEST` `MatType` 3400 3401 The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see, 3402 for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`. 3403 One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers". 3404 3405 See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`. 3406 3407 The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the 3408 residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables. 3409 3410 The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape 3411 ```{math} 3412 \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right] 3413 ``` 3414 with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$. 3415 A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`. 3416 3417 Some `PCFIELDSPLIT` variants are called physics-based preconditioners, since the preconditioner takes into account the underlying physics of the 3418 problem. But this nomenclature is not well-defined. 3419 3420 Developer Note: 3421 The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their 3422 user API. 3423 3424 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`, 3425 `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`, 3426 `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`, 3427 `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()` 3428 M*/ 3429 3430 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc) 3431 { 3432 PC_FieldSplit *jac; 3433 3434 PetscFunctionBegin; 3435 PetscCall(PetscNew(&jac)); 3436 3437 jac->bs = -1; 3438 jac->type = PC_COMPOSITE_MULTIPLICATIVE; 3439 jac->schurpre = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */ 3440 jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL; 3441 jac->schurscale = -1.0; 3442 jac->dm_splits = PETSC_TRUE; 3443 jac->gkbtol = 1e-5; 3444 jac->gkbdelay = 5; 3445 jac->gkbnu = 1; 3446 jac->gkbmaxit = 100; 3447 3448 pc->data = (void *)jac; 3449 3450 pc->ops->setup = PCSetUp_FieldSplit; 3451 pc->ops->reset = PCReset_FieldSplit; 3452 pc->ops->destroy = PCDestroy_FieldSplit; 3453 pc->ops->setfromoptions = PCSetFromOptions_FieldSplit; 3454 pc->ops->applyrichardson = NULL; 3455 3456 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit)); 3457 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit)); 3458 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit)); 3459 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit)); 3460 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit)); 3461 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit)); 3462 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit)); 3463 3464 /* Initialize function pointers */ 3465 PetscCall(PCFieldSplitSetType(pc, jac->type)); 3466 PetscFunctionReturn(PETSC_SUCCESS); 3467 } 3468