1 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 2 #include <petsc/private/kspimpl.h> /* This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/ 3 #include <petscdm.h> 4 #include <petscdevice.h> 5 #if PetscDefined(HAVE_CUDA) 6 #include <petscdevice_cuda.h> 7 #endif 8 #if PetscDefined(HAVE_HIP) 9 #include <petscdevice_hip.h> 10 #endif 11 12 const char *const PCFieldSplitSchurPreTypes[] = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL}; 13 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL}; 14 15 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4; 16 17 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink; 18 struct _PC_FieldSplitLink { 19 KSP ksp; 20 Vec x, y, z; 21 char *splitname; 22 PetscInt nfields; 23 PetscInt *fields, *fields_col; 24 VecScatter sctx; 25 IS is, is_col; 26 PC_FieldSplitLink next, previous; 27 PetscLogEvent event; 28 29 /* Used only when setting coordinates with PCSetCoordinates */ 30 PetscInt dim; 31 PetscInt ndofs; 32 PetscReal *coords; 33 }; 34 35 typedef struct { 36 PCCompositeType type; 37 PetscBool defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */ 38 PetscBool splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */ 39 PetscInt bs; /* Block size for IS and Mat structures */ 40 PetscInt nsplits; /* Number of field divisions defined */ 41 Vec *x, *y, w1, w2; 42 Mat *mat; /* The diagonal block for each split */ 43 Mat *pmat; /* The preconditioning diagonal block for each split */ 44 Mat *Afield; /* The rows of the matrix associated with each split */ 45 PetscBool issetup; 46 47 /* Only used when Schur complement preconditioning is used */ 48 Mat B; /* The (0,1) block */ 49 Mat C; /* The (1,0) block */ 50 Mat schur; /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */ 51 Mat schurp; /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */ 52 Mat schur_user; /* User-provided preconditioning matrix for the Schur complement */ 53 PCFieldSplitSchurPreType schurpre; /* Determines which preconditioning matrix is used for the Schur complement */ 54 PCFieldSplitSchurFactType schurfactorization; 55 KSP kspschur; /* The solver for S */ 56 KSP kspupper; /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */ 57 PetscScalar schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */ 58 59 /* Only used when Golub-Kahan bidiagonalization preconditioning is used */ 60 Mat H; /* The modified matrix H = A00 + nu*A01*A01' */ 61 PetscReal gkbtol; /* Stopping tolerance for lower bound estimate */ 62 PetscInt gkbdelay; /* The delay window for the stopping criterion */ 63 PetscReal gkbnu; /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */ 64 PetscInt gkbmaxit; /* Maximum number of iterations for outer loop */ 65 PetscBool gkbmonitor; /* Monitor for gkb iterations and the lower bound error */ 66 PetscViewer gkbviewer; /* Viewer context for gkbmonitor */ 67 Vec u, v, d, Hu; /* Work vectors for the GKB algorithm */ 68 PetscScalar *vecz; /* Contains intermediate values, eg for lower bound */ 69 70 PC_FieldSplitLink head; 71 PetscBool isrestrict; /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */ 72 PetscBool suboptionsset; /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */ 73 PetscBool dm_splits; /* Whether to use DMCreateFieldDecomposition() whenever possible */ 74 PetscBool diag_use_amat; /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */ 75 PetscBool offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */ 76 PetscBool detect; /* Whether to form 2-way split by finding zero diagonal entries */ 77 PetscBool coordinates_set; /* Whether PCSetCoordinates has been called */ 78 } PC_FieldSplit; 79 80 /* 81 Note: 82 there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of 83 inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the 84 PC you could change this. 85 */ 86 87 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it. This way the 88 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */ 89 static Mat FieldSplitSchurPre(PC_FieldSplit *jac) 90 { 91 switch (jac->schurpre) { 92 case PC_FIELDSPLIT_SCHUR_PRE_SELF: 93 return jac->schur; 94 case PC_FIELDSPLIT_SCHUR_PRE_SELFP: 95 return jac->schurp; 96 case PC_FIELDSPLIT_SCHUR_PRE_A11: 97 return jac->pmat[1]; 98 case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */ 99 case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */ 100 default: 101 return jac->schur_user ? jac->schur_user : jac->pmat[1]; 102 } 103 } 104 105 #include <petscdraw.h> 106 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer) 107 { 108 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 109 PetscBool iascii, isdraw; 110 PetscInt i, j; 111 PC_FieldSplitLink ilink = jac->head; 112 113 PetscFunctionBegin; 114 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 115 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 116 if (iascii) { 117 if (jac->bs > 0) { 118 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs)); 119 } else { 120 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits)); 121 } 122 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 123 if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for diagonal blocks\n")); 124 if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for off-diagonal blocks\n")); 125 PetscCall(PetscViewerASCIIPrintf(viewer, " Solver info for each split is in the following KSP objects:\n")); 126 for (i = 0; i < jac->nsplits; i++) { 127 if (ilink->fields) { 128 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i)); 129 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 130 for (j = 0; j < ilink->nfields; j++) { 131 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 132 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 133 } 134 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 135 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 136 } else { 137 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i)); 138 } 139 PetscCall(KSPView(ilink->ksp, viewer)); 140 ilink = ilink->next; 141 } 142 } 143 144 if (isdraw) { 145 PetscDraw draw; 146 PetscReal x, y, w, wd; 147 148 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 149 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 150 w = 2 * PetscMin(1.0 - x, x); 151 wd = w / (jac->nsplits + 1); 152 x = x - wd * (jac->nsplits - 1) / 2.0; 153 for (i = 0; i < jac->nsplits; i++) { 154 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 155 PetscCall(KSPView(ilink->ksp, viewer)); 156 PetscCall(PetscDrawPopCurrentPoint(draw)); 157 x += wd; 158 ilink = ilink->next; 159 } 160 } 161 PetscFunctionReturn(PETSC_SUCCESS); 162 } 163 164 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer) 165 { 166 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 167 PetscBool iascii, isdraw; 168 PetscInt i, j; 169 PC_FieldSplitLink ilink = jac->head; 170 MatSchurComplementAinvType atype; 171 172 PetscFunctionBegin; 173 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 174 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 175 if (iascii) { 176 if (jac->bs > 0) { 177 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization])); 178 } else { 179 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization])); 180 } 181 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 182 switch (jac->schurpre) { 183 case PC_FIELDSPLIT_SCHUR_PRE_SELF: 184 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from S itself\n")); 185 break; 186 case PC_FIELDSPLIT_SCHUR_PRE_SELFP: 187 if (jac->schur) { 188 PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype)); 189 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's ")))); 190 } 191 break; 192 case PC_FIELDSPLIT_SCHUR_PRE_A11: 193 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from A11\n")); 194 break; 195 case PC_FIELDSPLIT_SCHUR_PRE_FULL: 196 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from the exact Schur complement\n")); 197 break; 198 case PC_FIELDSPLIT_SCHUR_PRE_USER: 199 if (jac->schur_user) { 200 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from user provided matrix\n")); 201 } else { 202 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from A11\n")); 203 } 204 break; 205 default: 206 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre); 207 } 208 PetscCall(PetscViewerASCIIPrintf(viewer, " Split info:\n")); 209 PetscCall(PetscViewerASCIIPushTab(viewer)); 210 for (i = 0; i < jac->nsplits; i++) { 211 if (ilink->fields) { 212 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i)); 213 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 214 for (j = 0; j < ilink->nfields; j++) { 215 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 216 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 217 } 218 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 219 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 220 } else { 221 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i)); 222 } 223 ilink = ilink->next; 224 } 225 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n")); 226 PetscCall(PetscViewerASCIIPushTab(viewer)); 227 if (jac->head) { 228 PetscCall(KSPView(jac->head->ksp, viewer)); 229 } else PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 230 PetscCall(PetscViewerASCIIPopTab(viewer)); 231 if (jac->head && jac->kspupper != jac->head->ksp) { 232 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n")); 233 PetscCall(PetscViewerASCIIPushTab(viewer)); 234 if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer)); 235 else PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 236 PetscCall(PetscViewerASCIIPopTab(viewer)); 237 } 238 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n")); 239 PetscCall(PetscViewerASCIIPushTab(viewer)); 240 if (jac->kspschur) { 241 PetscCall(KSPView(jac->kspschur, viewer)); 242 } else { 243 PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 244 } 245 PetscCall(PetscViewerASCIIPopTab(viewer)); 246 PetscCall(PetscViewerASCIIPopTab(viewer)); 247 } else if (isdraw && jac->head) { 248 PetscDraw draw; 249 PetscReal x, y, w, wd, h; 250 PetscInt cnt = 2; 251 char str[32]; 252 253 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 254 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 255 if (jac->kspupper != jac->head->ksp) cnt++; 256 w = 2 * PetscMin(1.0 - x, x); 257 wd = w / (cnt + 1); 258 259 PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization])); 260 PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 261 y -= h; 262 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) { 263 PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11])); 264 } else { 265 PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre])); 266 } 267 PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 268 y -= h; 269 x = x - wd * (cnt - 1) / 2.0; 270 271 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 272 PetscCall(KSPView(jac->head->ksp, viewer)); 273 PetscCall(PetscDrawPopCurrentPoint(draw)); 274 if (jac->kspupper != jac->head->ksp) { 275 x += wd; 276 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 277 PetscCall(KSPView(jac->kspupper, viewer)); 278 PetscCall(PetscDrawPopCurrentPoint(draw)); 279 } 280 x += wd; 281 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 282 PetscCall(KSPView(jac->kspschur, viewer)); 283 PetscCall(PetscDrawPopCurrentPoint(draw)); 284 } 285 PetscFunctionReturn(PETSC_SUCCESS); 286 } 287 288 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer) 289 { 290 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 291 PetscBool iascii, isdraw; 292 PetscInt i, j; 293 PC_FieldSplitLink ilink = jac->head; 294 295 PetscFunctionBegin; 296 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 297 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 298 if (iascii) { 299 if (jac->bs > 0) { 300 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs)); 301 } else { 302 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits)); 303 } 304 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 305 if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for diagonal blocks\n")); 306 if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for off-diagonal blocks\n")); 307 308 PetscCall(PetscViewerASCIIPrintf(viewer, " Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit)); 309 PetscCall(PetscViewerASCIIPrintf(viewer, " Solver info for H = A00 + nu*A01*A01' matrix:\n")); 310 PetscCall(PetscViewerASCIIPushTab(viewer)); 311 312 if (ilink->fields) { 313 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields ")); 314 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 315 for (j = 0; j < ilink->nfields; j++) { 316 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 317 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 318 } 319 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 320 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 321 } else { 322 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n")); 323 } 324 PetscCall(KSPView(ilink->ksp, viewer)); 325 326 PetscCall(PetscViewerASCIIPopTab(viewer)); 327 } 328 329 if (isdraw) { 330 PetscDraw draw; 331 PetscReal x, y, w, wd; 332 333 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 334 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 335 w = 2 * PetscMin(1.0 - x, x); 336 wd = w / (jac->nsplits + 1); 337 x = x - wd * (jac->nsplits - 1) / 2.0; 338 for (i = 0; i < jac->nsplits; i++) { 339 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 340 PetscCall(KSPView(ilink->ksp, viewer)); 341 PetscCall(PetscDrawPopCurrentPoint(draw)); 342 x += wd; 343 ilink = ilink->next; 344 } 345 } 346 PetscFunctionReturn(PETSC_SUCCESS); 347 } 348 349 /* Precondition: jac->bs is set to a meaningful value or MATNEST */ 350 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc) 351 { 352 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 353 PetscInt bs, i, nfields, *ifields, nfields_col, *ifields_col; 354 PetscBool flg, flg_col, mnest; 355 char optionname[128], splitname[8], optionname_col[128]; 356 357 PetscFunctionBegin; 358 PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest)); 359 if (mnest) { 360 PetscCall(MatNestGetSize(pc->pmat, &bs, NULL)); 361 } else { 362 bs = jac->bs; 363 } 364 PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col)); 365 for (i = 0, flg = PETSC_TRUE;; i++) { 366 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 367 PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i)); 368 PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i)); 369 nfields = bs; 370 nfields_col = bs; 371 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg)); 372 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col)); 373 if (!flg) break; 374 else if (flg && !flg_col) { 375 PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields"); 376 PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields)); 377 } else { 378 PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields"); 379 PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match"); 380 PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col)); 381 } 382 } 383 if (i > 0) { 384 /* Makes command-line setting of splits take precedence over setting them in code. 385 Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would 386 create new splits, which would probably not be what the user wanted. */ 387 jac->splitdefined = PETSC_TRUE; 388 } 389 PetscCall(PetscFree2(ifields, ifields_col)); 390 PetscFunctionReturn(PETSC_SUCCESS); 391 } 392 393 static PetscErrorCode PCFieldSplitSetDefaults(PC pc) 394 { 395 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 396 PC_FieldSplitLink ilink = jac->head; 397 PetscBool fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE; 398 PetscInt i; 399 400 PetscFunctionBegin; 401 /* 402 Kinda messy, but at least this now uses DMCreateFieldDecomposition(). 403 Should probably be rewritten. 404 */ 405 if (!ilink) { 406 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL)); 407 if (pc->dm && jac->dm_splits && !jac->detect && !coupling) { 408 PetscInt numFields, f, i, j; 409 char **fieldNames; 410 IS *fields; 411 DM *dms; 412 DM subdm[128]; 413 PetscBool flg; 414 415 PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms)); 416 /* Allow the user to prescribe the splits */ 417 for (i = 0, flg = PETSC_TRUE;; i++) { 418 PetscInt ifields[128]; 419 IS compField; 420 char optionname[128], splitname[8]; 421 PetscInt nfields = numFields; 422 423 PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i)); 424 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg)); 425 if (!flg) break; 426 PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields); 427 PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i])); 428 if (nfields == 1) { 429 PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField)); 430 } else { 431 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 432 PetscCall(PCFieldSplitSetIS(pc, splitname, compField)); 433 } 434 PetscCall(ISDestroy(&compField)); 435 for (j = 0; j < nfields; ++j) { 436 f = ifields[j]; 437 PetscCall(PetscFree(fieldNames[f])); 438 PetscCall(ISDestroy(&fields[f])); 439 } 440 } 441 if (i == 0) { 442 for (f = 0; f < numFields; ++f) { 443 PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f])); 444 PetscCall(PetscFree(fieldNames[f])); 445 PetscCall(ISDestroy(&fields[f])); 446 } 447 } else { 448 for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j)); 449 PetscCall(PetscFree(dms)); 450 PetscCall(PetscMalloc1(i, &dms)); 451 for (j = 0; j < i; ++j) dms[j] = subdm[j]; 452 } 453 PetscCall(PetscFree(fieldNames)); 454 PetscCall(PetscFree(fields)); 455 if (dms) { 456 PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n")); 457 for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) { 458 const char *prefix; 459 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix)); 460 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix)); 461 PetscCall(KSPSetDM(ilink->ksp, dms[i])); 462 PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE)); 463 PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0)); 464 PetscCall(DMDestroy(&dms[i])); 465 } 466 PetscCall(PetscFree(dms)); 467 } 468 } else { 469 if (jac->bs <= 0) { 470 if (pc->pmat) { 471 PetscCall(MatGetBlockSize(pc->pmat, &jac->bs)); 472 } else jac->bs = 1; 473 } 474 475 if (jac->detect) { 476 IS zerodiags, rest; 477 PetscInt nmin, nmax; 478 479 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 480 if (jac->diag_use_amat) { 481 PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags)); 482 } else { 483 PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags)); 484 } 485 PetscCall(ISComplement(zerodiags, nmin, nmax, &rest)); 486 PetscCall(PCFieldSplitSetIS(pc, "0", rest)); 487 PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags)); 488 PetscCall(ISDestroy(&zerodiags)); 489 PetscCall(ISDestroy(&rest)); 490 } else if (coupling) { 491 IS coupling, rest; 492 PetscInt nmin, nmax; 493 494 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 495 if (jac->offdiag_use_amat) { 496 PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling)); 497 } else { 498 PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling)); 499 } 500 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest)); 501 PetscCall(ISSetIdentity(rest)); 502 PetscCall(PCFieldSplitSetIS(pc, "0", rest)); 503 PetscCall(PCFieldSplitSetIS(pc, "1", coupling)); 504 PetscCall(ISDestroy(&coupling)); 505 PetscCall(ISDestroy(&rest)); 506 } else { 507 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL)); 508 if (!fieldsplit_default) { 509 /* Allow user to set fields from command line, if bs was known at the time of PCSetFromOptions_FieldSplit() 510 then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */ 511 PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc)); 512 if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n")); 513 } 514 if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) { 515 Mat M = pc->pmat; 516 PetscBool isnest; 517 PetscInt nf; 518 519 PetscCall(PetscInfo(pc, "Using default splitting of fields\n")); 520 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest)); 521 if (!isnest) { 522 M = pc->mat; 523 PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest)); 524 } 525 if (!isnest) nf = jac->bs; 526 else PetscCall(MatNestGetSize(M, &nf, NULL)); 527 for (i = 0; i < nf; i++) { 528 char splitname[8]; 529 530 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 531 PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i)); 532 } 533 jac->defaultsplit = PETSC_TRUE; 534 } 535 } 536 } 537 } else if (jac->nsplits == 1) { 538 IS is2; 539 PetscInt nmin, nmax; 540 541 PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()"); 542 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 543 PetscCall(ISComplement(ilink->is, nmin, nmax, &is2)); 544 PetscCall(PCFieldSplitSetIS(pc, "1", is2)); 545 PetscCall(ISDestroy(&is2)); 546 } 547 548 PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits); 549 PetscFunctionReturn(PETSC_SUCCESS); 550 } 551 552 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu) 553 { 554 Mat BT, T; 555 PetscReal nrmT, nrmB; 556 557 PetscFunctionBegin; 558 PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */ 559 PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN)); 560 PetscCall(MatNorm(T, NORM_1, &nrmT)); 561 PetscCall(MatNorm(B, NORM_1, &nrmB)); 562 PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable."); 563 564 /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */ 565 /* setting N := 1/nu*I in [Ar13]. */ 566 PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT)); 567 PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_CURRENT, H)); /* H = A01*A01' */ 568 PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN)); /* H = A00 + nu*A01*A01' */ 569 570 PetscCall(MatDestroy(&BT)); 571 PetscCall(MatDestroy(&T)); 572 PetscFunctionReturn(PETSC_SUCCESS); 573 } 574 575 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg); 576 577 static PetscErrorCode PCSetUp_FieldSplit(PC pc) 578 { 579 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 580 PC_FieldSplitLink ilink; 581 PetscInt i, nsplit; 582 PetscBool sorted, sorted_col, matnest = PETSC_FALSE; 583 584 PetscFunctionBegin; 585 pc->failedreason = PC_NOERROR; 586 PetscCall(PCFieldSplitSetDefaults(pc)); 587 nsplit = jac->nsplits; 588 ilink = jac->head; 589 if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest)); 590 591 /* get the matrices for each split */ 592 if (!jac->issetup) { 593 PetscInt rstart, rend, nslots, bs; 594 595 jac->issetup = PETSC_TRUE; 596 597 /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */ 598 if (jac->defaultsplit || !ilink->is) { 599 if (jac->bs <= 0) jac->bs = nsplit; 600 } 601 602 /* MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */ 603 PetscCall(MatGetBlockSize(pc->pmat, &bs)); 604 if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) { 605 PetscBool blk; 606 607 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL)); 608 PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes"); 609 } 610 611 if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */ 612 bs = jac->bs; 613 PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend)); 614 nslots = (rend - rstart) / bs; 615 for (i = 0; i < nsplit; i++) { 616 if (jac->defaultsplit) { 617 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is)); 618 PetscCall(ISDuplicate(ilink->is, &ilink->is_col)); 619 } else if (!ilink->is) { 620 if (ilink->nfields > 1) { 621 PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col; 622 623 PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii)); 624 PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj)); 625 for (j = 0; j < nslots; j++) { 626 for (k = 0; k < nfields; k++) { 627 ii[nfields * j + k] = rstart + bs * j + fields[k]; 628 jj[nfields * j + k] = rstart + bs * j + fields_col[k]; 629 } 630 } 631 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is)); 632 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col)); 633 PetscCall(ISSetBlockSize(ilink->is, nfields)); 634 PetscCall(ISSetBlockSize(ilink->is_col, nfields)); 635 } else { 636 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is)); 637 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col)); 638 } 639 } 640 PetscCall(ISSorted(ilink->is, &sorted)); 641 if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col)); 642 PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split"); 643 ilink = ilink->next; 644 } 645 } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */ 646 IS *rowis, *colis, *ises = NULL; 647 PetscInt mis, nis; 648 649 PetscCall(MatNestGetSize(pc->pmat, &mis, &nis)); 650 PetscCall(PetscMalloc2(mis, &rowis, nis, &colis)); 651 PetscCall(MatNestGetISs(pc->pmat, rowis, colis)); 652 if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises)); 653 654 for (i = 0; i < nsplit; i++) { 655 if (jac->defaultsplit) { 656 PetscCall(ISDuplicate(rowis[i], &ilink->is)); 657 PetscCall(ISDuplicate(ilink->is, &ilink->is_col)); 658 } else if (!ilink->is) { 659 if (ilink->nfields > 1) { 660 for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]]; 661 PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is)); 662 } else { 663 PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is)); 664 } 665 PetscCall(ISDuplicate(ilink->is, &ilink->is_col)); 666 } 667 ilink = ilink->next; 668 } 669 PetscCall(PetscFree2(rowis, colis)); 670 PetscCall(PetscFree(ises)); 671 } 672 } 673 674 ilink = jac->head; 675 if (!jac->pmat) { 676 Vec xtmp; 677 678 PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL)); 679 PetscCall(PetscMalloc1(nsplit, &jac->pmat)); 680 PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y)); 681 for (i = 0; i < nsplit; i++) { 682 MatNullSpace sp; 683 684 /* Check for preconditioning matrix attached to IS */ 685 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i])); 686 if (jac->pmat[i]) { 687 PetscCall(PetscObjectReference((PetscObject)jac->pmat[i])); 688 if (jac->type == PC_COMPOSITE_SCHUR) { 689 jac->schur_user = jac->pmat[i]; 690 691 PetscCall(PetscObjectReference((PetscObject)jac->schur_user)); 692 } 693 } else { 694 const char *prefix; 695 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i])); 696 PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix)); 697 if (!prefix) { 698 PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix)); 699 PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix)); 700 } 701 PetscCall(MatSetFromOptions(jac->pmat[i])); 702 PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view")); 703 } 704 /* create work vectors for each split */ 705 PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i])); 706 ilink->x = jac->x[i]; 707 ilink->y = jac->y[i]; 708 ilink->z = NULL; 709 /* compute scatter contexts needed by multiplicative versions and non-default splits */ 710 PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx)); 711 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp)); 712 if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp)); 713 ilink = ilink->next; 714 } 715 PetscCall(VecDestroy(&xtmp)); 716 } else { 717 MatReuse scall; 718 MatNullSpace *nullsp = NULL; 719 720 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 721 PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp)); 722 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i])); 723 scall = MAT_INITIAL_MATRIX; 724 } else scall = MAT_REUSE_MATRIX; 725 726 for (i = 0; i < nsplit; i++) { 727 Mat pmat; 728 729 /* Check for preconditioning matrix attached to IS */ 730 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat)); 731 if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i])); 732 ilink = ilink->next; 733 } 734 if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp)); 735 } 736 if (jac->diag_use_amat) { 737 ilink = jac->head; 738 if (!jac->mat) { 739 PetscCall(PetscMalloc1(nsplit, &jac->mat)); 740 for (i = 0; i < nsplit; i++) { 741 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i])); 742 ilink = ilink->next; 743 } 744 } else { 745 MatReuse scall; 746 MatNullSpace *nullsp = NULL; 747 748 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 749 PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp)); 750 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i])); 751 scall = MAT_INITIAL_MATRIX; 752 } else scall = MAT_REUSE_MATRIX; 753 754 for (i = 0; i < nsplit; i++) { 755 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i])); 756 ilink = ilink->next; 757 } 758 if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp)); 759 } 760 } else { 761 jac->mat = jac->pmat; 762 } 763 764 /* Check for null space attached to IS */ 765 ilink = jac->head; 766 for (i = 0; i < nsplit; i++) { 767 MatNullSpace sp; 768 769 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp)); 770 if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp)); 771 ilink = ilink->next; 772 } 773 774 if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) { 775 /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */ 776 /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */ 777 ilink = jac->head; 778 if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) { 779 /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */ 780 if (!jac->Afield) { 781 PetscCall(PetscCalloc1(nsplit, &jac->Afield)); 782 if (jac->offdiag_use_amat) { 783 PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1])); 784 } else { 785 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1])); 786 } 787 } else { 788 MatReuse scall; 789 790 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 791 PetscCall(MatDestroy(&jac->Afield[1])); 792 scall = MAT_INITIAL_MATRIX; 793 } else scall = MAT_REUSE_MATRIX; 794 795 if (jac->offdiag_use_amat) { 796 PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1])); 797 } else { 798 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1])); 799 } 800 } 801 } else { 802 if (!jac->Afield) { 803 PetscCall(PetscMalloc1(nsplit, &jac->Afield)); 804 for (i = 0; i < nsplit; i++) { 805 if (jac->offdiag_use_amat) { 806 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i])); 807 } else { 808 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i])); 809 } 810 ilink = ilink->next; 811 } 812 } else { 813 MatReuse scall; 814 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 815 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i])); 816 scall = MAT_INITIAL_MATRIX; 817 } else scall = MAT_REUSE_MATRIX; 818 819 for (i = 0; i < nsplit; i++) { 820 if (jac->offdiag_use_amat) { 821 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i])); 822 } else { 823 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i])); 824 } 825 ilink = ilink->next; 826 } 827 } 828 } 829 } 830 831 if (jac->type == PC_COMPOSITE_SCHUR) { 832 IS ccis; 833 PetscBool isset, isspd = PETSC_FALSE, issym = PETSC_FALSE, flg; 834 PetscInt rstart, rend; 835 char lscname[256]; 836 PetscObject LSC_L; 837 838 PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields"); 839 840 /* If pc->mat is SPD, don't scale by -1 the Schur complement */ 841 PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd)); 842 if (jac->schurscale == (PetscScalar)-1.0) jac->schurscale = (isset && isspd) ? 1.0 : -1.0; 843 PetscCall(MatIsSymmetricKnown(pc->pmat, &isset, &issym)); 844 845 /* When extracting off-diagonal submatrices, we take complements from this range */ 846 PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend)); 847 PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 848 849 if (jac->schur) { 850 KSP kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper; 851 MatReuse scall; 852 853 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 854 scall = MAT_INITIAL_MATRIX; 855 PetscCall(MatDestroy(&jac->B)); 856 PetscCall(MatDestroy(&jac->C)); 857 } else scall = MAT_REUSE_MATRIX; 858 859 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 860 ilink = jac->head; 861 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 862 if (jac->offdiag_use_amat) { 863 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B)); 864 } else { 865 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B)); 866 } 867 PetscCall(ISDestroy(&ccis)); 868 if (!flg) { 869 ilink = ilink->next; 870 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 871 if (jac->offdiag_use_amat) { 872 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C)); 873 } else { 874 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C)); 875 } 876 PetscCall(ISDestroy(&ccis)); 877 } else { 878 PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg)); 879 if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C)); 880 else PetscCall(MatCreateTranspose(jac->B, &jac->C)); 881 } 882 PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1])); 883 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) { 884 PetscCall(MatDestroy(&jac->schurp)); 885 PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp)); 886 } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) { 887 PetscCall(MatDestroy(&jac->schur_user)); 888 if (jac->kspupper == jac->head->ksp) { 889 Mat AinvB; 890 891 PetscCall(MatCreate(PetscObjectComm((PetscObject)jac->schur), &AinvB)); 892 PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)AinvB)); 893 PetscCall(MatDestroy(&AinvB)); 894 } 895 PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user)); 896 } 897 if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0])); 898 if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0])); 899 PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac))); 900 } else { 901 const char *Dprefix; 902 char schurprefix[256], schurmatprefix[256]; 903 char schurtestoption[256]; 904 MatNullSpace sp; 905 KSP kspt; 906 907 /* extract the A01 and A10 matrices */ 908 ilink = jac->head; 909 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 910 if (jac->offdiag_use_amat) { 911 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 912 } else { 913 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 914 } 915 PetscCall(ISDestroy(&ccis)); 916 ilink = ilink->next; 917 if (!flg) { 918 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 919 if (jac->offdiag_use_amat) { 920 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 921 } else { 922 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 923 } 924 PetscCall(ISDestroy(&ccis)); 925 } else { 926 PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg)); 927 if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C)); 928 else PetscCall(MatCreateTranspose(jac->B, &jac->C)); 929 } 930 /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */ 931 PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur)); 932 PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT)); 933 PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1])); 934 PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 935 PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix)); 936 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt)); 937 PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix)); 938 939 /* Note: this is not true in general */ 940 PetscCall(MatGetNullSpace(jac->mat[1], &sp)); 941 if (sp) PetscCall(MatSetNullSpace(jac->schur, sp)); 942 943 PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname)); 944 PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg)); 945 if (flg) { 946 DM dmInner; 947 KSP kspInner; 948 PC pcInner; 949 950 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 951 PetscCall(KSPReset(kspInner)); 952 PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0])); 953 PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 954 /* Indent this deeper to emphasize the "inner" nature of this solver. */ 955 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2)); 956 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2)); 957 PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix)); 958 959 /* Set DM for new solver */ 960 PetscCall(KSPGetDM(jac->head->ksp, &dmInner)); 961 PetscCall(KSPSetDM(kspInner, dmInner)); 962 PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE)); 963 964 /* Defaults to PCKSP as preconditioner */ 965 PetscCall(KSPGetPC(kspInner, &pcInner)); 966 PetscCall(PCSetType(pcInner, PCKSP)); 967 PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp)); 968 } else { 969 /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or 970 * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact, 971 * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for 972 * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make 973 * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used 974 * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */ 975 PetscCall(KSPSetType(jac->head->ksp, KSPGMRES)); 976 PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp)); 977 } 978 PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0])); 979 PetscCall(KSPSetFromOptions(jac->head->ksp)); 980 PetscCall(MatSetFromOptions(jac->schur)); 981 982 PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg)); 983 if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */ 984 KSP kspInner; 985 PC pcInner; 986 987 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 988 PetscCall(KSPGetPC(kspInner, &pcInner)); 989 PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg)); 990 if (flg) { 991 KSP ksp; 992 993 PetscCall(PCKSPGetKSP(pcInner, &ksp)); 994 if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE)); 995 } 996 } 997 PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname)); 998 PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg)); 999 if (flg) { 1000 DM dmInner; 1001 1002 PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 1003 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper)); 1004 PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel)); 1005 PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure)); 1006 PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix)); 1007 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1)); 1008 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1)); 1009 PetscCall(KSPGetDM(jac->head->ksp, &dmInner)); 1010 PetscCall(KSPSetDM(jac->kspupper, dmInner)); 1011 PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE)); 1012 PetscCall(KSPSetFromOptions(jac->kspupper)); 1013 PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0])); 1014 PetscCall(VecDuplicate(jac->head->x, &jac->head->z)); 1015 } else { 1016 jac->kspupper = jac->head->ksp; 1017 PetscCall(PetscObjectReference((PetscObject)jac->head->ksp)); 1018 } 1019 1020 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp)); 1021 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur)); 1022 PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel)); 1023 PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure)); 1024 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1)); 1025 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) { 1026 PC pcschur; 1027 PetscCall(KSPGetPC(jac->kspschur, &pcschur)); 1028 PetscCall(PCSetType(pcschur, PCNONE)); 1029 /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */ 1030 } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) { 1031 if (jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL && jac->kspupper == jac->head->ksp) { 1032 Mat AinvB; 1033 1034 PetscCall(MatCreate(PetscObjectComm((PetscObject)jac->schur), &AinvB)); 1035 PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)AinvB)); 1036 PetscCall(MatDestroy(&AinvB)); 1037 } 1038 PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user)); 1039 } 1040 PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac))); 1041 PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix)); 1042 PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix)); 1043 /* propagate DM */ 1044 { 1045 DM sdm; 1046 PetscCall(KSPGetDM(jac->head->next->ksp, &sdm)); 1047 if (sdm) { 1048 PetscCall(KSPSetDM(jac->kspschur, sdm)); 1049 PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE)); 1050 } 1051 } 1052 /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */ 1053 /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */ 1054 PetscCall(KSPSetFromOptions(jac->kspschur)); 1055 } 1056 PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY)); 1057 PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY)); 1058 if (issym) PetscCall(MatSetOption(jac->schur, MAT_SYMMETRIC, PETSC_TRUE)); 1059 if (isspd) PetscCall(MatSetOption(jac->schur, MAT_SPD, PETSC_TRUE)); 1060 1061 /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */ 1062 PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname)); 1063 PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L)); 1064 if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L)); 1065 if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", LSC_L)); 1066 PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname)); 1067 PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L)); 1068 if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L)); 1069 if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", LSC_L)); 1070 } else if (jac->type == PC_COMPOSITE_GKB) { 1071 IS ccis; 1072 PetscInt rstart, rend; 1073 1074 PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields"); 1075 1076 ilink = jac->head; 1077 1078 /* When extracting off-diagonal submatrices, we take complements from this range */ 1079 PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend)); 1080 1081 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 1082 if (jac->offdiag_use_amat) { 1083 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 1084 } else { 1085 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 1086 } 1087 PetscCall(ISDestroy(&ccis)); 1088 /* Create work vectors for GKB algorithm */ 1089 PetscCall(VecDuplicate(ilink->x, &jac->u)); 1090 PetscCall(VecDuplicate(ilink->x, &jac->Hu)); 1091 PetscCall(VecDuplicate(ilink->x, &jac->w2)); 1092 ilink = ilink->next; 1093 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 1094 if (jac->offdiag_use_amat) { 1095 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 1096 } else { 1097 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 1098 } 1099 PetscCall(ISDestroy(&ccis)); 1100 /* Create work vectors for GKB algorithm */ 1101 PetscCall(VecDuplicate(ilink->x, &jac->v)); 1102 PetscCall(VecDuplicate(ilink->x, &jac->d)); 1103 PetscCall(VecDuplicate(ilink->x, &jac->w1)); 1104 PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu)); 1105 PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz)); 1106 1107 ilink = jac->head; 1108 PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H)); 1109 if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp)); 1110 /* Create gkb_monitor context */ 1111 if (jac->gkbmonitor) { 1112 PetscInt tablevel; 1113 PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer)); 1114 PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII)); 1115 PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel)); 1116 PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel)); 1117 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1)); 1118 } 1119 } else { 1120 /* set up the individual splits' PCs */ 1121 i = 0; 1122 ilink = jac->head; 1123 while (ilink) { 1124 PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i])); 1125 /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */ 1126 if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp)); 1127 i++; 1128 ilink = ilink->next; 1129 } 1130 } 1131 1132 /* Set coordinates to the sub PC objects whenever these are set */ 1133 if (jac->coordinates_set) { 1134 PC pc_coords; 1135 if (jac->type == PC_COMPOSITE_SCHUR) { 1136 // Head is first block. 1137 PetscCall(KSPGetPC(jac->head->ksp, &pc_coords)); 1138 PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords)); 1139 // Second one is Schur block, but its KSP object is in kspschur. 1140 PetscCall(KSPGetPC(jac->kspschur, &pc_coords)); 1141 PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords)); 1142 } else if (jac->type == PC_COMPOSITE_GKB) { 1143 PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n")); 1144 } else { 1145 ilink = jac->head; 1146 while (ilink) { 1147 PetscCall(KSPGetPC(ilink->ksp, &pc_coords)); 1148 PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords)); 1149 ilink = ilink->next; 1150 } 1151 } 1152 } 1153 1154 jac->suboptionsset = PETSC_TRUE; 1155 PetscFunctionReturn(PETSC_SUCCESS); 1156 } 1157 1158 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \ 1159 ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \ 1160 KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \ 1161 VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE))) 1162 1163 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_Schur(PC pc) 1164 { 1165 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1166 PC_FieldSplitLink ilinkA = jac->head; 1167 KSP kspA = ilinkA->ksp, kspUpper = jac->kspupper; 1168 1169 PetscFunctionBegin; 1170 if (jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL && kspUpper != kspA) { 1171 PetscCall(KSPSetUp(kspUpper)); 1172 PetscCall(KSPSetUpOnBlocks(kspUpper)); 1173 } 1174 PetscCall(KSPSetUp(kspA)); 1175 PetscCall(KSPSetUpOnBlocks(kspA)); 1176 if (jac->schurpre != PC_FIELDSPLIT_SCHUR_PRE_FULL) { 1177 PetscCall(KSPSetUp(jac->kspschur)); 1178 PetscCall(KSPSetUpOnBlocks(jac->kspschur)); 1179 } else if (kspUpper == kspA) { 1180 Mat AinvB, A; 1181 PetscInt m, M, N; 1182 1183 PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB)); 1184 if (AinvB) { 1185 PetscCall(MatGetSize(AinvB, NULL, &N)); 1186 if (N == -1) { // first time PCSetUpOnBlocks_FieldSplit_Schur() is called 1187 VecType vtype; 1188 PetscMemType mtype; 1189 PetscScalar *array; 1190 1191 PetscCall(MatGetSize(jac->B, &M, &N)); 1192 PetscCall(MatGetLocalSize(jac->B, &m, NULL)); 1193 PetscCall(MatGetVecType(jac->B, &vtype)); 1194 PetscCall(VecGetArrayAndMemType(ilinkA->x, &array, &mtype)); 1195 PetscCall(VecRestoreArrayAndMemType(ilinkA->x, &array)); 1196 if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(PetscMalloc1(m * (N + 1), &array)); 1197 #if PetscDefined(HAVE_CUDA) 1198 else if (PetscMemTypeCUDA(mtype)) PetscCallCUDA(cudaMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1))); 1199 #endif 1200 #if PetscDefined(HAVE_HIP) 1201 else if (PetscMemTypeHIP(mtype)) PetscCallHIP(hipMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1))); 1202 #endif 1203 PetscCall(MatCreateDenseFromVecType(PetscObjectComm((PetscObject)jac->schur), vtype, m, PETSC_DECIDE, M, N + 1, -1, array, &A)); // number of columns of the Schur complement plus one 1204 PetscCall(MatHeaderReplace(AinvB, &A)); 1205 } 1206 } 1207 } 1208 PetscFunctionReturn(PETSC_SUCCESS); 1209 } 1210 1211 static PetscErrorCode PCSetUpOnBlocks_FieldSplit(PC pc) 1212 { 1213 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1214 PC_FieldSplitLink ilink = jac->head; 1215 1216 PetscFunctionBegin; 1217 while (ilink) { 1218 PetscCall(KSPSetUp(ilink->ksp)); 1219 PetscCall(KSPSetUpOnBlocks(ilink->ksp)); 1220 ilink = ilink->next; 1221 } 1222 PetscFunctionReturn(PETSC_SUCCESS); 1223 } 1224 1225 static PetscErrorCode PCSetUpOnBlocks_FieldSplit_GKB(PC pc) 1226 { 1227 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1228 PC_FieldSplitLink ilinkA = jac->head; 1229 KSP ksp = ilinkA->ksp; 1230 1231 PetscFunctionBegin; 1232 PetscCall(KSPSetUp(ksp)); 1233 PetscCall(KSPSetUpOnBlocks(ksp)); 1234 PetscFunctionReturn(PETSC_SUCCESS); 1235 } 1236 1237 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y) 1238 { 1239 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1240 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1241 KSP kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper; 1242 Mat AinvB = NULL; 1243 PetscInt N, P; 1244 1245 PetscFunctionBegin; 1246 switch (jac->schurfactorization) { 1247 case PC_FIELDSPLIT_SCHUR_FACT_DIAG: 1248 /* [A00 0; 0 -S], positive definite, suitable for MINRES */ 1249 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1250 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1251 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1252 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1253 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1254 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1255 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1256 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1257 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1258 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1259 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1260 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1261 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1262 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1263 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1264 PetscCall(VecScale(ilinkD->y, jac->schurscale)); 1265 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1266 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1267 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1268 break; 1269 case PC_FIELDSPLIT_SCHUR_FACT_LOWER: 1270 /* [A00 0; A10 S], suitable for left preconditioning */ 1271 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1272 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1273 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1274 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1275 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1276 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1277 PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x)); 1278 PetscCall(VecScale(ilinkD->x, -1.)); 1279 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1280 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1281 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1282 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1283 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1284 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1285 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1286 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1287 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1288 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1289 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1290 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1291 break; 1292 case PC_FIELDSPLIT_SCHUR_FACT_UPPER: 1293 /* [A00 A01; 0 S], suitable for right preconditioning */ 1294 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1295 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1296 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1297 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1298 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1299 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1300 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1301 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1302 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x)); 1303 PetscCall(VecScale(ilinkA->x, -1.)); 1304 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1305 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1306 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1307 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1308 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1309 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1310 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1311 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1312 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1313 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1314 break; 1315 case PC_FIELDSPLIT_SCHUR_FACT_FULL: 1316 /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */ 1317 PetscCall(MatGetSize(jac->B, NULL, &P)); 1318 N = P; 1319 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1320 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1321 PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL)); 1322 if (kspUpper == kspA) { 1323 PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB)); 1324 if (AinvB) { 1325 PetscCall(MatGetSize(AinvB, NULL, &N)); 1326 if (N > P) { // first time PCApply_FieldSplit_Schur() is called 1327 PetscMemType mtype; 1328 Vec c = NULL; 1329 PetscScalar *array; 1330 PetscInt m, M; 1331 1332 PetscCall(MatGetSize(jac->B, &M, NULL)); 1333 PetscCall(MatGetLocalSize(jac->B, &m, NULL)); 1334 PetscCall(MatDenseGetArrayAndMemType(AinvB, &array, &mtype)); 1335 if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c)); 1336 #if PetscDefined(HAVE_CUDA) 1337 else if (PetscMemTypeCUDA(mtype)) PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c)); 1338 #endif 1339 #if PetscDefined(HAVE_HIP) 1340 else if (PetscMemTypeHIP(mtype)) PetscCall(VecCreateMPIHIPWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c)); 1341 #endif 1342 PetscCall(MatDenseRestoreArrayAndMemType(AinvB, &array)); 1343 PetscCall(VecCopy(ilinkA->x, c)); 1344 PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user)); 1345 PetscCall(KSPSetOperators(jac->kspschur, jac->schur, jac->schur_user)); 1346 PetscCall(VecCopy(c, ilinkA->y)); // retrieve the solution as the last column of the composed Mat 1347 PetscCall(VecDestroy(&c)); 1348 } 1349 } 1350 } 1351 if (N == P) PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y)); 1352 PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y)); 1353 PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL)); 1354 PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x)); 1355 PetscCall(VecScale(ilinkD->x, -1.0)); 1356 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1357 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1358 1359 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1360 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1361 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1362 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1363 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1364 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1365 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1366 1367 if (kspUpper == kspA) { 1368 if (!AinvB) { 1369 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y)); 1370 PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y)); 1371 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1372 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1373 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1374 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1375 } else PetscCall(MatMultAdd(AinvB, ilinkD->y, ilinkA->y, ilinkA->y)); 1376 } else { 1377 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1378 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1379 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1380 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x)); 1381 PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL)); 1382 PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z)); 1383 PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z)); 1384 PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL)); 1385 PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z)); 1386 } 1387 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1388 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1389 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1390 } 1391 PetscFunctionReturn(PETSC_SUCCESS); 1392 } 1393 1394 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y) 1395 { 1396 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1397 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1398 KSP kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper; 1399 1400 PetscFunctionBegin; 1401 switch (jac->schurfactorization) { 1402 case PC_FIELDSPLIT_SCHUR_FACT_DIAG: 1403 /* [A00 0; 0 -S], positive definite, suitable for MINRES */ 1404 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1405 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1406 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1407 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1408 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1409 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1410 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1411 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1412 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1413 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1414 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1415 PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); 1416 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1417 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1418 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1419 PetscCall(VecScale(ilinkD->y, jac->schurscale)); 1420 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1421 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1422 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1423 break; 1424 case PC_FIELDSPLIT_SCHUR_FACT_UPPER: 1425 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1426 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1427 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1428 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1429 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1430 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1431 PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x)); 1432 PetscCall(VecScale(ilinkD->x, -1.)); 1433 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1434 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1435 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1436 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1437 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1438 PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); 1439 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1440 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1441 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1442 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1443 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1444 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1445 break; 1446 case PC_FIELDSPLIT_SCHUR_FACT_LOWER: 1447 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1448 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1449 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1450 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1451 PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); 1452 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1453 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1454 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1455 PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x)); 1456 PetscCall(VecScale(ilinkA->x, -1.)); 1457 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1458 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1459 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1460 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1461 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1462 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1463 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1464 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1465 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1466 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1467 break; 1468 case PC_FIELDSPLIT_SCHUR_FACT_FULL: 1469 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1470 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1471 PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL)); 1472 PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y)); 1473 PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y)); 1474 PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL)); 1475 PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x)); 1476 PetscCall(VecScale(ilinkD->x, -1.0)); 1477 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1478 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1479 1480 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1481 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); 1482 PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); 1483 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); 1484 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1485 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1486 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1487 1488 if (kspLower == kspA) { 1489 PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y)); 1490 PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y)); 1491 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1492 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1493 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1494 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1495 } else { 1496 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1497 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1498 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1499 PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x)); 1500 PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL)); 1501 PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z)); 1502 PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z)); 1503 PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL)); 1504 PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z)); 1505 } 1506 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1507 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1508 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1509 } 1510 PetscFunctionReturn(PETSC_SUCCESS); 1511 } 1512 1513 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y) 1514 { 1515 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1516 PC_FieldSplitLink ilink = jac->head; 1517 PetscInt cnt, bs; 1518 1519 PetscFunctionBegin; 1520 if (jac->type == PC_COMPOSITE_ADDITIVE) { 1521 PetscBool matnest; 1522 1523 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest)); 1524 if (jac->defaultsplit && !matnest) { 1525 PetscCall(VecGetBlockSize(x, &bs)); 1526 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1527 PetscCall(VecGetBlockSize(y, &bs)); 1528 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1529 PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES)); 1530 while (ilink) { 1531 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1532 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1533 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1534 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1535 ilink = ilink->next; 1536 } 1537 PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES)); 1538 } else { 1539 PetscCall(VecSet(y, 0.0)); 1540 while (ilink) { 1541 PetscCall(FieldSplitSplitSolveAdd(ilink, x, y)); 1542 ilink = ilink->next; 1543 } 1544 } 1545 } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) { 1546 PetscCall(VecSet(y, 0.0)); 1547 /* solve on first block for first block variables */ 1548 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD)); 1549 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD)); 1550 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1551 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1552 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1553 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1554 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1555 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1556 1557 /* compute the residual only onto second block variables using first block variables */ 1558 PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x)); 1559 ilink = ilink->next; 1560 PetscCall(VecScale(ilink->x, -1.0)); 1561 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1562 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1563 1564 /* solve on second block variables */ 1565 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1566 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1567 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1568 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1569 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1570 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1571 } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1572 if (!jac->w1) { 1573 PetscCall(VecDuplicate(x, &jac->w1)); 1574 PetscCall(VecDuplicate(x, &jac->w2)); 1575 } 1576 PetscCall(VecSet(y, 0.0)); 1577 PetscCall(FieldSplitSplitSolveAdd(ilink, x, y)); 1578 cnt = 1; 1579 while (ilink->next) { 1580 ilink = ilink->next; 1581 /* compute the residual only over the part of the vector needed */ 1582 PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x)); 1583 PetscCall(VecScale(ilink->x, -1.0)); 1584 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1585 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1586 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1587 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1588 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1589 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1590 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1591 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1592 } 1593 if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1594 cnt -= 2; 1595 while (ilink->previous) { 1596 ilink = ilink->previous; 1597 /* compute the residual only over the part of the vector needed */ 1598 PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x)); 1599 PetscCall(VecScale(ilink->x, -1.0)); 1600 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1601 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1602 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1603 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1604 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1605 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1606 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1607 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1608 } 1609 } 1610 } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type); 1611 PetscFunctionReturn(PETSC_SUCCESS); 1612 } 1613 1614 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y) 1615 { 1616 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1617 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1618 KSP ksp = ilinkA->ksp; 1619 Vec u, v, Hu, d, work1, work2; 1620 PetscScalar alpha, z, nrmz2, *vecz; 1621 PetscReal lowbnd, nu, beta; 1622 PetscInt j, iterGKB; 1623 1624 PetscFunctionBegin; 1625 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1626 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1627 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1628 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1629 1630 u = jac->u; 1631 v = jac->v; 1632 Hu = jac->Hu; 1633 d = jac->d; 1634 work1 = jac->w1; 1635 work2 = jac->w2; 1636 vecz = jac->vecz; 1637 1638 /* Change RHS to comply with matrix regularization H = A + nu*B*B' */ 1639 /* Add q = q + nu*B*b */ 1640 if (jac->gkbnu) { 1641 nu = jac->gkbnu; 1642 PetscCall(VecScale(ilinkD->x, jac->gkbnu)); 1643 PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */ 1644 } else { 1645 /* Situation when no augmented Lagrangian is used. Then we set inner */ 1646 /* matrix N = I in [Ar13], and thus nu = 1. */ 1647 nu = 1; 1648 } 1649 1650 /* Transform rhs from [q,tilde{b}] to [0,b] */ 1651 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL)); 1652 PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y)); 1653 PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y)); 1654 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL)); 1655 PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1)); 1656 PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x */ 1657 1658 /* First step of algorithm */ 1659 PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/ 1660 KSPCheckDot(ksp, beta); 1661 beta = PetscSqrtReal(nu) * beta; 1662 PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c */ 1663 PetscCall(MatMult(jac->B, v, work2)); /* u = H^{-1}*B*v */ 1664 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL)); 1665 PetscCall(KSPSolve(ksp, work2, u)); 1666 PetscCall(KSPCheckSolve(ksp, pc, u)); 1667 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL)); 1668 PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u */ 1669 PetscCall(VecDot(Hu, u, &alpha)); 1670 KSPCheckDot(ksp, alpha); 1671 PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite"); 1672 alpha = PetscSqrtReal(PetscAbsScalar(alpha)); 1673 PetscCall(VecScale(u, 1.0 / alpha)); 1674 PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c */ 1675 1676 z = beta / alpha; 1677 vecz[1] = z; 1678 1679 /* Computation of first iterate x(1) and p(1) */ 1680 PetscCall(VecAXPY(ilinkA->y, z, u)); 1681 PetscCall(VecCopy(d, ilinkD->y)); 1682 PetscCall(VecScale(ilinkD->y, -z)); 1683 1684 iterGKB = 1; 1685 lowbnd = 2 * jac->gkbtol; 1686 if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd)); 1687 1688 while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) { 1689 iterGKB += 1; 1690 PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */ 1691 PetscCall(VecAXPBY(v, nu, -alpha, work1)); 1692 PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v */ 1693 beta = beta / PetscSqrtReal(nu); 1694 PetscCall(VecScale(v, 1.0 / beta)); 1695 PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */ 1696 PetscCall(MatMult(jac->H, u, Hu)); 1697 PetscCall(VecAXPY(work2, -beta, Hu)); 1698 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL)); 1699 PetscCall(KSPSolve(ksp, work2, u)); 1700 PetscCall(KSPCheckSolve(ksp, pc, u)); 1701 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL)); 1702 PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u */ 1703 PetscCall(VecDot(Hu, u, &alpha)); 1704 KSPCheckDot(ksp, alpha); 1705 PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite"); 1706 alpha = PetscSqrtReal(PetscAbsScalar(alpha)); 1707 PetscCall(VecScale(u, 1.0 / alpha)); 1708 1709 z = -beta / alpha * z; /* z <- beta/alpha*z */ 1710 vecz[0] = z; 1711 1712 /* Computation of new iterate x(i+1) and p(i+1) */ 1713 PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */ 1714 PetscCall(VecAXPY(ilinkA->y, z, u)); /* r = r + z*u */ 1715 PetscCall(VecAXPY(ilinkD->y, -z, d)); /* p = p - z*d */ 1716 PetscCall(MatMult(jac->H, ilinkA->y, Hu)); /* ||u||_H = u'*H*u */ 1717 PetscCall(VecDot(Hu, ilinkA->y, &nrmz2)); 1718 1719 /* Compute Lower Bound estimate */ 1720 if (iterGKB > jac->gkbdelay) { 1721 lowbnd = 0.0; 1722 for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]); 1723 lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2)); 1724 } 1725 1726 for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2]; 1727 if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd)); 1728 } 1729 1730 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1731 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1732 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1733 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1734 PetscFunctionReturn(PETSC_SUCCESS); 1735 } 1736 1737 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \ 1738 ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \ 1739 KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \ 1740 VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE))) 1741 1742 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y) 1743 { 1744 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1745 PC_FieldSplitLink ilink = jac->head; 1746 PetscInt bs; 1747 1748 PetscFunctionBegin; 1749 if (jac->type == PC_COMPOSITE_ADDITIVE) { 1750 PetscBool matnest; 1751 1752 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest)); 1753 if (jac->defaultsplit && !matnest) { 1754 PetscCall(VecGetBlockSize(x, &bs)); 1755 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1756 PetscCall(VecGetBlockSize(y, &bs)); 1757 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1758 PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES)); 1759 while (ilink) { 1760 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1761 PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y)); 1762 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1763 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1764 ilink = ilink->next; 1765 } 1766 PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES)); 1767 } else { 1768 PetscCall(VecSet(y, 0.0)); 1769 while (ilink) { 1770 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1771 ilink = ilink->next; 1772 } 1773 } 1774 } else { 1775 if (!jac->w1) { 1776 PetscCall(VecDuplicate(x, &jac->w1)); 1777 PetscCall(VecDuplicate(x, &jac->w2)); 1778 } 1779 PetscCall(VecSet(y, 0.0)); 1780 if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1781 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1782 while (ilink->next) { 1783 ilink = ilink->next; 1784 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1785 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1786 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1787 } 1788 while (ilink->previous) { 1789 ilink = ilink->previous; 1790 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1791 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1792 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1793 } 1794 } else { 1795 while (ilink->next) { /* get to last entry in linked list */ 1796 ilink = ilink->next; 1797 } 1798 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1799 while (ilink->previous) { 1800 ilink = ilink->previous; 1801 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1802 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1803 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1804 } 1805 } 1806 } 1807 PetscFunctionReturn(PETSC_SUCCESS); 1808 } 1809 1810 static PetscErrorCode PCReset_FieldSplit(PC pc) 1811 { 1812 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1813 PC_FieldSplitLink ilink = jac->head, next; 1814 1815 PetscFunctionBegin; 1816 while (ilink) { 1817 PetscCall(KSPDestroy(&ilink->ksp)); 1818 PetscCall(VecDestroy(&ilink->x)); 1819 PetscCall(VecDestroy(&ilink->y)); 1820 PetscCall(VecDestroy(&ilink->z)); 1821 PetscCall(VecScatterDestroy(&ilink->sctx)); 1822 PetscCall(ISDestroy(&ilink->is)); 1823 PetscCall(ISDestroy(&ilink->is_col)); 1824 PetscCall(PetscFree(ilink->splitname)); 1825 PetscCall(PetscFree(ilink->fields)); 1826 PetscCall(PetscFree(ilink->fields_col)); 1827 next = ilink->next; 1828 PetscCall(PetscFree(ilink)); 1829 ilink = next; 1830 } 1831 jac->head = NULL; 1832 PetscCall(PetscFree2(jac->x, jac->y)); 1833 if (jac->mat && jac->mat != jac->pmat) { 1834 PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat)); 1835 } else if (jac->mat) { 1836 jac->mat = NULL; 1837 } 1838 if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat)); 1839 if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield)); 1840 jac->nsplits = 0; 1841 PetscCall(VecDestroy(&jac->w1)); 1842 PetscCall(VecDestroy(&jac->w2)); 1843 if (jac->schur) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", NULL)); 1844 PetscCall(MatDestroy(&jac->schur)); 1845 PetscCall(MatDestroy(&jac->schurp)); 1846 PetscCall(MatDestroy(&jac->schur_user)); 1847 PetscCall(KSPDestroy(&jac->kspschur)); 1848 PetscCall(KSPDestroy(&jac->kspupper)); 1849 PetscCall(MatDestroy(&jac->B)); 1850 PetscCall(MatDestroy(&jac->C)); 1851 PetscCall(MatDestroy(&jac->H)); 1852 PetscCall(VecDestroy(&jac->u)); 1853 PetscCall(VecDestroy(&jac->v)); 1854 PetscCall(VecDestroy(&jac->Hu)); 1855 PetscCall(VecDestroy(&jac->d)); 1856 PetscCall(PetscFree(jac->vecz)); 1857 PetscCall(PetscViewerDestroy(&jac->gkbviewer)); 1858 jac->isrestrict = PETSC_FALSE; 1859 PetscFunctionReturn(PETSC_SUCCESS); 1860 } 1861 1862 static PetscErrorCode PCDestroy_FieldSplit(PC pc) 1863 { 1864 PetscFunctionBegin; 1865 PetscCall(PCReset_FieldSplit(pc)); 1866 PetscCall(PetscFree(pc->data)); 1867 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL)); 1868 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL)); 1869 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL)); 1870 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL)); 1871 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL)); 1872 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL)); 1873 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL)); 1874 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 1875 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL)); 1876 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL)); 1877 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL)); 1878 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL)); 1879 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL)); 1880 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL)); 1881 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL)); 1882 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL)); 1883 PetscFunctionReturn(PETSC_SUCCESS); 1884 } 1885 1886 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems PetscOptionsObject) 1887 { 1888 PetscInt bs; 1889 PetscBool flg; 1890 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1891 PCCompositeType ctype; 1892 1893 PetscFunctionBegin; 1894 PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options"); 1895 PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL)); 1896 PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg)); 1897 if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs)); 1898 jac->diag_use_amat = pc->useAmat; 1899 PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL)); 1900 jac->offdiag_use_amat = pc->useAmat; 1901 PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL)); 1902 PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL)); 1903 PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */ 1904 PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg)); 1905 if (flg) PetscCall(PCFieldSplitSetType(pc, ctype)); 1906 /* Only setup fields once */ 1907 if ((jac->bs > 0) && (jac->nsplits == 0)) { 1908 /* only allow user to set fields from command line. 1909 otherwise user can set them in PCFieldSplitSetDefaults() */ 1910 PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc)); 1911 if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n")); 1912 } 1913 if (jac->type == PC_COMPOSITE_SCHUR) { 1914 PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg)); 1915 if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n")); 1916 PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL)); 1917 PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL)); 1918 PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL)); 1919 } else if (jac->type == PC_COMPOSITE_GKB) { 1920 PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL)); 1921 PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL)); 1922 PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0)); 1923 PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL)); 1924 PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL)); 1925 } 1926 /* 1927 In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet. 1928 But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it 1929 is called on the outer solver in case changes were made in the options database 1930 1931 But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions() 1932 if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete. 1933 Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types. 1934 1935 There could be a negative side effect of calling the KSPSetFromOptions() below. 1936 1937 If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call 1938 */ 1939 if (jac->issetup) { 1940 PC_FieldSplitLink ilink = jac->head; 1941 if (jac->type == PC_COMPOSITE_SCHUR) { 1942 if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper)); 1943 if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur)); 1944 } 1945 while (ilink) { 1946 if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp)); 1947 ilink = ilink->next; 1948 } 1949 } 1950 PetscOptionsHeadEnd(); 1951 PetscFunctionReturn(PETSC_SUCCESS); 1952 } 1953 1954 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col) 1955 { 1956 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1957 PC_FieldSplitLink ilink, next = jac->head; 1958 char prefix[128]; 1959 PetscInt i; 1960 PetscLogEvent nse; 1961 1962 PetscFunctionBegin; 1963 if (jac->splitdefined) { 1964 PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname)); 1965 PetscFunctionReturn(PETSC_SUCCESS); 1966 } 1967 for (i = 0; i < n; i++) { PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); } 1968 PetscCall(PetscNew(&ilink)); 1969 if (splitname) { 1970 PetscCall(PetscStrallocpy(splitname, &ilink->splitname)); 1971 } else { 1972 PetscCall(PetscMalloc1(3, &ilink->splitname)); 1973 PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits)); 1974 } 1975 PetscCall(PetscMPIIntCast(jac->nsplits, &nse)); 1976 ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */ 1977 PetscCall(PetscMalloc1(n, &ilink->fields)); 1978 PetscCall(PetscArraycpy(ilink->fields, fields, n)); 1979 PetscCall(PetscMalloc1(n, &ilink->fields_col)); 1980 PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n)); 1981 1982 ilink->nfields = n; 1983 ilink->next = NULL; 1984 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp)); 1985 PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel)); 1986 PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure)); 1987 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1)); 1988 PetscCall(KSPSetType(ilink->ksp, KSPPREONLY)); 1989 1990 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 1991 PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix)); 1992 1993 if (!next) { 1994 jac->head = ilink; 1995 ilink->previous = NULL; 1996 } else { 1997 while (next->next) next = next->next; 1998 next->next = ilink; 1999 ilink->previous = next; 2000 } 2001 jac->nsplits++; 2002 PetscFunctionReturn(PETSC_SUCCESS); 2003 } 2004 2005 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp) 2006 { 2007 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2008 2009 PetscFunctionBegin; 2010 *subksp = NULL; 2011 if (n) *n = 0; 2012 if (jac->type == PC_COMPOSITE_SCHUR) { 2013 PetscInt nn; 2014 2015 PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()"); 2016 PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits); 2017 nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0); 2018 PetscCall(PetscMalloc1(nn, subksp)); 2019 (*subksp)[0] = jac->head->ksp; 2020 (*subksp)[1] = jac->kspschur; 2021 if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper; 2022 if (n) *n = nn; 2023 } 2024 PetscFunctionReturn(PETSC_SUCCESS); 2025 } 2026 2027 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp) 2028 { 2029 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2030 2031 PetscFunctionBegin; 2032 PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()"); 2033 PetscCall(PetscMalloc1(jac->nsplits, subksp)); 2034 PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp)); 2035 2036 (*subksp)[1] = jac->kspschur; 2037 if (n) *n = jac->nsplits; 2038 PetscFunctionReturn(PETSC_SUCCESS); 2039 } 2040 2041 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp) 2042 { 2043 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2044 PetscInt cnt = 0; 2045 PC_FieldSplitLink ilink = jac->head; 2046 2047 PetscFunctionBegin; 2048 PetscCall(PetscMalloc1(jac->nsplits, subksp)); 2049 while (ilink) { 2050 (*subksp)[cnt++] = ilink->ksp; 2051 ilink = ilink->next; 2052 } 2053 PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits); 2054 if (n) *n = jac->nsplits; 2055 PetscFunctionReturn(PETSC_SUCCESS); 2056 } 2057 2058 /*@ 2059 PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`. 2060 2061 Input Parameters: 2062 + pc - the preconditioner context 2063 - isy - the index set that defines the indices to which the fieldsplit is to be restricted 2064 2065 Level: advanced 2066 2067 Developer Notes: 2068 It seems the resulting `IS`s will not cover the entire space, so 2069 how can they define a convergent preconditioner? Needs explaining. 2070 2071 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 2072 @*/ 2073 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy) 2074 { 2075 PetscFunctionBegin; 2076 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2077 PetscValidHeaderSpecific(isy, IS_CLASSID, 2); 2078 PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy)); 2079 PetscFunctionReturn(PETSC_SUCCESS); 2080 } 2081 2082 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy) 2083 { 2084 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2085 PC_FieldSplitLink ilink = jac->head, next; 2086 PetscInt localsize, size, sizez, i; 2087 const PetscInt *ind, *indz; 2088 PetscInt *indc, *indcz; 2089 PetscBool flg; 2090 2091 PetscFunctionBegin; 2092 PetscCall(ISGetLocalSize(isy, &localsize)); 2093 PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy))); 2094 size -= localsize; 2095 while (ilink) { 2096 IS isrl, isr; 2097 PC subpc; 2098 PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl)); 2099 PetscCall(ISGetLocalSize(isrl, &localsize)); 2100 PetscCall(PetscMalloc1(localsize, &indc)); 2101 PetscCall(ISGetIndices(isrl, &ind)); 2102 PetscCall(PetscArraycpy(indc, ind, localsize)); 2103 PetscCall(ISRestoreIndices(isrl, &ind)); 2104 PetscCall(ISDestroy(&isrl)); 2105 for (i = 0; i < localsize; i++) *(indc + i) += size; 2106 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr)); 2107 PetscCall(PetscObjectReference((PetscObject)isr)); 2108 PetscCall(ISDestroy(&ilink->is)); 2109 ilink->is = isr; 2110 PetscCall(PetscObjectReference((PetscObject)isr)); 2111 PetscCall(ISDestroy(&ilink->is_col)); 2112 ilink->is_col = isr; 2113 PetscCall(ISDestroy(&isr)); 2114 PetscCall(KSPGetPC(ilink->ksp, &subpc)); 2115 PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg)); 2116 if (flg) { 2117 IS iszl, isz; 2118 MPI_Comm comm; 2119 PetscCall(ISGetLocalSize(ilink->is, &localsize)); 2120 comm = PetscObjectComm((PetscObject)ilink->is); 2121 PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl)); 2122 PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm)); 2123 sizez -= localsize; 2124 PetscCall(ISGetLocalSize(iszl, &localsize)); 2125 PetscCall(PetscMalloc1(localsize, &indcz)); 2126 PetscCall(ISGetIndices(iszl, &indz)); 2127 PetscCall(PetscArraycpy(indcz, indz, localsize)); 2128 PetscCall(ISRestoreIndices(iszl, &indz)); 2129 PetscCall(ISDestroy(&iszl)); 2130 for (i = 0; i < localsize; i++) *(indcz + i) += sizez; 2131 PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz)); 2132 PetscCall(PCFieldSplitRestrictIS(subpc, isz)); 2133 PetscCall(ISDestroy(&isz)); 2134 } 2135 next = ilink->next; 2136 ilink = next; 2137 } 2138 jac->isrestrict = PETSC_TRUE; 2139 PetscFunctionReturn(PETSC_SUCCESS); 2140 } 2141 2142 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is) 2143 { 2144 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2145 PC_FieldSplitLink ilink, next = jac->head; 2146 char prefix[128]; 2147 PetscLogEvent nse; 2148 2149 PetscFunctionBegin; 2150 if (jac->splitdefined) { 2151 PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname)); 2152 PetscFunctionReturn(PETSC_SUCCESS); 2153 } 2154 PetscCall(PetscNew(&ilink)); 2155 if (splitname) { 2156 PetscCall(PetscStrallocpy(splitname, &ilink->splitname)); 2157 } else { 2158 PetscCall(PetscMalloc1(8, &ilink->splitname)); 2159 PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits)); 2160 } 2161 PetscCall(PetscMPIIntCast(jac->nsplits, &nse)); 2162 ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */ 2163 PetscCall(PetscObjectReference((PetscObject)is)); 2164 PetscCall(ISDestroy(&ilink->is)); 2165 ilink->is = is; 2166 PetscCall(PetscObjectReference((PetscObject)is)); 2167 PetscCall(ISDestroy(&ilink->is_col)); 2168 ilink->is_col = is; 2169 ilink->next = NULL; 2170 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp)); 2171 PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel)); 2172 PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure)); 2173 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1)); 2174 PetscCall(KSPSetType(ilink->ksp, KSPPREONLY)); 2175 2176 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 2177 PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix)); 2178 2179 if (!next) { 2180 jac->head = ilink; 2181 ilink->previous = NULL; 2182 } else { 2183 while (next->next) next = next->next; 2184 next->next = ilink; 2185 ilink->previous = next; 2186 } 2187 jac->nsplits++; 2188 PetscFunctionReturn(PETSC_SUCCESS); 2189 } 2190 2191 /*@ 2192 PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT` 2193 2194 Logically Collective 2195 2196 Input Parameters: 2197 + pc - the preconditioner context 2198 . splitname - name of this split, if `NULL` the number of the split is used 2199 . n - the number of fields in this split 2200 . fields - the fields in this split 2201 - fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block 2202 of the matrix and `fields_col` provides the column indices for that block 2203 2204 Options Database Key: 2205 . -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split 2206 2207 Level: intermediate 2208 2209 Notes: 2210 Use `PCFieldSplitSetIS()` to set a general set of indices as a split. 2211 2212 If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`. 2213 2214 If the matrix used to construct the preconditioner is not `MATNEST` then 2215 `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlockSize()` or 2216 to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block 2217 size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean 2218 0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x23x56x8.. x12x45x78x.... 2219 where the numbered entries indicate what is in the split. 2220 2221 This function is called once per split (it creates a new split each time). Solve options 2222 for this split will be available under the prefix `-fieldsplit_SPLITNAME_`. 2223 2224 `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields` 2225 2226 Developer Notes: 2227 This routine does not actually create the `IS` representing the split, that is delayed 2228 until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be 2229 available when this routine is called. 2230 2231 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`, 2232 `MatSetBlockSize()`, `MatCreateNest()` 2233 @*/ 2234 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt fields[], const PetscInt fields_col[]) 2235 { 2236 PetscFunctionBegin; 2237 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2238 PetscAssertPointer(splitname, 2); 2239 PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname); 2240 PetscAssertPointer(fields, 4); 2241 PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col)); 2242 PetscFunctionReturn(PETSC_SUCCESS); 2243 } 2244 2245 /*@ 2246 PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build 2247 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators. 2248 2249 Logically Collective 2250 2251 Input Parameters: 2252 + pc - the preconditioner object 2253 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from 2254 2255 Options Database Key: 2256 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks 2257 2258 Level: intermediate 2259 2260 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT` 2261 @*/ 2262 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg) 2263 { 2264 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2265 PetscBool isfs; 2266 2267 PetscFunctionBegin; 2268 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2269 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2270 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2271 jac->diag_use_amat = flg; 2272 PetscFunctionReturn(PETSC_SUCCESS); 2273 } 2274 2275 /*@ 2276 PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build 2277 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators. 2278 2279 Logically Collective 2280 2281 Input Parameter: 2282 . pc - the preconditioner object 2283 2284 Output Parameter: 2285 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from 2286 2287 Level: intermediate 2288 2289 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT` 2290 @*/ 2291 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg) 2292 { 2293 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2294 PetscBool isfs; 2295 2296 PetscFunctionBegin; 2297 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2298 PetscAssertPointer(flg, 2); 2299 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2300 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2301 *flg = jac->diag_use_amat; 2302 PetscFunctionReturn(PETSC_SUCCESS); 2303 } 2304 2305 /*@ 2306 PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build 2307 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators. 2308 2309 Logically Collective 2310 2311 Input Parameters: 2312 + pc - the preconditioner object 2313 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from 2314 2315 Options Database Key: 2316 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks 2317 2318 Level: intermediate 2319 2320 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT` 2321 @*/ 2322 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg) 2323 { 2324 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2325 PetscBool isfs; 2326 2327 PetscFunctionBegin; 2328 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2329 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2330 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2331 jac->offdiag_use_amat = flg; 2332 PetscFunctionReturn(PETSC_SUCCESS); 2333 } 2334 2335 /*@ 2336 PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build 2337 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators. 2338 2339 Logically Collective 2340 2341 Input Parameter: 2342 . pc - the preconditioner object 2343 2344 Output Parameter: 2345 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from 2346 2347 Level: intermediate 2348 2349 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT` 2350 @*/ 2351 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg) 2352 { 2353 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2354 PetscBool isfs; 2355 2356 PetscFunctionBegin; 2357 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2358 PetscAssertPointer(flg, 2); 2359 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2360 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2361 *flg = jac->offdiag_use_amat; 2362 PetscFunctionReturn(PETSC_SUCCESS); 2363 } 2364 2365 /*@ 2366 PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT` 2367 2368 Logically Collective 2369 2370 Input Parameters: 2371 + pc - the preconditioner context 2372 . splitname - name of this split, if `NULL` the number of the split is used 2373 - is - the index set that defines the elements in this split 2374 2375 Level: intermediate 2376 2377 Notes: 2378 Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST` 2379 2380 This function is called once per split (it creates a new split each time). Solve options 2381 for this split will be available under the prefix -fieldsplit_SPLITNAME_. 2382 2383 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()` 2384 @*/ 2385 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is) 2386 { 2387 PetscFunctionBegin; 2388 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2389 if (splitname) PetscAssertPointer(splitname, 2); 2390 PetscValidHeaderSpecific(is, IS_CLASSID, 3); 2391 PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is)); 2392 PetscFunctionReturn(PETSC_SUCCESS); 2393 } 2394 2395 /*@ 2396 PCFieldSplitGetIS - Retrieves the elements for a split as an `IS` 2397 2398 Logically Collective 2399 2400 Input Parameters: 2401 + pc - the preconditioner context 2402 - splitname - name of this split 2403 2404 Output Parameter: 2405 . is - the index set that defines the elements in this split, or `NULL` if the split is not found 2406 2407 Level: intermediate 2408 2409 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()` 2410 @*/ 2411 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is) 2412 { 2413 PetscFunctionBegin; 2414 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2415 PetscAssertPointer(splitname, 2); 2416 PetscAssertPointer(is, 3); 2417 { 2418 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2419 PC_FieldSplitLink ilink = jac->head; 2420 PetscBool found; 2421 2422 *is = NULL; 2423 while (ilink) { 2424 PetscCall(PetscStrcmp(ilink->splitname, splitname, &found)); 2425 if (found) { 2426 *is = ilink->is; 2427 break; 2428 } 2429 ilink = ilink->next; 2430 } 2431 } 2432 PetscFunctionReturn(PETSC_SUCCESS); 2433 } 2434 2435 /*@ 2436 PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS` 2437 2438 Logically Collective 2439 2440 Input Parameters: 2441 + pc - the preconditioner context 2442 - index - index of this split 2443 2444 Output Parameter: 2445 . is - the index set that defines the elements in this split 2446 2447 Level: intermediate 2448 2449 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`, 2450 2451 @*/ 2452 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is) 2453 { 2454 PetscFunctionBegin; 2455 PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index); 2456 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2457 PetscAssertPointer(is, 3); 2458 { 2459 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2460 PC_FieldSplitLink ilink = jac->head; 2461 PetscInt i = 0; 2462 PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits); 2463 2464 while (i < index) { 2465 ilink = ilink->next; 2466 ++i; 2467 } 2468 PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is)); 2469 } 2470 PetscFunctionReturn(PETSC_SUCCESS); 2471 } 2472 2473 /*@ 2474 PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the 2475 fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used. 2476 2477 Logically Collective 2478 2479 Input Parameters: 2480 + pc - the preconditioner context 2481 - bs - the block size 2482 2483 Level: intermediate 2484 2485 Note: 2486 If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields. 2487 2488 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 2489 @*/ 2490 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs) 2491 { 2492 PetscFunctionBegin; 2493 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2494 PetscValidLogicalCollectiveInt(pc, bs, 2); 2495 PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs)); 2496 PetscFunctionReturn(PETSC_SUCCESS); 2497 } 2498 2499 /*@C 2500 PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits 2501 2502 Collective 2503 2504 Input Parameter: 2505 . pc - the preconditioner context 2506 2507 Output Parameters: 2508 + n - the number of splits 2509 - subksp - the array of `KSP` contexts 2510 2511 Level: advanced 2512 2513 Notes: 2514 After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()` 2515 (not the `KSP`, just the array that contains them). 2516 2517 You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`. 2518 2519 If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the 2520 Schur complement and the `KSP` object used to iterate over the Schur complement. 2521 To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`. 2522 2523 If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the 2524 inner linear system defined by the matrix H in each loop. 2525 2526 Fortran Notes: 2527 You must pass in a `KSP` array that is large enough to contain all the `KSP`s. 2528 You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the 2529 `KSP` array must be. 2530 2531 Developer Notes: 2532 There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()` 2533 2534 The Fortran interface could be modernized to return directly the array of values. 2535 2536 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()` 2537 @*/ 2538 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[]) 2539 { 2540 PetscFunctionBegin; 2541 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2542 if (n) PetscAssertPointer(n, 2); 2543 PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp)); 2544 PetscFunctionReturn(PETSC_SUCCESS); 2545 } 2546 2547 /*@C 2548 PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT` 2549 2550 Collective 2551 2552 Input Parameter: 2553 . pc - the preconditioner context 2554 2555 Output Parameters: 2556 + n - the number of splits 2557 - subksp - the array of `KSP` contexts 2558 2559 Level: advanced 2560 2561 Notes: 2562 After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()` 2563 (not the `KSP` just the array that contains them). 2564 2565 You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`. 2566 2567 If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order) 2568 + 1 - the `KSP` used for the (1,1) block 2569 . 2 - the `KSP` used for the Schur complement (not the one used for the interior Schur solver) 2570 - 3 - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block). 2571 2572 It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`. 2573 2574 Fortran Notes: 2575 You must pass in a `KSP` array that is large enough to contain all the local `KSP`s. 2576 You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the 2577 `KSP` array must be. 2578 2579 Developer Notes: 2580 There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()` 2581 2582 Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged? 2583 2584 The Fortran interface should be modernized to return directly the array of values. 2585 2586 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()` 2587 @*/ 2588 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[]) 2589 { 2590 PetscFunctionBegin; 2591 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2592 if (n) PetscAssertPointer(n, 2); 2593 PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp)); 2594 PetscFunctionReturn(PETSC_SUCCESS); 2595 } 2596 2597 /*@ 2598 PCFieldSplitSetSchurPre - Indicates from what operator the preconditioner is constructed for the Schur complement. 2599 The default is the A11 matrix. 2600 2601 Collective 2602 2603 Input Parameters: 2604 + pc - the preconditioner context 2605 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default), 2606 `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`, 2607 `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL` 2608 - pre - matrix to use for preconditioning, or `NULL` 2609 2610 Options Database Keys: 2611 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments 2612 - -fieldsplit_1_pc_type <pctype> - the preconditioner algorithm that is used to construct the preconditioner from the operator 2613 2614 Level: intermediate 2615 2616 Notes: 2617 If ptype is 2618 + a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner 2619 matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix 2620 . self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix: 2621 The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM` 2622 . user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument 2623 to this function). 2624 . selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $ 2625 This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be 2626 lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump` 2627 - full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation 2628 computed internally by `PCFIELDSPLIT` (this is expensive) 2629 useful mostly as a test that the Schur complement approach can work for your problem 2630 2631 When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense 2632 with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and 2633 `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement. 2634 2635 Developer Note: 2636 The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere. 2637 2638 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, 2639 `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()` 2640 @*/ 2641 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2642 { 2643 PetscFunctionBegin; 2644 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2645 PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre)); 2646 PetscFunctionReturn(PETSC_SUCCESS); 2647 } 2648 2649 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2650 { 2651 return PCFieldSplitSetSchurPre(pc, ptype, pre); 2652 } /* Deprecated name */ 2653 2654 /*@ 2655 PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be 2656 preconditioned. See `PCFieldSplitSetSchurPre()` for details. 2657 2658 Logically Collective 2659 2660 Input Parameter: 2661 . pc - the preconditioner context 2662 2663 Output Parameters: 2664 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER` 2665 - pre - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL` 2666 2667 Level: intermediate 2668 2669 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC` 2670 @*/ 2671 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre) 2672 { 2673 PetscFunctionBegin; 2674 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2675 PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre)); 2676 PetscFunctionReturn(PETSC_SUCCESS); 2677 } 2678 2679 /*@ 2680 PCFieldSplitSchurGetS - extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately 2681 2682 Not Collective 2683 2684 Input Parameter: 2685 . pc - the preconditioner context 2686 2687 Output Parameter: 2688 . S - the Schur complement matrix 2689 2690 Level: advanced 2691 2692 Note: 2693 This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`. 2694 2695 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`, 2696 `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()` 2697 @*/ 2698 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S) 2699 { 2700 const char *t; 2701 PetscBool isfs; 2702 PC_FieldSplit *jac; 2703 2704 PetscFunctionBegin; 2705 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2706 PetscCall(PetscObjectGetType((PetscObject)pc, &t)); 2707 PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs)); 2708 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t); 2709 jac = (PC_FieldSplit *)pc->data; 2710 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type); 2711 if (S) *S = jac->schur; 2712 PetscFunctionReturn(PETSC_SUCCESS); 2713 } 2714 2715 /*@ 2716 PCFieldSplitSchurRestoreS - returns the `MATSCHURCOMPLEMENT` matrix used by this `PC` 2717 2718 Not Collective 2719 2720 Input Parameters: 2721 + pc - the preconditioner context 2722 - S - the Schur complement matrix 2723 2724 Level: advanced 2725 2726 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()` 2727 @*/ 2728 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S) 2729 { 2730 const char *t; 2731 PetscBool isfs; 2732 PC_FieldSplit *jac; 2733 2734 PetscFunctionBegin; 2735 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2736 PetscCall(PetscObjectGetType((PetscObject)pc, &t)); 2737 PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs)); 2738 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t); 2739 jac = (PC_FieldSplit *)pc->data; 2740 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type); 2741 PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten"); 2742 PetscFunctionReturn(PETSC_SUCCESS); 2743 } 2744 2745 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2746 { 2747 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2748 2749 PetscFunctionBegin; 2750 jac->schurpre = ptype; 2751 if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) { 2752 PetscCall(MatDestroy(&jac->schur_user)); 2753 jac->schur_user = pre; 2754 PetscCall(PetscObjectReference((PetscObject)jac->schur_user)); 2755 } 2756 PetscFunctionReturn(PETSC_SUCCESS); 2757 } 2758 2759 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre) 2760 { 2761 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2762 2763 PetscFunctionBegin; 2764 if (ptype) *ptype = jac->schurpre; 2765 if (pre) *pre = jac->schur_user; 2766 PetscFunctionReturn(PETSC_SUCCESS); 2767 } 2768 2769 /*@ 2770 PCFieldSplitSetSchurFactType - sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note` 2771 2772 Collective 2773 2774 Input Parameters: 2775 + pc - the preconditioner context 2776 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default 2777 2778 Options Database Key: 2779 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full` 2780 2781 Level: intermediate 2782 2783 Notes: 2784 The FULL factorization is 2785 2786 ```{math} 2787 \left(\begin{array}{cc} A & B \\ 2788 C & E \\ 2789 \end{array}\right) = 2790 \left(\begin{array}{cc} 1 & 0 \\ 2791 C*A^{-1} & I \\ 2792 \end{array}\right) 2793 \left(\begin{array}{cc} A & 0 \\ 2794 0 & S \\ 2795 \end{array}\right) 2796 \left(\begin{array}{cc} I & A^{-1}B \\ 2797 0 & I \\ 2798 \end{array}\right) = L D U. 2799 ``` 2800 2801 where $ S = E - C*A^{-1}*B $. In practice, the full factorization is applied via block triangular solves with the grouping $L*(D*U)$. UPPER uses $D*U$, LOWER uses $L*D$, 2802 and DIAG is the diagonal part with the sign of $ S $ flipped (because this makes the preconditioner positive definite for many formulations, 2803 thus allowing the use of `KSPMINRES)`. Sign flipping of $ S $ can be turned off with `PCFieldSplitSetSchurScale()`. 2804 2805 If $A$ and $S$ are solved exactly 2806 + 1 - FULL factorization is a direct solver. 2807 . 2 - The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations. 2808 - 3 - With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations. 2809 2810 If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner 2811 application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice. 2812 2813 For symmetric problems in which $A$ is positive definite and $S$ is negative definite, DIAG can be used with `KSPMINRES`. 2814 2815 A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S). 2816 2817 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`, 2818 [](sec_flexibleksp), `PCFieldSplitSetSchurPre()` 2819 @*/ 2820 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype) 2821 { 2822 PetscFunctionBegin; 2823 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2824 PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype)); 2825 PetscFunctionReturn(PETSC_SUCCESS); 2826 } 2827 2828 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype) 2829 { 2830 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2831 2832 PetscFunctionBegin; 2833 jac->schurfactorization = ftype; 2834 PetscFunctionReturn(PETSC_SUCCESS); 2835 } 2836 2837 /*@ 2838 PCFieldSplitSetSchurScale - Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`. 2839 2840 Collective 2841 2842 Input Parameters: 2843 + pc - the preconditioner context 2844 - scale - scaling factor for the Schur complement 2845 2846 Options Database Key: 2847 . -pc_fieldsplit_schur_scale <scale> - default is -1.0 2848 2849 Level: intermediate 2850 2851 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()` 2852 @*/ 2853 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale) 2854 { 2855 PetscFunctionBegin; 2856 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2857 PetscValidLogicalCollectiveScalar(pc, scale, 2); 2858 PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale)); 2859 PetscFunctionReturn(PETSC_SUCCESS); 2860 } 2861 2862 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale) 2863 { 2864 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2865 2866 PetscFunctionBegin; 2867 jac->schurscale = scale; 2868 PetscFunctionReturn(PETSC_SUCCESS); 2869 } 2870 2871 /*@C 2872 PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement 2873 2874 Collective 2875 2876 Input Parameter: 2877 . pc - the preconditioner context 2878 2879 Output Parameters: 2880 + A00 - the (0,0) block 2881 . A01 - the (0,1) block 2882 . A10 - the (1,0) block 2883 - A11 - the (1,1) block 2884 2885 Level: advanced 2886 2887 Note: 2888 Use `NULL` for any unneeded output arguments 2889 2890 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()` 2891 @*/ 2892 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11) 2893 { 2894 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2895 2896 PetscFunctionBegin; 2897 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2898 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach."); 2899 if (A00) *A00 = jac->pmat[0]; 2900 if (A01) *A01 = jac->B; 2901 if (A10) *A10 = jac->C; 2902 if (A11) *A11 = jac->pmat[1]; 2903 PetscFunctionReturn(PETSC_SUCCESS); 2904 } 2905 2906 /*@ 2907 PCFieldSplitSetGKBTol - Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT` 2908 2909 Collective 2910 2911 Input Parameters: 2912 + pc - the preconditioner context 2913 - tolerance - the solver tolerance 2914 2915 Options Database Key: 2916 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5 2917 2918 Level: intermediate 2919 2920 Note: 2921 The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion. 2922 It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than 2923 this estimate, the stopping criterion is satisfactory in practical cases. 2924 2925 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()` 2926 @*/ 2927 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance) 2928 { 2929 PetscFunctionBegin; 2930 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2931 PetscValidLogicalCollectiveReal(pc, tolerance, 2); 2932 PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance)); 2933 PetscFunctionReturn(PETSC_SUCCESS); 2934 } 2935 2936 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance) 2937 { 2938 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2939 2940 PetscFunctionBegin; 2941 jac->gkbtol = tolerance; 2942 PetscFunctionReturn(PETSC_SUCCESS); 2943 } 2944 2945 /*@ 2946 PCFieldSplitSetGKBMaxit - Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT` 2947 2948 Collective 2949 2950 Input Parameters: 2951 + pc - the preconditioner context 2952 - maxit - the maximum number of iterations 2953 2954 Options Database Key: 2955 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100 2956 2957 Level: intermediate 2958 2959 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()` 2960 @*/ 2961 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit) 2962 { 2963 PetscFunctionBegin; 2964 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2965 PetscValidLogicalCollectiveInt(pc, maxit, 2); 2966 PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit)); 2967 PetscFunctionReturn(PETSC_SUCCESS); 2968 } 2969 2970 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit) 2971 { 2972 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2973 2974 PetscFunctionBegin; 2975 jac->gkbmaxit = maxit; 2976 PetscFunctionReturn(PETSC_SUCCESS); 2977 } 2978 2979 /*@ 2980 PCFieldSplitSetGKBDelay - Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT` 2981 preconditioner. 2982 2983 Collective 2984 2985 Input Parameters: 2986 + pc - the preconditioner context 2987 - delay - the delay window in the lower bound estimate 2988 2989 Options Database Key: 2990 . -pc_fieldsplit_gkb_delay <delay> - default is 5 2991 2992 Level: intermediate 2993 2994 Notes: 2995 The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $ 2996 is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs 2997 at least (`delay` + 1) iterations to stop. 2998 2999 For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013` 3000 3001 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()` 3002 @*/ 3003 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay) 3004 { 3005 PetscFunctionBegin; 3006 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 3007 PetscValidLogicalCollectiveInt(pc, delay, 2); 3008 PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay)); 3009 PetscFunctionReturn(PETSC_SUCCESS); 3010 } 3011 3012 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay) 3013 { 3014 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3015 3016 PetscFunctionBegin; 3017 jac->gkbdelay = delay; 3018 PetscFunctionReturn(PETSC_SUCCESS); 3019 } 3020 3021 /*@ 3022 PCFieldSplitSetGKBNu - Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the 3023 Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT` 3024 3025 Collective 3026 3027 Input Parameters: 3028 + pc - the preconditioner context 3029 - nu - the shift parameter 3030 3031 Options Database Key: 3032 . -pc_fieldsplit_gkb_nu <nu> - default is 1 3033 3034 Level: intermediate 3035 3036 Notes: 3037 This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However, 3038 if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore 3039 necessary to find a good balance in between the convergence of the inner and outer loop. 3040 3041 For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity. 3042 3043 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()` 3044 @*/ 3045 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu) 3046 { 3047 PetscFunctionBegin; 3048 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 3049 PetscValidLogicalCollectiveReal(pc, nu, 2); 3050 PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu)); 3051 PetscFunctionReturn(PETSC_SUCCESS); 3052 } 3053 3054 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu) 3055 { 3056 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3057 3058 PetscFunctionBegin; 3059 jac->gkbnu = nu; 3060 PetscFunctionReturn(PETSC_SUCCESS); 3061 } 3062 3063 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type) 3064 { 3065 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3066 3067 PetscFunctionBegin; 3068 jac->type = type; 3069 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 3070 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL)); 3071 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL)); 3072 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL)); 3073 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL)); 3074 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL)); 3075 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL)); 3076 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL)); 3077 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL)); 3078 3079 if (type == PC_COMPOSITE_SCHUR) { 3080 pc->ops->apply = PCApply_FieldSplit_Schur; 3081 pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur; 3082 pc->ops->view = PCView_FieldSplit_Schur; 3083 pc->ops->setuponblocks = PCSetUpOnBlocks_FieldSplit_Schur; 3084 3085 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur)); 3086 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit)); 3087 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit)); 3088 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit)); 3089 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit)); 3090 } else if (type == PC_COMPOSITE_GKB) { 3091 pc->ops->apply = PCApply_FieldSplit_GKB; 3092 pc->ops->applytranspose = NULL; 3093 pc->ops->view = PCView_FieldSplit_GKB; 3094 pc->ops->setuponblocks = PCSetUpOnBlocks_FieldSplit_GKB; 3095 3096 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 3097 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit)); 3098 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit)); 3099 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit)); 3100 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit)); 3101 } else { 3102 pc->ops->apply = PCApply_FieldSplit; 3103 pc->ops->applytranspose = PCApplyTranspose_FieldSplit; 3104 pc->ops->view = PCView_FieldSplit; 3105 pc->ops->setuponblocks = PCSetUpOnBlocks_FieldSplit; 3106 3107 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 3108 } 3109 PetscFunctionReturn(PETSC_SUCCESS); 3110 } 3111 3112 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs) 3113 { 3114 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3115 3116 PetscFunctionBegin; 3117 PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs); 3118 PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs); 3119 jac->bs = bs; 3120 PetscFunctionReturn(PETSC_SUCCESS); 3121 } 3122 3123 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 3124 { 3125 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3126 PC_FieldSplitLink ilink_current = jac->head; 3127 IS is_owned; 3128 3129 PetscFunctionBegin; 3130 jac->coordinates_set = PETSC_TRUE; // Internal flag 3131 PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL)); 3132 3133 while (ilink_current) { 3134 // For each IS, embed it to get local coords indces 3135 IS is_coords; 3136 PetscInt ndofs_block; 3137 const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block 3138 3139 // Setting drop to true for safety. It should make no difference. 3140 PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords)); 3141 PetscCall(ISGetLocalSize(is_coords, &ndofs_block)); 3142 PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration)); 3143 3144 // Allocate coordinates vector and set it directly 3145 PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords)); 3146 for (PetscInt dof = 0; dof < ndofs_block; ++dof) { 3147 for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d]; 3148 } 3149 ilink_current->dim = dim; 3150 ilink_current->ndofs = ndofs_block; 3151 PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration)); 3152 PetscCall(ISDestroy(&is_coords)); 3153 ilink_current = ilink_current->next; 3154 } 3155 PetscCall(ISDestroy(&is_owned)); 3156 PetscFunctionReturn(PETSC_SUCCESS); 3157 } 3158 3159 /*@ 3160 PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT` 3161 3162 Collective 3163 3164 Input Parameters: 3165 + pc - the preconditioner context 3166 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, 3167 `PC_COMPOSITE_GKB` 3168 3169 Options Database Key: 3170 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type 3171 3172 Level: intermediate 3173 3174 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`, 3175 `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()` 3176 @*/ 3177 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type) 3178 { 3179 PetscFunctionBegin; 3180 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 3181 PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type)); 3182 PetscFunctionReturn(PETSC_SUCCESS); 3183 } 3184 3185 /*@ 3186 PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT` 3187 3188 Not collective 3189 3190 Input Parameter: 3191 . pc - the preconditioner context 3192 3193 Output Parameter: 3194 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 3195 3196 Level: intermediate 3197 3198 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`, 3199 `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 3200 @*/ 3201 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type) 3202 { 3203 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3204 3205 PetscFunctionBegin; 3206 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 3207 PetscAssertPointer(type, 2); 3208 *type = jac->type; 3209 PetscFunctionReturn(PETSC_SUCCESS); 3210 } 3211 3212 /*@ 3213 PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible. 3214 3215 Logically Collective 3216 3217 Input Parameters: 3218 + pc - the preconditioner context 3219 - flg - boolean indicating whether to use field splits defined by the `DM` 3220 3221 Options Database Key: 3222 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM` 3223 3224 Level: intermediate 3225 3226 Developer Note: 3227 The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database 3228 3229 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 3230 @*/ 3231 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg) 3232 { 3233 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3234 PetscBool isfs; 3235 3236 PetscFunctionBegin; 3237 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 3238 PetscValidLogicalCollectiveBool(pc, flg, 2); 3239 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 3240 if (isfs) jac->dm_splits = flg; 3241 PetscFunctionReturn(PETSC_SUCCESS); 3242 } 3243 3244 /*@ 3245 PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible. 3246 3247 Logically Collective 3248 3249 Input Parameter: 3250 . pc - the preconditioner context 3251 3252 Output Parameter: 3253 . flg - boolean indicating whether to use field splits defined by the `DM` 3254 3255 Level: intermediate 3256 3257 Developer Note: 3258 The name should be `PCFieldSplitGetUseDMSplits()` 3259 3260 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 3261 @*/ 3262 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg) 3263 { 3264 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3265 PetscBool isfs; 3266 3267 PetscFunctionBegin; 3268 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 3269 PetscAssertPointer(flg, 2); 3270 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 3271 if (isfs) { 3272 if (flg) *flg = jac->dm_splits; 3273 } 3274 PetscFunctionReturn(PETSC_SUCCESS); 3275 } 3276 3277 /*@ 3278 PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries. 3279 3280 Logically Collective 3281 3282 Input Parameter: 3283 . pc - the preconditioner context 3284 3285 Output Parameter: 3286 . flg - boolean indicating whether to detect fields or not 3287 3288 Level: intermediate 3289 3290 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()` 3291 @*/ 3292 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg) 3293 { 3294 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3295 3296 PetscFunctionBegin; 3297 *flg = jac->detect; 3298 PetscFunctionReturn(PETSC_SUCCESS); 3299 } 3300 3301 /*@ 3302 PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries. 3303 3304 Logically Collective 3305 3306 Input Parameter: 3307 . pc - the preconditioner context 3308 3309 Output Parameter: 3310 . flg - boolean indicating whether to detect fields or not 3311 3312 Options Database Key: 3313 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point 3314 3315 Level: intermediate 3316 3317 Note: 3318 Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`). 3319 3320 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF` 3321 @*/ 3322 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg) 3323 { 3324 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3325 3326 PetscFunctionBegin; 3327 jac->detect = flg; 3328 if (jac->detect) { 3329 PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR)); 3330 PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL)); 3331 } 3332 PetscFunctionReturn(PETSC_SUCCESS); 3333 } 3334 3335 /*MC 3336 PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual 3337 collections of variables (that may overlap) called fields or splits. Each field often represents a different continuum variable 3338 represented on a grid, such as velocity, pressure, or temperature. 3339 In the literature these are sometimes called block preconditioners; but should not be confused with `PCBJACOBI`. 3340 See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details. 3341 3342 Options Database Keys: 3343 + -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split 3344 . -pc_fieldsplit_default - automatically add any fields to additional splits that have not 3345 been supplied explicitly by `-pc_fieldsplit_%d_fields` 3346 . -pc_fieldsplit_block_size <bs> - size of block that defines fields (i.e. there are bs fields) 3347 when the matrix is not of `MatType` `MATNEST` 3348 . -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting 3349 . -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`; see `PCFieldSplitSetSchurPre()` 3350 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - set factorization type when using `-pc_fieldsplit_type schur`; 3351 see `PCFieldSplitSetSchurFactType()` 3352 . -pc_fieldsplit_dm_splits <true,false> (default is true) - Whether to use `DMCreateFieldDecomposition()` for splits 3353 - -pc_fieldsplit_detect_saddle_point - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver 3354 3355 Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` . 3356 The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_` 3357 For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields. 3358 3359 To set options on the solvers for all blocks, prepend `-fieldsplit_` to all the `PC` 3360 options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`. 3361 3362 To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()` 3363 and set the options directly on the resulting `KSP` object 3364 3365 Level: intermediate 3366 3367 Notes: 3368 Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()` 3369 to define a split by an arbitrary collection of entries. 3370 3371 If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports 3372 `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs, 3373 beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`, 3374 if this is not called the block size defaults to the blocksize of the second matrix passed 3375 to `KSPSetOperators()`/`PCSetOperators()`. 3376 3377 For the Schur complement preconditioner if 3378 ```{math} 3379 J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right] 3380 ``` 3381 3382 the preconditioner using `full` factorization is logically 3383 ```{math} 3384 \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right] 3385 ``` 3386 where the action of $\text{ksp}(A_{00})$ is applied using the `KSP` solver with prefix `-fieldsplit_0_`. $S$ is the Schur complement 3387 ```{math} 3388 S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01} 3389 ``` 3390 which is usually dense and not stored explicitly. The action of $\text{ksp}(S)$ is computed using the `KSP` solver with prefix `-fieldsplit_splitname_` (where `splitname` 3391 was given in providing the SECOND split or 1 if not given). Accordingly, if using `PCFieldSplitGetSubKSP()`, the array of sub-`KSP` contexts will hold two `KSP`s: at its 3392 0th index, the `KSP` associated with `-fieldsplit_0_`, and at its 1st index, the `KSP` corresponding to `-fieldsplit_1_`. 3393 By default, $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$. 3394 3395 The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above, 3396 `diag` gives 3397 ```{math} 3398 \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\ 0 & -\text{ksp}(S) \end{array}\right] 3399 ``` 3400 Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$ so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip 3401 can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of 3402 ```{math} 3403 \left[\begin{array}{cc} A_{00} & 0 \\ A_{10} & S \end{array}\right] 3404 ``` 3405 where the inverses of $A_{00}$ and $S$ are applied using `KSP`s. The upper factorization is the inverse of 3406 ```{math} 3407 \left[\begin{array}{cc} A_{00} & A_{01} \\ 0 & S \end{array}\right] 3408 ``` 3409 where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s. 3410 3411 If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS` 3412 is used automatically for a second submatrix. 3413 3414 The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1. 3415 Generally it should be used with the `MATAIJ` or `MATNEST` `MatType` 3416 3417 The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see, 3418 for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`. 3419 One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers". 3420 3421 See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`. 3422 3423 The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the 3424 residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables. 3425 3426 The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape 3427 ```{math} 3428 \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right] 3429 ``` 3430 with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$. 3431 A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`. 3432 3433 Some `PCFIELDSPLIT` variants are called physics-based preconditioners, since the preconditioner takes into account the underlying physics of the 3434 problem. But this nomenclature is not well-defined. 3435 3436 Developer Note: 3437 The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their 3438 user API. 3439 3440 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`, 3441 `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`, 3442 `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`, 3443 `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()` 3444 M*/ 3445 3446 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc) 3447 { 3448 PC_FieldSplit *jac; 3449 3450 PetscFunctionBegin; 3451 PetscCall(PetscNew(&jac)); 3452 3453 jac->bs = -1; 3454 jac->type = PC_COMPOSITE_MULTIPLICATIVE; 3455 jac->schurpre = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */ 3456 jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL; 3457 jac->schurscale = -1.0; 3458 jac->dm_splits = PETSC_TRUE; 3459 jac->gkbtol = 1e-5; 3460 jac->gkbdelay = 5; 3461 jac->gkbnu = 1; 3462 jac->gkbmaxit = 100; 3463 3464 pc->data = (void *)jac; 3465 3466 pc->ops->setup = PCSetUp_FieldSplit; 3467 pc->ops->reset = PCReset_FieldSplit; 3468 pc->ops->destroy = PCDestroy_FieldSplit; 3469 pc->ops->setfromoptions = PCSetFromOptions_FieldSplit; 3470 pc->ops->applyrichardson = NULL; 3471 3472 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit)); 3473 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit)); 3474 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit)); 3475 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit)); 3476 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit)); 3477 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit)); 3478 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit)); 3479 3480 /* Initialize function pointers */ 3481 PetscCall(PCFieldSplitSetType(pc, jac->type)); 3482 PetscFunctionReturn(PETSC_SUCCESS); 3483 } 3484