1 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 2 #include <petsc/private/kspimpl.h> /* This is needed to provide the appropriate PETSC_EXTERN for KSP_Solve_FS ....*/ 3 #include <petscdm.h> 4 5 const char *const PCFieldSplitSchurPreTypes[] = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL}; 6 const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL}; 7 8 PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4; 9 10 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink; 11 struct _PC_FieldSplitLink { 12 KSP ksp; 13 Vec x, y, z; 14 char *splitname; 15 PetscInt nfields; 16 PetscInt *fields, *fields_col; 17 VecScatter sctx; 18 IS is, is_col; 19 PC_FieldSplitLink next, previous; 20 PetscLogEvent event; 21 22 /* Used only when setting coordinates with PCSetCoordinates */ 23 PetscInt dim; 24 PetscInt ndofs; 25 PetscReal *coords; 26 }; 27 28 typedef struct { 29 PCCompositeType type; 30 PetscBool defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */ 31 PetscBool splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */ 32 PetscInt bs; /* Block size for IS and Mat structures */ 33 PetscInt nsplits; /* Number of field divisions defined */ 34 Vec *x, *y, w1, w2; 35 Mat *mat; /* The diagonal block for each split */ 36 Mat *pmat; /* The preconditioning diagonal block for each split */ 37 Mat *Afield; /* The rows of the matrix associated with each split */ 38 PetscBool issetup; 39 40 /* Only used when Schur complement preconditioning is used */ 41 Mat B; /* The (0,1) block */ 42 Mat C; /* The (1,0) block */ 43 Mat schur; /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */ 44 Mat schurp; /* Assembled approximation to S built by MatSchurComplement to be used as a preconditioning matrix when solving with S */ 45 Mat schur_user; /* User-provided preconditioning matrix for the Schur complement */ 46 PCFieldSplitSchurPreType schurpre; /* Determines which preconditioning matrix is used for the Schur complement */ 47 PCFieldSplitSchurFactType schurfactorization; 48 KSP kspschur; /* The solver for S */ 49 KSP kspupper; /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */ 50 PetscScalar schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */ 51 52 /* Only used when Golub-Kahan bidiagonalization preconditioning is used */ 53 Mat H; /* The modified matrix H = A00 + nu*A01*A01' */ 54 PetscReal gkbtol; /* Stopping tolerance for lower bound estimate */ 55 PetscInt gkbdelay; /* The delay window for the stopping criterion */ 56 PetscReal gkbnu; /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */ 57 PetscInt gkbmaxit; /* Maximum number of iterations for outer loop */ 58 PetscBool gkbmonitor; /* Monitor for gkb iterations and the lower bound error */ 59 PetscViewer gkbviewer; /* Viewer context for gkbmonitor */ 60 Vec u, v, d, Hu; /* Work vectors for the GKB algorithm */ 61 PetscScalar *vecz; /* Contains intermediate values, eg for lower bound */ 62 63 PC_FieldSplitLink head; 64 PetscBool isrestrict; /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */ 65 PetscBool suboptionsset; /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */ 66 PetscBool dm_splits; /* Whether to use DMCreateFieldDecomposition() whenever possible */ 67 PetscBool diag_use_amat; /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */ 68 PetscBool offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */ 69 PetscBool detect; /* Whether to form 2-way split by finding zero diagonal entries */ 70 PetscBool coordinates_set; /* Whether PCSetCoordinates has been called */ 71 } PC_FieldSplit; 72 73 /* 74 Note: 75 there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of 76 inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the 77 PC you could change this. 78 */ 79 80 /* This helper is so that setting a user-provided preconditioning matrix is orthogonal to choosing to use it. This way the 81 * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */ 82 static Mat FieldSplitSchurPre(PC_FieldSplit *jac) 83 { 84 switch (jac->schurpre) { 85 case PC_FIELDSPLIT_SCHUR_PRE_SELF: 86 return jac->schur; 87 case PC_FIELDSPLIT_SCHUR_PRE_SELFP: 88 return jac->schurp; 89 case PC_FIELDSPLIT_SCHUR_PRE_A11: 90 return jac->pmat[1]; 91 case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */ 92 case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */ 93 default: 94 return jac->schur_user ? jac->schur_user : jac->pmat[1]; 95 } 96 } 97 98 #include <petscdraw.h> 99 static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer) 100 { 101 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 102 PetscBool iascii, isdraw; 103 PetscInt i, j; 104 PC_FieldSplitLink ilink = jac->head; 105 106 PetscFunctionBegin; 107 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 108 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 109 if (iascii) { 110 if (jac->bs > 0) { 111 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs)); 112 } else { 113 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits)); 114 } 115 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 116 if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for diagonal blocks\n")); 117 if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for off-diagonal blocks\n")); 118 PetscCall(PetscViewerASCIIPrintf(viewer, " Solver info for each split is in the following KSP objects:\n")); 119 for (i = 0; i < jac->nsplits; i++) { 120 if (ilink->fields) { 121 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i)); 122 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 123 for (j = 0; j < ilink->nfields; j++) { 124 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 125 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 126 } 127 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 128 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 129 } else { 130 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i)); 131 } 132 PetscCall(KSPView(ilink->ksp, viewer)); 133 ilink = ilink->next; 134 } 135 } 136 137 if (isdraw) { 138 PetscDraw draw; 139 PetscReal x, y, w, wd; 140 141 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 142 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 143 w = 2 * PetscMin(1.0 - x, x); 144 wd = w / (jac->nsplits + 1); 145 x = x - wd * (jac->nsplits - 1) / 2.0; 146 for (i = 0; i < jac->nsplits; i++) { 147 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 148 PetscCall(KSPView(ilink->ksp, viewer)); 149 PetscCall(PetscDrawPopCurrentPoint(draw)); 150 x += wd; 151 ilink = ilink->next; 152 } 153 } 154 PetscFunctionReturn(PETSC_SUCCESS); 155 } 156 157 static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer) 158 { 159 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 160 PetscBool iascii, isdraw; 161 PetscInt i, j; 162 PC_FieldSplitLink ilink = jac->head; 163 MatSchurComplementAinvType atype; 164 165 PetscFunctionBegin; 166 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 167 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 168 if (iascii) { 169 if (jac->bs > 0) { 170 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization])); 171 } else { 172 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization])); 173 } 174 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 175 switch (jac->schurpre) { 176 case PC_FIELDSPLIT_SCHUR_PRE_SELF: 177 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from S itself\n")); 178 break; 179 case PC_FIELDSPLIT_SCHUR_PRE_SELFP: 180 if (jac->schur) { 181 PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype)); 182 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's ")))); 183 } 184 break; 185 case PC_FIELDSPLIT_SCHUR_PRE_A11: 186 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from A11\n")); 187 break; 188 case PC_FIELDSPLIT_SCHUR_PRE_FULL: 189 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from the exact Schur complement\n")); 190 break; 191 case PC_FIELDSPLIT_SCHUR_PRE_USER: 192 if (jac->schur_user) { 193 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from user provided matrix\n")); 194 } else { 195 PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from A11\n")); 196 } 197 break; 198 default: 199 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre); 200 } 201 PetscCall(PetscViewerASCIIPrintf(viewer, " Split info:\n")); 202 PetscCall(PetscViewerASCIIPushTab(viewer)); 203 for (i = 0; i < jac->nsplits; i++) { 204 if (ilink->fields) { 205 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i)); 206 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 207 for (j = 0; j < ilink->nfields; j++) { 208 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 209 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 210 } 211 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 212 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 213 } else { 214 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i)); 215 } 216 ilink = ilink->next; 217 } 218 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n")); 219 PetscCall(PetscViewerASCIIPushTab(viewer)); 220 if (jac->head) { 221 PetscCall(KSPView(jac->head->ksp, viewer)); 222 } else PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 223 PetscCall(PetscViewerASCIIPopTab(viewer)); 224 if (jac->head && jac->kspupper != jac->head->ksp) { 225 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n")); 226 PetscCall(PetscViewerASCIIPushTab(viewer)); 227 if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer)); 228 else PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 229 PetscCall(PetscViewerASCIIPopTab(viewer)); 230 } 231 PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n")); 232 PetscCall(PetscViewerASCIIPushTab(viewer)); 233 if (jac->kspschur) { 234 PetscCall(KSPView(jac->kspschur, viewer)); 235 } else { 236 PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n")); 237 } 238 PetscCall(PetscViewerASCIIPopTab(viewer)); 239 PetscCall(PetscViewerASCIIPopTab(viewer)); 240 } else if (isdraw && jac->head) { 241 PetscDraw draw; 242 PetscReal x, y, w, wd, h; 243 PetscInt cnt = 2; 244 char str[32]; 245 246 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 247 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 248 if (jac->kspupper != jac->head->ksp) cnt++; 249 w = 2 * PetscMin(1.0 - x, x); 250 wd = w / (cnt + 1); 251 252 PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization])); 253 PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 254 y -= h; 255 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) { 256 PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11])); 257 } else { 258 PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre])); 259 } 260 PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 261 y -= h; 262 x = x - wd * (cnt - 1) / 2.0; 263 264 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 265 PetscCall(KSPView(jac->head->ksp, viewer)); 266 PetscCall(PetscDrawPopCurrentPoint(draw)); 267 if (jac->kspupper != jac->head->ksp) { 268 x += wd; 269 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 270 PetscCall(KSPView(jac->kspupper, viewer)); 271 PetscCall(PetscDrawPopCurrentPoint(draw)); 272 } 273 x += wd; 274 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 275 PetscCall(KSPView(jac->kspschur, viewer)); 276 PetscCall(PetscDrawPopCurrentPoint(draw)); 277 } 278 PetscFunctionReturn(PETSC_SUCCESS); 279 } 280 281 static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer) 282 { 283 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 284 PetscBool iascii, isdraw; 285 PetscInt i, j; 286 PC_FieldSplitLink ilink = jac->head; 287 288 PetscFunctionBegin; 289 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 290 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 291 if (iascii) { 292 if (jac->bs > 0) { 293 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs)); 294 } else { 295 PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits)); 296 } 297 if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n")); 298 if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for diagonal blocks\n")); 299 if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for off-diagonal blocks\n")); 300 301 PetscCall(PetscViewerASCIIPrintf(viewer, " Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit)); 302 PetscCall(PetscViewerASCIIPrintf(viewer, " Solver info for H = A00 + nu*A01*A01' matrix:\n")); 303 PetscCall(PetscViewerASCIIPushTab(viewer)); 304 305 if (ilink->fields) { 306 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields ")); 307 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 308 for (j = 0; j < ilink->nfields; j++) { 309 if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ",")); 310 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j])); 311 } 312 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 313 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 314 } else { 315 PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n")); 316 } 317 PetscCall(KSPView(ilink->ksp, viewer)); 318 319 PetscCall(PetscViewerASCIIPopTab(viewer)); 320 } 321 322 if (isdraw) { 323 PetscDraw draw; 324 PetscReal x, y, w, wd; 325 326 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 327 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 328 w = 2 * PetscMin(1.0 - x, x); 329 wd = w / (jac->nsplits + 1); 330 x = x - wd * (jac->nsplits - 1) / 2.0; 331 for (i = 0; i < jac->nsplits; i++) { 332 PetscCall(PetscDrawPushCurrentPoint(draw, x, y)); 333 PetscCall(KSPView(ilink->ksp, viewer)); 334 PetscCall(PetscDrawPopCurrentPoint(draw)); 335 x += wd; 336 ilink = ilink->next; 337 } 338 } 339 PetscFunctionReturn(PETSC_SUCCESS); 340 } 341 342 /* Precondition: jac->bs is set to a meaningful value */ 343 static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc) 344 { 345 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 346 PetscInt i, nfields, *ifields, nfields_col, *ifields_col; 347 PetscBool flg, flg_col; 348 char optionname[128], splitname[8], optionname_col[128]; 349 350 PetscFunctionBegin; 351 PetscCall(PetscMalloc1(jac->bs, &ifields)); 352 PetscCall(PetscMalloc1(jac->bs, &ifields_col)); 353 for (i = 0, flg = PETSC_TRUE;; i++) { 354 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 355 PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i)); 356 PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i)); 357 nfields = jac->bs; 358 nfields_col = jac->bs; 359 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg)); 360 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col)); 361 if (!flg) break; 362 else if (flg && !flg_col) { 363 PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields"); 364 PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields)); 365 } else { 366 PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields"); 367 PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match"); 368 PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col)); 369 } 370 } 371 if (i > 0) { 372 /* Makes command-line setting of splits take precedence over setting them in code. 373 Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would 374 create new splits, which would probably not be what the user wanted. */ 375 jac->splitdefined = PETSC_TRUE; 376 } 377 PetscCall(PetscFree(ifields)); 378 PetscCall(PetscFree(ifields_col)); 379 PetscFunctionReturn(PETSC_SUCCESS); 380 } 381 382 static PetscErrorCode PCFieldSplitSetDefaults(PC pc) 383 { 384 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 385 PC_FieldSplitLink ilink = jac->head; 386 PetscBool fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE; 387 PetscInt i; 388 389 PetscFunctionBegin; 390 /* 391 Kinda messy, but at least this now uses DMCreateFieldDecomposition(). 392 Should probably be rewritten. 393 */ 394 if (!ilink) { 395 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL)); 396 if (pc->dm && jac->dm_splits && !jac->detect && !coupling) { 397 PetscInt numFields, f, i, j; 398 char **fieldNames; 399 IS *fields; 400 DM *dms; 401 DM subdm[128]; 402 PetscBool flg; 403 404 PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms)); 405 /* Allow the user to prescribe the splits */ 406 for (i = 0, flg = PETSC_TRUE;; i++) { 407 PetscInt ifields[128]; 408 IS compField; 409 char optionname[128], splitname[8]; 410 PetscInt nfields = numFields; 411 412 PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i)); 413 PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg)); 414 if (!flg) break; 415 PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields); 416 PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i])); 417 if (nfields == 1) { 418 PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField)); 419 } else { 420 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 421 PetscCall(PCFieldSplitSetIS(pc, splitname, compField)); 422 } 423 PetscCall(ISDestroy(&compField)); 424 for (j = 0; j < nfields; ++j) { 425 f = ifields[j]; 426 PetscCall(PetscFree(fieldNames[f])); 427 PetscCall(ISDestroy(&fields[f])); 428 } 429 } 430 if (i == 0) { 431 for (f = 0; f < numFields; ++f) { 432 PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f])); 433 PetscCall(PetscFree(fieldNames[f])); 434 PetscCall(ISDestroy(&fields[f])); 435 } 436 } else { 437 for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j)); 438 PetscCall(PetscFree(dms)); 439 PetscCall(PetscMalloc1(i, &dms)); 440 for (j = 0; j < i; ++j) dms[j] = subdm[j]; 441 } 442 PetscCall(PetscFree(fieldNames)); 443 PetscCall(PetscFree(fields)); 444 if (dms) { 445 PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n")); 446 for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) { 447 const char *prefix; 448 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix)); 449 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix)); 450 PetscCall(KSPSetDM(ilink->ksp, dms[i])); 451 PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE)); 452 PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0)); 453 PetscCall(DMDestroy(&dms[i])); 454 } 455 PetscCall(PetscFree(dms)); 456 } 457 } else { 458 if (jac->bs <= 0) { 459 if (pc->pmat) { 460 PetscCall(MatGetBlockSize(pc->pmat, &jac->bs)); 461 } else jac->bs = 1; 462 } 463 464 if (jac->detect) { 465 IS zerodiags, rest; 466 PetscInt nmin, nmax; 467 468 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 469 if (jac->diag_use_amat) { 470 PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags)); 471 } else { 472 PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags)); 473 } 474 PetscCall(ISComplement(zerodiags, nmin, nmax, &rest)); 475 PetscCall(PCFieldSplitSetIS(pc, "0", rest)); 476 PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags)); 477 PetscCall(ISDestroy(&zerodiags)); 478 PetscCall(ISDestroy(&rest)); 479 } else if (coupling) { 480 IS coupling, rest; 481 PetscInt nmin, nmax; 482 483 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 484 if (jac->offdiag_use_amat) { 485 PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling)); 486 } else { 487 PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling)); 488 } 489 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest)); 490 PetscCall(ISSetIdentity(rest)); 491 PetscCall(PCFieldSplitSetIS(pc, "0", rest)); 492 PetscCall(PCFieldSplitSetIS(pc, "1", coupling)); 493 PetscCall(ISDestroy(&coupling)); 494 PetscCall(ISDestroy(&rest)); 495 } else { 496 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL)); 497 if (!fieldsplit_default) { 498 /* Allow user to set fields from command line, if bs was known at the time of PCSetFromOptions_FieldSplit() 499 then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */ 500 PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc)); 501 if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n")); 502 } 503 if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) { 504 Mat M = pc->pmat; 505 PetscBool isnest; 506 507 PetscCall(PetscInfo(pc, "Using default splitting of fields\n")); 508 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest)); 509 if (!isnest) { 510 M = pc->mat; 511 PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest)); 512 } 513 if (isnest) { 514 IS *fields; 515 PetscInt nf; 516 517 PetscCall(MatNestGetSize(M, &nf, NULL)); 518 PetscCall(PetscMalloc1(nf, &fields)); 519 PetscCall(MatNestGetISs(M, fields, NULL)); 520 for (i = 0; i < nf; i++) PetscCall(PCFieldSplitSetIS(pc, NULL, fields[i])); 521 PetscCall(PetscFree(fields)); 522 } else { 523 for (i = 0; i < jac->bs; i++) { 524 char splitname[8]; 525 PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); 526 PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i)); 527 } 528 jac->defaultsplit = PETSC_TRUE; 529 } 530 } 531 } 532 } 533 } else if (jac->nsplits == 1) { 534 IS is2; 535 PetscInt nmin, nmax; 536 537 PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()"); 538 PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax)); 539 PetscCall(ISComplement(ilink->is, nmin, nmax, &is2)); 540 PetscCall(PCFieldSplitSetIS(pc, "1", is2)); 541 PetscCall(ISDestroy(&is2)); 542 } 543 544 PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits); 545 PetscFunctionReturn(PETSC_SUCCESS); 546 } 547 548 static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu) 549 { 550 Mat BT, T; 551 PetscReal nrmT, nrmB; 552 553 PetscFunctionBegin; 554 PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */ 555 PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN)); 556 PetscCall(MatNorm(T, NORM_1, &nrmT)); 557 PetscCall(MatNorm(B, NORM_1, &nrmB)); 558 PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/hermitian, GKB is not applicable."); 559 560 /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */ 561 /* setting N := 1/nu*I in [Ar13]. */ 562 PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT)); 563 PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, H)); /* H = A01*A01' */ 564 PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN)); /* H = A00 + nu*A01*A01' */ 565 566 PetscCall(MatDestroy(&BT)); 567 PetscCall(MatDestroy(&T)); 568 PetscFunctionReturn(PETSC_SUCCESS); 569 } 570 571 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg); 572 573 static PetscErrorCode PCSetUp_FieldSplit(PC pc) 574 { 575 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 576 PC_FieldSplitLink ilink; 577 PetscInt i, nsplit; 578 PetscBool sorted, sorted_col; 579 580 PetscFunctionBegin; 581 pc->failedreason = PC_NOERROR; 582 PetscCall(PCFieldSplitSetDefaults(pc)); 583 nsplit = jac->nsplits; 584 ilink = jac->head; 585 586 /* get the matrices for each split */ 587 if (!jac->issetup) { 588 PetscInt rstart, rend, nslots, bs; 589 590 jac->issetup = PETSC_TRUE; 591 592 /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */ 593 if (jac->defaultsplit || !ilink->is) { 594 if (jac->bs <= 0) jac->bs = nsplit; 595 } 596 597 /* MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */ 598 PetscCall(MatGetBlockSize(pc->pmat, &bs)); 599 if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) { 600 PetscBool blk; 601 602 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL)); 603 PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes"); 604 } 605 606 bs = jac->bs; 607 PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend)); 608 nslots = (rend - rstart) / bs; 609 for (i = 0; i < nsplit; i++) { 610 if (jac->defaultsplit) { 611 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is)); 612 PetscCall(ISDuplicate(ilink->is, &ilink->is_col)); 613 } else if (!ilink->is) { 614 if (ilink->nfields > 1) { 615 PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col; 616 PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii)); 617 PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj)); 618 for (j = 0; j < nslots; j++) { 619 for (k = 0; k < nfields; k++) { 620 ii[nfields * j + k] = rstart + bs * j + fields[k]; 621 jj[nfields * j + k] = rstart + bs * j + fields_col[k]; 622 } 623 } 624 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is)); 625 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col)); 626 PetscCall(ISSetBlockSize(ilink->is, nfields)); 627 PetscCall(ISSetBlockSize(ilink->is_col, nfields)); 628 } else { 629 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is)); 630 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col)); 631 } 632 } 633 PetscCall(ISSorted(ilink->is, &sorted)); 634 if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col)); 635 PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split"); 636 ilink = ilink->next; 637 } 638 } 639 640 ilink = jac->head; 641 if (!jac->pmat) { 642 Vec xtmp; 643 644 PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL)); 645 PetscCall(PetscMalloc1(nsplit, &jac->pmat)); 646 PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y)); 647 for (i = 0; i < nsplit; i++) { 648 MatNullSpace sp; 649 650 /* Check for preconditioning matrix attached to IS */ 651 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i])); 652 if (jac->pmat[i]) { 653 PetscCall(PetscObjectReference((PetscObject)jac->pmat[i])); 654 if (jac->type == PC_COMPOSITE_SCHUR) { 655 jac->schur_user = jac->pmat[i]; 656 657 PetscCall(PetscObjectReference((PetscObject)jac->schur_user)); 658 } 659 } else { 660 const char *prefix; 661 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i])); 662 PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix)); 663 if (!prefix) { 664 PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix)); 665 PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix)); 666 } 667 PetscCall(MatSetFromOptions(jac->pmat[i])); 668 PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view")); 669 } 670 /* create work vectors for each split */ 671 PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i])); 672 ilink->x = jac->x[i]; 673 ilink->y = jac->y[i]; 674 ilink->z = NULL; 675 /* compute scatter contexts needed by multiplicative versions and non-default splits */ 676 PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx)); 677 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp)); 678 if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp)); 679 ilink = ilink->next; 680 } 681 PetscCall(VecDestroy(&xtmp)); 682 } else { 683 MatReuse scall; 684 MatNullSpace *nullsp = NULL; 685 686 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 687 PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp)); 688 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i])); 689 scall = MAT_INITIAL_MATRIX; 690 } else scall = MAT_REUSE_MATRIX; 691 692 for (i = 0; i < nsplit; i++) { 693 Mat pmat; 694 695 /* Check for preconditioning matrix attached to IS */ 696 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat)); 697 if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i])); 698 ilink = ilink->next; 699 } 700 if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp)); 701 } 702 if (jac->diag_use_amat) { 703 ilink = jac->head; 704 if (!jac->mat) { 705 PetscCall(PetscMalloc1(nsplit, &jac->mat)); 706 for (i = 0; i < nsplit; i++) { 707 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i])); 708 ilink = ilink->next; 709 } 710 } else { 711 MatReuse scall; 712 MatNullSpace *nullsp = NULL; 713 714 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 715 PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp)); 716 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i])); 717 scall = MAT_INITIAL_MATRIX; 718 } else scall = MAT_REUSE_MATRIX; 719 720 for (i = 0; i < nsplit; i++) { 721 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i])); 722 ilink = ilink->next; 723 } 724 if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp)); 725 } 726 } else { 727 jac->mat = jac->pmat; 728 } 729 730 /* Check for null space attached to IS */ 731 ilink = jac->head; 732 for (i = 0; i < nsplit; i++) { 733 MatNullSpace sp; 734 735 PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp)); 736 if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp)); 737 ilink = ilink->next; 738 } 739 740 if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) { 741 /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */ 742 /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */ 743 ilink = jac->head; 744 if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) { 745 /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */ 746 if (!jac->Afield) { 747 PetscCall(PetscCalloc1(nsplit, &jac->Afield)); 748 if (jac->offdiag_use_amat) { 749 PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1])); 750 } else { 751 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1])); 752 } 753 } else { 754 MatReuse scall; 755 756 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 757 PetscCall(MatDestroy(&jac->Afield[1])); 758 scall = MAT_INITIAL_MATRIX; 759 } else scall = MAT_REUSE_MATRIX; 760 761 if (jac->offdiag_use_amat) { 762 PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1])); 763 } else { 764 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1])); 765 } 766 } 767 } else { 768 if (!jac->Afield) { 769 PetscCall(PetscMalloc1(nsplit, &jac->Afield)); 770 for (i = 0; i < nsplit; i++) { 771 if (jac->offdiag_use_amat) { 772 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i])); 773 } else { 774 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i])); 775 } 776 ilink = ilink->next; 777 } 778 } else { 779 MatReuse scall; 780 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 781 for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i])); 782 scall = MAT_INITIAL_MATRIX; 783 } else scall = MAT_REUSE_MATRIX; 784 785 for (i = 0; i < nsplit; i++) { 786 if (jac->offdiag_use_amat) { 787 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i])); 788 } else { 789 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i])); 790 } 791 ilink = ilink->next; 792 } 793 } 794 } 795 } 796 797 if (jac->type == PC_COMPOSITE_SCHUR) { 798 IS ccis; 799 PetscBool isset, isspd; 800 PetscInt rstart, rend; 801 char lscname[256]; 802 PetscObject LSC_L; 803 PetscBool set, flg; 804 805 PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields"); 806 807 /* If pc->mat is SPD, don't scale by -1 the Schur complement */ 808 if (jac->schurscale == (PetscScalar)-1.0) { 809 PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd)); 810 jac->schurscale = (isset && isspd) ? 1.0 : -1.0; 811 } 812 813 /* When extracting off-diagonal submatrices, we take complements from this range */ 814 PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend)); 815 PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, "")); 816 817 if (jac->schur) { 818 KSP kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper; 819 MatReuse scall; 820 821 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 822 scall = MAT_INITIAL_MATRIX; 823 PetscCall(MatDestroy(&jac->B)); 824 PetscCall(MatDestroy(&jac->C)); 825 } else scall = MAT_REUSE_MATRIX; 826 827 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 828 ilink = jac->head; 829 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 830 if (jac->offdiag_use_amat) { 831 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->B)); 832 } else { 833 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->B)); 834 } 835 PetscCall(ISDestroy(&ccis)); 836 if (!flg) { 837 ilink = ilink->next; 838 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 839 if (jac->offdiag_use_amat) { 840 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, scall, &jac->C)); 841 } else { 842 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, scall, &jac->C)); 843 } 844 PetscCall(ISDestroy(&ccis)); 845 } else { 846 PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg)); 847 if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C)); 848 else PetscCall(MatCreateTranspose(jac->B, &jac->C)); 849 } 850 PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1])); 851 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) { 852 PetscCall(MatDestroy(&jac->schurp)); 853 PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp)); 854 } 855 if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0])); 856 if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0])); 857 PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac))); 858 } else { 859 const char *Dprefix; 860 char schurprefix[256], schurmatprefix[256]; 861 char schurtestoption[256]; 862 MatNullSpace sp; 863 KSP kspt; 864 865 /* extract the A01 and A10 matrices */ 866 ilink = jac->head; 867 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 868 if (jac->offdiag_use_amat) { 869 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 870 } else { 871 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 872 } 873 PetscCall(ISDestroy(&ccis)); 874 ilink = ilink->next; 875 if (!flg) { 876 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 877 if (jac->offdiag_use_amat) { 878 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 879 } else { 880 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 881 } 882 PetscCall(ISDestroy(&ccis)); 883 } else { 884 PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &set, &flg)); 885 if (set && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C)); 886 else PetscCall(MatCreateTranspose(jac->B, &jac->C)); 887 } 888 /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */ 889 PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur)); 890 PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT)); 891 PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1])); 892 PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 893 PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix)); 894 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt)); 895 PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix)); 896 897 /* Note: this is not true in general */ 898 PetscCall(MatGetNullSpace(jac->mat[1], &sp)); 899 if (sp) PetscCall(MatSetNullSpace(jac->schur, sp)); 900 901 PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname)); 902 PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg)); 903 if (flg) { 904 DM dmInner; 905 KSP kspInner; 906 PC pcInner; 907 908 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 909 PetscCall(KSPReset(kspInner)); 910 PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0])); 911 PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 912 /* Indent this deeper to emphasize the "inner" nature of this solver. */ 913 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2)); 914 PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2)); 915 PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix)); 916 917 /* Set DM for new solver */ 918 PetscCall(KSPGetDM(jac->head->ksp, &dmInner)); 919 PetscCall(KSPSetDM(kspInner, dmInner)); 920 PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE)); 921 922 /* Defaults to PCKSP as preconditioner */ 923 PetscCall(KSPGetPC(kspInner, &pcInner)); 924 PetscCall(PCSetType(pcInner, PCKSP)); 925 PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp)); 926 } else { 927 /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or 928 * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact, 929 * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for 930 * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make 931 * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used 932 * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */ 933 PetscCall(KSPSetType(jac->head->ksp, KSPGMRES)); 934 PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp)); 935 } 936 PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0])); 937 PetscCall(KSPSetFromOptions(jac->head->ksp)); 938 PetscCall(MatSetFromOptions(jac->schur)); 939 940 PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg)); 941 if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */ 942 KSP kspInner; 943 PC pcInner; 944 945 PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner)); 946 PetscCall(KSPGetPC(kspInner, &pcInner)); 947 PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg)); 948 if (flg) { 949 KSP ksp; 950 951 PetscCall(PCKSPGetKSP(pcInner, &ksp)); 952 if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE)); 953 } 954 } 955 PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname)); 956 PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg)); 957 if (flg) { 958 DM dmInner; 959 960 PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 961 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper)); 962 PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel)); 963 PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure)); 964 PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix)); 965 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1)); 966 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1)); 967 PetscCall(KSPGetDM(jac->head->ksp, &dmInner)); 968 PetscCall(KSPSetDM(jac->kspupper, dmInner)); 969 PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE)); 970 PetscCall(KSPSetFromOptions(jac->kspupper)); 971 PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0])); 972 PetscCall(VecDuplicate(jac->head->x, &jac->head->z)); 973 } else { 974 jac->kspupper = jac->head->ksp; 975 PetscCall(PetscObjectReference((PetscObject)jac->head->ksp)); 976 } 977 978 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp)); 979 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur)); 980 PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel)); 981 PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure)); 982 PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1)); 983 if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) { 984 PC pcschur; 985 PetscCall(KSPGetPC(jac->kspschur, &pcschur)); 986 PetscCall(PCSetType(pcschur, PCNONE)); 987 /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */ 988 } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) { 989 PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user)); 990 } 991 PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac))); 992 PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix)); 993 PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix)); 994 /* propagate DM */ 995 { 996 DM sdm; 997 PetscCall(KSPGetDM(jac->head->next->ksp, &sdm)); 998 if (sdm) { 999 PetscCall(KSPSetDM(jac->kspschur, sdm)); 1000 PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE)); 1001 } 1002 } 1003 /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */ 1004 /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */ 1005 PetscCall(KSPSetFromOptions(jac->kspschur)); 1006 } 1007 PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY)); 1008 PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY)); 1009 1010 /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */ 1011 PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname)); 1012 PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L)); 1013 if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L)); 1014 if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", (PetscObject)LSC_L)); 1015 PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname)); 1016 PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, (PetscObject *)&LSC_L)); 1017 if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, (PetscObject *)&LSC_L)); 1018 if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", (PetscObject)LSC_L)); 1019 } else if (jac->type == PC_COMPOSITE_GKB) { 1020 IS ccis; 1021 PetscInt rstart, rend; 1022 1023 PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields"); 1024 1025 ilink = jac->head; 1026 1027 /* When extracting off-diagonal submatrices, we take complements from this range */ 1028 PetscCall(MatGetOwnershipRangeColumn(pc->mat, &rstart, &rend)); 1029 1030 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 1031 if (jac->offdiag_use_amat) { 1032 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 1033 } else { 1034 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->B)); 1035 } 1036 PetscCall(ISDestroy(&ccis)); 1037 /* Create work vectors for GKB algorithm */ 1038 PetscCall(VecDuplicate(ilink->x, &jac->u)); 1039 PetscCall(VecDuplicate(ilink->x, &jac->Hu)); 1040 PetscCall(VecDuplicate(ilink->x, &jac->w2)); 1041 ilink = ilink->next; 1042 PetscCall(ISComplement(ilink->is_col, rstart, rend, &ccis)); 1043 if (jac->offdiag_use_amat) { 1044 PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 1045 } else { 1046 PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ccis, MAT_INITIAL_MATRIX, &jac->C)); 1047 } 1048 PetscCall(ISDestroy(&ccis)); 1049 /* Create work vectors for GKB algorithm */ 1050 PetscCall(VecDuplicate(ilink->x, &jac->v)); 1051 PetscCall(VecDuplicate(ilink->x, &jac->d)); 1052 PetscCall(VecDuplicate(ilink->x, &jac->w1)); 1053 PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu)); 1054 PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz)); 1055 1056 ilink = jac->head; 1057 PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H)); 1058 if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp)); 1059 /* Create gkb_monitor context */ 1060 if (jac->gkbmonitor) { 1061 PetscInt tablevel; 1062 PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer)); 1063 PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII)); 1064 PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel)); 1065 PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel)); 1066 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1)); 1067 } 1068 } else { 1069 /* set up the individual splits' PCs */ 1070 i = 0; 1071 ilink = jac->head; 1072 while (ilink) { 1073 PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i])); 1074 /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */ 1075 if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp)); 1076 i++; 1077 ilink = ilink->next; 1078 } 1079 } 1080 1081 /* Set coordinates to the sub PC objects whenever these are set */ 1082 if (jac->coordinates_set) { 1083 PC pc_coords; 1084 if (jac->type == PC_COMPOSITE_SCHUR) { 1085 // Head is first block. 1086 PetscCall(KSPGetPC(jac->head->ksp, &pc_coords)); 1087 PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords)); 1088 // Second one is Schur block, but its KSP object is in kspschur. 1089 PetscCall(KSPGetPC(jac->kspschur, &pc_coords)); 1090 PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords)); 1091 } else if (jac->type == PC_COMPOSITE_GKB) { 1092 PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n")); 1093 } else { 1094 ilink = jac->head; 1095 while (ilink) { 1096 PetscCall(KSPGetPC(ilink->ksp, &pc_coords)); 1097 PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords)); 1098 ilink = ilink->next; 1099 } 1100 } 1101 } 1102 1103 jac->suboptionsset = PETSC_TRUE; 1104 PetscFunctionReturn(PETSC_SUCCESS); 1105 } 1106 1107 #define FieldSplitSplitSolveAdd(ilink, xx, yy) \ 1108 ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \ 1109 KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \ 1110 VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE))) 1111 1112 static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y) 1113 { 1114 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1115 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1116 KSP kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper; 1117 1118 PetscFunctionBegin; 1119 switch (jac->schurfactorization) { 1120 case PC_FIELDSPLIT_SCHUR_FACT_DIAG: 1121 /* [A00 0; 0 -S], positive definite, suitable for MINRES */ 1122 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1123 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1124 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1125 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1126 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1127 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1128 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1129 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1130 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1131 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1132 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1133 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1134 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1135 PetscCall(VecScale(ilinkD->y, jac->schurscale)); 1136 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1137 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1138 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1139 break; 1140 case PC_FIELDSPLIT_SCHUR_FACT_LOWER: 1141 /* [A00 0; A10 S], suitable for left preconditioning */ 1142 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1143 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1144 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1145 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1146 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1147 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1148 PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x)); 1149 PetscCall(VecScale(ilinkD->x, -1.)); 1150 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1151 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1152 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1153 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1154 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1155 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1156 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1157 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1158 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1159 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1160 break; 1161 case PC_FIELDSPLIT_SCHUR_FACT_UPPER: 1162 /* [A00 A01; 0 S], suitable for right preconditioning */ 1163 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1164 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1165 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1166 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1167 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1168 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1169 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x)); 1170 PetscCall(VecScale(ilinkA->x, -1.)); 1171 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1172 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1173 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1174 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1175 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1176 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1177 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1178 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1179 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1180 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1181 break; 1182 case PC_FIELDSPLIT_SCHUR_FACT_FULL: 1183 /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */ 1184 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1185 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1186 PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL)); 1187 PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y)); 1188 PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y)); 1189 PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL)); 1190 PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x)); 1191 PetscCall(VecScale(ilinkD->x, -1.0)); 1192 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1193 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1194 1195 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1196 PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); 1197 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1198 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1199 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1200 1201 if (kspUpper == kspA) { 1202 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y)); 1203 PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y)); 1204 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1205 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1206 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1207 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1208 } else { 1209 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1210 PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y)); 1211 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1212 PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x)); 1213 PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL)); 1214 PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z)); 1215 PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z)); 1216 PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL)); 1217 PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z)); 1218 } 1219 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1220 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1221 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1222 } 1223 PetscFunctionReturn(PETSC_SUCCESS); 1224 } 1225 1226 static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y) 1227 { 1228 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1229 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1230 KSP kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper; 1231 1232 PetscFunctionBegin; 1233 switch (jac->schurfactorization) { 1234 case PC_FIELDSPLIT_SCHUR_FACT_DIAG: 1235 /* [A00 0; 0 -S], positive definite, suitable for MINRES */ 1236 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1237 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1238 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1239 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1240 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1241 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1242 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1243 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1244 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1245 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1246 PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); 1247 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1248 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1249 PetscCall(VecScale(ilinkD->y, jac->schurscale)); 1250 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1251 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1252 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1253 break; 1254 case PC_FIELDSPLIT_SCHUR_FACT_UPPER: 1255 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1256 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1257 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1258 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1259 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1260 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1261 PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x)); 1262 PetscCall(VecScale(ilinkD->x, -1.)); 1263 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1264 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1265 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1266 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1267 PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); 1268 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1269 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1270 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1271 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1272 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1273 break; 1274 case PC_FIELDSPLIT_SCHUR_FACT_LOWER: 1275 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1276 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1277 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1278 PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); 1279 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1280 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1281 PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x)); 1282 PetscCall(VecScale(ilinkA->x, -1.)); 1283 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1284 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1285 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD)); 1286 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1287 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1288 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1289 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1290 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1291 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1292 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1293 break; 1294 case PC_FIELDSPLIT_SCHUR_FACT_FULL: 1295 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1296 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1297 PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL)); 1298 PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y)); 1299 PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y)); 1300 PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL)); 1301 PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x)); 1302 PetscCall(VecScale(ilinkD->x, -1.0)); 1303 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1304 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); 1305 1306 PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1307 PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); 1308 PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); 1309 PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); 1310 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1311 1312 if (kspLower == kspA) { 1313 PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y)); 1314 PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y)); 1315 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1316 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1317 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1318 PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1319 } else { 1320 PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL)); 1321 PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y)); 1322 PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y)); 1323 PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x)); 1324 PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL)); 1325 PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z)); 1326 PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z)); 1327 PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL)); 1328 PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z)); 1329 } 1330 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1331 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1332 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1333 } 1334 PetscFunctionReturn(PETSC_SUCCESS); 1335 } 1336 1337 static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y) 1338 { 1339 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1340 PC_FieldSplitLink ilink = jac->head; 1341 PetscInt cnt, bs; 1342 1343 PetscFunctionBegin; 1344 if (jac->type == PC_COMPOSITE_ADDITIVE) { 1345 if (jac->defaultsplit) { 1346 PetscCall(VecGetBlockSize(x, &bs)); 1347 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1348 PetscCall(VecGetBlockSize(y, &bs)); 1349 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1350 PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES)); 1351 while (ilink) { 1352 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1353 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1354 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1355 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1356 ilink = ilink->next; 1357 } 1358 PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES)); 1359 } else { 1360 PetscCall(VecSet(y, 0.0)); 1361 while (ilink) { 1362 PetscCall(FieldSplitSplitSolveAdd(ilink, x, y)); 1363 ilink = ilink->next; 1364 } 1365 } 1366 } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) { 1367 PetscCall(VecSet(y, 0.0)); 1368 /* solve on first block for first block variables */ 1369 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD)); 1370 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD)); 1371 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1372 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1373 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1374 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1375 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1376 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1377 1378 /* compute the residual only onto second block variables using first block variables */ 1379 PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x)); 1380 ilink = ilink->next; 1381 PetscCall(VecScale(ilink->x, -1.0)); 1382 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1383 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1384 1385 /* solve on second block variables */ 1386 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1387 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1388 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1389 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1390 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1391 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1392 } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1393 if (!jac->w1) { 1394 PetscCall(VecDuplicate(x, &jac->w1)); 1395 PetscCall(VecDuplicate(x, &jac->w2)); 1396 } 1397 PetscCall(VecSet(y, 0.0)); 1398 PetscCall(FieldSplitSplitSolveAdd(ilink, x, y)); 1399 cnt = 1; 1400 while (ilink->next) { 1401 ilink = ilink->next; 1402 /* compute the residual only over the part of the vector needed */ 1403 PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x)); 1404 PetscCall(VecScale(ilink->x, -1.0)); 1405 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1406 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1407 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1408 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1409 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1410 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1411 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1412 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1413 } 1414 if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1415 cnt -= 2; 1416 while (ilink->previous) { 1417 ilink = ilink->previous; 1418 /* compute the residual only over the part of the vector needed */ 1419 PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x)); 1420 PetscCall(VecScale(ilink->x, -1.0)); 1421 PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1422 PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD)); 1423 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1424 PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y)); 1425 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1426 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1427 PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1428 PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE)); 1429 } 1430 } 1431 } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type); 1432 PetscFunctionReturn(PETSC_SUCCESS); 1433 } 1434 1435 static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y) 1436 { 1437 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1438 PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next; 1439 KSP ksp = ilinkA->ksp; 1440 Vec u, v, Hu, d, work1, work2; 1441 PetscScalar alpha, z, nrmz2, *vecz; 1442 PetscReal lowbnd, nu, beta; 1443 PetscInt j, iterGKB; 1444 1445 PetscFunctionBegin; 1446 PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1447 PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1448 PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD)); 1449 PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); 1450 1451 u = jac->u; 1452 v = jac->v; 1453 Hu = jac->Hu; 1454 d = jac->d; 1455 work1 = jac->w1; 1456 work2 = jac->w2; 1457 vecz = jac->vecz; 1458 1459 /* Change RHS to comply with matrix regularization H = A + nu*B*B' */ 1460 /* Add q = q + nu*B*b */ 1461 if (jac->gkbnu) { 1462 nu = jac->gkbnu; 1463 PetscCall(VecScale(ilinkD->x, jac->gkbnu)); 1464 PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */ 1465 } else { 1466 /* Situation when no augmented Lagrangian is used. Then we set inner */ 1467 /* matrix N = I in [Ar13], and thus nu = 1. */ 1468 nu = 1; 1469 } 1470 1471 /* Transform rhs from [q,tilde{b}] to [0,b] */ 1472 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL)); 1473 PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y)); 1474 PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y)); 1475 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL)); 1476 PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1)); 1477 PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x */ 1478 1479 /* First step of algorithm */ 1480 PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/ 1481 KSPCheckDot(ksp, beta); 1482 beta = PetscSqrtReal(nu) * beta; 1483 PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c */ 1484 PetscCall(MatMult(jac->B, v, work2)); /* u = H^{-1}*B*v */ 1485 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL)); 1486 PetscCall(KSPSolve(ksp, work2, u)); 1487 PetscCall(KSPCheckSolve(ksp, pc, u)); 1488 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL)); 1489 PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u */ 1490 PetscCall(VecDot(Hu, u, &alpha)); 1491 KSPCheckDot(ksp, alpha); 1492 PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite"); 1493 alpha = PetscSqrtReal(PetscAbsScalar(alpha)); 1494 PetscCall(VecScale(u, 1.0 / alpha)); 1495 PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c */ 1496 1497 z = beta / alpha; 1498 vecz[1] = z; 1499 1500 /* Computation of first iterate x(1) and p(1) */ 1501 PetscCall(VecAXPY(ilinkA->y, z, u)); 1502 PetscCall(VecCopy(d, ilinkD->y)); 1503 PetscCall(VecScale(ilinkD->y, -z)); 1504 1505 iterGKB = 1; 1506 lowbnd = 2 * jac->gkbtol; 1507 if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd)); 1508 1509 while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) { 1510 iterGKB += 1; 1511 PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */ 1512 PetscCall(VecAXPBY(v, nu, -alpha, work1)); 1513 PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v */ 1514 beta = beta / PetscSqrtReal(nu); 1515 PetscCall(VecScale(v, 1.0 / beta)); 1516 PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */ 1517 PetscCall(MatMult(jac->H, u, Hu)); 1518 PetscCall(VecAXPY(work2, -beta, Hu)); 1519 PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL)); 1520 PetscCall(KSPSolve(ksp, work2, u)); 1521 PetscCall(KSPCheckSolve(ksp, pc, u)); 1522 PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL)); 1523 PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u */ 1524 PetscCall(VecDot(Hu, u, &alpha)); 1525 KSPCheckDot(ksp, alpha); 1526 PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite"); 1527 alpha = PetscSqrtReal(PetscAbsScalar(alpha)); 1528 PetscCall(VecScale(u, 1.0 / alpha)); 1529 1530 z = -beta / alpha * z; /* z <- beta/alpha*z */ 1531 vecz[0] = z; 1532 1533 /* Computation of new iterate x(i+1) and p(i+1) */ 1534 PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */ 1535 PetscCall(VecAXPY(ilinkA->y, z, u)); /* r = r + z*u */ 1536 PetscCall(VecAXPY(ilinkD->y, -z, d)); /* p = p - z*d */ 1537 PetscCall(MatMult(jac->H, ilinkA->y, Hu)); /* ||u||_H = u'*H*u */ 1538 PetscCall(VecDot(Hu, ilinkA->y, &nrmz2)); 1539 1540 /* Compute Lower Bound estimate */ 1541 if (iterGKB > jac->gkbdelay) { 1542 lowbnd = 0.0; 1543 for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]); 1544 lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2)); 1545 } 1546 1547 for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2]; 1548 if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd)); 1549 } 1550 1551 PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1552 PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1553 PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1554 PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); 1555 PetscFunctionReturn(PETSC_SUCCESS); 1556 } 1557 1558 #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \ 1559 ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \ 1560 KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \ 1561 VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE))) 1562 1563 static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y) 1564 { 1565 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1566 PC_FieldSplitLink ilink = jac->head; 1567 PetscInt bs; 1568 1569 PetscFunctionBegin; 1570 if (jac->type == PC_COMPOSITE_ADDITIVE) { 1571 if (jac->defaultsplit) { 1572 PetscCall(VecGetBlockSize(x, &bs)); 1573 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1574 PetscCall(VecGetBlockSize(y, &bs)); 1575 PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); 1576 PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES)); 1577 while (ilink) { 1578 PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1579 PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y)); 1580 PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y)); 1581 PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL)); 1582 ilink = ilink->next; 1583 } 1584 PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES)); 1585 } else { 1586 PetscCall(VecSet(y, 0.0)); 1587 while (ilink) { 1588 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1589 ilink = ilink->next; 1590 } 1591 } 1592 } else { 1593 if (!jac->w1) { 1594 PetscCall(VecDuplicate(x, &jac->w1)); 1595 PetscCall(VecDuplicate(x, &jac->w2)); 1596 } 1597 PetscCall(VecSet(y, 0.0)); 1598 if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) { 1599 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1600 while (ilink->next) { 1601 ilink = ilink->next; 1602 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1603 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1604 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1605 } 1606 while (ilink->previous) { 1607 ilink = ilink->previous; 1608 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1609 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1610 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1611 } 1612 } else { 1613 while (ilink->next) { /* get to last entry in linked list */ 1614 ilink = ilink->next; 1615 } 1616 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y)); 1617 while (ilink->previous) { 1618 ilink = ilink->previous; 1619 PetscCall(MatMultTranspose(pc->mat, y, jac->w1)); 1620 PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x)); 1621 PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y)); 1622 } 1623 } 1624 } 1625 PetscFunctionReturn(PETSC_SUCCESS); 1626 } 1627 1628 static PetscErrorCode PCReset_FieldSplit(PC pc) 1629 { 1630 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1631 PC_FieldSplitLink ilink = jac->head, next; 1632 1633 PetscFunctionBegin; 1634 while (ilink) { 1635 PetscCall(KSPDestroy(&ilink->ksp)); 1636 PetscCall(VecDestroy(&ilink->x)); 1637 PetscCall(VecDestroy(&ilink->y)); 1638 PetscCall(VecDestroy(&ilink->z)); 1639 PetscCall(VecScatterDestroy(&ilink->sctx)); 1640 PetscCall(ISDestroy(&ilink->is)); 1641 PetscCall(ISDestroy(&ilink->is_col)); 1642 PetscCall(PetscFree(ilink->splitname)); 1643 PetscCall(PetscFree(ilink->fields)); 1644 PetscCall(PetscFree(ilink->fields_col)); 1645 next = ilink->next; 1646 PetscCall(PetscFree(ilink)); 1647 ilink = next; 1648 } 1649 jac->head = NULL; 1650 PetscCall(PetscFree2(jac->x, jac->y)); 1651 if (jac->mat && jac->mat != jac->pmat) { 1652 PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat)); 1653 } else if (jac->mat) { 1654 jac->mat = NULL; 1655 } 1656 if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat)); 1657 if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield)); 1658 jac->nsplits = 0; 1659 PetscCall(VecDestroy(&jac->w1)); 1660 PetscCall(VecDestroy(&jac->w2)); 1661 PetscCall(MatDestroy(&jac->schur)); 1662 PetscCall(MatDestroy(&jac->schurp)); 1663 PetscCall(MatDestroy(&jac->schur_user)); 1664 PetscCall(KSPDestroy(&jac->kspschur)); 1665 PetscCall(KSPDestroy(&jac->kspupper)); 1666 PetscCall(MatDestroy(&jac->B)); 1667 PetscCall(MatDestroy(&jac->C)); 1668 PetscCall(MatDestroy(&jac->H)); 1669 PetscCall(VecDestroy(&jac->u)); 1670 PetscCall(VecDestroy(&jac->v)); 1671 PetscCall(VecDestroy(&jac->Hu)); 1672 PetscCall(VecDestroy(&jac->d)); 1673 PetscCall(PetscFree(jac->vecz)); 1674 PetscCall(PetscViewerDestroy(&jac->gkbviewer)); 1675 jac->isrestrict = PETSC_FALSE; 1676 PetscFunctionReturn(PETSC_SUCCESS); 1677 } 1678 1679 static PetscErrorCode PCDestroy_FieldSplit(PC pc) 1680 { 1681 PetscFunctionBegin; 1682 PetscCall(PCReset_FieldSplit(pc)); 1683 PetscCall(PetscFree(pc->data)); 1684 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL)); 1685 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL)); 1686 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL)); 1687 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL)); 1688 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL)); 1689 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL)); 1690 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL)); 1691 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 1692 1693 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL)); 1694 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL)); 1695 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL)); 1696 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL)); 1697 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 1698 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL)); 1699 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL)); 1700 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL)); 1701 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL)); 1702 PetscFunctionReturn(PETSC_SUCCESS); 1703 } 1704 1705 static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *PetscOptionsObject) 1706 { 1707 PetscInt bs; 1708 PetscBool flg; 1709 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1710 PCCompositeType ctype; 1711 1712 PetscFunctionBegin; 1713 PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options"); 1714 PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL)); 1715 PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg)); 1716 if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs)); 1717 jac->diag_use_amat = pc->useAmat; 1718 PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL)); 1719 jac->offdiag_use_amat = pc->useAmat; 1720 PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL)); 1721 PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL)); 1722 PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */ 1723 PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg)); 1724 if (flg) PetscCall(PCFieldSplitSetType(pc, ctype)); 1725 /* Only setup fields once */ 1726 if ((jac->bs > 0) && (jac->nsplits == 0)) { 1727 /* only allow user to set fields from command line if bs is already known. 1728 otherwise user can set them in PCFieldSplitSetDefaults() */ 1729 PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc)); 1730 if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n")); 1731 } 1732 if (jac->type == PC_COMPOSITE_SCHUR) { 1733 PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg)); 1734 if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n")); 1735 PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL)); 1736 PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL)); 1737 PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL)); 1738 } else if (jac->type == PC_COMPOSITE_GKB) { 1739 PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL)); 1740 PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL)); 1741 PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0)); 1742 PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL)); 1743 PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL)); 1744 } 1745 /* 1746 In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet. 1747 But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it 1748 is called on the outer solver in case changes were made in the options database 1749 1750 But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions() 1751 if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete. 1752 Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types. 1753 1754 There could be a negative side effect of calling the KSPSetFromOptions() below. 1755 1756 If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call 1757 */ 1758 if (jac->issetup) { 1759 PC_FieldSplitLink ilink = jac->head; 1760 if (jac->type == PC_COMPOSITE_SCHUR) { 1761 if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper)); 1762 if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur)); 1763 } 1764 while (ilink) { 1765 if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp)); 1766 ilink = ilink->next; 1767 } 1768 } 1769 PetscOptionsHeadEnd(); 1770 PetscFunctionReturn(PETSC_SUCCESS); 1771 } 1772 1773 static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col) 1774 { 1775 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1776 PC_FieldSplitLink ilink, next = jac->head; 1777 char prefix[128]; 1778 PetscInt i; 1779 1780 PetscFunctionBegin; 1781 if (jac->splitdefined) { 1782 PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname)); 1783 PetscFunctionReturn(PETSC_SUCCESS); 1784 } 1785 for (i = 0; i < n; i++) { 1786 PetscCheck(fields[i] < jac->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", fields[i], jac->bs); 1787 PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); 1788 } 1789 PetscCall(PetscNew(&ilink)); 1790 if (splitname) { 1791 PetscCall(PetscStrallocpy(splitname, &ilink->splitname)); 1792 } else { 1793 PetscCall(PetscMalloc1(3, &ilink->splitname)); 1794 PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits)); 1795 } 1796 ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */ 1797 PetscCall(PetscMalloc1(n, &ilink->fields)); 1798 PetscCall(PetscArraycpy(ilink->fields, fields, n)); 1799 PetscCall(PetscMalloc1(n, &ilink->fields_col)); 1800 PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n)); 1801 1802 ilink->nfields = n; 1803 ilink->next = NULL; 1804 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp)); 1805 PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel)); 1806 PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure)); 1807 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1)); 1808 PetscCall(KSPSetType(ilink->ksp, KSPPREONLY)); 1809 1810 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 1811 PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix)); 1812 1813 if (!next) { 1814 jac->head = ilink; 1815 ilink->previous = NULL; 1816 } else { 1817 while (next->next) next = next->next; 1818 next->next = ilink; 1819 ilink->previous = next; 1820 } 1821 jac->nsplits++; 1822 PetscFunctionReturn(PETSC_SUCCESS); 1823 } 1824 1825 static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp) 1826 { 1827 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1828 1829 PetscFunctionBegin; 1830 *subksp = NULL; 1831 if (n) *n = 0; 1832 if (jac->type == PC_COMPOSITE_SCHUR) { 1833 PetscInt nn; 1834 1835 PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()"); 1836 PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits); 1837 nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0); 1838 PetscCall(PetscMalloc1(nn, subksp)); 1839 (*subksp)[0] = jac->head->ksp; 1840 (*subksp)[1] = jac->kspschur; 1841 if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper; 1842 if (n) *n = nn; 1843 } 1844 PetscFunctionReturn(PETSC_SUCCESS); 1845 } 1846 1847 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp) 1848 { 1849 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1850 1851 PetscFunctionBegin; 1852 PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()"); 1853 PetscCall(PetscMalloc1(jac->nsplits, subksp)); 1854 PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp)); 1855 1856 (*subksp)[1] = jac->kspschur; 1857 if (n) *n = jac->nsplits; 1858 PetscFunctionReturn(PETSC_SUCCESS); 1859 } 1860 1861 static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp) 1862 { 1863 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1864 PetscInt cnt = 0; 1865 PC_FieldSplitLink ilink = jac->head; 1866 1867 PetscFunctionBegin; 1868 PetscCall(PetscMalloc1(jac->nsplits, subksp)); 1869 while (ilink) { 1870 (*subksp)[cnt++] = ilink->ksp; 1871 ilink = ilink->next; 1872 } 1873 PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits); 1874 if (n) *n = jac->nsplits; 1875 PetscFunctionReturn(PETSC_SUCCESS); 1876 } 1877 1878 /*@C 1879 PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`. 1880 1881 Input Parameters: 1882 + pc - the preconditioner context 1883 - isy - the index set that defines the indices to which the fieldsplit is to be restricted 1884 1885 Level: advanced 1886 1887 Developer Notes: 1888 It seems the resulting `IS`s will not cover the entire space, so 1889 how can they define a convergent preconditioner? Needs explaining. 1890 1891 .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 1892 @*/ 1893 PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy) 1894 { 1895 PetscFunctionBegin; 1896 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1897 PetscValidHeaderSpecific(isy, IS_CLASSID, 2); 1898 PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy)); 1899 PetscFunctionReturn(PETSC_SUCCESS); 1900 } 1901 1902 static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy) 1903 { 1904 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1905 PC_FieldSplitLink ilink = jac->head, next; 1906 PetscInt localsize, size, sizez, i; 1907 const PetscInt *ind, *indz; 1908 PetscInt *indc, *indcz; 1909 PetscBool flg; 1910 1911 PetscFunctionBegin; 1912 PetscCall(ISGetLocalSize(isy, &localsize)); 1913 PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy))); 1914 size -= localsize; 1915 while (ilink) { 1916 IS isrl, isr; 1917 PC subpc; 1918 PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl)); 1919 PetscCall(ISGetLocalSize(isrl, &localsize)); 1920 PetscCall(PetscMalloc1(localsize, &indc)); 1921 PetscCall(ISGetIndices(isrl, &ind)); 1922 PetscCall(PetscArraycpy(indc, ind, localsize)); 1923 PetscCall(ISRestoreIndices(isrl, &ind)); 1924 PetscCall(ISDestroy(&isrl)); 1925 for (i = 0; i < localsize; i++) *(indc + i) += size; 1926 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr)); 1927 PetscCall(PetscObjectReference((PetscObject)isr)); 1928 PetscCall(ISDestroy(&ilink->is)); 1929 ilink->is = isr; 1930 PetscCall(PetscObjectReference((PetscObject)isr)); 1931 PetscCall(ISDestroy(&ilink->is_col)); 1932 ilink->is_col = isr; 1933 PetscCall(ISDestroy(&isr)); 1934 PetscCall(KSPGetPC(ilink->ksp, &subpc)); 1935 PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg)); 1936 if (flg) { 1937 IS iszl, isz; 1938 MPI_Comm comm; 1939 PetscCall(ISGetLocalSize(ilink->is, &localsize)); 1940 comm = PetscObjectComm((PetscObject)ilink->is); 1941 PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl)); 1942 PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm)); 1943 sizez -= localsize; 1944 PetscCall(ISGetLocalSize(iszl, &localsize)); 1945 PetscCall(PetscMalloc1(localsize, &indcz)); 1946 PetscCall(ISGetIndices(iszl, &indz)); 1947 PetscCall(PetscArraycpy(indcz, indz, localsize)); 1948 PetscCall(ISRestoreIndices(iszl, &indz)); 1949 PetscCall(ISDestroy(&iszl)); 1950 for (i = 0; i < localsize; i++) *(indcz + i) += sizez; 1951 PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz)); 1952 PetscCall(PCFieldSplitRestrictIS(subpc, isz)); 1953 PetscCall(ISDestroy(&isz)); 1954 } 1955 next = ilink->next; 1956 ilink = next; 1957 } 1958 jac->isrestrict = PETSC_TRUE; 1959 PetscFunctionReturn(PETSC_SUCCESS); 1960 } 1961 1962 static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is) 1963 { 1964 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 1965 PC_FieldSplitLink ilink, next = jac->head; 1966 char prefix[128]; 1967 1968 PetscFunctionBegin; 1969 if (jac->splitdefined) { 1970 PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname)); 1971 PetscFunctionReturn(PETSC_SUCCESS); 1972 } 1973 PetscCall(PetscNew(&ilink)); 1974 if (splitname) { 1975 PetscCall(PetscStrallocpy(splitname, &ilink->splitname)); 1976 } else { 1977 PetscCall(PetscMalloc1(8, &ilink->splitname)); 1978 PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits)); 1979 } 1980 ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + jac->nsplits : KSP_Solve_FS_0 + 4; /* Any split great than 4 gets logged in the 4th split */ 1981 PetscCall(PetscObjectReference((PetscObject)is)); 1982 PetscCall(ISDestroy(&ilink->is)); 1983 ilink->is = is; 1984 PetscCall(PetscObjectReference((PetscObject)is)); 1985 PetscCall(ISDestroy(&ilink->is_col)); 1986 ilink->is_col = is; 1987 ilink->next = NULL; 1988 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp)); 1989 PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel)); 1990 PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure)); 1991 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1)); 1992 PetscCall(KSPSetType(ilink->ksp, KSPPREONLY)); 1993 1994 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname)); 1995 PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix)); 1996 1997 if (!next) { 1998 jac->head = ilink; 1999 ilink->previous = NULL; 2000 } else { 2001 while (next->next) next = next->next; 2002 next->next = ilink; 2003 ilink->previous = next; 2004 } 2005 jac->nsplits++; 2006 PetscFunctionReturn(PETSC_SUCCESS); 2007 } 2008 2009 /*@C 2010 PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT` 2011 2012 Logically Collective 2013 2014 Input Parameters: 2015 + pc - the preconditioner context 2016 . splitname - name of this split, if `NULL` the number of the split is used 2017 . n - the number of fields in this split 2018 . fields - the fields in this split 2019 - fields_col - generally the same as fields, if it does not match fields then the matrix block that is solved for this set of fields comes from an off-diagonal block 2020 of the matrix and fields_col provides the column indices for that block 2021 2022 Level: intermediate 2023 2024 Notes: 2025 Use `PCFieldSplitSetIS()` to set a general set of indices as a split. 2026 2027 `PCFieldSplitSetFields()` is for defining fields as strided blocks. For example, if the block 2028 size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean 2029 0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x.... 2030 where the numbered entries indicate what is in the split. 2031 2032 This function is called once per split (it creates a new split each time). Solve options 2033 for this split will be available under the prefix `-fieldsplit_SPLITNAME_`. 2034 2035 `PCFieldSplitSetIS()` does not support having a fields_col different from fields 2036 2037 Developer Notes: 2038 This routine does not actually create the `IS` representing the split, that is delayed 2039 until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be 2040 available when this routine is called. 2041 2042 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()` 2043 @*/ 2044 PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col) 2045 { 2046 PetscFunctionBegin; 2047 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2048 PetscAssertPointer(splitname, 2); 2049 PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname); 2050 PetscAssertPointer(fields, 4); 2051 PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col)); 2052 PetscFunctionReturn(PETSC_SUCCESS); 2053 } 2054 2055 /*@ 2056 PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build 2057 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators. 2058 2059 Logically Collective 2060 2061 Input Parameters: 2062 + pc - the preconditioner object 2063 - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from 2064 2065 Options Database Key: 2066 . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks 2067 2068 Level: intermediate 2069 2070 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT` 2071 @*/ 2072 PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg) 2073 { 2074 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2075 PetscBool isfs; 2076 2077 PetscFunctionBegin; 2078 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2079 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2080 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2081 jac->diag_use_amat = flg; 2082 PetscFunctionReturn(PETSC_SUCCESS); 2083 } 2084 2085 /*@ 2086 PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build 2087 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators. 2088 2089 Logically Collective 2090 2091 Input Parameter: 2092 . pc - the preconditioner object 2093 2094 Output Parameter: 2095 . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from 2096 2097 Level: intermediate 2098 2099 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT` 2100 @*/ 2101 PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg) 2102 { 2103 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2104 PetscBool isfs; 2105 2106 PetscFunctionBegin; 2107 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2108 PetscAssertPointer(flg, 2); 2109 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2110 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2111 *flg = jac->diag_use_amat; 2112 PetscFunctionReturn(PETSC_SUCCESS); 2113 } 2114 2115 /*@ 2116 PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build 2117 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators. 2118 2119 Logically Collective 2120 2121 Input Parameters: 2122 + pc - the preconditioner object 2123 - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from 2124 2125 Options Database Key: 2126 . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks 2127 2128 Level: intermediate 2129 2130 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT` 2131 @*/ 2132 PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg) 2133 { 2134 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2135 PetscBool isfs; 2136 2137 PetscFunctionBegin; 2138 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2139 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2140 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2141 jac->offdiag_use_amat = flg; 2142 PetscFunctionReturn(PETSC_SUCCESS); 2143 } 2144 2145 /*@ 2146 PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build 2147 the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators. 2148 2149 Logically Collective 2150 2151 Input Parameter: 2152 . pc - the preconditioner object 2153 2154 Output Parameter: 2155 . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from 2156 2157 Level: intermediate 2158 2159 .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT` 2160 @*/ 2161 PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg) 2162 { 2163 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2164 PetscBool isfs; 2165 2166 PetscFunctionBegin; 2167 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2168 PetscAssertPointer(flg, 2); 2169 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 2170 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT); 2171 *flg = jac->offdiag_use_amat; 2172 PetscFunctionReturn(PETSC_SUCCESS); 2173 } 2174 2175 /*@C 2176 PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT` 2177 2178 Logically Collective 2179 2180 Input Parameters: 2181 + pc - the preconditioner context 2182 . splitname - name of this split, if `NULL` the number of the split is used 2183 - is - the index set that defines the elements in this split 2184 2185 Level: intermediate 2186 2187 Notes: 2188 Use `PCFieldSplitSetFields()`, for splits defined by strided types. 2189 2190 This function is called once per split (it creates a new split each time). Solve options 2191 for this split will be available under the prefix -fieldsplit_SPLITNAME_. 2192 2193 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()` 2194 @*/ 2195 PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is) 2196 { 2197 PetscFunctionBegin; 2198 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2199 if (splitname) PetscAssertPointer(splitname, 2); 2200 PetscValidHeaderSpecific(is, IS_CLASSID, 3); 2201 PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is)); 2202 PetscFunctionReturn(PETSC_SUCCESS); 2203 } 2204 2205 /*@C 2206 PCFieldSplitGetIS - Retrieves the elements for a split as an `IS` 2207 2208 Logically Collective 2209 2210 Input Parameters: 2211 + pc - the preconditioner context 2212 - splitname - name of this split 2213 2214 Output Parameter: 2215 . is - the index set that defines the elements in this split, or `NULL` if the split is not found 2216 2217 Level: intermediate 2218 2219 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()` 2220 @*/ 2221 PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is) 2222 { 2223 PetscFunctionBegin; 2224 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2225 PetscAssertPointer(splitname, 2); 2226 PetscAssertPointer(is, 3); 2227 { 2228 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2229 PC_FieldSplitLink ilink = jac->head; 2230 PetscBool found; 2231 2232 *is = NULL; 2233 while (ilink) { 2234 PetscCall(PetscStrcmp(ilink->splitname, splitname, &found)); 2235 if (found) { 2236 *is = ilink->is; 2237 break; 2238 } 2239 ilink = ilink->next; 2240 } 2241 } 2242 PetscFunctionReturn(PETSC_SUCCESS); 2243 } 2244 2245 /*@C 2246 PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS` 2247 2248 Logically Collective 2249 2250 Input Parameters: 2251 + pc - the preconditioner context 2252 - index - index of this split 2253 2254 Output Parameter: 2255 . is - the index set that defines the elements in this split 2256 2257 Level: intermediate 2258 2259 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()` 2260 @*/ 2261 PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is) 2262 { 2263 PetscFunctionBegin; 2264 PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index); 2265 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2266 PetscAssertPointer(is, 3); 2267 { 2268 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2269 PC_FieldSplitLink ilink = jac->head; 2270 PetscInt i = 0; 2271 PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits); 2272 2273 while (i < index) { 2274 ilink = ilink->next; 2275 ++i; 2276 } 2277 PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is)); 2278 } 2279 PetscFunctionReturn(PETSC_SUCCESS); 2280 } 2281 2282 /*@ 2283 PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the 2284 fieldsplit preconditioner when calling `PCFieldSplitSetIS()`. If not set the matrix block size is used. 2285 2286 Logically Collective 2287 2288 Input Parameters: 2289 + pc - the preconditioner context 2290 - bs - the block size 2291 2292 Level: intermediate 2293 2294 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` 2295 @*/ 2296 PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs) 2297 { 2298 PetscFunctionBegin; 2299 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2300 PetscValidLogicalCollectiveInt(pc, bs, 2); 2301 PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs)); 2302 PetscFunctionReturn(PETSC_SUCCESS); 2303 } 2304 2305 /*@C 2306 PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits 2307 2308 Collective 2309 2310 Input Parameter: 2311 . pc - the preconditioner context 2312 2313 Output Parameters: 2314 + n - the number of splits 2315 - subksp - the array of `KSP` contexts 2316 2317 Level: advanced 2318 2319 Notes: 2320 After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()` 2321 (not the `KSP`, just the array that contains them). 2322 2323 You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`. 2324 2325 If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the 2326 Schur complement and the `KSP` object used to iterate over the Schur complement. 2327 To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`. 2328 2329 If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the 2330 inner linear system defined by the matrix H in each loop. 2331 2332 Fortran Notes: 2333 You must pass in a `KSP` array that is large enough to contain all the `KSP`s. 2334 You can call `PCFieldSplitGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the 2335 `KSP` array must be. 2336 2337 Developer Notes: 2338 There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()` 2339 2340 The Fortran interface should be modernized to return directly the array of values. 2341 2342 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()` 2343 @*/ 2344 PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[]) 2345 { 2346 PetscFunctionBegin; 2347 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2348 if (n) PetscAssertPointer(n, 2); 2349 PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp)); 2350 PetscFunctionReturn(PETSC_SUCCESS); 2351 } 2352 2353 /*@C 2354 PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT` 2355 2356 Collective 2357 2358 Input Parameter: 2359 . pc - the preconditioner context 2360 2361 Output Parameters: 2362 + n - the number of splits 2363 - subksp - the array of `KSP` contexts 2364 2365 Level: advanced 2366 2367 Notes: 2368 After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()` 2369 (not the `KSP` just the array that contains them). 2370 2371 You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`. 2372 2373 If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order) 2374 + 1 - the `KSP` used for the (1,1) block 2375 . 2 - the `KSP` used for the Schur complement (not the one used for the interior Schur solver) 2376 - 3 - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block). 2377 2378 It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`. 2379 2380 Fortran Notes: 2381 You must pass in a `KSP` array that is large enough to contain all the local `KSP`s. 2382 You can call `PCFieldSplitSchurGetSubKSP`(pc,n,`PETSC_NULL_KSP`,ierr) to determine how large the 2383 `KSP` array must be. 2384 2385 Developer Notes: 2386 There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()` 2387 2388 Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged? 2389 2390 The Fortran interface should be modernized to return directly the array of values. 2391 2392 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()` 2393 @*/ 2394 PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[]) 2395 { 2396 PetscFunctionBegin; 2397 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2398 if (n) PetscAssertPointer(n, 2); 2399 PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp)); 2400 PetscFunctionReturn(PETSC_SUCCESS); 2401 } 2402 2403 /*@ 2404 PCFieldSplitSetSchurPre - Indicates from what operator the preconditioner is constructed for the Schur complement. 2405 The default is the A11 matrix. 2406 2407 Collective 2408 2409 Input Parameters: 2410 + pc - the preconditioner context 2411 . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default), 2412 `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`, 2413 `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL` 2414 - pre - matrix to use for preconditioning, or `NULL` 2415 2416 Options Database Keys: 2417 + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments 2418 - -fieldsplit_1_pc_type <pctype> - the preconditioner algorithm that is used to construct the preconditioner from the operator 2419 2420 Level: intermediate 2421 2422 Notes: 2423 If ptype is 2424 + a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner 2425 matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix 2426 . self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix: 2427 The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM` 2428 . user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument 2429 to this function). 2430 . selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $ 2431 This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be 2432 lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump` 2433 - full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation 2434 computed internally by `PCFIELDSPLIT` (this is expensive) 2435 useful mostly as a test that the Schur complement approach can work for your problem 2436 2437 When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense 2438 with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and 2439 `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement. 2440 2441 Developer Note: 2442 The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere. 2443 2444 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, 2445 `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()` 2446 @*/ 2447 PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2448 { 2449 PetscFunctionBegin; 2450 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2451 PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre)); 2452 PetscFunctionReturn(PETSC_SUCCESS); 2453 } 2454 2455 PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2456 { 2457 return PCFieldSplitSetSchurPre(pc, ptype, pre); 2458 } /* Deprecated name */ 2459 2460 /*@ 2461 PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be 2462 preconditioned. See `PCFieldSplitSetSchurPre()` for details. 2463 2464 Logically Collective 2465 2466 Input Parameter: 2467 . pc - the preconditioner context 2468 2469 Output Parameters: 2470 + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER` 2471 - pre - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL` 2472 2473 Level: intermediate 2474 2475 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC` 2476 @*/ 2477 PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre) 2478 { 2479 PetscFunctionBegin; 2480 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2481 PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre)); 2482 PetscFunctionReturn(PETSC_SUCCESS); 2483 } 2484 2485 /*@ 2486 PCFieldSplitSchurGetS - extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately 2487 2488 Not Collective 2489 2490 Input Parameter: 2491 . pc - the preconditioner context 2492 2493 Output Parameter: 2494 . S - the Schur complement matrix 2495 2496 Level: advanced 2497 2498 Note: 2499 This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`. 2500 2501 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`, 2502 `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()` 2503 @*/ 2504 PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S) 2505 { 2506 const char *t; 2507 PetscBool isfs; 2508 PC_FieldSplit *jac; 2509 2510 PetscFunctionBegin; 2511 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2512 PetscCall(PetscObjectGetType((PetscObject)pc, &t)); 2513 PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs)); 2514 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t); 2515 jac = (PC_FieldSplit *)pc->data; 2516 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type); 2517 if (S) *S = jac->schur; 2518 PetscFunctionReturn(PETSC_SUCCESS); 2519 } 2520 2521 /*@ 2522 PCFieldSplitSchurRestoreS - returns the `MATSCHURCOMPLEMENT` matrix used by this `PC` 2523 2524 Not Collective 2525 2526 Input Parameters: 2527 + pc - the preconditioner context 2528 - S - the Schur complement matrix 2529 2530 Level: advanced 2531 2532 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()` 2533 @*/ 2534 PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S) 2535 { 2536 const char *t; 2537 PetscBool isfs; 2538 PC_FieldSplit *jac; 2539 2540 PetscFunctionBegin; 2541 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2542 PetscCall(PetscObjectGetType((PetscObject)pc, &t)); 2543 PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs)); 2544 PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t); 2545 jac = (PC_FieldSplit *)pc->data; 2546 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type); 2547 PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten"); 2548 PetscFunctionReturn(PETSC_SUCCESS); 2549 } 2550 2551 static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) 2552 { 2553 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2554 2555 PetscFunctionBegin; 2556 jac->schurpre = ptype; 2557 if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) { 2558 PetscCall(MatDestroy(&jac->schur_user)); 2559 jac->schur_user = pre; 2560 PetscCall(PetscObjectReference((PetscObject)jac->schur_user)); 2561 } 2562 PetscFunctionReturn(PETSC_SUCCESS); 2563 } 2564 2565 static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre) 2566 { 2567 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2568 2569 PetscFunctionBegin; 2570 if (ptype) *ptype = jac->schurpre; 2571 if (pre) *pre = jac->schur_user; 2572 PetscFunctionReturn(PETSC_SUCCESS); 2573 } 2574 2575 /*@ 2576 PCFieldSplitSetSchurFactType - sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note` 2577 2578 Collective 2579 2580 Input Parameters: 2581 + pc - the preconditioner context 2582 - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default 2583 2584 Options Database Key: 2585 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full` 2586 2587 Level: intermediate 2588 2589 Notes: 2590 The FULL factorization is 2591 2592 ```{math} 2593 \left(\begin{array}{cc} A & B \\ 2594 C & E \\ 2595 \end{array}\right) = 2596 \left(\begin{array}{cc} 1 & 0 \\ 2597 C*A^{-1} & I \\ 2598 \end{array}\right) 2599 \left(\begin{array}{cc} A & 0 \\ 2600 0 & S \\ 2601 \end{array}\right) 2602 \left(\begin{array}{cc} I & A^{-1}B \\ 2603 0 & I \\ 2604 \end{array}\right) = L D U. 2605 ``` 2606 2607 where $ S = E - C*A^{-1}*B $. In practice, the full factorization is applied via block triangular solves with the grouping $L*(D*U)$. UPPER uses $D*U$, LOWER uses $L*D$, 2608 and DIAG is the diagonal part with the sign of $ S $ flipped (because this makes the preconditioner positive definite for many formulations, 2609 thus allowing the use of `KSPMINRES)`. Sign flipping of $ S $ can be turned off with `PCFieldSplitSetSchurScale()`. 2610 2611 If $A$ and $S$ are solved exactly 2612 + 1 - FULL factorization is a direct solver. 2613 . 2 - The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations. 2614 - 3 - With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations. 2615 2616 If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner 2617 application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice. 2618 2619 For symmetric problems in which $A$ is positive definite and $S$ is negative definite, DIAG can be used with `KSPMINRES`. 2620 2621 A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S). 2622 2623 .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`, 2624 [](sec_flexibleksp), `PCFieldSplitSetSchurPre()` 2625 @*/ 2626 PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype) 2627 { 2628 PetscFunctionBegin; 2629 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2630 PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype)); 2631 PetscFunctionReturn(PETSC_SUCCESS); 2632 } 2633 2634 static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype) 2635 { 2636 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2637 2638 PetscFunctionBegin; 2639 jac->schurfactorization = ftype; 2640 PetscFunctionReturn(PETSC_SUCCESS); 2641 } 2642 2643 /*@ 2644 PCFieldSplitSetSchurScale - Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`. 2645 2646 Collective 2647 2648 Input Parameters: 2649 + pc - the preconditioner context 2650 - scale - scaling factor for the Schur complement 2651 2652 Options Database Key: 2653 . -pc_fieldsplit_schur_scale <scale> - default is -1.0 2654 2655 Level: intermediate 2656 2657 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()` 2658 @*/ 2659 PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale) 2660 { 2661 PetscFunctionBegin; 2662 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2663 PetscValidLogicalCollectiveScalar(pc, scale, 2); 2664 PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale)); 2665 PetscFunctionReturn(PETSC_SUCCESS); 2666 } 2667 2668 static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale) 2669 { 2670 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2671 2672 PetscFunctionBegin; 2673 jac->schurscale = scale; 2674 PetscFunctionReturn(PETSC_SUCCESS); 2675 } 2676 2677 /*@C 2678 PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement 2679 2680 Collective 2681 2682 Input Parameter: 2683 . pc - the preconditioner context 2684 2685 Output Parameters: 2686 + A00 - the (0,0) block 2687 . A01 - the (0,1) block 2688 . A10 - the (1,0) block 2689 - A11 - the (1,1) block 2690 2691 Level: advanced 2692 2693 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()` 2694 @*/ 2695 PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11) 2696 { 2697 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2698 2699 PetscFunctionBegin; 2700 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2701 PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach."); 2702 if (A00) *A00 = jac->pmat[0]; 2703 if (A01) *A01 = jac->B; 2704 if (A10) *A10 = jac->C; 2705 if (A11) *A11 = jac->pmat[1]; 2706 PetscFunctionReturn(PETSC_SUCCESS); 2707 } 2708 2709 /*@ 2710 PCFieldSplitSetGKBTol - Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT` 2711 2712 Collective 2713 2714 Input Parameters: 2715 + pc - the preconditioner context 2716 - tolerance - the solver tolerance 2717 2718 Options Database Key: 2719 . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5 2720 2721 Level: intermediate 2722 2723 Note: 2724 The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion. 2725 It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than 2726 this estimate, the stopping criterion is satisfactory in practical cases. 2727 2728 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()` 2729 @*/ 2730 PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance) 2731 { 2732 PetscFunctionBegin; 2733 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2734 PetscValidLogicalCollectiveReal(pc, tolerance, 2); 2735 PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance)); 2736 PetscFunctionReturn(PETSC_SUCCESS); 2737 } 2738 2739 static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance) 2740 { 2741 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2742 2743 PetscFunctionBegin; 2744 jac->gkbtol = tolerance; 2745 PetscFunctionReturn(PETSC_SUCCESS); 2746 } 2747 2748 /*@ 2749 PCFieldSplitSetGKBMaxit - Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT` 2750 2751 Collective 2752 2753 Input Parameters: 2754 + pc - the preconditioner context 2755 - maxit - the maximum number of iterations 2756 2757 Options Database Key: 2758 . -pc_fieldsplit_gkb_maxit <maxit> - default is 100 2759 2760 Level: intermediate 2761 2762 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()` 2763 @*/ 2764 PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit) 2765 { 2766 PetscFunctionBegin; 2767 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2768 PetscValidLogicalCollectiveInt(pc, maxit, 2); 2769 PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit)); 2770 PetscFunctionReturn(PETSC_SUCCESS); 2771 } 2772 2773 static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit) 2774 { 2775 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2776 2777 PetscFunctionBegin; 2778 jac->gkbmaxit = maxit; 2779 PetscFunctionReturn(PETSC_SUCCESS); 2780 } 2781 2782 /*@ 2783 PCFieldSplitSetGKBDelay - Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT` 2784 preconditioner. 2785 2786 Collective 2787 2788 Input Parameters: 2789 + pc - the preconditioner context 2790 - delay - the delay window in the lower bound estimate 2791 2792 Options Database Key: 2793 . -pc_fieldsplit_gkb_delay <delay> - default is 5 2794 2795 Level: intermediate 2796 2797 Notes: 2798 The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $ 2799 is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs 2800 at least (`delay` + 1) iterations to stop. 2801 2802 For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013` 2803 2804 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()` 2805 @*/ 2806 PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay) 2807 { 2808 PetscFunctionBegin; 2809 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2810 PetscValidLogicalCollectiveInt(pc, delay, 2); 2811 PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay)); 2812 PetscFunctionReturn(PETSC_SUCCESS); 2813 } 2814 2815 static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay) 2816 { 2817 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2818 2819 PetscFunctionBegin; 2820 jac->gkbdelay = delay; 2821 PetscFunctionReturn(PETSC_SUCCESS); 2822 } 2823 2824 /*@ 2825 PCFieldSplitSetGKBNu - Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the 2826 Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT` 2827 2828 Collective 2829 2830 Input Parameters: 2831 + pc - the preconditioner context 2832 - nu - the shift parameter 2833 2834 Options Database Key: 2835 . -pc_fieldsplit_gkb_nu <nu> - default is 1 2836 2837 Level: intermediate 2838 2839 Notes: 2840 This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However, 2841 if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore 2842 necessary to find a good balance in between the convergence of the inner and outer loop. 2843 2844 For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity. 2845 2846 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()` 2847 @*/ 2848 PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu) 2849 { 2850 PetscFunctionBegin; 2851 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2852 PetscValidLogicalCollectiveReal(pc, nu, 2); 2853 PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu)); 2854 PetscFunctionReturn(PETSC_SUCCESS); 2855 } 2856 2857 static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu) 2858 { 2859 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2860 2861 PetscFunctionBegin; 2862 jac->gkbnu = nu; 2863 PetscFunctionReturn(PETSC_SUCCESS); 2864 } 2865 2866 static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type) 2867 { 2868 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2869 2870 PetscFunctionBegin; 2871 jac->type = type; 2872 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL)); 2873 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL)); 2874 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL)); 2875 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL)); 2876 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL)); 2877 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL)); 2878 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL)); 2879 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL)); 2880 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL)); 2881 2882 if (type == PC_COMPOSITE_SCHUR) { 2883 pc->ops->apply = PCApply_FieldSplit_Schur; 2884 pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur; 2885 pc->ops->view = PCView_FieldSplit_Schur; 2886 2887 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur)); 2888 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit)); 2889 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit)); 2890 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit)); 2891 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit)); 2892 } else if (type == PC_COMPOSITE_GKB) { 2893 pc->ops->apply = PCApply_FieldSplit_GKB; 2894 pc->ops->view = PCView_FieldSplit_GKB; 2895 2896 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 2897 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit)); 2898 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit)); 2899 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit)); 2900 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit)); 2901 } else { 2902 pc->ops->apply = PCApply_FieldSplit; 2903 pc->ops->view = PCView_FieldSplit; 2904 2905 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 2906 } 2907 PetscFunctionReturn(PETSC_SUCCESS); 2908 } 2909 2910 static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs) 2911 { 2912 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2913 2914 PetscFunctionBegin; 2915 PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs); 2916 PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs); 2917 jac->bs = bs; 2918 PetscFunctionReturn(PETSC_SUCCESS); 2919 } 2920 2921 static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 2922 { 2923 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 2924 PC_FieldSplitLink ilink_current = jac->head; 2925 IS is_owned; 2926 2927 PetscFunctionBegin; 2928 jac->coordinates_set = PETSC_TRUE; // Internal flag 2929 PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL)); 2930 2931 while (ilink_current) { 2932 // For each IS, embed it to get local coords indces 2933 IS is_coords; 2934 PetscInt ndofs_block; 2935 const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block 2936 2937 // Setting drop to true for safety. It should make no difference. 2938 PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords)); 2939 PetscCall(ISGetLocalSize(is_coords, &ndofs_block)); 2940 PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration)); 2941 2942 // Allocate coordinates vector and set it directly 2943 PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords)); 2944 for (PetscInt dof = 0; dof < ndofs_block; ++dof) { 2945 for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d]; 2946 } 2947 ilink_current->dim = dim; 2948 ilink_current->ndofs = ndofs_block; 2949 PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration)); 2950 PetscCall(ISDestroy(&is_coords)); 2951 ilink_current = ilink_current->next; 2952 } 2953 PetscCall(ISDestroy(&is_owned)); 2954 PetscFunctionReturn(PETSC_SUCCESS); 2955 } 2956 2957 /*@ 2958 PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT` 2959 2960 Collective 2961 2962 Input Parameters: 2963 + pc - the preconditioner context 2964 - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, 2965 `PC_COMPOSITE_GKB` 2966 2967 Options Database Key: 2968 . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type 2969 2970 Level: intermediate 2971 2972 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`, 2973 `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()` 2974 @*/ 2975 PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type) 2976 { 2977 PetscFunctionBegin; 2978 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2979 PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type)); 2980 PetscFunctionReturn(PETSC_SUCCESS); 2981 } 2982 2983 /*@ 2984 PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT` 2985 2986 Not collective 2987 2988 Input Parameter: 2989 . pc - the preconditioner context 2990 2991 Output Parameter: 2992 . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 2993 2994 Level: intermediate 2995 2996 .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`, 2997 `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` 2998 @*/ 2999 PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type) 3000 { 3001 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3002 3003 PetscFunctionBegin; 3004 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 3005 PetscAssertPointer(type, 2); 3006 *type = jac->type; 3007 PetscFunctionReturn(PETSC_SUCCESS); 3008 } 3009 3010 /*@ 3011 PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible. 3012 3013 Logically Collective 3014 3015 Input Parameters: 3016 + pc - the preconditioner context 3017 - flg - boolean indicating whether to use field splits defined by the `DM` 3018 3019 Options Database Key: 3020 . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM` 3021 3022 Level: intermediate 3023 3024 Developer Note: 3025 The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database 3026 3027 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()` 3028 @*/ 3029 PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg) 3030 { 3031 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3032 PetscBool isfs; 3033 3034 PetscFunctionBegin; 3035 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 3036 PetscValidLogicalCollectiveBool(pc, flg, 2); 3037 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 3038 if (isfs) jac->dm_splits = flg; 3039 PetscFunctionReturn(PETSC_SUCCESS); 3040 } 3041 3042 /*@ 3043 PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible. 3044 3045 Logically Collective 3046 3047 Input Parameter: 3048 . pc - the preconditioner context 3049 3050 Output Parameter: 3051 . flg - boolean indicating whether to use field splits defined by the `DM` 3052 3053 Level: intermediate 3054 3055 Developer Note: 3056 The name should be `PCFieldSplitGetUseDMSplits()` 3057 3058 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()` 3059 @*/ 3060 PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg) 3061 { 3062 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3063 PetscBool isfs; 3064 3065 PetscFunctionBegin; 3066 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 3067 PetscAssertPointer(flg, 2); 3068 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs)); 3069 if (isfs) { 3070 if (flg) *flg = jac->dm_splits; 3071 } 3072 PetscFunctionReturn(PETSC_SUCCESS); 3073 } 3074 3075 /*@ 3076 PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries. 3077 3078 Logically Collective 3079 3080 Input Parameter: 3081 . pc - the preconditioner context 3082 3083 Output Parameter: 3084 . flg - boolean indicating whether to detect fields or not 3085 3086 Level: intermediate 3087 3088 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()` 3089 @*/ 3090 PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg) 3091 { 3092 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3093 3094 PetscFunctionBegin; 3095 *flg = jac->detect; 3096 PetscFunctionReturn(PETSC_SUCCESS); 3097 } 3098 3099 /*@ 3100 PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries. 3101 3102 Logically Collective 3103 3104 Input Parameter: 3105 . pc - the preconditioner context 3106 3107 Output Parameter: 3108 . flg - boolean indicating whether to detect fields or not 3109 3110 Options Database Key: 3111 . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point 3112 3113 Level: intermediate 3114 3115 Note: 3116 Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`). 3117 3118 .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF` 3119 @*/ 3120 PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg) 3121 { 3122 PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; 3123 3124 PetscFunctionBegin; 3125 jac->detect = flg; 3126 if (jac->detect) { 3127 PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR)); 3128 PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL)); 3129 } 3130 PetscFunctionReturn(PETSC_SUCCESS); 3131 } 3132 3133 /*MC 3134 PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual 3135 collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details. 3136 3137 Options Database Keys: 3138 + -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split 3139 . -pc_fieldsplit_default - automatically add any fields to additional splits that have not 3140 been supplied explicitly by `-pc_fieldsplit_%d_fields` 3141 . -pc_fieldsplit_block_size <bs> - size of block that defines fields (i.e. there are bs fields) 3142 . -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting 3143 . -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`; see `PCFieldSplitSetSchurPre()` 3144 . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - set factorization type when using `-pc_fieldsplit_type schur`; 3145 see `PCFieldSplitSetSchurFactType()` 3146 . -pc_fieldsplit_dm_splits <true,false> (default is true) - Whether to use `DMCreateFieldDecomposition()` for splits 3147 - -pc_fieldsplit_detect_saddle_point - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver 3148 3149 Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` . 3150 The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_` 3151 For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields. 3152 3153 To set options on the solvers for each block append `-fieldsplit_` to all the `PC` 3154 options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1` 3155 3156 To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()` 3157 and set the options directly on the resulting `KSP` object 3158 3159 Level: intermediate 3160 3161 Notes: 3162 Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries and `PCFieldSplitSetIS()` 3163 to define a split by an arbitrary collection of entries. 3164 3165 If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports 3166 `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise, the splits are defined by entries strided by bs, 3167 beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`, 3168 if this is not called the block size defaults to the blocksize of the second matrix passed 3169 to `KSPSetOperators()`/`PCSetOperators()`. 3170 3171 For the Schur complement preconditioner if 3172 3173 ```{math} 3174 J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right] 3175 ``` 3176 3177 the preconditioner using `full` factorization is logically 3178 ```{math} 3179 \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right] 3180 ``` 3181 where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`. $S$ is the Schur complement 3182 ```{math} 3183 S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01} 3184 ``` 3185 which is usually dense and not stored explicitly. The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given 3186 in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0, 3187 it returns the KSP associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default 3188 $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$. 3189 3190 The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above, 3191 `diag` gives 3192 ```{math} 3193 \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & -\text{ksp}(S) \end{array}\right] 3194 ``` 3195 Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$ so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip 3196 can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of 3197 ```{math} 3198 \left[\begin{array}{cc} A_{00} & 0 \\ A_{10} & S \end{array}\right] 3199 ``` 3200 where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of 3201 ```{math} 3202 \left[\begin{array}{cc} A_{00} & A_{01} \\ 0 & S \end{array}\right] 3203 ``` 3204 where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s. 3205 3206 If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS` 3207 is used automatically for a second block. 3208 3209 The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1. 3210 Generally it should be used with the `MATAIJ` format. 3211 3212 The forms of these preconditioners are closely related if not identical to forms derived as "Distributive Iterations", see, 3213 for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`. 3214 One can also use `PCFIELDSPLIT` 3215 inside a smoother resulting in "Distributive Smoothers". 3216 3217 See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`. 3218 3219 The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the 3220 residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables. 3221 3222 The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape 3223 ```{math} 3224 \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right] 3225 ``` 3226 with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$. 3227 A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`. 3228 3229 Developer Note: 3230 The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their 3231 user API. 3232 3233 .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`, 3234 `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`, 3235 `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`, 3236 `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()` 3237 M*/ 3238 3239 PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc) 3240 { 3241 PC_FieldSplit *jac; 3242 3243 PetscFunctionBegin; 3244 PetscCall(PetscNew(&jac)); 3245 3246 jac->bs = -1; 3247 jac->nsplits = 0; 3248 jac->type = PC_COMPOSITE_MULTIPLICATIVE; 3249 jac->schurpre = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */ 3250 jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL; 3251 jac->schurscale = -1.0; 3252 jac->dm_splits = PETSC_TRUE; 3253 jac->detect = PETSC_FALSE; 3254 jac->gkbtol = 1e-5; 3255 jac->gkbdelay = 5; 3256 jac->gkbnu = 1; 3257 jac->gkbmaxit = 100; 3258 jac->gkbmonitor = PETSC_FALSE; 3259 jac->coordinates_set = PETSC_FALSE; 3260 3261 pc->data = (void *)jac; 3262 3263 pc->ops->apply = PCApply_FieldSplit; 3264 pc->ops->applytranspose = PCApplyTranspose_FieldSplit; 3265 pc->ops->setup = PCSetUp_FieldSplit; 3266 pc->ops->reset = PCReset_FieldSplit; 3267 pc->ops->destroy = PCDestroy_FieldSplit; 3268 pc->ops->setfromoptions = PCSetFromOptions_FieldSplit; 3269 pc->ops->view = PCView_FieldSplit; 3270 pc->ops->applyrichardson = NULL; 3271 3272 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit)); 3273 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit)); 3274 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit)); 3275 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit)); 3276 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit)); 3277 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit)); 3278 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit)); 3279 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit)); 3280 PetscFunctionReturn(PETSC_SUCCESS); 3281 } 3282