1 /* 2 The PC (preconditioner) interface routines, callable by users. 3 */ 4 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 5 #include <petscdm.h> 6 7 /* Logging support */ 8 PetscClassId PC_CLASSID; 9 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 10 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 11 PetscInt PetscMGLevelId; 12 PetscLogStage PCMPIStage; 13 14 PETSC_INTERN PetscErrorCode PCGetDefaultType_Private(PC pc, const char *type[]) 15 { 16 PetscMPIInt size; 17 PetscBool hasopblock, hasopsolve, flg1, flg2, set, flg3, isnormal; 18 19 PetscFunctionBegin; 20 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 21 if (pc->pmat) { 22 PetscCall(MatHasOperation(pc->pmat, MATOP_GET_DIAGONAL_BLOCK, &hasopblock)); 23 PetscCall(MatHasOperation(pc->pmat, MATOP_SOLVE, &hasopsolve)); 24 if (size == 1) { 25 PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ICC, &flg1)); 26 PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ILU, &flg2)); 27 PetscCall(MatIsSymmetricKnown(pc->pmat, &set, &flg3)); 28 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &isnormal, MATNORMAL, MATNORMALHERMITIAN, NULL)); 29 if (flg1 && (!flg2 || (set && flg3))) { 30 *type = PCICC; 31 } else if (flg2) { 32 *type = PCILU; 33 } else if (isnormal) { 34 *type = PCNONE; 35 } else if (hasopblock) { /* likely is a parallel matrix run on one processor */ 36 *type = PCBJACOBI; 37 } else if (hasopsolve) { 38 *type = PCMAT; 39 } else { 40 *type = PCNONE; 41 } 42 } else { 43 if (hasopblock) { 44 *type = PCBJACOBI; 45 } else if (hasopsolve) { 46 *type = PCMAT; 47 } else { 48 *type = PCNONE; 49 } 50 } 51 } else *type = NULL; 52 PetscFunctionReturn(PETSC_SUCCESS); 53 } 54 55 /* do not log solves, setup and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */ 56 PETSC_EXTERN PetscLogEvent KSP_Solve, KSP_SetUp; 57 58 static PetscErrorCode PCLogEventsDeactivatePush(void) 59 { 60 PetscFunctionBegin; 61 PetscCall(KSPInitializePackage()); 62 PetscCall(PetscLogEventDeactivatePush(KSP_Solve)); 63 PetscCall(PetscLogEventDeactivatePush(KSP_SetUp)); 64 PetscCall(PetscLogEventDeactivatePush(PC_Apply)); 65 PetscCall(PetscLogEventDeactivatePush(PC_SetUp)); 66 PetscCall(PetscLogEventDeactivatePush(PC_SetUpOnBlocks)); 67 PetscFunctionReturn(PETSC_SUCCESS); 68 } 69 70 static PetscErrorCode PCLogEventsDeactivatePop(void) 71 { 72 PetscFunctionBegin; 73 PetscCall(KSPInitializePackage()); 74 PetscCall(PetscLogEventDeactivatePop(KSP_Solve)); 75 PetscCall(PetscLogEventDeactivatePop(KSP_SetUp)); 76 PetscCall(PetscLogEventDeactivatePop(PC_Apply)); 77 PetscCall(PetscLogEventDeactivatePop(PC_SetUp)); 78 PetscCall(PetscLogEventDeactivatePop(PC_SetUpOnBlocks)); 79 PetscFunctionReturn(PETSC_SUCCESS); 80 } 81 82 /*@ 83 PCReset - Resets a `PC` context to the pcsetupcalled = 0 state and removes any allocated `Vec`s and `Mat`s 84 85 Collective 86 87 Input Parameter: 88 . pc - the preconditioner context 89 90 Level: developer 91 92 Note: 93 This allows a `PC` to be reused for a different sized linear system but using the same options that have been previously set in `pc` 94 95 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCSetUp()` 96 @*/ 97 PetscErrorCode PCReset(PC pc) 98 { 99 PetscFunctionBegin; 100 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 101 PetscTryTypeMethod(pc, reset); 102 PetscCall(VecDestroy(&pc->diagonalscaleright)); 103 PetscCall(VecDestroy(&pc->diagonalscaleleft)); 104 PetscCall(MatDestroy(&pc->pmat)); 105 PetscCall(MatDestroy(&pc->mat)); 106 107 pc->setupcalled = 0; 108 PetscFunctionReturn(PETSC_SUCCESS); 109 } 110 111 /*@ 112 PCDestroy - Destroys `PC` context that was created with `PCCreate()`. 113 114 Collective 115 116 Input Parameter: 117 . pc - the preconditioner context 118 119 Level: developer 120 121 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCSetUp()` 122 @*/ 123 PetscErrorCode PCDestroy(PC *pc) 124 { 125 PetscFunctionBegin; 126 if (!*pc) PetscFunctionReturn(PETSC_SUCCESS); 127 PetscValidHeaderSpecific(*pc, PC_CLASSID, 1); 128 if (--((PetscObject)*pc)->refct > 0) { 129 *pc = NULL; 130 PetscFunctionReturn(PETSC_SUCCESS); 131 } 132 133 PetscCall(PCReset(*pc)); 134 135 /* if memory was published with SAWs then destroy it */ 136 PetscCall(PetscObjectSAWsViewOff((PetscObject)*pc)); 137 PetscTryTypeMethod(*pc, destroy); 138 PetscCall(DMDestroy(&(*pc)->dm)); 139 PetscCall(PetscHeaderDestroy(pc)); 140 PetscFunctionReturn(PETSC_SUCCESS); 141 } 142 143 /*@ 144 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 145 scaling as needed by certain time-stepping codes. 146 147 Logically Collective 148 149 Input Parameter: 150 . pc - the preconditioner context 151 152 Output Parameter: 153 . flag - `PETSC_TRUE` if it applies the scaling 154 155 Level: developer 156 157 Note: 158 If this returns `PETSC_TRUE` then the system solved via the Krylov method is, for left and right preconditioning, 159 160 $$ 161 \begin{align*} 162 D M A D^{-1} y = D M b \\ 163 D A M D^{-1} z = D b. 164 \end{align*} 165 $$ 166 167 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCSetDiagonalScale()` 168 @*/ 169 PetscErrorCode PCGetDiagonalScale(PC pc, PetscBool *flag) 170 { 171 PetscFunctionBegin; 172 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 173 PetscAssertPointer(flag, 2); 174 *flag = pc->diagonalscale; 175 PetscFunctionReturn(PETSC_SUCCESS); 176 } 177 178 /*@ 179 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 180 scaling as needed by certain time-stepping codes. 181 182 Logically Collective 183 184 Input Parameters: 185 + pc - the preconditioner context 186 - s - scaling vector 187 188 Level: intermediate 189 190 Notes: 191 The system solved via the Krylov method is, for left and right preconditioning, 192 $$ 193 \begin{align*} 194 D M A D^{-1} y = D M b \\ 195 D A M D^{-1} z = D b. 196 \end{align*} 197 $$ 198 199 `PCDiagonalScaleLeft()` scales a vector by $D$. `PCDiagonalScaleRight()` scales a vector by $D^{-1}$. 200 201 .seealso: [](ch_ksp), `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCGetDiagonalScale()` 202 @*/ 203 PetscErrorCode PCSetDiagonalScale(PC pc, Vec s) 204 { 205 PetscFunctionBegin; 206 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 207 PetscValidHeaderSpecific(s, VEC_CLASSID, 2); 208 pc->diagonalscale = PETSC_TRUE; 209 210 PetscCall(PetscObjectReference((PetscObject)s)); 211 PetscCall(VecDestroy(&pc->diagonalscaleleft)); 212 213 pc->diagonalscaleleft = s; 214 215 PetscCall(VecDuplicate(s, &pc->diagonalscaleright)); 216 PetscCall(VecCopy(s, pc->diagonalscaleright)); 217 PetscCall(VecReciprocal(pc->diagonalscaleright)); 218 PetscFunctionReturn(PETSC_SUCCESS); 219 } 220 221 /*@ 222 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 223 224 Logically Collective 225 226 Input Parameters: 227 + pc - the preconditioner context 228 . in - input vector 229 - out - scaled vector (maybe the same as in) 230 231 Level: intermediate 232 233 Notes: 234 The system solved via the Krylov method is, for left and right preconditioning, 235 236 $$ 237 \begin{align*} 238 D M A D^{-1} y = D M b \\ 239 D A M D^{-1} z = D b. 240 \end{align*} 241 $$ 242 243 `PCDiagonalScaleLeft()` scales a vector by $D$. `PCDiagonalScaleRight()` scales a vector by $D^{-1}$. 244 245 If diagonal scaling is turned off and `in` is not `out` then `in` is copied to `out` 246 247 .seealso: [](ch_ksp), `PCCreate()`, `PCSetUp()`, `PCSetDiagonalScale()`, `PCDiagonalScaleRight()`, `MatDiagonalScale()` 248 @*/ 249 PetscErrorCode PCDiagonalScaleLeft(PC pc, Vec in, Vec out) 250 { 251 PetscFunctionBegin; 252 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 253 PetscValidHeaderSpecific(in, VEC_CLASSID, 2); 254 PetscValidHeaderSpecific(out, VEC_CLASSID, 3); 255 if (pc->diagonalscale) { 256 PetscCall(VecPointwiseMult(out, pc->diagonalscaleleft, in)); 257 } else if (in != out) { 258 PetscCall(VecCopy(in, out)); 259 } 260 PetscFunctionReturn(PETSC_SUCCESS); 261 } 262 263 /*@ 264 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 265 266 Logically Collective 267 268 Input Parameters: 269 + pc - the preconditioner context 270 . in - input vector 271 - out - scaled vector (maybe the same as in) 272 273 Level: intermediate 274 275 Notes: 276 The system solved via the Krylov method is, for left and right preconditioning, 277 278 $$ 279 \begin{align*} 280 D M A D^{-1} y = D M b \\ 281 D A M D^{-1} z = D b. 282 \end{align*} 283 $$ 284 285 `PCDiagonalScaleLeft()` scales a vector by $D$. `PCDiagonalScaleRight()` scales a vector by $D^{-1}$. 286 287 If diagonal scaling is turned off and `in` is not `out` then `in` is copied to `out` 288 289 .seealso: [](ch_ksp), `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCSetDiagonalScale()`, `MatDiagonalScale()` 290 @*/ 291 PetscErrorCode PCDiagonalScaleRight(PC pc, Vec in, Vec out) 292 { 293 PetscFunctionBegin; 294 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 295 PetscValidHeaderSpecific(in, VEC_CLASSID, 2); 296 PetscValidHeaderSpecific(out, VEC_CLASSID, 3); 297 if (pc->diagonalscale) { 298 PetscCall(VecPointwiseMult(out, pc->diagonalscaleright, in)); 299 } else if (in != out) { 300 PetscCall(VecCopy(in, out)); 301 } 302 PetscFunctionReturn(PETSC_SUCCESS); 303 } 304 305 /*@ 306 PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the 307 operator during the preconditioning process it applies the Amat provided to `TSSetRHSJacobian()`, 308 `TSSetIJacobian()`, `SNESSetJacobian()`, `KSPSetOperators()` or `PCSetOperators()` not the Pmat. 309 310 Logically Collective 311 312 Input Parameters: 313 + pc - the preconditioner context 314 - flg - `PETSC_TRUE` to use the Amat, `PETSC_FALSE` to use the Pmat (default is false) 315 316 Options Database Key: 317 . -pc_use_amat <true,false> - use the amat argument to `KSPSetOperators()` or `PCSetOperators()` to apply the operator 318 319 Level: intermediate 320 321 Note: 322 For the common case in which the linear system matrix and the matrix used to construct the 323 preconditioner are identical, this routine has no affect. 324 325 .seealso: [](ch_ksp), `PC`, `PCGetUseAmat()`, `PCBJACOBI`, `PCMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`, 326 `KSPSetOperators()`, `PCSetOperators()` 327 @*/ 328 PetscErrorCode PCSetUseAmat(PC pc, PetscBool flg) 329 { 330 PetscFunctionBegin; 331 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 332 pc->useAmat = flg; 333 PetscFunctionReturn(PETSC_SUCCESS); 334 } 335 336 /*@ 337 PCSetErrorIfFailure - Causes `PC` to generate an error if a floating point exception, for example a zero pivot, is detected. 338 339 Logically Collective 340 341 Input Parameters: 342 + pc - iterative context obtained from `PCCreate()` 343 - flg - `PETSC_TRUE` indicates you want the error generated 344 345 Level: advanced 346 347 Notes: 348 Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call `KSPGetConvergedReason()` after a `KSPSolve()` 349 to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is 350 detected. 351 352 This is propagated into `KSP`s used by this `PC`, which then propagate it into `PC`s used by those `KSP`s 353 354 .seealso: [](ch_ksp), `PC`, `KSPSetErrorIfNotConverged()`, `PCGetInitialGuessNonzero()`, `PCSetInitialGuessKnoll()`, `PCGetInitialGuessKnoll()` 355 @*/ 356 PetscErrorCode PCSetErrorIfFailure(PC pc, PetscBool flg) 357 { 358 PetscFunctionBegin; 359 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 360 PetscValidLogicalCollectiveBool(pc, flg, 2); 361 pc->erroriffailure = flg; 362 PetscFunctionReturn(PETSC_SUCCESS); 363 } 364 365 /*@ 366 PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the 367 operator during the preconditioning process it applies the Amat provided to `TSSetRHSJacobian()`, 368 `TSSetIJacobian()`, `SNESSetJacobian()`, `KSPSetOperators()` or `PCSetOperators()` not the Pmat. 369 370 Logically Collective 371 372 Input Parameter: 373 . pc - the preconditioner context 374 375 Output Parameter: 376 . flg - `PETSC_TRUE` to use the Amat, `PETSC_FALSE` to use the Pmat (default is false) 377 378 Level: intermediate 379 380 Note: 381 For the common case in which the linear system matrix and the matrix used to construct the 382 preconditioner are identical, this routine is does nothing. 383 384 .seealso: [](ch_ksp), `PC`, `PCSetUseAmat()`, `PCBJACOBI`, `PCMG`, `PCFIELDSPLIT`, `PCCOMPOSITE` 385 @*/ 386 PetscErrorCode PCGetUseAmat(PC pc, PetscBool *flg) 387 { 388 PetscFunctionBegin; 389 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 390 *flg = pc->useAmat; 391 PetscFunctionReturn(PETSC_SUCCESS); 392 } 393 394 /*@ 395 PCSetKSPNestLevel - sets the amount of nesting the `KSP` that contains this `PC` has 396 397 Collective 398 399 Input Parameters: 400 + pc - the `PC` 401 - level - the nest level 402 403 Level: developer 404 405 .seealso: [](ch_ksp), `KSPSetUp()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPGMRES`, `KSPType`, `KSPGetNestLevel()`, `PCGetKSPNestLevel()`, `KSPSetNestLevel()` 406 @*/ 407 PetscErrorCode PCSetKSPNestLevel(PC pc, PetscInt level) 408 { 409 PetscFunctionBegin; 410 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 411 PetscValidLogicalCollectiveInt(pc, level, 2); 412 pc->kspnestlevel = level; 413 PetscFunctionReturn(PETSC_SUCCESS); 414 } 415 416 /*@ 417 PCGetKSPNestLevel - gets the amount of nesting the `KSP` that contains this `PC` has 418 419 Not Collective 420 421 Input Parameter: 422 . pc - the `PC` 423 424 Output Parameter: 425 . level - the nest level 426 427 Level: developer 428 429 .seealso: [](ch_ksp), `KSPSetUp()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPGMRES`, `KSPType`, `KSPSetNestLevel()`, `PCSetKSPNestLevel()`, `KSPGetNestLevel()` 430 @*/ 431 PetscErrorCode PCGetKSPNestLevel(PC pc, PetscInt *level) 432 { 433 PetscFunctionBegin; 434 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 435 PetscAssertPointer(level, 2); 436 *level = pc->kspnestlevel; 437 PetscFunctionReturn(PETSC_SUCCESS); 438 } 439 440 /*@ 441 PCCreate - Creates a preconditioner context, `PC` 442 443 Collective 444 445 Input Parameter: 446 . comm - MPI communicator 447 448 Output Parameter: 449 . newpc - location to put the preconditioner context 450 451 Level: developer 452 453 Note: 454 The default preconditioner for sparse matrices is `PCILU` or `PCICC` with 0 fill on one process and block Jacobi (`PCBJACOBI`) with `PCILU` or `PCICC` 455 in parallel. For dense matrices it is always `PCNONE`. 456 457 .seealso: [](ch_ksp), `PC`, `PCSetUp()`, `PCApply()`, `PCDestroy()` 458 @*/ 459 PetscErrorCode PCCreate(MPI_Comm comm, PC *newpc) 460 { 461 PC pc; 462 463 PetscFunctionBegin; 464 PetscAssertPointer(newpc, 2); 465 PetscCall(PCInitializePackage()); 466 467 PetscCall(PetscHeaderCreate(pc, PC_CLASSID, "PC", "Preconditioner", "PC", comm, PCDestroy, PCView)); 468 pc->mat = NULL; 469 pc->pmat = NULL; 470 pc->setupcalled = 0; 471 pc->setfromoptionscalled = 0; 472 pc->data = NULL; 473 pc->diagonalscale = PETSC_FALSE; 474 pc->diagonalscaleleft = NULL; 475 pc->diagonalscaleright = NULL; 476 477 pc->modifysubmatrices = NULL; 478 pc->modifysubmatricesP = NULL; 479 480 *newpc = pc; 481 PetscFunctionReturn(PETSC_SUCCESS); 482 } 483 484 /*@ 485 PCApply - Applies the preconditioner to a vector. 486 487 Collective 488 489 Input Parameters: 490 + pc - the preconditioner context 491 - x - input vector 492 493 Output Parameter: 494 . y - output vector 495 496 Level: developer 497 498 .seealso: [](ch_ksp), `PC`, `PCApplyTranspose()`, `PCApplyBAorAB()` 499 @*/ 500 PetscErrorCode PCApply(PC pc, Vec x, Vec y) 501 { 502 PetscInt m, n, mv, nv; 503 504 PetscFunctionBegin; 505 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 506 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 507 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 508 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 509 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 510 /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */ 511 PetscCall(MatGetLocalSize(pc->pmat, &m, &n)); 512 PetscCall(VecGetLocalSize(x, &mv)); 513 PetscCall(VecGetLocalSize(y, &nv)); 514 /* check pmat * y = x is feasible */ 515 PetscCheck(mv == m, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local rows %" PetscInt_FMT " does not equal input vector size %" PetscInt_FMT, m, mv); 516 PetscCheck(nv == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local columns %" PetscInt_FMT " does not equal output vector size %" PetscInt_FMT, n, nv); 517 PetscCall(VecSetErrorIfLocked(y, 3)); 518 519 PetscCall(PCSetUp(pc)); 520 PetscCall(VecLockReadPush(x)); 521 PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0)); 522 PetscUseTypeMethod(pc, apply, x, y); 523 PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0)); 524 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 525 PetscCall(VecLockReadPop(x)); 526 PetscFunctionReturn(PETSC_SUCCESS); 527 } 528 529 /*@ 530 PCMatApply - Applies the preconditioner to multiple vectors stored as a `MATDENSE`. Like `PCApply()`, `Y` and `X` must be different matrices. 531 532 Collective 533 534 Input Parameters: 535 + pc - the preconditioner context 536 - X - block of input vectors 537 538 Output Parameter: 539 . Y - block of output vectors 540 541 Level: developer 542 543 .seealso: [](ch_ksp), `PC`, `PCApply()`, `KSPMatSolve()` 544 @*/ 545 PetscErrorCode PCMatApply(PC pc, Mat X, Mat Y) 546 { 547 Mat A; 548 Vec cy, cx; 549 PetscInt m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3; 550 PetscBool match; 551 552 PetscFunctionBegin; 553 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 554 PetscValidHeaderSpecific(X, MAT_CLASSID, 2); 555 PetscValidHeaderSpecific(Y, MAT_CLASSID, 3); 556 PetscCheckSameComm(pc, 1, X, 2); 557 PetscCheckSameComm(pc, 1, Y, 3); 558 PetscCheck(Y != X, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices"); 559 PetscCall(PCGetOperators(pc, NULL, &A)); 560 PetscCall(MatGetLocalSize(A, &m3, &n3)); 561 PetscCall(MatGetLocalSize(X, &m2, &n2)); 562 PetscCall(MatGetLocalSize(Y, &m1, &n1)); 563 PetscCall(MatGetSize(A, &M3, &N3)); 564 PetscCall(MatGetSize(X, &M2, &N2)); 565 PetscCall(MatGetSize(Y, &M1, &N1)); 566 PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of output vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1); 567 PetscCheck(m2 == m3 && M2 == M3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m2, M2, m3, M3, n3, N3); 568 PetscCheck(m1 == n3 && M1 == N3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m1, M1, m3, M3, n3, N3); 569 PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "")); 570 PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat"); 571 PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "")); 572 PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat"); 573 PetscCall(PCSetUp(pc)); 574 if (pc->ops->matapply) { 575 PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0)); 576 PetscUseTypeMethod(pc, matapply, X, Y); 577 PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0)); 578 } else { 579 PetscCall(PetscInfo(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name)); 580 for (n1 = 0; n1 < N1; ++n1) { 581 PetscCall(MatDenseGetColumnVecRead(X, n1, &cx)); 582 PetscCall(MatDenseGetColumnVecWrite(Y, n1, &cy)); 583 PetscCall(PCApply(pc, cx, cy)); 584 PetscCall(MatDenseRestoreColumnVecWrite(Y, n1, &cy)); 585 PetscCall(MatDenseRestoreColumnVecRead(X, n1, &cx)); 586 } 587 } 588 PetscFunctionReturn(PETSC_SUCCESS); 589 } 590 591 /*@ 592 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 593 594 Collective 595 596 Input Parameters: 597 + pc - the preconditioner context 598 - x - input vector 599 600 Output Parameter: 601 . y - output vector 602 603 Level: developer 604 605 Note: 606 Currently, this routine is implemented only for `PCICC` and `PCJACOBI` preconditioners. 607 608 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplySymmetricRight()` 609 @*/ 610 PetscErrorCode PCApplySymmetricLeft(PC pc, Vec x, Vec y) 611 { 612 PetscFunctionBegin; 613 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 614 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 615 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 616 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 617 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 618 PetscCall(PCSetUp(pc)); 619 PetscCall(VecLockReadPush(x)); 620 PetscCall(PetscLogEventBegin(PC_ApplySymmetricLeft, pc, x, y, 0)); 621 PetscUseTypeMethod(pc, applysymmetricleft, x, y); 622 PetscCall(PetscLogEventEnd(PC_ApplySymmetricLeft, pc, x, y, 0)); 623 PetscCall(VecLockReadPop(x)); 624 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 625 PetscFunctionReturn(PETSC_SUCCESS); 626 } 627 628 /*@ 629 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 630 631 Collective 632 633 Input Parameters: 634 + pc - the preconditioner context 635 - x - input vector 636 637 Output Parameter: 638 . y - output vector 639 640 Level: developer 641 642 Note: 643 Currently, this routine is implemented only for `PCICC` and `PCJACOBI` preconditioners. 644 645 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplySymmetricLeft()` 646 @*/ 647 PetscErrorCode PCApplySymmetricRight(PC pc, Vec x, Vec y) 648 { 649 PetscFunctionBegin; 650 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 651 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 652 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 653 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 654 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 655 PetscCall(PCSetUp(pc)); 656 PetscCall(VecLockReadPush(x)); 657 PetscCall(PetscLogEventBegin(PC_ApplySymmetricRight, pc, x, y, 0)); 658 PetscUseTypeMethod(pc, applysymmetricright, x, y); 659 PetscCall(PetscLogEventEnd(PC_ApplySymmetricRight, pc, x, y, 0)); 660 PetscCall(VecLockReadPop(x)); 661 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 662 PetscFunctionReturn(PETSC_SUCCESS); 663 } 664 665 /*@ 666 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 667 668 Collective 669 670 Input Parameters: 671 + pc - the preconditioner context 672 - x - input vector 673 674 Output Parameter: 675 . y - output vector 676 677 Level: developer 678 679 Note: 680 For complex numbers this applies the non-Hermitian transpose. 681 682 Developer Note: 683 We need to implement a `PCApplyHermitianTranspose()` 684 685 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplyBAorAB()`, `PCApplyBAorABTranspose()`, `PCApplyTransposeExists()` 686 @*/ 687 PetscErrorCode PCApplyTranspose(PC pc, Vec x, Vec y) 688 { 689 PetscFunctionBegin; 690 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 691 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 692 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 693 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 694 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 695 PetscCall(PCSetUp(pc)); 696 PetscCall(VecLockReadPush(x)); 697 PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0)); 698 PetscUseTypeMethod(pc, applytranspose, x, y); 699 PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0)); 700 PetscCall(VecLockReadPop(x)); 701 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 702 PetscFunctionReturn(PETSC_SUCCESS); 703 } 704 705 /*@ 706 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 707 708 Collective 709 710 Input Parameter: 711 . pc - the preconditioner context 712 713 Output Parameter: 714 . flg - `PETSC_TRUE` if a transpose operation is defined 715 716 Level: developer 717 718 .seealso: [](ch_ksp), `PC`, `PCApplyTranspose()` 719 @*/ 720 PetscErrorCode PCApplyTransposeExists(PC pc, PetscBool *flg) 721 { 722 PetscFunctionBegin; 723 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 724 PetscAssertPointer(flg, 2); 725 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 726 else *flg = PETSC_FALSE; 727 PetscFunctionReturn(PETSC_SUCCESS); 728 } 729 730 /*@ 731 PCApplyBAorAB - Applies the preconditioner and operator to a vector. $y = B*A*x $ or $ y = A*B*x$. 732 733 Collective 734 735 Input Parameters: 736 + pc - the preconditioner context 737 . side - indicates the preconditioner side, one of `PC_LEFT`, `PC_RIGHT`, or `PC_SYMMETRIC` 738 . x - input vector 739 - work - work vector 740 741 Output Parameter: 742 . y - output vector 743 744 Level: developer 745 746 Note: 747 If the `PC` has had `PCSetDiagonalScale()` set then $ D M A D^{-1} $ for left preconditioning or $ D A M D^{-1} $ is actually applied. 748 The specific `KSPSolve()` method must also be written to handle the post-solve "correction" for the diagonal scaling. 749 750 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorABTranspose()` 751 @*/ 752 PetscErrorCode PCApplyBAorAB(PC pc, PCSide side, Vec x, Vec y, Vec work) 753 { 754 PetscFunctionBegin; 755 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 756 PetscValidLogicalCollectiveEnum(pc, side, 2); 757 PetscValidHeaderSpecific(x, VEC_CLASSID, 3); 758 PetscValidHeaderSpecific(y, VEC_CLASSID, 4); 759 PetscValidHeaderSpecific(work, VEC_CLASSID, 5); 760 PetscCheckSameComm(pc, 1, x, 3); 761 PetscCheckSameComm(pc, 1, y, 4); 762 PetscCheckSameComm(pc, 1, work, 5); 763 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 764 PetscCheck(side == PC_LEFT || side == PC_SYMMETRIC || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right, left, or symmetric"); 765 PetscCheck(!pc->diagonalscale || side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot include diagonal scaling with symmetric preconditioner application"); 766 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 3, PETSC_TRUE)); 767 768 PetscCall(PCSetUp(pc)); 769 if (pc->diagonalscale) { 770 if (pc->ops->applyBA) { 771 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 772 PetscCall(VecDuplicate(x, &work2)); 773 PetscCall(PCDiagonalScaleRight(pc, x, work2)); 774 PetscUseTypeMethod(pc, applyBA, side, work2, y, work); 775 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 776 PetscCall(VecDestroy(&work2)); 777 } else if (side == PC_RIGHT) { 778 PetscCall(PCDiagonalScaleRight(pc, x, y)); 779 PetscCall(PCApply(pc, y, work)); 780 PetscCall(MatMult(pc->mat, work, y)); 781 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 782 } else if (side == PC_LEFT) { 783 PetscCall(PCDiagonalScaleRight(pc, x, y)); 784 PetscCall(MatMult(pc->mat, y, work)); 785 PetscCall(PCApply(pc, work, y)); 786 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 787 } else PetscCheck(side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot provide diagonal scaling with symmetric application of preconditioner"); 788 } else { 789 if (pc->ops->applyBA) { 790 PetscUseTypeMethod(pc, applyBA, side, x, y, work); 791 } else if (side == PC_RIGHT) { 792 PetscCall(PCApply(pc, x, work)); 793 PetscCall(MatMult(pc->mat, work, y)); 794 } else if (side == PC_LEFT) { 795 PetscCall(MatMult(pc->mat, x, work)); 796 PetscCall(PCApply(pc, work, y)); 797 } else if (side == PC_SYMMETRIC) { 798 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 799 PetscCall(PCApplySymmetricRight(pc, x, work)); 800 PetscCall(MatMult(pc->mat, work, y)); 801 PetscCall(VecCopy(y, work)); 802 PetscCall(PCApplySymmetricLeft(pc, work, y)); 803 } 804 } 805 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 806 PetscFunctionReturn(PETSC_SUCCESS); 807 } 808 809 /*@ 810 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 811 and operator to a vector. That is, applies $B^T * A^T$ with left preconditioning, 812 NOT $(B*A)^T = A^T*B^T$. 813 814 Collective 815 816 Input Parameters: 817 + pc - the preconditioner context 818 . side - indicates the preconditioner side, one of `PC_LEFT`, `PC_RIGHT`, or `PC_SYMMETRIC` 819 . x - input vector 820 - work - work vector 821 822 Output Parameter: 823 . y - output vector 824 825 Level: developer 826 827 Note: 828 This routine is used internally so that the same Krylov code can be used to solve $A x = b$ and $A^T x = b$, with a preconditioner 829 defined by $B^T$. This is why this has the funny form that it computes $B^T * A^T$ 830 831 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorAB()` 832 @*/ 833 PetscErrorCode PCApplyBAorABTranspose(PC pc, PCSide side, Vec x, Vec y, Vec work) 834 { 835 PetscFunctionBegin; 836 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 837 PetscValidHeaderSpecific(x, VEC_CLASSID, 3); 838 PetscValidHeaderSpecific(y, VEC_CLASSID, 4); 839 PetscValidHeaderSpecific(work, VEC_CLASSID, 5); 840 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 841 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 3, PETSC_TRUE)); 842 if (pc->ops->applyBAtranspose) { 843 PetscUseTypeMethod(pc, applyBAtranspose, side, x, y, work); 844 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 845 PetscFunctionReturn(PETSC_SUCCESS); 846 } 847 PetscCheck(side == PC_LEFT || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right or left"); 848 849 PetscCall(PCSetUp(pc)); 850 if (side == PC_RIGHT) { 851 PetscCall(PCApplyTranspose(pc, x, work)); 852 PetscCall(MatMultTranspose(pc->mat, work, y)); 853 } else if (side == PC_LEFT) { 854 PetscCall(MatMultTranspose(pc->mat, x, work)); 855 PetscCall(PCApplyTranspose(pc, work, y)); 856 } 857 /* add support for PC_SYMMETRIC */ 858 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 859 PetscFunctionReturn(PETSC_SUCCESS); 860 } 861 862 /*@ 863 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 864 built-in fast application of Richardson's method. 865 866 Not Collective 867 868 Input Parameter: 869 . pc - the preconditioner 870 871 Output Parameter: 872 . exists - `PETSC_TRUE` or `PETSC_FALSE` 873 874 Level: developer 875 876 .seealso: [](ch_ksp), `PC`, `KSPRICHARDSON`, `PCApplyRichardson()` 877 @*/ 878 PetscErrorCode PCApplyRichardsonExists(PC pc, PetscBool *exists) 879 { 880 PetscFunctionBegin; 881 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 882 PetscAssertPointer(exists, 2); 883 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 884 else *exists = PETSC_FALSE; 885 PetscFunctionReturn(PETSC_SUCCESS); 886 } 887 888 /*@ 889 PCApplyRichardson - Applies several steps of Richardson iteration with 890 the particular preconditioner. This routine is usually used by the 891 Krylov solvers and not the application code directly. 892 893 Collective 894 895 Input Parameters: 896 + pc - the preconditioner context 897 . b - the right-hand side 898 . w - one work vector 899 . rtol - relative decrease in residual norm convergence criteria 900 . abstol - absolute residual norm convergence criteria 901 . dtol - divergence residual norm increase criteria 902 . its - the number of iterations to apply. 903 - guesszero - if the input x contains nonzero initial guess 904 905 Output Parameters: 906 + outits - number of iterations actually used (for SOR this always equals its) 907 . reason - the reason the apply terminated 908 - y - the solution (also contains initial guess if guesszero is `PETSC_FALSE` 909 910 Level: developer 911 912 Notes: 913 Most preconditioners do not support this function. Use the command 914 `PCApplyRichardsonExists()` to determine if one does. 915 916 Except for the `PCMG` this routine ignores the convergence tolerances 917 and always runs for the number of iterations 918 919 .seealso: [](ch_ksp), `PC`, `PCApplyRichardsonExists()` 920 @*/ 921 PetscErrorCode PCApplyRichardson(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 922 { 923 PetscFunctionBegin; 924 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 925 PetscValidHeaderSpecific(b, VEC_CLASSID, 2); 926 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 927 PetscValidHeaderSpecific(w, VEC_CLASSID, 4); 928 PetscCheck(b != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "b and y must be different vectors"); 929 PetscCall(PCSetUp(pc)); 930 PetscUseTypeMethod(pc, applyrichardson, b, y, w, rtol, abstol, dtol, its, guesszero, outits, reason); 931 PetscFunctionReturn(PETSC_SUCCESS); 932 } 933 934 /*@ 935 PCSetFailedReason - Sets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail 936 937 Logically Collective 938 939 Input Parameters: 940 + pc - the preconditioner context 941 - reason - the reason it failedx 942 943 Level: advanced 944 945 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCFailedReason` 946 @*/ 947 PetscErrorCode PCSetFailedReason(PC pc, PCFailedReason reason) 948 { 949 PetscFunctionBegin; 950 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 951 pc->failedreason = reason; 952 PetscFunctionReturn(PETSC_SUCCESS); 953 } 954 955 /*@ 956 PCGetFailedReason - Gets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail 957 958 Logically Collective 959 960 Input Parameter: 961 . pc - the preconditioner context 962 963 Output Parameter: 964 . reason - the reason it failed 965 966 Level: advanced 967 968 Note: 969 This is the maximum over reason over all ranks in the PC communicator. It is only valid after 970 a call `KSPCheckDot()` or `KSPCheckNorm()` inside a `KSPSolve()` or `PCReduceFailedReason()`. 971 It is not valid immediately after a `PCSetUp()` or `PCApply()`, then use `PCGetFailedReasonRank()` 972 973 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReasonRank()`, `PCSetFailedReason()` 974 @*/ 975 PetscErrorCode PCGetFailedReason(PC pc, PCFailedReason *reason) 976 { 977 PetscFunctionBegin; 978 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 979 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 980 else *reason = pc->failedreason; 981 PetscFunctionReturn(PETSC_SUCCESS); 982 } 983 984 /*@ 985 PCGetFailedReasonRank - Gets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail on this MPI rank 986 987 Not Collective 988 989 Input Parameter: 990 . pc - the preconditioner context 991 992 Output Parameter: 993 . reason - the reason it failed 994 995 Level: advanced 996 997 Note: 998 Different processes may have different reasons or no reason, see `PCGetFailedReason()` 999 1000 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()`, `PCReduceFailedReason()` 1001 @*/ 1002 PetscErrorCode PCGetFailedReasonRank(PC pc, PCFailedReason *reason) 1003 { 1004 PetscFunctionBegin; 1005 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1006 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 1007 else *reason = pc->failedreason; 1008 PetscFunctionReturn(PETSC_SUCCESS); 1009 } 1010 1011 /*@ 1012 PCReduceFailedReason - Reduce the failed reason among the MPI processes that share the `PC` 1013 1014 Collective 1015 1016 Input Parameter: 1017 . pc - the preconditioner context 1018 1019 Level: advanced 1020 1021 Note: 1022 Different MPI processes may have different reasons or no reason, see `PCGetFailedReason()`. This routine 1023 makes them have a common value (failure if any MPI process had a failure). 1024 1025 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()` 1026 @*/ 1027 PetscErrorCode PCReduceFailedReason(PC pc) 1028 { 1029 PetscInt buf; 1030 1031 PetscFunctionBegin; 1032 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1033 buf = (PetscInt)pc->failedreason; 1034 PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &buf, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)pc))); 1035 pc->failedreason = (PCFailedReason)buf; 1036 PetscFunctionReturn(PETSC_SUCCESS); 1037 } 1038 1039 /* 1040 a setupcall of 0 indicates never setup, 1041 1 indicates has been previously setup 1042 -1 indicates a PCSetUp() was attempted and failed 1043 */ 1044 /*@ 1045 PCSetUp - Prepares for the use of a preconditioner. 1046 1047 Collective 1048 1049 Input Parameter: 1050 . pc - the preconditioner context 1051 1052 Level: developer 1053 1054 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()` 1055 @*/ 1056 PetscErrorCode PCSetUp(PC pc) 1057 { 1058 const char *def; 1059 PetscObjectState matstate, matnonzerostate; 1060 1061 PetscFunctionBegin; 1062 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1063 PetscCheck(pc->mat, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be set first"); 1064 1065 if (pc->setupcalled && pc->reusepreconditioner) { 1066 PetscCall(PetscInfo(pc, "Leaving PC with identical preconditioner since reuse preconditioner is set\n")); 1067 PetscFunctionReturn(PETSC_SUCCESS); 1068 } 1069 1070 PetscCall(PetscObjectStateGet((PetscObject)pc->pmat, &matstate)); 1071 PetscCall(MatGetNonzeroState(pc->pmat, &matnonzerostate)); 1072 if (!pc->setupcalled) { 1073 //PetscCall(PetscInfo(pc, "Setting up PC for first time\n")); 1074 pc->flag = DIFFERENT_NONZERO_PATTERN; 1075 } else if (matstate == pc->matstate) PetscFunctionReturn(PETSC_SUCCESS); 1076 else { 1077 if (matnonzerostate != pc->matnonzerostate) { 1078 PetscCall(PetscInfo(pc, "Setting up PC with different nonzero pattern\n")); 1079 pc->flag = DIFFERENT_NONZERO_PATTERN; 1080 } else { 1081 //PetscCall(PetscInfo(pc, "Setting up PC with same nonzero pattern\n")); 1082 pc->flag = SAME_NONZERO_PATTERN; 1083 } 1084 } 1085 pc->matstate = matstate; 1086 pc->matnonzerostate = matnonzerostate; 1087 1088 if (!((PetscObject)pc)->type_name) { 1089 PetscCall(PCGetDefaultType_Private(pc, &def)); 1090 PetscCall(PCSetType(pc, def)); 1091 } 1092 1093 PetscCall(MatSetErrorIfFailure(pc->pmat, pc->erroriffailure)); 1094 PetscCall(MatSetErrorIfFailure(pc->mat, pc->erroriffailure)); 1095 PetscCall(PetscLogEventBegin(PC_SetUp, pc, 0, 0, 0)); 1096 if (pc->ops->setup) { 1097 PetscCall(PCLogEventsDeactivatePush()); 1098 PetscUseTypeMethod(pc, setup); 1099 PetscCall(PCLogEventsDeactivatePop()); 1100 } 1101 PetscCall(PetscLogEventEnd(PC_SetUp, pc, 0, 0, 0)); 1102 if (!pc->setupcalled) pc->setupcalled = 1; 1103 PetscFunctionReturn(PETSC_SUCCESS); 1104 } 1105 1106 /*@ 1107 PCSetUpOnBlocks - Sets up the preconditioner for each block in 1108 the block Jacobi, overlapping Schwarz, and fieldsplit methods. 1109 1110 Collective 1111 1112 Input Parameter: 1113 . pc - the preconditioner context 1114 1115 Level: developer 1116 1117 Notes: 1118 For nested preconditioners such as `PCBJACOBI`, `PCSetUp()` is not called on each sub-`KSP` when `PCSetUp()` is 1119 called on the outer `PC`, this routine ensures it is called. 1120 1121 It calls `PCSetUp()` if not yet called. 1122 1123 .seealso: [](ch_ksp), `PC`, `PCSetUp()`, `PCCreate()`, `PCApply()`, `PCDestroy()` 1124 @*/ 1125 PetscErrorCode PCSetUpOnBlocks(PC pc) 1126 { 1127 PetscFunctionBegin; 1128 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1129 if (!pc->setupcalled) PetscCall(PCSetUp(pc)); /* "if" to prevent -info extra prints */ 1130 if (!pc->ops->setuponblocks) PetscFunctionReturn(PETSC_SUCCESS); 1131 PetscCall(PetscLogEventBegin(PC_SetUpOnBlocks, pc, 0, 0, 0)); 1132 PetscCall(PCLogEventsDeactivatePush()); 1133 PetscUseTypeMethod(pc, setuponblocks); 1134 PetscCall(PCLogEventsDeactivatePop()); 1135 PetscCall(PetscLogEventEnd(PC_SetUpOnBlocks, pc, 0, 0, 0)); 1136 PetscFunctionReturn(PETSC_SUCCESS); 1137 } 1138 1139 /*@C 1140 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 1141 submatrices that arise within certain subdomain-based preconditioners such as `PCASM` 1142 1143 Logically Collective 1144 1145 Input Parameters: 1146 + pc - the preconditioner context 1147 . func - routine for modifying the submatrices 1148 - ctx - optional user-defined context (may be `NULL`) 1149 1150 Calling sequence of `func`: 1151 + pc - the preconditioner context 1152 . nsub - number of index sets 1153 . row - an array of index sets that contain the global row numbers 1154 that comprise each local submatrix 1155 . col - an array of index sets that contain the global column numbers 1156 that comprise each local submatrix 1157 . submat - array of local submatrices 1158 - ctx - optional user-defined context for private data for the 1159 user-defined func routine (may be `NULL`) 1160 1161 Level: advanced 1162 1163 Notes: 1164 The basic submatrices are extracted from the preconditioner matrix as 1165 usual; the user can then alter these (for example, to set different boundary 1166 conditions for each submatrix) before they are used for the local solves. 1167 1168 `PCSetModifySubMatrices()` MUST be called before `KSPSetUp()` and 1169 `KSPSolve()`. 1170 1171 A routine set by `PCSetModifySubMatrices()` is currently called within 1172 the block Jacobi (`PCBJACOBI`) and additive Schwarz (`PCASM`) 1173 preconditioners. All other preconditioners ignore this routine. 1174 1175 .seealso: [](ch_ksp), `PC`, `PCBJACOBI`, `PCASM`, `PCModifySubMatrices()` 1176 @*/ 1177 PetscErrorCode PCSetModifySubMatrices(PC pc, PetscErrorCode (*func)(PC pc, PetscInt nsub, const IS row[], const IS col[], Mat submat[], void *ctx), void *ctx) 1178 { 1179 PetscFunctionBegin; 1180 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1181 pc->modifysubmatrices = func; 1182 pc->modifysubmatricesP = ctx; 1183 PetscFunctionReturn(PETSC_SUCCESS); 1184 } 1185 1186 /*@C 1187 PCModifySubMatrices - Calls an optional user-defined routine within 1188 certain preconditioners if one has been set with `PCSetModifySubMatrices()`. 1189 1190 Collective 1191 1192 Input Parameters: 1193 + pc - the preconditioner context 1194 . nsub - the number of local submatrices 1195 . row - an array of index sets that contain the global row numbers 1196 that comprise each local submatrix 1197 . col - an array of index sets that contain the global column numbers 1198 that comprise each local submatrix 1199 . submat - array of local submatrices 1200 - ctx - optional user-defined context for private data for the 1201 user-defined routine (may be `NULL`) 1202 1203 Output Parameter: 1204 . submat - array of local submatrices (the entries of which may 1205 have been modified) 1206 1207 Level: developer 1208 1209 Note: 1210 The user should NOT generally call this routine, as it will 1211 automatically be called within certain preconditioners. 1212 1213 .seealso: [](ch_ksp), `PC`, `PCSetModifySubMatrices()` 1214 @*/ 1215 PetscErrorCode PCModifySubMatrices(PC pc, PetscInt nsub, const IS row[], const IS col[], Mat submat[], void *ctx) 1216 { 1217 PetscFunctionBegin; 1218 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1219 if (!pc->modifysubmatrices) PetscFunctionReturn(PETSC_SUCCESS); 1220 PetscCall(PetscLogEventBegin(PC_ModifySubMatrices, pc, 0, 0, 0)); 1221 PetscCall((*pc->modifysubmatrices)(pc, nsub, row, col, submat, ctx)); 1222 PetscCall(PetscLogEventEnd(PC_ModifySubMatrices, pc, 0, 0, 0)); 1223 PetscFunctionReturn(PETSC_SUCCESS); 1224 } 1225 1226 /*@ 1227 PCSetOperators - Sets the matrix associated with the linear system and 1228 a (possibly) different one associated with the preconditioner. 1229 1230 Logically Collective 1231 1232 Input Parameters: 1233 + pc - the preconditioner context 1234 . Amat - the matrix that defines the linear system 1235 - Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat. 1236 1237 Level: intermediate 1238 1239 Notes: 1240 Passing a `NULL` for `Amat` or `Pmat` removes the matrix that is currently used. 1241 1242 If you wish to replace either `Amat` or `Pmat` but leave the other one untouched then 1243 first call `KSPGetOperators()` to get the one you wish to keep, call `PetscObjectReference()` 1244 on it and then pass it back in in your call to `KSPSetOperators()`. 1245 1246 More Notes about Repeated Solution of Linear Systems: 1247 PETSc does NOT reset the matrix entries of either `Amat` or `Pmat` 1248 to zero after a linear solve; the user is completely responsible for 1249 matrix assembly. See the routine `MatZeroEntries()` if desiring to 1250 zero all elements of a matrix. 1251 1252 .seealso: [](ch_ksp), `PC`, `PCGetOperators()`, `MatZeroEntries()` 1253 @*/ 1254 PetscErrorCode PCSetOperators(PC pc, Mat Amat, Mat Pmat) 1255 { 1256 PetscInt m1, n1, m2, n2; 1257 1258 PetscFunctionBegin; 1259 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1260 if (Amat) PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 1261 if (Pmat) PetscValidHeaderSpecific(Pmat, MAT_CLASSID, 3); 1262 if (Amat) PetscCheckSameComm(pc, 1, Amat, 2); 1263 if (Pmat) PetscCheckSameComm(pc, 1, Pmat, 3); 1264 if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) { 1265 PetscCall(MatGetLocalSize(Amat, &m1, &n1)); 1266 PetscCall(MatGetLocalSize(pc->mat, &m2, &n2)); 1267 PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Amat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1); 1268 PetscCall(MatGetLocalSize(Pmat, &m1, &n1)); 1269 PetscCall(MatGetLocalSize(pc->pmat, &m2, &n2)); 1270 PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Pmat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1); 1271 } 1272 1273 if (Pmat != pc->pmat) { 1274 /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */ 1275 pc->matnonzerostate = -1; 1276 pc->matstate = -1; 1277 } 1278 1279 /* reference first in case the matrices are the same */ 1280 if (Amat) PetscCall(PetscObjectReference((PetscObject)Amat)); 1281 PetscCall(MatDestroy(&pc->mat)); 1282 if (Pmat) PetscCall(PetscObjectReference((PetscObject)Pmat)); 1283 PetscCall(MatDestroy(&pc->pmat)); 1284 pc->mat = Amat; 1285 pc->pmat = Pmat; 1286 PetscFunctionReturn(PETSC_SUCCESS); 1287 } 1288 1289 /*@ 1290 PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed. 1291 1292 Logically Collective 1293 1294 Input Parameters: 1295 + pc - the preconditioner context 1296 - flag - `PETSC_TRUE` do not compute a new preconditioner, `PETSC_FALSE` do compute a new preconditioner 1297 1298 Level: intermediate 1299 1300 Note: 1301 Normally if a matrix inside a `PC` changes the `PC` automatically updates itself using information from the changed matrix. This option 1302 prevents this. 1303 1304 .seealso: [](ch_ksp), `PC`, `PCGetOperators()`, `MatZeroEntries()`, `PCGetReusePreconditioner()`, `KSPSetReusePreconditioner()` 1305 @*/ 1306 PetscErrorCode PCSetReusePreconditioner(PC pc, PetscBool flag) 1307 { 1308 PetscFunctionBegin; 1309 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1310 PetscValidLogicalCollectiveBool(pc, flag, 2); 1311 pc->reusepreconditioner = flag; 1312 PetscFunctionReturn(PETSC_SUCCESS); 1313 } 1314 1315 /*@ 1316 PCGetReusePreconditioner - Determines if the `PC` reuses the current preconditioner even if the operator in the preconditioner has changed. 1317 1318 Not Collective 1319 1320 Input Parameter: 1321 . pc - the preconditioner context 1322 1323 Output Parameter: 1324 . flag - `PETSC_TRUE` do not compute a new preconditioner, `PETSC_FALSE` do compute a new preconditioner 1325 1326 Level: intermediate 1327 1328 .seealso: [](ch_ksp), `PC`, `PCGetOperators()`, `MatZeroEntries()`, `PCSetReusePreconditioner()` 1329 @*/ 1330 PetscErrorCode PCGetReusePreconditioner(PC pc, PetscBool *flag) 1331 { 1332 PetscFunctionBegin; 1333 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1334 PetscAssertPointer(flag, 2); 1335 *flag = pc->reusepreconditioner; 1336 PetscFunctionReturn(PETSC_SUCCESS); 1337 } 1338 1339 /*@ 1340 PCGetOperators - Gets the matrix associated with the linear system and 1341 possibly a different one associated with the preconditioner. 1342 1343 Not Collective, though parallel `Mat`s are returned if `pc` is parallel 1344 1345 Input Parameter: 1346 . pc - the preconditioner context 1347 1348 Output Parameters: 1349 + Amat - the matrix defining the linear system 1350 - Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat. 1351 1352 Level: intermediate 1353 1354 Note: 1355 Does not increase the reference count of the matrices, so you should not destroy them 1356 1357 Alternative usage: If the operators have NOT been set with `KSPSetOperators()`/`PCSetOperators()` then the operators 1358 are created in `PC` and returned to the user. In this case, if both operators 1359 mat and pmat are requested, two DIFFERENT operators will be returned. If 1360 only one is requested both operators in the PC will be the same (i.e. as 1361 if one had called `KSPSetOperators()`/`PCSetOperators()` with the same argument for both Mats). 1362 The user must set the sizes of the returned matrices and their type etc just 1363 as if the user created them with `MatCreate()`. For example, 1364 1365 .vb 1366 KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to 1367 set size, type, etc of Amat 1368 1369 MatCreate(comm,&mat); 1370 KSP/PCSetOperators(ksp/pc,Amat,Amat); 1371 PetscObjectDereference((PetscObject)mat); 1372 set size, type, etc of Amat 1373 .ve 1374 1375 and 1376 1377 .vb 1378 KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to 1379 set size, type, etc of Amat and Pmat 1380 1381 MatCreate(comm,&Amat); 1382 MatCreate(comm,&Pmat); 1383 KSP/PCSetOperators(ksp/pc,Amat,Pmat); 1384 PetscObjectDereference((PetscObject)Amat); 1385 PetscObjectDereference((PetscObject)Pmat); 1386 set size, type, etc of Amat and Pmat 1387 .ve 1388 1389 The rationale for this support is so that when creating a `TS`, `SNES`, or `KSP` the hierarchy 1390 of underlying objects (i.e. `SNES`, `KSP`, `PC`, `Mat`) and their lifespans can be completely 1391 managed by the top most level object (i.e. the `TS`, `SNES`, or `KSP`). Another way to look 1392 at this is when you create a `SNES` you do not NEED to create a `KSP` and attach it to 1393 the `SNES` object (the `SNES` object manages it for you). Similarly when you create a KSP 1394 you do not need to attach a `PC` to it (the `KSP` object manages the `PC` object for you). 1395 Thus, why should YOU have to create the `Mat` and attach it to the `SNES`/`KSP`/`PC`, when 1396 it can be created for you? 1397 1398 .seealso: [](ch_ksp), `PC`, `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperatorsSet()` 1399 @*/ 1400 PetscErrorCode PCGetOperators(PC pc, Mat *Amat, Mat *Pmat) 1401 { 1402 PetscFunctionBegin; 1403 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1404 if (Amat) { 1405 if (!pc->mat) { 1406 if (pc->pmat && !Pmat) { /* Pmat has been set, but user did not request it, so use for Amat */ 1407 pc->mat = pc->pmat; 1408 PetscCall(PetscObjectReference((PetscObject)pc->mat)); 1409 } else { /* both Amat and Pmat are empty */ 1410 PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->mat)); 1411 if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */ 1412 pc->pmat = pc->mat; 1413 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 1414 } 1415 } 1416 } 1417 *Amat = pc->mat; 1418 } 1419 if (Pmat) { 1420 if (!pc->pmat) { 1421 if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */ 1422 pc->pmat = pc->mat; 1423 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 1424 } else { 1425 PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->pmat)); 1426 if (!Amat) { /* user did NOT request Amat, so make same as Pmat */ 1427 pc->mat = pc->pmat; 1428 PetscCall(PetscObjectReference((PetscObject)pc->mat)); 1429 } 1430 } 1431 } 1432 *Pmat = pc->pmat; 1433 } 1434 PetscFunctionReturn(PETSC_SUCCESS); 1435 } 1436 1437 /*@ 1438 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1439 possibly a different one associated with the preconditioner have been set in the `PC`. 1440 1441 Not Collective, though the results on all processes should be the same 1442 1443 Input Parameter: 1444 . pc - the preconditioner context 1445 1446 Output Parameters: 1447 + mat - the matrix associated with the linear system was set 1448 - pmat - matrix associated with the preconditioner was set, usually the same 1449 1450 Level: intermediate 1451 1452 .seealso: [](ch_ksp), `PC`, `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperators()` 1453 @*/ 1454 PetscErrorCode PCGetOperatorsSet(PC pc, PetscBool *mat, PetscBool *pmat) 1455 { 1456 PetscFunctionBegin; 1457 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1458 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1459 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1460 PetscFunctionReturn(PETSC_SUCCESS); 1461 } 1462 1463 /*@ 1464 PCFactorGetMatrix - Gets the factored matrix from the 1465 preconditioner context. This routine is valid only for the `PCLU`, 1466 `PCILU`, `PCCHOLESKY`, and `PCICC` methods. 1467 1468 Not Collective though `mat` is parallel if `pc` is parallel 1469 1470 Input Parameter: 1471 . pc - the preconditioner context 1472 1473 Output Parameters: 1474 . mat - the factored matrix 1475 1476 Level: advanced 1477 1478 Note: 1479 Does not increase the reference count for `mat` so DO NOT destroy it 1480 1481 .seealso: [](ch_ksp), `PC`, `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC` 1482 @*/ 1483 PetscErrorCode PCFactorGetMatrix(PC pc, Mat *mat) 1484 { 1485 PetscFunctionBegin; 1486 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1487 PetscAssertPointer(mat, 2); 1488 PetscCall(PCFactorSetUpMatSolverType(pc)); 1489 PetscUseTypeMethod(pc, getfactoredmatrix, mat); 1490 PetscFunctionReturn(PETSC_SUCCESS); 1491 } 1492 1493 /*@ 1494 PCSetOptionsPrefix - Sets the prefix used for searching for all 1495 `PC` options in the database. 1496 1497 Logically Collective 1498 1499 Input Parameters: 1500 + pc - the preconditioner context 1501 - prefix - the prefix string to prepend to all `PC` option requests 1502 1503 Note: 1504 A hyphen (-) must NOT be given at the beginning of the prefix name. 1505 The first character of all runtime options is AUTOMATICALLY the 1506 hyphen. 1507 1508 Level: advanced 1509 1510 .seealso: [](ch_ksp), `PC`, `PCSetFromOptions`, `PCAppendOptionsPrefix()`, `PCGetOptionsPrefix()` 1511 @*/ 1512 PetscErrorCode PCSetOptionsPrefix(PC pc, const char prefix[]) 1513 { 1514 PetscFunctionBegin; 1515 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1516 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc, prefix)); 1517 PetscFunctionReturn(PETSC_SUCCESS); 1518 } 1519 1520 /*@ 1521 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1522 `PC` options in the database. 1523 1524 Logically Collective 1525 1526 Input Parameters: 1527 + pc - the preconditioner context 1528 - prefix - the prefix string to prepend to all `PC` option requests 1529 1530 Note: 1531 A hyphen (-) must NOT be given at the beginning of the prefix name. 1532 The first character of all runtime options is AUTOMATICALLY the 1533 hyphen. 1534 1535 Level: advanced 1536 1537 .seealso: [](ch_ksp), `PC`, `PCSetFromOptions`, `PCSetOptionsPrefix()`, `PCGetOptionsPrefix()` 1538 @*/ 1539 PetscErrorCode PCAppendOptionsPrefix(PC pc, const char prefix[]) 1540 { 1541 PetscFunctionBegin; 1542 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1543 PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc, prefix)); 1544 PetscFunctionReturn(PETSC_SUCCESS); 1545 } 1546 1547 /*@ 1548 PCGetOptionsPrefix - Gets the prefix used for searching for all 1549 PC options in the database. 1550 1551 Not Collective 1552 1553 Input Parameter: 1554 . pc - the preconditioner context 1555 1556 Output Parameter: 1557 . prefix - pointer to the prefix string used, is returned 1558 1559 Level: advanced 1560 1561 Fortran Note: 1562 The user should pass in a string `prefix` of 1563 sufficient length to hold the prefix. 1564 1565 .seealso: [](ch_ksp), `PC`, `PCSetFromOptions`, `PCSetOptionsPrefix()`, `PCAppendOptionsPrefix()` 1566 @*/ 1567 PetscErrorCode PCGetOptionsPrefix(PC pc, const char *prefix[]) 1568 { 1569 PetscFunctionBegin; 1570 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1571 PetscAssertPointer(prefix, 2); 1572 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, prefix)); 1573 PetscFunctionReturn(PETSC_SUCCESS); 1574 } 1575 1576 /* 1577 Indicates the right-hand side will be changed by KSPSolve(), this occurs for a few 1578 preconditioners including BDDC and Eisentat that transform the equations before applying 1579 the Krylov methods 1580 */ 1581 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc, PetscBool *change) 1582 { 1583 PetscFunctionBegin; 1584 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1585 PetscAssertPointer(change, 2); 1586 *change = PETSC_FALSE; 1587 PetscTryMethod(pc, "PCPreSolveChangeRHS_C", (PC, PetscBool *), (pc, change)); 1588 PetscFunctionReturn(PETSC_SUCCESS); 1589 } 1590 1591 /*@ 1592 PCPreSolve - Optional pre-solve phase, intended for any 1593 preconditioner-specific actions that must be performed before 1594 the iterative solve itself. 1595 1596 Collective 1597 1598 Input Parameters: 1599 + pc - the preconditioner context 1600 - ksp - the Krylov subspace context 1601 1602 Level: developer 1603 1604 Example Usage: 1605 .vb 1606 PCPreSolve(pc,ksp); 1607 KSPSolve(ksp,b,x); 1608 PCPostSolve(pc,ksp); 1609 .ve 1610 1611 Notes: 1612 The pre-solve phase is distinct from the `PCSetUp()` phase. 1613 1614 `KSPSolve()` calls this directly, so is rarely called by the user. 1615 1616 .seealso: [](ch_ksp), `PC`, `PCPostSolve()` 1617 @*/ 1618 PetscErrorCode PCPreSolve(PC pc, KSP ksp) 1619 { 1620 Vec x, rhs; 1621 1622 PetscFunctionBegin; 1623 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1624 PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2); 1625 pc->presolvedone++; 1626 PetscCheck(pc->presolvedone <= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot embed PCPreSolve() more than twice"); 1627 PetscCall(KSPGetSolution(ksp, &x)); 1628 PetscCall(KSPGetRhs(ksp, &rhs)); 1629 1630 if (pc->ops->presolve) PetscUseTypeMethod(pc, presolve, ksp, rhs, x); 1631 else if (pc->presolve) PetscCall(pc->presolve(pc, ksp)); 1632 PetscFunctionReturn(PETSC_SUCCESS); 1633 } 1634 1635 /*@C 1636 PCSetPreSolve - Sets function used by `PCPreSolve()` which is intended for any 1637 preconditioner-specific actions that must be performed before 1638 the iterative solve itself. 1639 1640 Logically Collective 1641 1642 Input Parameters: 1643 + pc - the preconditioner object 1644 - presolve - the function to call before the solve 1645 1646 Calling sequence of `presolve`: 1647 + pc - the `PC` context 1648 - ksp - the `KSP` context 1649 1650 Level: developer 1651 1652 .seealso: [](ch_ksp), `PC`, `PCSetUp()`, `PCPreSolve()` 1653 @*/ 1654 PetscErrorCode PCSetPreSolve(PC pc, PetscErrorCode (*presolve)(PC pc, KSP ksp)) 1655 { 1656 PetscFunctionBegin; 1657 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1658 pc->presolve = presolve; 1659 PetscFunctionReturn(PETSC_SUCCESS); 1660 } 1661 1662 /*@ 1663 PCPostSolve - Optional post-solve phase, intended for any 1664 preconditioner-specific actions that must be performed after 1665 the iterative solve itself. 1666 1667 Collective 1668 1669 Input Parameters: 1670 + pc - the preconditioner context 1671 - ksp - the Krylov subspace context 1672 1673 Example Usage: 1674 .vb 1675 PCPreSolve(pc,ksp); 1676 KSPSolve(ksp,b,x); 1677 PCPostSolve(pc,ksp); 1678 .ve 1679 1680 Level: developer 1681 1682 Note: 1683 `KSPSolve()` calls this routine directly, so it is rarely called by the user. 1684 1685 .seealso: [](ch_ksp), `PC`, `PCSetPostSolve()`, `PCSetPresolve()`, `PCPreSolve()`, `KSPSolve()` 1686 @*/ 1687 PetscErrorCode PCPostSolve(PC pc, KSP ksp) 1688 { 1689 Vec x, rhs; 1690 1691 PetscFunctionBegin; 1692 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1693 PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2); 1694 pc->presolvedone--; 1695 PetscCall(KSPGetSolution(ksp, &x)); 1696 PetscCall(KSPGetRhs(ksp, &rhs)); 1697 PetscTryTypeMethod(pc, postsolve, ksp, rhs, x); 1698 PetscFunctionReturn(PETSC_SUCCESS); 1699 } 1700 1701 /*@ 1702 PCLoad - Loads a `PC` that has been stored in binary with `PCView()`. 1703 1704 Collective 1705 1706 Input Parameters: 1707 + newdm - the newly loaded `PC`, this needs to have been created with `PCCreate()` or 1708 some related function before a call to `PCLoad()`. 1709 - viewer - binary file viewer, obtained from `PetscViewerBinaryOpen()` 1710 1711 Level: intermediate 1712 1713 Note: 1714 The type is determined by the data in the file, any `PCType` set into the `PC` before this call is ignored. 1715 1716 .seealso: [](ch_ksp), `PC`, `PetscViewerBinaryOpen()`, `PCView()`, `MatLoad()`, `VecLoad()` 1717 @*/ 1718 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) 1719 { 1720 PetscBool isbinary; 1721 PetscInt classid; 1722 char type[256]; 1723 1724 PetscFunctionBegin; 1725 PetscValidHeaderSpecific(newdm, PC_CLASSID, 1); 1726 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1727 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 1728 PetscCheck(isbinary, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 1729 1730 PetscCall(PetscViewerBinaryRead(viewer, &classid, 1, NULL, PETSC_INT)); 1731 PetscCheck(classid == PC_FILE_CLASSID, PetscObjectComm((PetscObject)newdm), PETSC_ERR_ARG_WRONG, "Not PC next in file"); 1732 PetscCall(PetscViewerBinaryRead(viewer, type, 256, NULL, PETSC_CHAR)); 1733 PetscCall(PCSetType(newdm, type)); 1734 PetscTryTypeMethod(newdm, load, viewer); 1735 PetscFunctionReturn(PETSC_SUCCESS); 1736 } 1737 1738 #include <petscdraw.h> 1739 #if defined(PETSC_HAVE_SAWS) 1740 #include <petscviewersaws.h> 1741 #endif 1742 1743 /*@ 1744 PCViewFromOptions - View from the `PC` based on options in the options database 1745 1746 Collective 1747 1748 Input Parameters: 1749 + A - the `PC` context 1750 . obj - Optional object that provides the options prefix 1751 - name - command line option 1752 1753 Level: intermediate 1754 1755 .seealso: [](ch_ksp), `PC`, `PCView`, `PetscObjectViewFromOptions()`, `PCCreate()` 1756 @*/ 1757 PetscErrorCode PCViewFromOptions(PC A, PetscObject obj, const char name[]) 1758 { 1759 PetscFunctionBegin; 1760 PetscValidHeaderSpecific(A, PC_CLASSID, 1); 1761 PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name)); 1762 PetscFunctionReturn(PETSC_SUCCESS); 1763 } 1764 1765 /*@ 1766 PCView - Prints information about the `PC` 1767 1768 Collective 1769 1770 Input Parameters: 1771 + pc - the `PC` context 1772 - viewer - optional visualization context 1773 1774 Level: developer 1775 1776 Notes: 1777 The available visualization contexts include 1778 + `PETSC_VIEWER_STDOUT_SELF` - standard output (default) 1779 - `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard 1780 output where only the first processor opens 1781 the file. All other processors send their 1782 data to the first processor to print. 1783 1784 The user can open an alternative visualization contexts with 1785 `PetscViewerASCIIOpen()` (output to a specified file). 1786 1787 .seealso: [](ch_ksp), `PC`, `PetscViewer`, `KSPView()`, `PetscViewerASCIIOpen()` 1788 @*/ 1789 PetscErrorCode PCView(PC pc, PetscViewer viewer) 1790 { 1791 PCType cstr; 1792 PetscViewerFormat format; 1793 PetscBool iascii, isstring, isbinary, isdraw, pop = PETSC_FALSE; 1794 #if defined(PETSC_HAVE_SAWS) 1795 PetscBool issaws; 1796 #endif 1797 1798 PetscFunctionBegin; 1799 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1800 if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer)); 1801 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1802 PetscCheckSameComm(pc, 1, viewer, 2); 1803 1804 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1805 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring)); 1806 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 1807 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 1808 #if defined(PETSC_HAVE_SAWS) 1809 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws)); 1810 #endif 1811 1812 if (iascii) { 1813 PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)pc, viewer)); 1814 if (!pc->setupcalled) PetscCall(PetscViewerASCIIPrintf(viewer, " PC has not been set up so information may be incomplete\n")); 1815 PetscCall(PetscViewerASCIIPushTab(viewer)); 1816 PetscTryTypeMethod(pc, view, viewer); 1817 PetscCall(PetscViewerASCIIPopTab(viewer)); 1818 if (pc->mat) { 1819 PetscCall(PetscViewerGetFormat(viewer, &format)); 1820 if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) { 1821 PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO)); 1822 pop = PETSC_TRUE; 1823 } 1824 if (pc->pmat == pc->mat) { 1825 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix = precond matrix:\n")); 1826 PetscCall(PetscViewerASCIIPushTab(viewer)); 1827 PetscCall(MatView(pc->mat, viewer)); 1828 PetscCall(PetscViewerASCIIPopTab(viewer)); 1829 } else { 1830 if (pc->pmat) { 1831 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix followed by preconditioner matrix:\n")); 1832 } else { 1833 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix:\n")); 1834 } 1835 PetscCall(PetscViewerASCIIPushTab(viewer)); 1836 PetscCall(MatView(pc->mat, viewer)); 1837 if (pc->pmat) PetscCall(MatView(pc->pmat, viewer)); 1838 PetscCall(PetscViewerASCIIPopTab(viewer)); 1839 } 1840 if (pop) PetscCall(PetscViewerPopFormat(viewer)); 1841 } 1842 } else if (isstring) { 1843 PetscCall(PCGetType(pc, &cstr)); 1844 PetscCall(PetscViewerStringSPrintf(viewer, " PCType: %-7.7s", cstr)); 1845 PetscTryTypeMethod(pc, view, viewer); 1846 if (pc->mat) PetscCall(MatView(pc->mat, viewer)); 1847 if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer)); 1848 } else if (isbinary) { 1849 PetscInt classid = PC_FILE_CLASSID; 1850 MPI_Comm comm; 1851 PetscMPIInt rank; 1852 char type[256]; 1853 1854 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 1855 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1856 if (rank == 0) { 1857 PetscCall(PetscViewerBinaryWrite(viewer, &classid, 1, PETSC_INT)); 1858 PetscCall(PetscStrncpy(type, ((PetscObject)pc)->type_name, 256)); 1859 PetscCall(PetscViewerBinaryWrite(viewer, type, 256, PETSC_CHAR)); 1860 } 1861 PetscTryTypeMethod(pc, view, viewer); 1862 } else if (isdraw) { 1863 PetscDraw draw; 1864 char str[25]; 1865 PetscReal x, y, bottom, h; 1866 PetscInt n; 1867 1868 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 1869 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 1870 if (pc->mat) { 1871 PetscCall(MatGetSize(pc->mat, &n, NULL)); 1872 PetscCall(PetscSNPrintf(str, 25, "PC: %s (%" PetscInt_FMT ")", ((PetscObject)pc)->type_name, n)); 1873 } else { 1874 PetscCall(PetscSNPrintf(str, 25, "PC: %s", ((PetscObject)pc)->type_name)); 1875 } 1876 PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 1877 bottom = y - h; 1878 PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom)); 1879 PetscTryTypeMethod(pc, view, viewer); 1880 PetscCall(PetscDrawPopCurrentPoint(draw)); 1881 #if defined(PETSC_HAVE_SAWS) 1882 } else if (issaws) { 1883 PetscMPIInt rank; 1884 1885 PetscCall(PetscObjectName((PetscObject)pc)); 1886 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); 1887 if (!((PetscObject)pc)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)pc, viewer)); 1888 if (pc->mat) PetscCall(MatView(pc->mat, viewer)); 1889 if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer)); 1890 #endif 1891 } 1892 PetscFunctionReturn(PETSC_SUCCESS); 1893 } 1894 1895 /*@C 1896 PCRegister - Adds a method (`PCType`) to the preconditioner package. 1897 1898 Not collective. No Fortran Support 1899 1900 Input Parameters: 1901 + sname - name of a new user-defined solver 1902 - function - routine to create method context 1903 1904 Example Usage: 1905 .vb 1906 PCRegister("my_solver", MySolverCreate); 1907 .ve 1908 1909 Then, your solver can be chosen with the procedural interface via 1910 $ PCSetType(pc, "my_solver") 1911 or at runtime via the option 1912 $ -pc_type my_solver 1913 1914 Level: advanced 1915 1916 Note: 1917 `PCRegister()` may be called multiple times to add several user-defined preconditioners. 1918 1919 .seealso: [](ch_ksp), `PC`, `PCType`, `PCRegisterAll()` 1920 @*/ 1921 PetscErrorCode PCRegister(const char sname[], PetscErrorCode (*function)(PC)) 1922 { 1923 PetscFunctionBegin; 1924 PetscCall(PCInitializePackage()); 1925 PetscCall(PetscFunctionListAdd(&PCList, sname, function)); 1926 PetscFunctionReturn(PETSC_SUCCESS); 1927 } 1928 1929 static PetscErrorCode MatMult_PC(Mat A, Vec X, Vec Y) 1930 { 1931 PC pc; 1932 1933 PetscFunctionBegin; 1934 PetscCall(MatShellGetContext(A, &pc)); 1935 PetscCall(PCApply(pc, X, Y)); 1936 PetscFunctionReturn(PETSC_SUCCESS); 1937 } 1938 1939 /*@ 1940 PCComputeOperator - Computes the explicit preconditioned operator. 1941 1942 Collective 1943 1944 Input Parameters: 1945 + pc - the preconditioner object 1946 - mattype - the `MatType` to be used for the operator 1947 1948 Output Parameter: 1949 . mat - the explicit preconditioned operator 1950 1951 Level: advanced 1952 1953 Note: 1954 This computation is done by applying the operators to columns of the identity matrix. 1955 This routine is costly in general, and is recommended for use only with relatively small systems. 1956 Currently, this routine uses a dense matrix format when `mattype` == `NULL` 1957 1958 .seealso: [](ch_ksp), `PC`, `KSPComputeOperator()`, `MatType` 1959 @*/ 1960 PetscErrorCode PCComputeOperator(PC pc, MatType mattype, Mat *mat) 1961 { 1962 PetscInt N, M, m, n; 1963 Mat A, Apc; 1964 1965 PetscFunctionBegin; 1966 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1967 PetscAssertPointer(mat, 3); 1968 PetscCall(PCGetOperators(pc, &A, NULL)); 1969 PetscCall(MatGetLocalSize(A, &m, &n)); 1970 PetscCall(MatGetSize(A, &M, &N)); 1971 PetscCall(MatCreateShell(PetscObjectComm((PetscObject)pc), m, n, M, N, pc, &Apc)); 1972 PetscCall(MatShellSetOperation(Apc, MATOP_MULT, (void (*)(void))MatMult_PC)); 1973 PetscCall(MatComputeOperator(Apc, mattype, mat)); 1974 PetscCall(MatDestroy(&Apc)); 1975 PetscFunctionReturn(PETSC_SUCCESS); 1976 } 1977 1978 /*@ 1979 PCSetCoordinates - sets the coordinates of all the nodes on the local process 1980 1981 Collective 1982 1983 Input Parameters: 1984 + pc - the solver context 1985 . dim - the dimension of the coordinates 1, 2, or 3 1986 . nloc - the blocked size of the coordinates array 1987 - coords - the coordinates array 1988 1989 Level: intermediate 1990 1991 Note: 1992 `coords` is an array of the dim coordinates for the nodes on 1993 the local processor, of size `dim`*`nloc`. 1994 If there are 108 equation on a processor 1995 for a displacement finite element discretization of elasticity (so 1996 that there are nloc = 36 = 108/3 nodes) then the array must have 108 1997 double precision values (ie, 3 * 36). These x y z coordinates 1998 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 1999 ... , N-1.z ]. 2000 2001 .seealso: [](ch_ksp), `PC`, `MatSetNearNullSpace()` 2002 @*/ 2003 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 2004 { 2005 PetscFunctionBegin; 2006 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2007 PetscValidLogicalCollectiveInt(pc, dim, 2); 2008 PetscTryMethod(pc, "PCSetCoordinates_C", (PC, PetscInt, PetscInt, PetscReal[]), (pc, dim, nloc, coords)); 2009 PetscFunctionReturn(PETSC_SUCCESS); 2010 } 2011 2012 /*@ 2013 PCGetInterpolations - Gets interpolation matrices for all levels (except level 0) 2014 2015 Logically Collective 2016 2017 Input Parameter: 2018 . pc - the precondition context 2019 2020 Output Parameters: 2021 + num_levels - the number of levels 2022 - interpolations - the interpolation matrices (size of `num_levels`-1) 2023 2024 Level: advanced 2025 2026 Developer Note: 2027 Why is this here instead of in `PCMG` etc? 2028 2029 .seealso: [](ch_ksp), `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetInterpolation()`, `PCGetCoarseOperators()` 2030 @*/ 2031 PetscErrorCode PCGetInterpolations(PC pc, PetscInt *num_levels, Mat *interpolations[]) 2032 { 2033 PetscFunctionBegin; 2034 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2035 PetscAssertPointer(num_levels, 2); 2036 PetscAssertPointer(interpolations, 3); 2037 PetscUseMethod(pc, "PCGetInterpolations_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, interpolations)); 2038 PetscFunctionReturn(PETSC_SUCCESS); 2039 } 2040 2041 /*@ 2042 PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level) 2043 2044 Logically Collective 2045 2046 Input Parameter: 2047 . pc - the precondition context 2048 2049 Output Parameters: 2050 + num_levels - the number of levels 2051 - coarseOperators - the coarse operator matrices (size of `num_levels`-1) 2052 2053 Level: advanced 2054 2055 Developer Note: 2056 Why is this here instead of in `PCMG` etc? 2057 2058 .seealso: [](ch_ksp), `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()` 2059 @*/ 2060 PetscErrorCode PCGetCoarseOperators(PC pc, PetscInt *num_levels, Mat *coarseOperators[]) 2061 { 2062 PetscFunctionBegin; 2063 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2064 PetscAssertPointer(num_levels, 2); 2065 PetscAssertPointer(coarseOperators, 3); 2066 PetscUseMethod(pc, "PCGetCoarseOperators_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, coarseOperators)); 2067 PetscFunctionReturn(PETSC_SUCCESS); 2068 } 2069