1 /* 2 The PC (preconditioner) interface routines, callable by users. 3 */ 4 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 5 #include <petscdm.h> 6 7 /* Logging support */ 8 PetscClassId PC_CLASSID; 9 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 10 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 11 PetscInt PetscMGLevelId; 12 PetscLogStage PCMPIStage; 13 14 PETSC_INTERN PetscErrorCode PCGetDefaultType_Private(PC pc, const char *type[]) 15 { 16 PetscMPIInt size; 17 PetscBool hasopblock, hasopsolve, flg1, flg2, set, flg3, isnormal; 18 19 PetscFunctionBegin; 20 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 21 if (pc->pmat) { 22 PetscCall(MatHasOperation(pc->pmat, MATOP_GET_DIAGONAL_BLOCK, &hasopblock)); 23 PetscCall(MatHasOperation(pc->pmat, MATOP_SOLVE, &hasopsolve)); 24 if (size == 1) { 25 PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ICC, &flg1)); 26 PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ILU, &flg2)); 27 PetscCall(MatIsSymmetricKnown(pc->pmat, &set, &flg3)); 28 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &isnormal, MATNORMAL, MATNORMALHERMITIAN, NULL)); 29 if (flg1 && (!flg2 || (set && flg3))) { 30 *type = PCICC; 31 } else if (flg2) { 32 *type = PCILU; 33 } else if (isnormal) { 34 *type = PCNONE; 35 } else if (hasopblock) { /* likely is a parallel matrix run on one processor */ 36 *type = PCBJACOBI; 37 } else if (hasopsolve) { 38 *type = PCMAT; 39 } else { 40 *type = PCNONE; 41 } 42 } else { 43 if (hasopblock) { 44 *type = PCBJACOBI; 45 } else if (hasopsolve) { 46 *type = PCMAT; 47 } else { 48 *type = PCNONE; 49 } 50 } 51 } else *type = NULL; 52 PetscFunctionReturn(PETSC_SUCCESS); 53 } 54 55 /* do not log solves, setup and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */ 56 PETSC_EXTERN PetscLogEvent KSP_Solve, KSP_SetUp; 57 58 static PetscErrorCode PCLogEventsDeactivatePush(void) 59 { 60 PetscFunctionBegin; 61 PetscCall(KSPInitializePackage()); 62 PetscCall(PetscLogEventDeactivatePush(KSP_Solve)); 63 PetscCall(PetscLogEventDeactivatePush(KSP_SetUp)); 64 PetscCall(PetscLogEventDeactivatePush(PC_Apply)); 65 PetscCall(PetscLogEventDeactivatePush(PC_SetUp)); 66 PetscCall(PetscLogEventDeactivatePush(PC_SetUpOnBlocks)); 67 PetscFunctionReturn(PETSC_SUCCESS); 68 } 69 70 static PetscErrorCode PCLogEventsDeactivatePop(void) 71 { 72 PetscFunctionBegin; 73 PetscCall(KSPInitializePackage()); 74 PetscCall(PetscLogEventDeactivatePop(KSP_Solve)); 75 PetscCall(PetscLogEventDeactivatePop(KSP_SetUp)); 76 PetscCall(PetscLogEventDeactivatePop(PC_Apply)); 77 PetscCall(PetscLogEventDeactivatePop(PC_SetUp)); 78 PetscCall(PetscLogEventDeactivatePop(PC_SetUpOnBlocks)); 79 PetscFunctionReturn(PETSC_SUCCESS); 80 } 81 82 /*@ 83 PCReset - Resets a `PC` context to the pcsetupcalled = 0 state and removes any allocated `Vec`s and `Mat`s 84 85 Collective 86 87 Input Parameter: 88 . pc - the preconditioner context 89 90 Level: developer 91 92 Note: 93 This allows a `PC` to be reused for a different sized linear system but using the same options that have been previously set in `pc` 94 95 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCSetUp()` 96 @*/ 97 PetscErrorCode PCReset(PC pc) 98 { 99 PetscFunctionBegin; 100 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 101 PetscTryTypeMethod(pc, reset); 102 PetscCall(VecDestroy(&pc->diagonalscaleright)); 103 PetscCall(VecDestroy(&pc->diagonalscaleleft)); 104 PetscCall(MatDestroy(&pc->pmat)); 105 PetscCall(MatDestroy(&pc->mat)); 106 107 pc->setupcalled = 0; 108 PetscFunctionReturn(PETSC_SUCCESS); 109 } 110 111 /*@ 112 PCDestroy - Destroys `PC` context that was created with `PCCreate()`. 113 114 Collective 115 116 Input Parameter: 117 . pc - the preconditioner context 118 119 Level: developer 120 121 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCSetUp()` 122 @*/ 123 PetscErrorCode PCDestroy(PC *pc) 124 { 125 PetscFunctionBegin; 126 if (!*pc) PetscFunctionReturn(PETSC_SUCCESS); 127 PetscValidHeaderSpecific(*pc, PC_CLASSID, 1); 128 if (--((PetscObject)*pc)->refct > 0) { 129 *pc = NULL; 130 PetscFunctionReturn(PETSC_SUCCESS); 131 } 132 133 PetscCall(PCReset(*pc)); 134 135 /* if memory was published with SAWs then destroy it */ 136 PetscCall(PetscObjectSAWsViewOff((PetscObject)*pc)); 137 PetscTryTypeMethod(*pc, destroy); 138 PetscCall(DMDestroy(&(*pc)->dm)); 139 PetscCall(PetscHeaderDestroy(pc)); 140 PetscFunctionReturn(PETSC_SUCCESS); 141 } 142 143 /*@ 144 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 145 scaling as needed by certain time-stepping codes. 146 147 Logically Collective 148 149 Input Parameter: 150 . pc - the preconditioner context 151 152 Output Parameter: 153 . flag - `PETSC_TRUE` if it applies the scaling 154 155 Level: developer 156 157 Note: 158 If this returns `PETSC_TRUE` then the system solved via the Krylov method is, for left and right preconditioning, 159 160 $$ 161 \begin{align*} 162 D M A D^{-1} y = D M b \\ 163 D A M D^{-1} z = D b. 164 \end{align*} 165 $$ 166 167 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCSetDiagonalScale()` 168 @*/ 169 PetscErrorCode PCGetDiagonalScale(PC pc, PetscBool *flag) 170 { 171 PetscFunctionBegin; 172 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 173 PetscAssertPointer(flag, 2); 174 *flag = pc->diagonalscale; 175 PetscFunctionReturn(PETSC_SUCCESS); 176 } 177 178 /*@ 179 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 180 scaling as needed by certain time-stepping codes. 181 182 Logically Collective 183 184 Input Parameters: 185 + pc - the preconditioner context 186 - s - scaling vector 187 188 Level: intermediate 189 190 Notes: 191 The system solved via the Krylov method is, for left and right preconditioning, 192 $$ 193 \begin{align*} 194 D M A D^{-1} y = D M b \\ 195 D A M D^{-1} z = D b. 196 \end{align*} 197 $$ 198 199 `PCDiagonalScaleLeft()` scales a vector by $D$. `PCDiagonalScaleRight()` scales a vector by $D^{-1}$. 200 201 .seealso: [](ch_ksp), `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCGetDiagonalScale()` 202 @*/ 203 PetscErrorCode PCSetDiagonalScale(PC pc, Vec s) 204 { 205 PetscFunctionBegin; 206 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 207 PetscValidHeaderSpecific(s, VEC_CLASSID, 2); 208 pc->diagonalscale = PETSC_TRUE; 209 210 PetscCall(PetscObjectReference((PetscObject)s)); 211 PetscCall(VecDestroy(&pc->diagonalscaleleft)); 212 213 pc->diagonalscaleleft = s; 214 215 PetscCall(VecDuplicate(s, &pc->diagonalscaleright)); 216 PetscCall(VecCopy(s, pc->diagonalscaleright)); 217 PetscCall(VecReciprocal(pc->diagonalscaleright)); 218 PetscFunctionReturn(PETSC_SUCCESS); 219 } 220 221 /*@ 222 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 223 224 Logically Collective 225 226 Input Parameters: 227 + pc - the preconditioner context 228 . in - input vector 229 - out - scaled vector (maybe the same as in) 230 231 Level: intermediate 232 233 Notes: 234 The system solved via the Krylov method is, for left and right preconditioning, 235 236 $$ 237 \begin{align*} 238 D M A D^{-1} y = D M b \\ 239 D A M D^{-1} z = D b. 240 \end{align*} 241 $$ 242 243 `PCDiagonalScaleLeft()` scales a vector by $D$. `PCDiagonalScaleRight()` scales a vector by $D^{-1}$. 244 245 If diagonal scaling is turned off and `in` is not `out` then `in` is copied to `out` 246 247 .seealso: [](ch_ksp), `PCCreate()`, `PCSetUp()`, `PCSetDiagonalScale()`, `PCDiagonalScaleRight()`, `MatDiagonalScale()` 248 @*/ 249 PetscErrorCode PCDiagonalScaleLeft(PC pc, Vec in, Vec out) 250 { 251 PetscFunctionBegin; 252 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 253 PetscValidHeaderSpecific(in, VEC_CLASSID, 2); 254 PetscValidHeaderSpecific(out, VEC_CLASSID, 3); 255 if (pc->diagonalscale) { 256 PetscCall(VecPointwiseMult(out, pc->diagonalscaleleft, in)); 257 } else if (in != out) { 258 PetscCall(VecCopy(in, out)); 259 } 260 PetscFunctionReturn(PETSC_SUCCESS); 261 } 262 263 /*@ 264 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 265 266 Logically Collective 267 268 Input Parameters: 269 + pc - the preconditioner context 270 . in - input vector 271 - out - scaled vector (maybe the same as in) 272 273 Level: intermediate 274 275 Notes: 276 The system solved via the Krylov method is, for left and right preconditioning, 277 278 $$ 279 \begin{align*} 280 D M A D^{-1} y = D M b \\ 281 D A M D^{-1} z = D b. 282 \end{align*} 283 $$ 284 285 `PCDiagonalScaleLeft()` scales a vector by $D$. `PCDiagonalScaleRight()` scales a vector by $D^{-1}$. 286 287 If diagonal scaling is turned off and `in` is not `out` then `in` is copied to `out` 288 289 .seealso: [](ch_ksp), `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCSetDiagonalScale()`, `MatDiagonalScale()` 290 @*/ 291 PetscErrorCode PCDiagonalScaleRight(PC pc, Vec in, Vec out) 292 { 293 PetscFunctionBegin; 294 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 295 PetscValidHeaderSpecific(in, VEC_CLASSID, 2); 296 PetscValidHeaderSpecific(out, VEC_CLASSID, 3); 297 if (pc->diagonalscale) { 298 PetscCall(VecPointwiseMult(out, pc->diagonalscaleright, in)); 299 } else if (in != out) { 300 PetscCall(VecCopy(in, out)); 301 } 302 PetscFunctionReturn(PETSC_SUCCESS); 303 } 304 305 /*@ 306 PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the 307 operator during the preconditioning process it applies the Amat provided to `TSSetRHSJacobian()`, 308 `TSSetIJacobian()`, `SNESSetJacobian()`, `KSPSetOperators()` or `PCSetOperators()` not the Pmat. 309 310 Logically Collective 311 312 Input Parameters: 313 + pc - the preconditioner context 314 - flg - `PETSC_TRUE` to use the Amat, `PETSC_FALSE` to use the Pmat (default is false) 315 316 Options Database Key: 317 . -pc_use_amat <true,false> - use the amat argument to `KSPSetOperators()` or `PCSetOperators()` to apply the operator 318 319 Level: intermediate 320 321 Note: 322 For the common case in which the linear system matrix and the matrix used to construct the 323 preconditioner are identical, this routine has no affect. 324 325 .seealso: [](ch_ksp), `PC`, `PCGetUseAmat()`, `PCBJACOBI`, `PCMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`, 326 `KSPSetOperators()`, `PCSetOperators()` 327 @*/ 328 PetscErrorCode PCSetUseAmat(PC pc, PetscBool flg) 329 { 330 PetscFunctionBegin; 331 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 332 pc->useAmat = flg; 333 PetscFunctionReturn(PETSC_SUCCESS); 334 } 335 336 /*@ 337 PCSetErrorIfFailure - Causes `PC` to generate an error if a floating point exception, for example a zero pivot, is detected. 338 339 Logically Collective 340 341 Input Parameters: 342 + pc - iterative context obtained from `PCCreate()` 343 - flg - `PETSC_TRUE` indicates you want the error generated 344 345 Level: advanced 346 347 Notes: 348 Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call `KSPGetConvergedReason()` after a `KSPSolve()` 349 to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is 350 detected. 351 352 This is propagated into `KSP`s used by this `PC`, which then propagate it into `PC`s used by those `KSP`s 353 354 .seealso: [](ch_ksp), `PC`, `KSPSetErrorIfNotConverged()`, `PCGetInitialGuessNonzero()`, `PCSetInitialGuessKnoll()`, `PCGetInitialGuessKnoll()` 355 @*/ 356 PetscErrorCode PCSetErrorIfFailure(PC pc, PetscBool flg) 357 { 358 PetscFunctionBegin; 359 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 360 PetscValidLogicalCollectiveBool(pc, flg, 2); 361 pc->erroriffailure = flg; 362 PetscFunctionReturn(PETSC_SUCCESS); 363 } 364 365 /*@ 366 PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the 367 operator during the preconditioning process it applies the Amat provided to `TSSetRHSJacobian()`, 368 `TSSetIJacobian()`, `SNESSetJacobian()`, `KSPSetOperators()` or `PCSetOperators()` not the Pmat. 369 370 Logically Collective 371 372 Input Parameter: 373 . pc - the preconditioner context 374 375 Output Parameter: 376 . flg - `PETSC_TRUE` to use the Amat, `PETSC_FALSE` to use the Pmat (default is false) 377 378 Level: intermediate 379 380 Note: 381 For the common case in which the linear system matrix and the matrix used to construct the 382 preconditioner are identical, this routine is does nothing. 383 384 .seealso: [](ch_ksp), `PC`, `PCSetUseAmat()`, `PCBJACOBI`, `PCMG`, `PCFIELDSPLIT`, `PCCOMPOSITE` 385 @*/ 386 PetscErrorCode PCGetUseAmat(PC pc, PetscBool *flg) 387 { 388 PetscFunctionBegin; 389 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 390 *flg = pc->useAmat; 391 PetscFunctionReturn(PETSC_SUCCESS); 392 } 393 394 /*@ 395 PCSetKSPNestLevel - sets the amount of nesting the `KSP` that contains this `PC` has 396 397 Collective 398 399 Input Parameters: 400 + pc - the `PC` 401 - level - the nest level 402 403 Level: developer 404 405 .seealso: [](ch_ksp), `KSPSetUp()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPGMRES`, `KSPType`, `KSPGetNestLevel()`, `PCGetKSPNestLevel()`, `KSPSetNestLevel()` 406 @*/ 407 PetscErrorCode PCSetKSPNestLevel(PC pc, PetscInt level) 408 { 409 PetscFunctionBegin; 410 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 411 PetscValidLogicalCollectiveInt(pc, level, 2); 412 pc->kspnestlevel = level; 413 PetscFunctionReturn(PETSC_SUCCESS); 414 } 415 416 /*@ 417 PCGetKSPNestLevel - gets the amount of nesting the `KSP` that contains this `PC` has 418 419 Not Collective 420 421 Input Parameter: 422 . pc - the `PC` 423 424 Output Parameter: 425 . level - the nest level 426 427 Level: developer 428 429 .seealso: [](ch_ksp), `KSPSetUp()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPGMRES`, `KSPType`, `KSPSetNestLevel()`, `PCSetKSPNestLevel()`, `KSPGetNestLevel()` 430 @*/ 431 PetscErrorCode PCGetKSPNestLevel(PC pc, PetscInt *level) 432 { 433 PetscFunctionBegin; 434 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 435 PetscAssertPointer(level, 2); 436 *level = pc->kspnestlevel; 437 PetscFunctionReturn(PETSC_SUCCESS); 438 } 439 440 /*@ 441 PCCreate - Creates a preconditioner context, `PC` 442 443 Collective 444 445 Input Parameter: 446 . comm - MPI communicator 447 448 Output Parameter: 449 . newpc - location to put the preconditioner context 450 451 Level: developer 452 453 Note: 454 The default preconditioner for sparse matrices is `PCILU` or `PCICC` with 0 fill on one process and block Jacobi (`PCBJACOBI`) with `PCILU` or `PCICC` 455 in parallel. For dense matrices it is always `PCNONE`. 456 457 .seealso: [](ch_ksp), `PC`, `PCSetUp()`, `PCApply()`, `PCDestroy()` 458 @*/ 459 PetscErrorCode PCCreate(MPI_Comm comm, PC *newpc) 460 { 461 PC pc; 462 463 PetscFunctionBegin; 464 PetscAssertPointer(newpc, 2); 465 PetscCall(PCInitializePackage()); 466 467 PetscCall(PetscHeaderCreate(pc, PC_CLASSID, "PC", "Preconditioner", "PC", comm, PCDestroy, PCView)); 468 pc->mat = NULL; 469 pc->pmat = NULL; 470 pc->setupcalled = 0; 471 pc->setfromoptionscalled = 0; 472 pc->data = NULL; 473 pc->diagonalscale = PETSC_FALSE; 474 pc->diagonalscaleleft = NULL; 475 pc->diagonalscaleright = NULL; 476 477 pc->modifysubmatrices = NULL; 478 pc->modifysubmatricesP = NULL; 479 480 *newpc = pc; 481 PetscFunctionReturn(PETSC_SUCCESS); 482 } 483 484 /*@ 485 PCApply - Applies the preconditioner to a vector. 486 487 Collective 488 489 Input Parameters: 490 + pc - the preconditioner context 491 - x - input vector 492 493 Output Parameter: 494 . y - output vector 495 496 Level: developer 497 498 .seealso: [](ch_ksp), `PC`, `PCApplyTranspose()`, `PCApplyBAorAB()` 499 @*/ 500 PetscErrorCode PCApply(PC pc, Vec x, Vec y) 501 { 502 PetscInt m, n, mv, nv; 503 504 PetscFunctionBegin; 505 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 506 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 507 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 508 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 509 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 510 /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */ 511 PetscCall(MatGetLocalSize(pc->pmat, &m, &n)); 512 PetscCall(VecGetLocalSize(x, &mv)); 513 PetscCall(VecGetLocalSize(y, &nv)); 514 /* check pmat * y = x is feasible */ 515 PetscCheck(mv == m, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local rows %" PetscInt_FMT " does not equal input vector size %" PetscInt_FMT, m, mv); 516 PetscCheck(nv == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local columns %" PetscInt_FMT " does not equal output vector size %" PetscInt_FMT, n, nv); 517 PetscCall(VecSetErrorIfLocked(y, 3)); 518 519 PetscCall(PCSetUp(pc)); 520 PetscCall(VecLockReadPush(x)); 521 PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0)); 522 PetscUseTypeMethod(pc, apply, x, y); 523 PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0)); 524 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 525 PetscCall(VecLockReadPop(x)); 526 PetscFunctionReturn(PETSC_SUCCESS); 527 } 528 529 /*@ 530 PCMatApply - Applies the preconditioner to multiple vectors stored as a `MATDENSE`. Like `PCApply()`, `Y` and `X` must be different matrices. 531 532 Collective 533 534 Input Parameters: 535 + pc - the preconditioner context 536 - X - block of input vectors 537 538 Output Parameter: 539 . Y - block of output vectors 540 541 Level: developer 542 543 .seealso: [](ch_ksp), `PC`, `PCApply()`, `KSPMatSolve()` 544 @*/ 545 PetscErrorCode PCMatApply(PC pc, Mat X, Mat Y) 546 { 547 Mat A; 548 Vec cy, cx; 549 PetscInt m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3; 550 PetscBool match; 551 552 PetscFunctionBegin; 553 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 554 PetscValidHeaderSpecific(X, MAT_CLASSID, 2); 555 PetscValidHeaderSpecific(Y, MAT_CLASSID, 3); 556 PetscCheckSameComm(pc, 1, X, 2); 557 PetscCheckSameComm(pc, 1, Y, 3); 558 PetscCheck(Y != X, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices"); 559 PetscCall(PCGetOperators(pc, NULL, &A)); 560 PetscCall(MatGetLocalSize(A, &m3, &n3)); 561 PetscCall(MatGetLocalSize(X, &m2, &n2)); 562 PetscCall(MatGetLocalSize(Y, &m1, &n1)); 563 PetscCall(MatGetSize(A, &M3, &N3)); 564 PetscCall(MatGetSize(X, &M2, &N2)); 565 PetscCall(MatGetSize(Y, &M1, &N1)); 566 PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of output vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1); 567 PetscCheck(m2 == m3 && M2 == M3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m2, M2, m3, M3, n3, N3); 568 PetscCheck(m1 == n3 && M1 == N3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m1, M1, m3, M3, n3, N3); 569 PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "")); 570 PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat"); 571 PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "")); 572 PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat"); 573 PetscCall(PCSetUp(pc)); 574 if (pc->ops->matapply) { 575 PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0)); 576 PetscUseTypeMethod(pc, matapply, X, Y); 577 PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0)); 578 } else { 579 PetscCall(PetscInfo(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name)); 580 for (n1 = 0; n1 < N1; ++n1) { 581 PetscCall(MatDenseGetColumnVecRead(X, n1, &cx)); 582 PetscCall(MatDenseGetColumnVecWrite(Y, n1, &cy)); 583 PetscCall(PCApply(pc, cx, cy)); 584 PetscCall(MatDenseRestoreColumnVecWrite(Y, n1, &cy)); 585 PetscCall(MatDenseRestoreColumnVecRead(X, n1, &cx)); 586 } 587 } 588 PetscFunctionReturn(PETSC_SUCCESS); 589 } 590 591 /*@ 592 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 593 594 Collective 595 596 Input Parameters: 597 + pc - the preconditioner context 598 - x - input vector 599 600 Output Parameter: 601 . y - output vector 602 603 Level: developer 604 605 Note: 606 Currently, this routine is implemented only for `PCICC` and `PCJACOBI` preconditioners. 607 608 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplySymmetricRight()` 609 @*/ 610 PetscErrorCode PCApplySymmetricLeft(PC pc, Vec x, Vec y) 611 { 612 PetscFunctionBegin; 613 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 614 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 615 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 616 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 617 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 618 PetscCall(PCSetUp(pc)); 619 PetscCall(VecLockReadPush(x)); 620 PetscCall(PetscLogEventBegin(PC_ApplySymmetricLeft, pc, x, y, 0)); 621 PetscUseTypeMethod(pc, applysymmetricleft, x, y); 622 PetscCall(PetscLogEventEnd(PC_ApplySymmetricLeft, pc, x, y, 0)); 623 PetscCall(VecLockReadPop(x)); 624 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 625 PetscFunctionReturn(PETSC_SUCCESS); 626 } 627 628 /*@ 629 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 630 631 Collective 632 633 Input Parameters: 634 + pc - the preconditioner context 635 - x - input vector 636 637 Output Parameter: 638 . y - output vector 639 640 Level: developer 641 642 Note: 643 Currently, this routine is implemented only for `PCICC` and `PCJACOBI` preconditioners. 644 645 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplySymmetricLeft()` 646 @*/ 647 PetscErrorCode PCApplySymmetricRight(PC pc, Vec x, Vec y) 648 { 649 PetscFunctionBegin; 650 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 651 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 652 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 653 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 654 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 655 PetscCall(PCSetUp(pc)); 656 PetscCall(VecLockReadPush(x)); 657 PetscCall(PetscLogEventBegin(PC_ApplySymmetricRight, pc, x, y, 0)); 658 PetscUseTypeMethod(pc, applysymmetricright, x, y); 659 PetscCall(PetscLogEventEnd(PC_ApplySymmetricRight, pc, x, y, 0)); 660 PetscCall(VecLockReadPop(x)); 661 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 662 PetscFunctionReturn(PETSC_SUCCESS); 663 } 664 665 /*@ 666 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 667 668 Collective 669 670 Input Parameters: 671 + pc - the preconditioner context 672 - x - input vector 673 674 Output Parameter: 675 . y - output vector 676 677 Level: developer 678 679 Note: 680 For complex numbers this applies the non-Hermitian transpose. 681 682 Developer Note: 683 We need to implement a `PCApplyHermitianTranspose()` 684 685 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplyBAorAB()`, `PCApplyBAorABTranspose()`, `PCApplyTransposeExists()` 686 @*/ 687 PetscErrorCode PCApplyTranspose(PC pc, Vec x, Vec y) 688 { 689 PetscFunctionBegin; 690 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 691 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 692 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 693 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 694 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 695 PetscCall(PCSetUp(pc)); 696 PetscCall(VecLockReadPush(x)); 697 PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0)); 698 PetscUseTypeMethod(pc, applytranspose, x, y); 699 PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0)); 700 PetscCall(VecLockReadPop(x)); 701 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 702 PetscFunctionReturn(PETSC_SUCCESS); 703 } 704 705 /*@ 706 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 707 708 Collective 709 710 Input Parameter: 711 . pc - the preconditioner context 712 713 Output Parameter: 714 . flg - `PETSC_TRUE` if a transpose operation is defined 715 716 Level: developer 717 718 .seealso: [](ch_ksp), `PC`, `PCApplyTranspose()` 719 @*/ 720 PetscErrorCode PCApplyTransposeExists(PC pc, PetscBool *flg) 721 { 722 PetscFunctionBegin; 723 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 724 PetscAssertPointer(flg, 2); 725 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 726 else *flg = PETSC_FALSE; 727 PetscFunctionReturn(PETSC_SUCCESS); 728 } 729 730 /*@ 731 PCApplyBAorAB - Applies the preconditioner and operator to a vector. $y = B*A*x $ or $ y = A*B*x$. 732 733 Collective 734 735 Input Parameters: 736 + pc - the preconditioner context 737 . side - indicates the preconditioner side, one of `PC_LEFT`, `PC_RIGHT`, or `PC_SYMMETRIC` 738 . x - input vector 739 - work - work vector 740 741 Output Parameter: 742 . y - output vector 743 744 Level: developer 745 746 Note: 747 If the `PC` has had `PCSetDiagonalScale()` set then $ D M A D^{-1} $ for left preconditioning or $ D A M D^{-1} $ is actually applied. 748 The specific `KSPSolve()` method must also be written to handle the post-solve "correction" for the diagonal scaling. 749 750 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorABTranspose()` 751 @*/ 752 PetscErrorCode PCApplyBAorAB(PC pc, PCSide side, Vec x, Vec y, Vec work) 753 { 754 PetscFunctionBegin; 755 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 756 PetscValidLogicalCollectiveEnum(pc, side, 2); 757 PetscValidHeaderSpecific(x, VEC_CLASSID, 3); 758 PetscValidHeaderSpecific(y, VEC_CLASSID, 4); 759 PetscValidHeaderSpecific(work, VEC_CLASSID, 5); 760 PetscCheckSameComm(pc, 1, x, 3); 761 PetscCheckSameComm(pc, 1, y, 4); 762 PetscCheckSameComm(pc, 1, work, 5); 763 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 764 PetscCheck(side == PC_LEFT || side == PC_SYMMETRIC || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right, left, or symmetric"); 765 PetscCheck(!pc->diagonalscale || side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot include diagonal scaling with symmetric preconditioner application"); 766 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 3, PETSC_TRUE)); 767 768 PetscCall(PCSetUp(pc)); 769 if (pc->diagonalscale) { 770 if (pc->ops->applyBA) { 771 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 772 PetscCall(VecDuplicate(x, &work2)); 773 PetscCall(PCDiagonalScaleRight(pc, x, work2)); 774 PetscUseTypeMethod(pc, applyBA, side, work2, y, work); 775 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 776 PetscCall(VecDestroy(&work2)); 777 } else if (side == PC_RIGHT) { 778 PetscCall(PCDiagonalScaleRight(pc, x, y)); 779 PetscCall(PCApply(pc, y, work)); 780 PetscCall(MatMult(pc->mat, work, y)); 781 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 782 } else if (side == PC_LEFT) { 783 PetscCall(PCDiagonalScaleRight(pc, x, y)); 784 PetscCall(MatMult(pc->mat, y, work)); 785 PetscCall(PCApply(pc, work, y)); 786 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 787 } else PetscCheck(side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot provide diagonal scaling with symmetric application of preconditioner"); 788 } else { 789 if (pc->ops->applyBA) { 790 PetscUseTypeMethod(pc, applyBA, side, x, y, work); 791 } else if (side == PC_RIGHT) { 792 PetscCall(PCApply(pc, x, work)); 793 PetscCall(MatMult(pc->mat, work, y)); 794 } else if (side == PC_LEFT) { 795 PetscCall(MatMult(pc->mat, x, work)); 796 PetscCall(PCApply(pc, work, y)); 797 } else if (side == PC_SYMMETRIC) { 798 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 799 PetscCall(PCApplySymmetricRight(pc, x, work)); 800 PetscCall(MatMult(pc->mat, work, y)); 801 PetscCall(VecCopy(y, work)); 802 PetscCall(PCApplySymmetricLeft(pc, work, y)); 803 } 804 } 805 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 806 PetscFunctionReturn(PETSC_SUCCESS); 807 } 808 809 /*@ 810 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 811 and operator to a vector. That is, applies $B^T * A^T$ with left preconditioning, 812 NOT $(B*A)^T = A^T*B^T$. 813 814 Collective 815 816 Input Parameters: 817 + pc - the preconditioner context 818 . side - indicates the preconditioner side, one of `PC_LEFT`, `PC_RIGHT`, or `PC_SYMMETRIC` 819 . x - input vector 820 - work - work vector 821 822 Output Parameter: 823 . y - output vector 824 825 Level: developer 826 827 Note: 828 This routine is used internally so that the same Krylov code can be used to solve $A x = b$ and $A^T x = b$, with a preconditioner 829 defined by $B^T$. This is why this has the funny form that it computes $B^T * A^T$ 830 831 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorAB()` 832 @*/ 833 PetscErrorCode PCApplyBAorABTranspose(PC pc, PCSide side, Vec x, Vec y, Vec work) 834 { 835 PetscFunctionBegin; 836 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 837 PetscValidHeaderSpecific(x, VEC_CLASSID, 3); 838 PetscValidHeaderSpecific(y, VEC_CLASSID, 4); 839 PetscValidHeaderSpecific(work, VEC_CLASSID, 5); 840 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 841 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 3, PETSC_TRUE)); 842 if (pc->ops->applyBAtranspose) { 843 PetscUseTypeMethod(pc, applyBAtranspose, side, x, y, work); 844 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 845 PetscFunctionReturn(PETSC_SUCCESS); 846 } 847 PetscCheck(side == PC_LEFT || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right or left"); 848 849 PetscCall(PCSetUp(pc)); 850 if (side == PC_RIGHT) { 851 PetscCall(PCApplyTranspose(pc, x, work)); 852 PetscCall(MatMultTranspose(pc->mat, work, y)); 853 } else if (side == PC_LEFT) { 854 PetscCall(MatMultTranspose(pc->mat, x, work)); 855 PetscCall(PCApplyTranspose(pc, work, y)); 856 } 857 /* add support for PC_SYMMETRIC */ 858 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 859 PetscFunctionReturn(PETSC_SUCCESS); 860 } 861 862 /*@ 863 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 864 built-in fast application of Richardson's method. 865 866 Not Collective 867 868 Input Parameter: 869 . pc - the preconditioner 870 871 Output Parameter: 872 . exists - `PETSC_TRUE` or `PETSC_FALSE` 873 874 Level: developer 875 876 .seealso: [](ch_ksp), `PC`, `KSPRICHARDSON`, `PCApplyRichardson()` 877 @*/ 878 PetscErrorCode PCApplyRichardsonExists(PC pc, PetscBool *exists) 879 { 880 PetscFunctionBegin; 881 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 882 PetscAssertPointer(exists, 2); 883 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 884 else *exists = PETSC_FALSE; 885 PetscFunctionReturn(PETSC_SUCCESS); 886 } 887 888 /*@ 889 PCApplyRichardson - Applies several steps of Richardson iteration with 890 the particular preconditioner. This routine is usually used by the 891 Krylov solvers and not the application code directly. 892 893 Collective 894 895 Input Parameters: 896 + pc - the preconditioner context 897 . b - the right-hand side 898 . w - one work vector 899 . rtol - relative decrease in residual norm convergence criteria 900 . abstol - absolute residual norm convergence criteria 901 . dtol - divergence residual norm increase criteria 902 . its - the number of iterations to apply. 903 - guesszero - if the input x contains nonzero initial guess 904 905 Output Parameters: 906 + outits - number of iterations actually used (for SOR this always equals its) 907 . reason - the reason the apply terminated 908 - y - the solution (also contains initial guess if guesszero is `PETSC_FALSE` 909 910 Level: developer 911 912 Notes: 913 Most preconditioners do not support this function. Use the command 914 `PCApplyRichardsonExists()` to determine if one does. 915 916 Except for the `PCMG` this routine ignores the convergence tolerances 917 and always runs for the number of iterations 918 919 .seealso: [](ch_ksp), `PC`, `PCApplyRichardsonExists()` 920 @*/ 921 PetscErrorCode PCApplyRichardson(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 922 { 923 PetscFunctionBegin; 924 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 925 PetscValidHeaderSpecific(b, VEC_CLASSID, 2); 926 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 927 PetscValidHeaderSpecific(w, VEC_CLASSID, 4); 928 PetscCheck(b != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "b and y must be different vectors"); 929 PetscCall(PCSetUp(pc)); 930 PetscUseTypeMethod(pc, applyrichardson, b, y, w, rtol, abstol, dtol, its, guesszero, outits, reason); 931 PetscFunctionReturn(PETSC_SUCCESS); 932 } 933 934 /*@ 935 PCSetFailedReason - Sets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail 936 937 Logically Collective 938 939 Input Parameters: 940 + pc - the preconditioner context 941 - reason - the reason it failedx 942 943 Level: advanced 944 945 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCFailedReason` 946 @*/ 947 PetscErrorCode PCSetFailedReason(PC pc, PCFailedReason reason) 948 { 949 PetscFunctionBegin; 950 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 951 pc->failedreason = reason; 952 PetscFunctionReturn(PETSC_SUCCESS); 953 } 954 955 /*@ 956 PCGetFailedReason - Gets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail 957 958 Logically Collective 959 960 Input Parameter: 961 . pc - the preconditioner context 962 963 Output Parameter: 964 . reason - the reason it failed 965 966 Level: advanced 967 968 Note: 969 This is the maximum over reason over all ranks in the PC communicator. It is only valid after 970 a call `KSPCheckDot()` or `KSPCheckNorm()` inside a `KSPSolve()` or `PCReduceFailedReason()`. 971 It is not valid immediately after a `PCSetUp()` or `PCApply()`, then use `PCGetFailedReasonRank()` 972 973 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReasonRank()`, `PCSetFailedReason()` 974 @*/ 975 PetscErrorCode PCGetFailedReason(PC pc, PCFailedReason *reason) 976 { 977 PetscFunctionBegin; 978 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 979 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 980 else *reason = pc->failedreason; 981 PetscFunctionReturn(PETSC_SUCCESS); 982 } 983 984 /*@ 985 PCGetFailedReasonRank - Gets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail on this MPI rank 986 987 Not Collective 988 989 Input Parameter: 990 . pc - the preconditioner context 991 992 Output Parameter: 993 . reason - the reason it failed 994 995 Level: advanced 996 997 Note: 998 Different processes may have different reasons or no reason, see `PCGetFailedReason()` 999 1000 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()`, `PCReduceFailedReason()` 1001 @*/ 1002 PetscErrorCode PCGetFailedReasonRank(PC pc, PCFailedReason *reason) 1003 { 1004 PetscFunctionBegin; 1005 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1006 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 1007 else *reason = pc->failedreason; 1008 PetscFunctionReturn(PETSC_SUCCESS); 1009 } 1010 1011 /*@ 1012 PCReduceFailedReason - Reduce the failed reason among the MPI processes that share the `PC` 1013 1014 Collective 1015 1016 Input Parameter: 1017 . pc - the preconditioner context 1018 1019 Level: advanced 1020 1021 Note: 1022 Different MPI processes may have different reasons or no reason, see `PCGetFailedReason()`. This routine 1023 makes them have a common value (failure if any MPI process had a failure). 1024 1025 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()` 1026 @*/ 1027 PetscErrorCode PCReduceFailedReason(PC pc) 1028 { 1029 PetscInt buf; 1030 1031 PetscFunctionBegin; 1032 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1033 buf = (PetscInt)pc->failedreason; 1034 PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &buf, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)pc))); 1035 pc->failedreason = (PCFailedReason)buf; 1036 PetscFunctionReturn(PETSC_SUCCESS); 1037 } 1038 1039 /* 1040 a setupcall of 0 indicates never setup, 1041 1 indicates has been previously setup 1042 -1 indicates a PCSetUp() was attempted and failed 1043 */ 1044 /*@ 1045 PCSetUp - Prepares for the use of a preconditioner. 1046 1047 Collective 1048 1049 Input Parameter: 1050 . pc - the preconditioner context 1051 1052 Level: developer 1053 1054 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()` 1055 @*/ 1056 PetscErrorCode PCSetUp(PC pc) 1057 { 1058 const char *def; 1059 PetscObjectState matstate, matnonzerostate; 1060 1061 PetscFunctionBegin; 1062 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1063 PetscCheck(pc->mat, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be set first"); 1064 1065 if (pc->setupcalled && pc->reusepreconditioner) { 1066 PetscCall(PetscInfo(pc, "Leaving PC with identical preconditioner since reuse preconditioner is set\n")); 1067 PetscFunctionReturn(PETSC_SUCCESS); 1068 } 1069 1070 PetscCall(PetscObjectStateGet((PetscObject)pc->pmat, &matstate)); 1071 PetscCall(MatGetNonzeroState(pc->pmat, &matnonzerostate)); 1072 if (!pc->setupcalled) { 1073 //PetscCall(PetscInfo(pc, "Setting up PC for first time\n")); 1074 pc->flag = DIFFERENT_NONZERO_PATTERN; 1075 } else if (matstate == pc->matstate) PetscFunctionReturn(PETSC_SUCCESS); 1076 else { 1077 if (matnonzerostate != pc->matnonzerostate) { 1078 PetscCall(PetscInfo(pc, "Setting up PC with different nonzero pattern\n")); 1079 pc->flag = DIFFERENT_NONZERO_PATTERN; 1080 } else { 1081 //PetscCall(PetscInfo(pc, "Setting up PC with same nonzero pattern\n")); 1082 pc->flag = SAME_NONZERO_PATTERN; 1083 } 1084 } 1085 pc->matstate = matstate; 1086 pc->matnonzerostate = matnonzerostate; 1087 1088 if (!((PetscObject)pc)->type_name) { 1089 PetscCall(PCGetDefaultType_Private(pc, &def)); 1090 PetscCall(PCSetType(pc, def)); 1091 } 1092 1093 PetscCall(MatSetErrorIfFailure(pc->pmat, pc->erroriffailure)); 1094 PetscCall(MatSetErrorIfFailure(pc->mat, pc->erroriffailure)); 1095 PetscCall(PetscLogEventBegin(PC_SetUp, pc, 0, 0, 0)); 1096 if (pc->ops->setup) { 1097 PetscCall(PCLogEventsDeactivatePush()); 1098 PetscUseTypeMethod(pc, setup); 1099 PetscCall(PCLogEventsDeactivatePop()); 1100 } 1101 PetscCall(PetscLogEventEnd(PC_SetUp, pc, 0, 0, 0)); 1102 if (!pc->setupcalled) pc->setupcalled = 1; 1103 PetscFunctionReturn(PETSC_SUCCESS); 1104 } 1105 1106 /*@ 1107 PCSetUpOnBlocks - Sets up the preconditioner for each block in 1108 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 1109 methods. 1110 1111 Collective 1112 1113 Input Parameter: 1114 . pc - the preconditioner context 1115 1116 Level: developer 1117 1118 Note: 1119 For nested preconditioners such as `PCBJACOBI` `PCSetUp()` is not called on each sub-`KSP` when `PCSetUp()` is 1120 called on the outer `PC`, this routine ensures it is called. 1121 1122 .seealso: [](ch_ksp), `PC`, `PCSetUp()`, `PCCreate()`, `PCApply()`, `PCDestroy()` 1123 @*/ 1124 PetscErrorCode PCSetUpOnBlocks(PC pc) 1125 { 1126 PetscFunctionBegin; 1127 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1128 if (!pc->ops->setuponblocks) PetscFunctionReturn(PETSC_SUCCESS); 1129 PetscCall(PetscLogEventBegin(PC_SetUpOnBlocks, pc, 0, 0, 0)); 1130 PetscCall(PCLogEventsDeactivatePush()); 1131 PetscUseTypeMethod(pc, setuponblocks); 1132 PetscCall(PCLogEventsDeactivatePop()); 1133 PetscCall(PetscLogEventEnd(PC_SetUpOnBlocks, pc, 0, 0, 0)); 1134 PetscFunctionReturn(PETSC_SUCCESS); 1135 } 1136 1137 /*@C 1138 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 1139 submatrices that arise within certain subdomain-based preconditioners such as `PCASM` 1140 1141 Logically Collective 1142 1143 Input Parameters: 1144 + pc - the preconditioner context 1145 . func - routine for modifying the submatrices 1146 - ctx - optional user-defined context (may be `NULL`) 1147 1148 Calling sequence of `func`: 1149 + pc - the preconditioner context 1150 . nsub - number of index sets 1151 . row - an array of index sets that contain the global row numbers 1152 that comprise each local submatrix 1153 . col - an array of index sets that contain the global column numbers 1154 that comprise each local submatrix 1155 . submat - array of local submatrices 1156 - ctx - optional user-defined context for private data for the 1157 user-defined func routine (may be `NULL`) 1158 1159 Level: advanced 1160 1161 Notes: 1162 The basic submatrices are extracted from the preconditioner matrix as 1163 usual; the user can then alter these (for example, to set different boundary 1164 conditions for each submatrix) before they are used for the local solves. 1165 1166 `PCSetModifySubMatrices()` MUST be called before `KSPSetUp()` and 1167 `KSPSolve()`. 1168 1169 A routine set by `PCSetModifySubMatrices()` is currently called within 1170 the block Jacobi (`PCBJACOBI`) and additive Schwarz (`PCASM`) 1171 preconditioners. All other preconditioners ignore this routine. 1172 1173 .seealso: [](ch_ksp), `PC`, `PCBJACOBI`, `PCASM`, `PCModifySubMatrices()` 1174 @*/ 1175 PetscErrorCode PCSetModifySubMatrices(PC pc, PetscErrorCode (*func)(PC pc, PetscInt nsub, const IS row[], const IS col[], Mat submat[], void *ctx), void *ctx) 1176 { 1177 PetscFunctionBegin; 1178 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1179 pc->modifysubmatrices = func; 1180 pc->modifysubmatricesP = ctx; 1181 PetscFunctionReturn(PETSC_SUCCESS); 1182 } 1183 1184 /*@C 1185 PCModifySubMatrices - Calls an optional user-defined routine within 1186 certain preconditioners if one has been set with `PCSetModifySubMatrices()`. 1187 1188 Collective 1189 1190 Input Parameters: 1191 + pc - the preconditioner context 1192 . nsub - the number of local submatrices 1193 . row - an array of index sets that contain the global row numbers 1194 that comprise each local submatrix 1195 . col - an array of index sets that contain the global column numbers 1196 that comprise each local submatrix 1197 . submat - array of local submatrices 1198 - ctx - optional user-defined context for private data for the 1199 user-defined routine (may be `NULL`) 1200 1201 Output Parameter: 1202 . submat - array of local submatrices (the entries of which may 1203 have been modified) 1204 1205 Level: developer 1206 1207 Note: 1208 The user should NOT generally call this routine, as it will 1209 automatically be called within certain preconditioners. 1210 1211 .seealso: [](ch_ksp), `PC`, `PCSetModifySubMatrices()` 1212 @*/ 1213 PetscErrorCode PCModifySubMatrices(PC pc, PetscInt nsub, const IS row[], const IS col[], Mat submat[], void *ctx) 1214 { 1215 PetscFunctionBegin; 1216 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1217 if (!pc->modifysubmatrices) PetscFunctionReturn(PETSC_SUCCESS); 1218 PetscCall(PetscLogEventBegin(PC_ModifySubMatrices, pc, 0, 0, 0)); 1219 PetscCall((*pc->modifysubmatrices)(pc, nsub, row, col, submat, ctx)); 1220 PetscCall(PetscLogEventEnd(PC_ModifySubMatrices, pc, 0, 0, 0)); 1221 PetscFunctionReturn(PETSC_SUCCESS); 1222 } 1223 1224 /*@ 1225 PCSetOperators - Sets the matrix associated with the linear system and 1226 a (possibly) different one associated with the preconditioner. 1227 1228 Logically Collective 1229 1230 Input Parameters: 1231 + pc - the preconditioner context 1232 . Amat - the matrix that defines the linear system 1233 - Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat. 1234 1235 Level: intermediate 1236 1237 Notes: 1238 Passing a `NULL` for `Amat` or `Pmat` removes the matrix that is currently used. 1239 1240 If you wish to replace either `Amat` or `Pmat` but leave the other one untouched then 1241 first call `KSPGetOperators()` to get the one you wish to keep, call `PetscObjectReference()` 1242 on it and then pass it back in in your call to `KSPSetOperators()`. 1243 1244 More Notes about Repeated Solution of Linear Systems: 1245 PETSc does NOT reset the matrix entries of either `Amat` or `Pmat` 1246 to zero after a linear solve; the user is completely responsible for 1247 matrix assembly. See the routine `MatZeroEntries()` if desiring to 1248 zero all elements of a matrix. 1249 1250 .seealso: [](ch_ksp), `PC`, `PCGetOperators()`, `MatZeroEntries()` 1251 @*/ 1252 PetscErrorCode PCSetOperators(PC pc, Mat Amat, Mat Pmat) 1253 { 1254 PetscInt m1, n1, m2, n2; 1255 1256 PetscFunctionBegin; 1257 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1258 if (Amat) PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 1259 if (Pmat) PetscValidHeaderSpecific(Pmat, MAT_CLASSID, 3); 1260 if (Amat) PetscCheckSameComm(pc, 1, Amat, 2); 1261 if (Pmat) PetscCheckSameComm(pc, 1, Pmat, 3); 1262 if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) { 1263 PetscCall(MatGetLocalSize(Amat, &m1, &n1)); 1264 PetscCall(MatGetLocalSize(pc->mat, &m2, &n2)); 1265 PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Amat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1); 1266 PetscCall(MatGetLocalSize(Pmat, &m1, &n1)); 1267 PetscCall(MatGetLocalSize(pc->pmat, &m2, &n2)); 1268 PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Pmat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1); 1269 } 1270 1271 if (Pmat != pc->pmat) { 1272 /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */ 1273 pc->matnonzerostate = -1; 1274 pc->matstate = -1; 1275 } 1276 1277 /* reference first in case the matrices are the same */ 1278 if (Amat) PetscCall(PetscObjectReference((PetscObject)Amat)); 1279 PetscCall(MatDestroy(&pc->mat)); 1280 if (Pmat) PetscCall(PetscObjectReference((PetscObject)Pmat)); 1281 PetscCall(MatDestroy(&pc->pmat)); 1282 pc->mat = Amat; 1283 pc->pmat = Pmat; 1284 PetscFunctionReturn(PETSC_SUCCESS); 1285 } 1286 1287 /*@ 1288 PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed. 1289 1290 Logically Collective 1291 1292 Input Parameters: 1293 + pc - the preconditioner context 1294 - flag - `PETSC_TRUE` do not compute a new preconditioner, `PETSC_FALSE` do compute a new preconditioner 1295 1296 Level: intermediate 1297 1298 Note: 1299 Normally if a matrix inside a `PC` changes the `PC` automatically updates itself using information from the changed matrix. This option 1300 prevents this. 1301 1302 .seealso: [](ch_ksp), `PC`, `PCGetOperators()`, `MatZeroEntries()`, `PCGetReusePreconditioner()`, `KSPSetReusePreconditioner()` 1303 @*/ 1304 PetscErrorCode PCSetReusePreconditioner(PC pc, PetscBool flag) 1305 { 1306 PetscFunctionBegin; 1307 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1308 PetscValidLogicalCollectiveBool(pc, flag, 2); 1309 pc->reusepreconditioner = flag; 1310 PetscFunctionReturn(PETSC_SUCCESS); 1311 } 1312 1313 /*@ 1314 PCGetReusePreconditioner - Determines if the `PC` reuses the current preconditioner even if the operator in the preconditioner has changed. 1315 1316 Not Collective 1317 1318 Input Parameter: 1319 . pc - the preconditioner context 1320 1321 Output Parameter: 1322 . flag - `PETSC_TRUE` do not compute a new preconditioner, `PETSC_FALSE` do compute a new preconditioner 1323 1324 Level: intermediate 1325 1326 .seealso: [](ch_ksp), `PC`, `PCGetOperators()`, `MatZeroEntries()`, `PCSetReusePreconditioner()` 1327 @*/ 1328 PetscErrorCode PCGetReusePreconditioner(PC pc, PetscBool *flag) 1329 { 1330 PetscFunctionBegin; 1331 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1332 PetscAssertPointer(flag, 2); 1333 *flag = pc->reusepreconditioner; 1334 PetscFunctionReturn(PETSC_SUCCESS); 1335 } 1336 1337 /*@ 1338 PCGetOperators - Gets the matrix associated with the linear system and 1339 possibly a different one associated with the preconditioner. 1340 1341 Not Collective, though parallel `Mat`s are returned if `pc` is parallel 1342 1343 Input Parameter: 1344 . pc - the preconditioner context 1345 1346 Output Parameters: 1347 + Amat - the matrix defining the linear system 1348 - Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat. 1349 1350 Level: intermediate 1351 1352 Note: 1353 Does not increase the reference count of the matrices, so you should not destroy them 1354 1355 Alternative usage: If the operators have NOT been set with `KSPSetOperators()`/`PCSetOperators()` then the operators 1356 are created in `PC` and returned to the user. In this case, if both operators 1357 mat and pmat are requested, two DIFFERENT operators will be returned. If 1358 only one is requested both operators in the PC will be the same (i.e. as 1359 if one had called `KSPSetOperators()`/`PCSetOperators()` with the same argument for both Mats). 1360 The user must set the sizes of the returned matrices and their type etc just 1361 as if the user created them with `MatCreate()`. For example, 1362 1363 .vb 1364 KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to 1365 set size, type, etc of Amat 1366 1367 MatCreate(comm,&mat); 1368 KSP/PCSetOperators(ksp/pc,Amat,Amat); 1369 PetscObjectDereference((PetscObject)mat); 1370 set size, type, etc of Amat 1371 .ve 1372 1373 and 1374 1375 .vb 1376 KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to 1377 set size, type, etc of Amat and Pmat 1378 1379 MatCreate(comm,&Amat); 1380 MatCreate(comm,&Pmat); 1381 KSP/PCSetOperators(ksp/pc,Amat,Pmat); 1382 PetscObjectDereference((PetscObject)Amat); 1383 PetscObjectDereference((PetscObject)Pmat); 1384 set size, type, etc of Amat and Pmat 1385 .ve 1386 1387 The rationale for this support is so that when creating a `TS`, `SNES`, or `KSP` the hierarchy 1388 of underlying objects (i.e. `SNES`, `KSP`, `PC`, `Mat`) and their lifespans can be completely 1389 managed by the top most level object (i.e. the `TS`, `SNES`, or `KSP`). Another way to look 1390 at this is when you create a `SNES` you do not NEED to create a `KSP` and attach it to 1391 the `SNES` object (the `SNES` object manages it for you). Similarly when you create a KSP 1392 you do not need to attach a `PC` to it (the `KSP` object manages the `PC` object for you). 1393 Thus, why should YOU have to create the `Mat` and attach it to the `SNES`/`KSP`/`PC`, when 1394 it can be created for you? 1395 1396 .seealso: [](ch_ksp), `PC`, `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperatorsSet()` 1397 @*/ 1398 PetscErrorCode PCGetOperators(PC pc, Mat *Amat, Mat *Pmat) 1399 { 1400 PetscFunctionBegin; 1401 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1402 if (Amat) { 1403 if (!pc->mat) { 1404 if (pc->pmat && !Pmat) { /* Pmat has been set, but user did not request it, so use for Amat */ 1405 pc->mat = pc->pmat; 1406 PetscCall(PetscObjectReference((PetscObject)pc->mat)); 1407 } else { /* both Amat and Pmat are empty */ 1408 PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->mat)); 1409 if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */ 1410 pc->pmat = pc->mat; 1411 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 1412 } 1413 } 1414 } 1415 *Amat = pc->mat; 1416 } 1417 if (Pmat) { 1418 if (!pc->pmat) { 1419 if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */ 1420 pc->pmat = pc->mat; 1421 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 1422 } else { 1423 PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->pmat)); 1424 if (!Amat) { /* user did NOT request Amat, so make same as Pmat */ 1425 pc->mat = pc->pmat; 1426 PetscCall(PetscObjectReference((PetscObject)pc->mat)); 1427 } 1428 } 1429 } 1430 *Pmat = pc->pmat; 1431 } 1432 PetscFunctionReturn(PETSC_SUCCESS); 1433 } 1434 1435 /*@ 1436 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1437 possibly a different one associated with the preconditioner have been set in the `PC`. 1438 1439 Not Collective, though the results on all processes should be the same 1440 1441 Input Parameter: 1442 . pc - the preconditioner context 1443 1444 Output Parameters: 1445 + mat - the matrix associated with the linear system was set 1446 - pmat - matrix associated with the preconditioner was set, usually the same 1447 1448 Level: intermediate 1449 1450 .seealso: [](ch_ksp), `PC`, `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperators()` 1451 @*/ 1452 PetscErrorCode PCGetOperatorsSet(PC pc, PetscBool *mat, PetscBool *pmat) 1453 { 1454 PetscFunctionBegin; 1455 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1456 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1457 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1458 PetscFunctionReturn(PETSC_SUCCESS); 1459 } 1460 1461 /*@ 1462 PCFactorGetMatrix - Gets the factored matrix from the 1463 preconditioner context. This routine is valid only for the `PCLU`, 1464 `PCILU`, `PCCHOLESKY`, and `PCICC` methods. 1465 1466 Not Collective though `mat` is parallel if `pc` is parallel 1467 1468 Input Parameter: 1469 . pc - the preconditioner context 1470 1471 Output Parameters: 1472 . mat - the factored matrix 1473 1474 Level: advanced 1475 1476 Note: 1477 Does not increase the reference count for `mat` so DO NOT destroy it 1478 1479 .seealso: [](ch_ksp), `PC`, `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC` 1480 @*/ 1481 PetscErrorCode PCFactorGetMatrix(PC pc, Mat *mat) 1482 { 1483 PetscFunctionBegin; 1484 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1485 PetscAssertPointer(mat, 2); 1486 PetscCall(PCFactorSetUpMatSolverType(pc)); 1487 PetscUseTypeMethod(pc, getfactoredmatrix, mat); 1488 PetscFunctionReturn(PETSC_SUCCESS); 1489 } 1490 1491 /*@ 1492 PCSetOptionsPrefix - Sets the prefix used for searching for all 1493 `PC` options in the database. 1494 1495 Logically Collective 1496 1497 Input Parameters: 1498 + pc - the preconditioner context 1499 - prefix - the prefix string to prepend to all `PC` option requests 1500 1501 Note: 1502 A hyphen (-) must NOT be given at the beginning of the prefix name. 1503 The first character of all runtime options is AUTOMATICALLY the 1504 hyphen. 1505 1506 Level: advanced 1507 1508 .seealso: [](ch_ksp), `PC`, `PCSetFromOptions`, `PCAppendOptionsPrefix()`, `PCGetOptionsPrefix()` 1509 @*/ 1510 PetscErrorCode PCSetOptionsPrefix(PC pc, const char prefix[]) 1511 { 1512 PetscFunctionBegin; 1513 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1514 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc, prefix)); 1515 PetscFunctionReturn(PETSC_SUCCESS); 1516 } 1517 1518 /*@ 1519 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1520 `PC` options in the database. 1521 1522 Logically Collective 1523 1524 Input Parameters: 1525 + pc - the preconditioner context 1526 - prefix - the prefix string to prepend to all `PC` option requests 1527 1528 Note: 1529 A hyphen (-) must NOT be given at the beginning of the prefix name. 1530 The first character of all runtime options is AUTOMATICALLY the 1531 hyphen. 1532 1533 Level: advanced 1534 1535 .seealso: [](ch_ksp), `PC`, `PCSetFromOptions`, `PCSetOptionsPrefix()`, `PCGetOptionsPrefix()` 1536 @*/ 1537 PetscErrorCode PCAppendOptionsPrefix(PC pc, const char prefix[]) 1538 { 1539 PetscFunctionBegin; 1540 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1541 PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc, prefix)); 1542 PetscFunctionReturn(PETSC_SUCCESS); 1543 } 1544 1545 /*@ 1546 PCGetOptionsPrefix - Gets the prefix used for searching for all 1547 PC options in the database. 1548 1549 Not Collective 1550 1551 Input Parameter: 1552 . pc - the preconditioner context 1553 1554 Output Parameter: 1555 . prefix - pointer to the prefix string used, is returned 1556 1557 Level: advanced 1558 1559 Fortran Note: 1560 The user should pass in a string `prefix` of 1561 sufficient length to hold the prefix. 1562 1563 .seealso: [](ch_ksp), `PC`, `PCSetFromOptions`, `PCSetOptionsPrefix()`, `PCAppendOptionsPrefix()` 1564 @*/ 1565 PetscErrorCode PCGetOptionsPrefix(PC pc, const char *prefix[]) 1566 { 1567 PetscFunctionBegin; 1568 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1569 PetscAssertPointer(prefix, 2); 1570 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, prefix)); 1571 PetscFunctionReturn(PETSC_SUCCESS); 1572 } 1573 1574 /* 1575 Indicates the right-hand side will be changed by KSPSolve(), this occurs for a few 1576 preconditioners including BDDC and Eisentat that transform the equations before applying 1577 the Krylov methods 1578 */ 1579 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc, PetscBool *change) 1580 { 1581 PetscFunctionBegin; 1582 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1583 PetscAssertPointer(change, 2); 1584 *change = PETSC_FALSE; 1585 PetscTryMethod(pc, "PCPreSolveChangeRHS_C", (PC, PetscBool *), (pc, change)); 1586 PetscFunctionReturn(PETSC_SUCCESS); 1587 } 1588 1589 /*@ 1590 PCPreSolve - Optional pre-solve phase, intended for any 1591 preconditioner-specific actions that must be performed before 1592 the iterative solve itself. 1593 1594 Collective 1595 1596 Input Parameters: 1597 + pc - the preconditioner context 1598 - ksp - the Krylov subspace context 1599 1600 Level: developer 1601 1602 Example Usage: 1603 .vb 1604 PCPreSolve(pc,ksp); 1605 KSPSolve(ksp,b,x); 1606 PCPostSolve(pc,ksp); 1607 .ve 1608 1609 Notes: 1610 The pre-solve phase is distinct from the `PCSetUp()` phase. 1611 1612 `KSPSolve()` calls this directly, so is rarely called by the user. 1613 1614 .seealso: [](ch_ksp), `PC`, `PCPostSolve()` 1615 @*/ 1616 PetscErrorCode PCPreSolve(PC pc, KSP ksp) 1617 { 1618 Vec x, rhs; 1619 1620 PetscFunctionBegin; 1621 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1622 PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2); 1623 pc->presolvedone++; 1624 PetscCheck(pc->presolvedone <= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot embed PCPreSolve() more than twice"); 1625 PetscCall(KSPGetSolution(ksp, &x)); 1626 PetscCall(KSPGetRhs(ksp, &rhs)); 1627 1628 if (pc->ops->presolve) PetscUseTypeMethod(pc, presolve, ksp, rhs, x); 1629 else if (pc->presolve) PetscCall(pc->presolve(pc, ksp)); 1630 PetscFunctionReturn(PETSC_SUCCESS); 1631 } 1632 1633 /*@C 1634 PCSetPreSolve - Sets function used by `PCPreSolve()` which is intended for any 1635 preconditioner-specific actions that must be performed before 1636 the iterative solve itself. 1637 1638 Logically Collective 1639 1640 Input Parameters: 1641 + pc - the preconditioner object 1642 - presolve - the function to call before the solve 1643 1644 Calling sequence of `presolve`: 1645 + pc - the `PC` context 1646 - ksp - the `KSP` context 1647 1648 Level: developer 1649 1650 .seealso: [](ch_ksp), `PC`, `PCSetUp()`, `PCPreSolve()` 1651 @*/ 1652 PetscErrorCode PCSetPreSolve(PC pc, PetscErrorCode (*presolve)(PC pc, KSP ksp)) 1653 { 1654 PetscFunctionBegin; 1655 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1656 pc->presolve = presolve; 1657 PetscFunctionReturn(PETSC_SUCCESS); 1658 } 1659 1660 /*@ 1661 PCPostSolve - Optional post-solve phase, intended for any 1662 preconditioner-specific actions that must be performed after 1663 the iterative solve itself. 1664 1665 Collective 1666 1667 Input Parameters: 1668 + pc - the preconditioner context 1669 - ksp - the Krylov subspace context 1670 1671 Example Usage: 1672 .vb 1673 PCPreSolve(pc,ksp); 1674 KSPSolve(ksp,b,x); 1675 PCPostSolve(pc,ksp); 1676 .ve 1677 1678 Level: developer 1679 1680 Note: 1681 `KSPSolve()` calls this routine directly, so it is rarely called by the user. 1682 1683 .seealso: [](ch_ksp), `PC`, `PCSetPostSolve()`, `PCSetPresolve()`, `PCPreSolve()`, `KSPSolve()` 1684 @*/ 1685 PetscErrorCode PCPostSolve(PC pc, KSP ksp) 1686 { 1687 Vec x, rhs; 1688 1689 PetscFunctionBegin; 1690 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1691 PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2); 1692 pc->presolvedone--; 1693 PetscCall(KSPGetSolution(ksp, &x)); 1694 PetscCall(KSPGetRhs(ksp, &rhs)); 1695 PetscTryTypeMethod(pc, postsolve, ksp, rhs, x); 1696 PetscFunctionReturn(PETSC_SUCCESS); 1697 } 1698 1699 /*@ 1700 PCLoad - Loads a `PC` that has been stored in binary with `PCView()`. 1701 1702 Collective 1703 1704 Input Parameters: 1705 + newdm - the newly loaded `PC`, this needs to have been created with `PCCreate()` or 1706 some related function before a call to `PCLoad()`. 1707 - viewer - binary file viewer, obtained from `PetscViewerBinaryOpen()` 1708 1709 Level: intermediate 1710 1711 Note: 1712 The type is determined by the data in the file, any `PCType` set into the `PC` before this call is ignored. 1713 1714 .seealso: [](ch_ksp), `PC`, `PetscViewerBinaryOpen()`, `PCView()`, `MatLoad()`, `VecLoad()` 1715 @*/ 1716 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) 1717 { 1718 PetscBool isbinary; 1719 PetscInt classid; 1720 char type[256]; 1721 1722 PetscFunctionBegin; 1723 PetscValidHeaderSpecific(newdm, PC_CLASSID, 1); 1724 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1725 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 1726 PetscCheck(isbinary, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 1727 1728 PetscCall(PetscViewerBinaryRead(viewer, &classid, 1, NULL, PETSC_INT)); 1729 PetscCheck(classid == PC_FILE_CLASSID, PetscObjectComm((PetscObject)newdm), PETSC_ERR_ARG_WRONG, "Not PC next in file"); 1730 PetscCall(PetscViewerBinaryRead(viewer, type, 256, NULL, PETSC_CHAR)); 1731 PetscCall(PCSetType(newdm, type)); 1732 PetscTryTypeMethod(newdm, load, viewer); 1733 PetscFunctionReturn(PETSC_SUCCESS); 1734 } 1735 1736 #include <petscdraw.h> 1737 #if defined(PETSC_HAVE_SAWS) 1738 #include <petscviewersaws.h> 1739 #endif 1740 1741 /*@ 1742 PCViewFromOptions - View from the `PC` based on options in the options database 1743 1744 Collective 1745 1746 Input Parameters: 1747 + A - the `PC` context 1748 . obj - Optional object that provides the options prefix 1749 - name - command line option 1750 1751 Level: intermediate 1752 1753 .seealso: [](ch_ksp), `PC`, `PCView`, `PetscObjectViewFromOptions()`, `PCCreate()` 1754 @*/ 1755 PetscErrorCode PCViewFromOptions(PC A, PetscObject obj, const char name[]) 1756 { 1757 PetscFunctionBegin; 1758 PetscValidHeaderSpecific(A, PC_CLASSID, 1); 1759 PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name)); 1760 PetscFunctionReturn(PETSC_SUCCESS); 1761 } 1762 1763 /*@ 1764 PCView - Prints information about the `PC` 1765 1766 Collective 1767 1768 Input Parameters: 1769 + pc - the `PC` context 1770 - viewer - optional visualization context 1771 1772 Level: developer 1773 1774 Notes: 1775 The available visualization contexts include 1776 + `PETSC_VIEWER_STDOUT_SELF` - standard output (default) 1777 - `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard 1778 output where only the first processor opens 1779 the file. All other processors send their 1780 data to the first processor to print. 1781 1782 The user can open an alternative visualization contexts with 1783 `PetscViewerASCIIOpen()` (output to a specified file). 1784 1785 .seealso: [](ch_ksp), `PC`, `PetscViewer`, `KSPView()`, `PetscViewerASCIIOpen()` 1786 @*/ 1787 PetscErrorCode PCView(PC pc, PetscViewer viewer) 1788 { 1789 PCType cstr; 1790 PetscViewerFormat format; 1791 PetscBool iascii, isstring, isbinary, isdraw, pop = PETSC_FALSE; 1792 #if defined(PETSC_HAVE_SAWS) 1793 PetscBool issaws; 1794 #endif 1795 1796 PetscFunctionBegin; 1797 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1798 if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer)); 1799 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1800 PetscCheckSameComm(pc, 1, viewer, 2); 1801 1802 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1803 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring)); 1804 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 1805 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 1806 #if defined(PETSC_HAVE_SAWS) 1807 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws)); 1808 #endif 1809 1810 if (iascii) { 1811 PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)pc, viewer)); 1812 if (!pc->setupcalled) PetscCall(PetscViewerASCIIPrintf(viewer, " PC has not been set up so information may be incomplete\n")); 1813 PetscCall(PetscViewerASCIIPushTab(viewer)); 1814 PetscTryTypeMethod(pc, view, viewer); 1815 PetscCall(PetscViewerASCIIPopTab(viewer)); 1816 if (pc->mat) { 1817 PetscCall(PetscViewerGetFormat(viewer, &format)); 1818 if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) { 1819 PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO)); 1820 pop = PETSC_TRUE; 1821 } 1822 if (pc->pmat == pc->mat) { 1823 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix = precond matrix:\n")); 1824 PetscCall(PetscViewerASCIIPushTab(viewer)); 1825 PetscCall(MatView(pc->mat, viewer)); 1826 PetscCall(PetscViewerASCIIPopTab(viewer)); 1827 } else { 1828 if (pc->pmat) { 1829 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix followed by preconditioner matrix:\n")); 1830 } else { 1831 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix:\n")); 1832 } 1833 PetscCall(PetscViewerASCIIPushTab(viewer)); 1834 PetscCall(MatView(pc->mat, viewer)); 1835 if (pc->pmat) PetscCall(MatView(pc->pmat, viewer)); 1836 PetscCall(PetscViewerASCIIPopTab(viewer)); 1837 } 1838 if (pop) PetscCall(PetscViewerPopFormat(viewer)); 1839 } 1840 } else if (isstring) { 1841 PetscCall(PCGetType(pc, &cstr)); 1842 PetscCall(PetscViewerStringSPrintf(viewer, " PCType: %-7.7s", cstr)); 1843 PetscTryTypeMethod(pc, view, viewer); 1844 if (pc->mat) PetscCall(MatView(pc->mat, viewer)); 1845 if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer)); 1846 } else if (isbinary) { 1847 PetscInt classid = PC_FILE_CLASSID; 1848 MPI_Comm comm; 1849 PetscMPIInt rank; 1850 char type[256]; 1851 1852 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 1853 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1854 if (rank == 0) { 1855 PetscCall(PetscViewerBinaryWrite(viewer, &classid, 1, PETSC_INT)); 1856 PetscCall(PetscStrncpy(type, ((PetscObject)pc)->type_name, 256)); 1857 PetscCall(PetscViewerBinaryWrite(viewer, type, 256, PETSC_CHAR)); 1858 } 1859 PetscTryTypeMethod(pc, view, viewer); 1860 } else if (isdraw) { 1861 PetscDraw draw; 1862 char str[25]; 1863 PetscReal x, y, bottom, h; 1864 PetscInt n; 1865 1866 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 1867 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 1868 if (pc->mat) { 1869 PetscCall(MatGetSize(pc->mat, &n, NULL)); 1870 PetscCall(PetscSNPrintf(str, 25, "PC: %s (%" PetscInt_FMT ")", ((PetscObject)pc)->type_name, n)); 1871 } else { 1872 PetscCall(PetscSNPrintf(str, 25, "PC: %s", ((PetscObject)pc)->type_name)); 1873 } 1874 PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 1875 bottom = y - h; 1876 PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom)); 1877 PetscTryTypeMethod(pc, view, viewer); 1878 PetscCall(PetscDrawPopCurrentPoint(draw)); 1879 #if defined(PETSC_HAVE_SAWS) 1880 } else if (issaws) { 1881 PetscMPIInt rank; 1882 1883 PetscCall(PetscObjectName((PetscObject)pc)); 1884 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); 1885 if (!((PetscObject)pc)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)pc, viewer)); 1886 if (pc->mat) PetscCall(MatView(pc->mat, viewer)); 1887 if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer)); 1888 #endif 1889 } 1890 PetscFunctionReturn(PETSC_SUCCESS); 1891 } 1892 1893 /*@C 1894 PCRegister - Adds a method (`PCType`) to the preconditioner package. 1895 1896 Not collective. No Fortran Support 1897 1898 Input Parameters: 1899 + sname - name of a new user-defined solver 1900 - function - routine to create method context 1901 1902 Example Usage: 1903 .vb 1904 PCRegister("my_solver", MySolverCreate); 1905 .ve 1906 1907 Then, your solver can be chosen with the procedural interface via 1908 $ PCSetType(pc, "my_solver") 1909 or at runtime via the option 1910 $ -pc_type my_solver 1911 1912 Level: advanced 1913 1914 Note: 1915 `PCRegister()` may be called multiple times to add several user-defined preconditioners. 1916 1917 .seealso: [](ch_ksp), `PC`, `PCType`, `PCRegisterAll()` 1918 @*/ 1919 PetscErrorCode PCRegister(const char sname[], PetscErrorCode (*function)(PC)) 1920 { 1921 PetscFunctionBegin; 1922 PetscCall(PCInitializePackage()); 1923 PetscCall(PetscFunctionListAdd(&PCList, sname, function)); 1924 PetscFunctionReturn(PETSC_SUCCESS); 1925 } 1926 1927 static PetscErrorCode MatMult_PC(Mat A, Vec X, Vec Y) 1928 { 1929 PC pc; 1930 1931 PetscFunctionBegin; 1932 PetscCall(MatShellGetContext(A, &pc)); 1933 PetscCall(PCApply(pc, X, Y)); 1934 PetscFunctionReturn(PETSC_SUCCESS); 1935 } 1936 1937 /*@ 1938 PCComputeOperator - Computes the explicit preconditioned operator. 1939 1940 Collective 1941 1942 Input Parameters: 1943 + pc - the preconditioner object 1944 - mattype - the `MatType` to be used for the operator 1945 1946 Output Parameter: 1947 . mat - the explicit preconditioned operator 1948 1949 Level: advanced 1950 1951 Note: 1952 This computation is done by applying the operators to columns of the identity matrix. 1953 This routine is costly in general, and is recommended for use only with relatively small systems. 1954 Currently, this routine uses a dense matrix format when `mattype` == `NULL` 1955 1956 .seealso: [](ch_ksp), `PC`, `KSPComputeOperator()`, `MatType` 1957 @*/ 1958 PetscErrorCode PCComputeOperator(PC pc, MatType mattype, Mat *mat) 1959 { 1960 PetscInt N, M, m, n; 1961 Mat A, Apc; 1962 1963 PetscFunctionBegin; 1964 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1965 PetscAssertPointer(mat, 3); 1966 PetscCall(PCGetOperators(pc, &A, NULL)); 1967 PetscCall(MatGetLocalSize(A, &m, &n)); 1968 PetscCall(MatGetSize(A, &M, &N)); 1969 PetscCall(MatCreateShell(PetscObjectComm((PetscObject)pc), m, n, M, N, pc, &Apc)); 1970 PetscCall(MatShellSetOperation(Apc, MATOP_MULT, (void (*)(void))MatMult_PC)); 1971 PetscCall(MatComputeOperator(Apc, mattype, mat)); 1972 PetscCall(MatDestroy(&Apc)); 1973 PetscFunctionReturn(PETSC_SUCCESS); 1974 } 1975 1976 /*@ 1977 PCSetCoordinates - sets the coordinates of all the nodes on the local process 1978 1979 Collective 1980 1981 Input Parameters: 1982 + pc - the solver context 1983 . dim - the dimension of the coordinates 1, 2, or 3 1984 . nloc - the blocked size of the coordinates array 1985 - coords - the coordinates array 1986 1987 Level: intermediate 1988 1989 Note: 1990 `coords` is an array of the dim coordinates for the nodes on 1991 the local processor, of size `dim`*`nloc`. 1992 If there are 108 equation on a processor 1993 for a displacement finite element discretization of elasticity (so 1994 that there are nloc = 36 = 108/3 nodes) then the array must have 108 1995 double precision values (ie, 3 * 36). These x y z coordinates 1996 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 1997 ... , N-1.z ]. 1998 1999 .seealso: [](ch_ksp), `PC`, `MatSetNearNullSpace()` 2000 @*/ 2001 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 2002 { 2003 PetscFunctionBegin; 2004 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2005 PetscValidLogicalCollectiveInt(pc, dim, 2); 2006 PetscTryMethod(pc, "PCSetCoordinates_C", (PC, PetscInt, PetscInt, PetscReal[]), (pc, dim, nloc, coords)); 2007 PetscFunctionReturn(PETSC_SUCCESS); 2008 } 2009 2010 /*@ 2011 PCGetInterpolations - Gets interpolation matrices for all levels (except level 0) 2012 2013 Logically Collective 2014 2015 Input Parameter: 2016 . pc - the precondition context 2017 2018 Output Parameters: 2019 + num_levels - the number of levels 2020 - interpolations - the interpolation matrices (size of `num_levels`-1) 2021 2022 Level: advanced 2023 2024 Developer Note: 2025 Why is this here instead of in `PCMG` etc? 2026 2027 .seealso: [](ch_ksp), `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetInterpolation()`, `PCGetCoarseOperators()` 2028 @*/ 2029 PetscErrorCode PCGetInterpolations(PC pc, PetscInt *num_levels, Mat *interpolations[]) 2030 { 2031 PetscFunctionBegin; 2032 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2033 PetscAssertPointer(num_levels, 2); 2034 PetscAssertPointer(interpolations, 3); 2035 PetscUseMethod(pc, "PCGetInterpolations_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, interpolations)); 2036 PetscFunctionReturn(PETSC_SUCCESS); 2037 } 2038 2039 /*@ 2040 PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level) 2041 2042 Logically Collective 2043 2044 Input Parameter: 2045 . pc - the precondition context 2046 2047 Output Parameters: 2048 + num_levels - the number of levels 2049 - coarseOperators - the coarse operator matrices (size of `num_levels`-1) 2050 2051 Level: advanced 2052 2053 Developer Note: 2054 Why is this here instead of in `PCMG` etc? 2055 2056 .seealso: [](ch_ksp), `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()` 2057 @*/ 2058 PetscErrorCode PCGetCoarseOperators(PC pc, PetscInt *num_levels, Mat *coarseOperators[]) 2059 { 2060 PetscFunctionBegin; 2061 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2062 PetscAssertPointer(num_levels, 2); 2063 PetscAssertPointer(coarseOperators, 3); 2064 PetscUseMethod(pc, "PCGetCoarseOperators_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, coarseOperators)); 2065 PetscFunctionReturn(PETSC_SUCCESS); 2066 } 2067