1 /* 2 The PC (preconditioner) interface routines, callable by users. 3 */ 4 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 5 #include <petscdm.h> 6 7 /* Logging support */ 8 PetscClassId PC_CLASSID; 9 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplySymmetricLeft; 10 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 11 PetscInt PetscMGLevelId; 12 PetscLogStage PCMPIStage; 13 14 PETSC_INTERN PetscErrorCode PCGetDefaultType_Private(PC pc, const char *type[]) 15 { 16 PetscMPIInt size; 17 PetscBool hasopblock, hasopsolve, flg1, flg2, set, flg3, isnormal; 18 19 PetscFunctionBegin; 20 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 21 if (pc->pmat) { 22 PetscCall(MatHasOperation(pc->pmat, MATOP_GET_DIAGONAL_BLOCK, &hasopblock)); 23 PetscCall(MatHasOperation(pc->pmat, MATOP_SOLVE, &hasopsolve)); 24 if (size == 1) { 25 PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ICC, &flg1)); 26 PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ILU, &flg2)); 27 PetscCall(MatIsSymmetricKnown(pc->pmat, &set, &flg3)); 28 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &isnormal, MATNORMAL, MATNORMALHERMITIAN, NULL)); 29 if (flg1 && (!flg2 || (set && flg3))) { 30 *type = PCICC; 31 } else if (flg2) { 32 *type = PCILU; 33 } else if (isnormal) { 34 *type = PCNONE; 35 } else if (hasopblock) { /* likely is a parallel matrix run on one processor */ 36 *type = PCBJACOBI; 37 } else if (hasopsolve) { 38 *type = PCMAT; 39 } else { 40 *type = PCNONE; 41 } 42 } else { 43 if (hasopblock) { 44 *type = PCBJACOBI; 45 } else if (hasopsolve) { 46 *type = PCMAT; 47 } else { 48 *type = PCNONE; 49 } 50 } 51 } else *type = NULL; 52 PetscFunctionReturn(PETSC_SUCCESS); 53 } 54 55 /* do not log solves, setup and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */ 56 PETSC_EXTERN PetscLogEvent KSP_Solve, KSP_SetUp; 57 58 static PetscErrorCode PCLogEventsDeactivatePush(void) 59 { 60 PetscFunctionBegin; 61 PetscCall(KSPInitializePackage()); 62 PetscCall(PetscLogEventDeactivatePush(KSP_Solve)); 63 PetscCall(PetscLogEventDeactivatePush(KSP_SetUp)); 64 PetscCall(PetscLogEventDeactivatePush(PC_Apply)); 65 PetscCall(PetscLogEventDeactivatePush(PC_SetUp)); 66 PetscCall(PetscLogEventDeactivatePush(PC_SetUpOnBlocks)); 67 PetscFunctionReturn(PETSC_SUCCESS); 68 } 69 70 static PetscErrorCode PCLogEventsDeactivatePop(void) 71 { 72 PetscFunctionBegin; 73 PetscCall(KSPInitializePackage()); 74 PetscCall(PetscLogEventDeactivatePop(KSP_Solve)); 75 PetscCall(PetscLogEventDeactivatePop(KSP_SetUp)); 76 PetscCall(PetscLogEventDeactivatePop(PC_Apply)); 77 PetscCall(PetscLogEventDeactivatePop(PC_SetUp)); 78 PetscCall(PetscLogEventDeactivatePop(PC_SetUpOnBlocks)); 79 PetscFunctionReturn(PETSC_SUCCESS); 80 } 81 82 /*@ 83 PCReset - Resets a `PC` context to the state it was in before `PCSetUp()` was called, and removes any allocated `Vec` and `Mat` from its data structure 84 85 Collective 86 87 Input Parameter: 88 . pc - the `PC` preconditioner context 89 90 Level: developer 91 92 Notes: 93 Any options set, including those set with `KSPSetFromOptions()` remain. 94 95 This allows a `PC` to be reused for a different sized linear system but using the same options that have been previously set in `pc` 96 97 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCSetUp()` 98 @*/ 99 PetscErrorCode PCReset(PC pc) 100 { 101 PetscFunctionBegin; 102 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 103 PetscTryTypeMethod(pc, reset); 104 PetscCall(VecDestroy(&pc->diagonalscaleright)); 105 PetscCall(VecDestroy(&pc->diagonalscaleleft)); 106 PetscCall(MatDestroy(&pc->pmat)); 107 PetscCall(MatDestroy(&pc->mat)); 108 109 pc->setupcalled = 0; 110 PetscFunctionReturn(PETSC_SUCCESS); 111 } 112 113 /*@ 114 PCDestroy - Destroys `PC` context that was created with `PCCreate()`. 115 116 Collective 117 118 Input Parameter: 119 . pc - the `PC` preconditioner context 120 121 Level: developer 122 123 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCSetUp()` 124 @*/ 125 PetscErrorCode PCDestroy(PC *pc) 126 { 127 PetscFunctionBegin; 128 if (!*pc) PetscFunctionReturn(PETSC_SUCCESS); 129 PetscValidHeaderSpecific(*pc, PC_CLASSID, 1); 130 if (--((PetscObject)*pc)->refct > 0) { 131 *pc = NULL; 132 PetscFunctionReturn(PETSC_SUCCESS); 133 } 134 135 PetscCall(PCReset(*pc)); 136 137 /* if memory was published with SAWs then destroy it */ 138 PetscCall(PetscObjectSAWsViewOff((PetscObject)*pc)); 139 PetscTryTypeMethod(*pc, destroy); 140 PetscCall(DMDestroy(&(*pc)->dm)); 141 PetscCall(PetscHeaderDestroy(pc)); 142 PetscFunctionReturn(PETSC_SUCCESS); 143 } 144 145 /*@ 146 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 147 scaling as needed by certain time-stepping codes. 148 149 Logically Collective 150 151 Input Parameter: 152 . pc - the `PC` preconditioner context 153 154 Output Parameter: 155 . flag - `PETSC_TRUE` if it applies the scaling 156 157 Level: developer 158 159 Note: 160 If this returns `PETSC_TRUE` then the system solved via the Krylov method is, for left and right preconditioning, 161 162 $$ 163 \begin{align*} 164 D M A D^{-1} y = D M b \\ 165 D A M D^{-1} z = D b. 166 \end{align*} 167 $$ 168 169 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCSetDiagonalScale()` 170 @*/ 171 PetscErrorCode PCGetDiagonalScale(PC pc, PetscBool *flag) 172 { 173 PetscFunctionBegin; 174 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 175 PetscAssertPointer(flag, 2); 176 *flag = pc->diagonalscale; 177 PetscFunctionReturn(PETSC_SUCCESS); 178 } 179 180 /*@ 181 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 182 scaling as needed by certain time-stepping codes. 183 184 Logically Collective 185 186 Input Parameters: 187 + pc - the `PC` preconditioner context 188 - s - scaling vector 189 190 Level: intermediate 191 192 Notes: 193 The system solved via the Krylov method is, for left and right preconditioning, 194 $$ 195 \begin{align*} 196 D M A D^{-1} y = D M b \\ 197 D A M D^{-1} z = D b. 198 \end{align*} 199 $$ 200 201 `PCDiagonalScaleLeft()` scales a vector by $D$. `PCDiagonalScaleRight()` scales a vector by $D^{-1}$. 202 203 .seealso: [](ch_ksp), `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCGetDiagonalScale()` 204 @*/ 205 PetscErrorCode PCSetDiagonalScale(PC pc, Vec s) 206 { 207 PetscFunctionBegin; 208 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 209 PetscValidHeaderSpecific(s, VEC_CLASSID, 2); 210 pc->diagonalscale = PETSC_TRUE; 211 212 PetscCall(PetscObjectReference((PetscObject)s)); 213 PetscCall(VecDestroy(&pc->diagonalscaleleft)); 214 215 pc->diagonalscaleleft = s; 216 217 PetscCall(VecDuplicate(s, &pc->diagonalscaleright)); 218 PetscCall(VecCopy(s, pc->diagonalscaleright)); 219 PetscCall(VecReciprocal(pc->diagonalscaleright)); 220 PetscFunctionReturn(PETSC_SUCCESS); 221 } 222 223 /*@ 224 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 225 226 Logically Collective 227 228 Input Parameters: 229 + pc - the `PC` preconditioner context 230 . in - input vector 231 - out - scaled vector (maybe the same as in) 232 233 Level: intermediate 234 235 Notes: 236 The system solved via the Krylov method is, for left and right preconditioning, 237 238 $$ 239 \begin{align*} 240 D M A D^{-1} y = D M b \\ 241 D A M D^{-1} z = D b. 242 \end{align*} 243 $$ 244 245 `PCDiagonalScaleLeft()` scales a vector by $D$. `PCDiagonalScaleRight()` scales a vector by $D^{-1}$. 246 247 If diagonal scaling is turned off and `in` is not `out` then `in` is copied to `out` 248 249 .seealso: [](ch_ksp), `PCCreate()`, `PCSetUp()`, `PCSetDiagonalScale()`, `PCDiagonalScaleRight()`, `MatDiagonalScale()` 250 @*/ 251 PetscErrorCode PCDiagonalScaleLeft(PC pc, Vec in, Vec out) 252 { 253 PetscFunctionBegin; 254 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 255 PetscValidHeaderSpecific(in, VEC_CLASSID, 2); 256 PetscValidHeaderSpecific(out, VEC_CLASSID, 3); 257 if (pc->diagonalscale) { 258 PetscCall(VecPointwiseMult(out, pc->diagonalscaleleft, in)); 259 } else if (in != out) { 260 PetscCall(VecCopy(in, out)); 261 } 262 PetscFunctionReturn(PETSC_SUCCESS); 263 } 264 265 /*@ 266 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 267 268 Logically Collective 269 270 Input Parameters: 271 + pc - the `PC` preconditioner context 272 . in - input vector 273 - out - scaled vector (maybe the same as in) 274 275 Level: intermediate 276 277 Notes: 278 The system solved via the Krylov method is, for left and right preconditioning, 279 280 $$ 281 \begin{align*} 282 D M A D^{-1} y = D M b \\ 283 D A M D^{-1} z = D b. 284 \end{align*} 285 $$ 286 287 `PCDiagonalScaleLeft()` scales a vector by $D$. `PCDiagonalScaleRight()` scales a vector by $D^{-1}$. 288 289 If diagonal scaling is turned off and `in` is not `out` then `in` is copied to `out` 290 291 .seealso: [](ch_ksp), `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCSetDiagonalScale()`, `MatDiagonalScale()` 292 @*/ 293 PetscErrorCode PCDiagonalScaleRight(PC pc, Vec in, Vec out) 294 { 295 PetscFunctionBegin; 296 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 297 PetscValidHeaderSpecific(in, VEC_CLASSID, 2); 298 PetscValidHeaderSpecific(out, VEC_CLASSID, 3); 299 if (pc->diagonalscale) { 300 PetscCall(VecPointwiseMult(out, pc->diagonalscaleright, in)); 301 } else if (in != out) { 302 PetscCall(VecCopy(in, out)); 303 } 304 PetscFunctionReturn(PETSC_SUCCESS); 305 } 306 307 /*@ 308 PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the 309 operator during the preconditioning process it applies the Amat provided to `TSSetRHSJacobian()`, 310 `TSSetIJacobian()`, `SNESSetJacobian()`, `KSPSetOperators()` or `PCSetOperators()` not the Pmat. 311 312 Logically Collective 313 314 Input Parameters: 315 + pc - the `PC` preconditioner context 316 - flg - `PETSC_TRUE` to use the Amat, `PETSC_FALSE` to use the Pmat (default is false) 317 318 Options Database Key: 319 . -pc_use_amat <true,false> - use the amat argument to `KSPSetOperators()` or `PCSetOperators()` to apply the operator 320 321 Level: intermediate 322 323 Note: 324 For the common case in which the linear system matrix and the matrix used to construct the 325 preconditioner are identical, this routine has no affect. 326 327 .seealso: [](ch_ksp), `PC`, `PCGetUseAmat()`, `PCBJACOBI`, `PCMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`, 328 `KSPSetOperators()`, `PCSetOperators()` 329 @*/ 330 PetscErrorCode PCSetUseAmat(PC pc, PetscBool flg) 331 { 332 PetscFunctionBegin; 333 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 334 pc->useAmat = flg; 335 PetscFunctionReturn(PETSC_SUCCESS); 336 } 337 338 /*@ 339 PCSetErrorIfFailure - Causes `PC` to generate an error if a floating point exception, for example a zero pivot, is detected. 340 341 Logically Collective 342 343 Input Parameters: 344 + pc - iterative context obtained from `PCCreate()` 345 - flg - `PETSC_TRUE` indicates you want the error generated 346 347 Level: advanced 348 349 Notes: 350 Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call `KSPGetConvergedReason()` after a `KSPSolve()` 351 to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is 352 detected. 353 354 This is propagated into `KSP`s used by this `PC`, which then propagate it into `PC`s used by those `KSP`s 355 356 .seealso: [](ch_ksp), `PC`, `KSPSetErrorIfNotConverged()`, `PCGetInitialGuessNonzero()`, `PCSetInitialGuessKnoll()`, `PCGetInitialGuessKnoll()` 357 @*/ 358 PetscErrorCode PCSetErrorIfFailure(PC pc, PetscBool flg) 359 { 360 PetscFunctionBegin; 361 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 362 PetscValidLogicalCollectiveBool(pc, flg, 2); 363 pc->erroriffailure = flg; 364 PetscFunctionReturn(PETSC_SUCCESS); 365 } 366 367 /*@ 368 PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the 369 operator during the preconditioning process it applies the Amat provided to `TSSetRHSJacobian()`, 370 `TSSetIJacobian()`, `SNESSetJacobian()`, `KSPSetOperators()` or `PCSetOperators()` not the Pmat. 371 372 Logically Collective 373 374 Input Parameter: 375 . pc - the `PC` preconditioner context 376 377 Output Parameter: 378 . flg - `PETSC_TRUE` to use the Amat, `PETSC_FALSE` to use the Pmat (default is false) 379 380 Level: intermediate 381 382 Note: 383 For the common case in which the linear system matrix and the matrix used to construct the 384 preconditioner are identical, this routine is does nothing. 385 386 .seealso: [](ch_ksp), `PC`, `PCSetUseAmat()`, `PCBJACOBI`, `PCMG`, `PCFIELDSPLIT`, `PCCOMPOSITE` 387 @*/ 388 PetscErrorCode PCGetUseAmat(PC pc, PetscBool *flg) 389 { 390 PetscFunctionBegin; 391 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 392 *flg = pc->useAmat; 393 PetscFunctionReturn(PETSC_SUCCESS); 394 } 395 396 /*@ 397 PCSetKSPNestLevel - sets the amount of nesting the `KSP` that contains this `PC` has 398 399 Collective 400 401 Input Parameters: 402 + pc - the `PC` 403 - level - the nest level 404 405 Level: developer 406 407 .seealso: [](ch_ksp), `KSPSetUp()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPGMRES`, `KSPType`, `KSPGetNestLevel()`, `PCGetKSPNestLevel()`, `KSPSetNestLevel()` 408 @*/ 409 PetscErrorCode PCSetKSPNestLevel(PC pc, PetscInt level) 410 { 411 PetscFunctionBegin; 412 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 413 PetscValidLogicalCollectiveInt(pc, level, 2); 414 pc->kspnestlevel = level; 415 PetscFunctionReturn(PETSC_SUCCESS); 416 } 417 418 /*@ 419 PCGetKSPNestLevel - gets the amount of nesting the `KSP` that contains this `PC` has 420 421 Not Collective 422 423 Input Parameter: 424 . pc - the `PC` 425 426 Output Parameter: 427 . level - the nest level 428 429 Level: developer 430 431 .seealso: [](ch_ksp), `KSPSetUp()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPGMRES`, `KSPType`, `KSPSetNestLevel()`, `PCSetKSPNestLevel()`, `KSPGetNestLevel()` 432 @*/ 433 PetscErrorCode PCGetKSPNestLevel(PC pc, PetscInt *level) 434 { 435 PetscFunctionBegin; 436 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 437 PetscAssertPointer(level, 2); 438 *level = pc->kspnestlevel; 439 PetscFunctionReturn(PETSC_SUCCESS); 440 } 441 442 /*@ 443 PCCreate - Creates a preconditioner context, `PC` 444 445 Collective 446 447 Input Parameter: 448 . comm - MPI communicator 449 450 Output Parameter: 451 . newpc - location to put the `PC` preconditioner context 452 453 Level: developer 454 455 Notes: 456 This is rarely called directly by users since `KSP` manages the `PC` objects it uses. Use `KSPGetPC()` to access the `PC` used by a `KSP`. 457 458 Use `PCSetType()` or `PCSetFromOptions()` with the option `-pc_type pctype` to set the `PCType` for this `PC` 459 460 The default preconditioner type `PCType` for sparse matrices is `PCILU` or `PCICC` with 0 fill on one process and block Jacobi (`PCBJACOBI`) with `PCILU` or `PCICC` 461 in parallel. For dense matrices it is always `PCNONE`. 462 463 .seealso: [](ch_ksp), `PC`, `PCType`, `PCSetType`, `PCSetUp()`, `PCApply()`, `PCDestroy()`, `KSP`, `KSPGetPC()` 464 @*/ 465 PetscErrorCode PCCreate(MPI_Comm comm, PC *newpc) 466 { 467 PC pc; 468 469 PetscFunctionBegin; 470 PetscAssertPointer(newpc, 2); 471 PetscCall(PCInitializePackage()); 472 473 PetscCall(PetscHeaderCreate(pc, PC_CLASSID, "PC", "Preconditioner", "PC", comm, PCDestroy, PCView)); 474 pc->mat = NULL; 475 pc->pmat = NULL; 476 pc->setupcalled = 0; 477 pc->setfromoptionscalled = 0; 478 pc->data = NULL; 479 pc->diagonalscale = PETSC_FALSE; 480 pc->diagonalscaleleft = NULL; 481 pc->diagonalscaleright = NULL; 482 483 pc->modifysubmatrices = NULL; 484 pc->modifysubmatricesP = NULL; 485 486 *newpc = pc; 487 PetscFunctionReturn(PETSC_SUCCESS); 488 } 489 490 /*@ 491 PCApply - Applies the preconditioner to a vector. 492 493 Collective 494 495 Input Parameters: 496 + pc - the `PC` preconditioner context 497 - x - input vector 498 499 Output Parameter: 500 . y - output vector 501 502 Level: developer 503 504 .seealso: [](ch_ksp), `PC`, `PCApplyTranspose()`, `PCApplyBAorAB()` 505 @*/ 506 PetscErrorCode PCApply(PC pc, Vec x, Vec y) 507 { 508 PetscInt m, n, mv, nv; 509 510 PetscFunctionBegin; 511 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 512 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 513 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 514 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 515 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 516 /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */ 517 PetscCall(MatGetLocalSize(pc->pmat, &m, &n)); 518 PetscCall(VecGetLocalSize(x, &mv)); 519 PetscCall(VecGetLocalSize(y, &nv)); 520 /* check pmat * y = x is feasible */ 521 PetscCheck(mv == m, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local rows %" PetscInt_FMT " does not equal input vector size %" PetscInt_FMT, m, mv); 522 PetscCheck(nv == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local columns %" PetscInt_FMT " does not equal output vector size %" PetscInt_FMT, n, nv); 523 PetscCall(VecSetErrorIfLocked(y, 3)); 524 525 PetscCall(PCSetUp(pc)); 526 PetscCall(VecLockReadPush(x)); 527 PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0)); 528 PetscUseTypeMethod(pc, apply, x, y); 529 PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0)); 530 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 531 PetscCall(VecLockReadPop(x)); 532 PetscFunctionReturn(PETSC_SUCCESS); 533 } 534 535 static PetscErrorCode PCMatApplyTranspose_Private(PC pc, Mat X, Mat Y, PetscBool transpose) 536 { 537 Mat A; 538 Vec cy, cx; 539 PetscInt m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3; 540 PetscBool match; 541 542 PetscFunctionBegin; 543 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 544 PetscValidHeaderSpecific(X, MAT_CLASSID, 2); 545 PetscValidHeaderSpecific(Y, MAT_CLASSID, 3); 546 PetscCheckSameComm(pc, 1, X, 2); 547 PetscCheckSameComm(pc, 1, Y, 3); 548 PetscCheck(Y != X, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices"); 549 PetscCall(PCGetOperators(pc, NULL, &A)); 550 PetscCall(MatGetLocalSize(A, &m3, &n3)); 551 PetscCall(MatGetLocalSize(X, &m2, &n2)); 552 PetscCall(MatGetLocalSize(Y, &m1, &n1)); 553 PetscCall(MatGetSize(A, &M3, &N3)); 554 PetscCall(MatGetSize(X, &M2, &N2)); 555 PetscCall(MatGetSize(Y, &M1, &N1)); 556 PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of output vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1); 557 PetscCheck(m2 == m3 && M2 == M3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m2, M2, m3, M3, n3, N3); 558 PetscCheck(m1 == n3 && M1 == N3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m1, M1, m3, M3, n3, N3); 559 PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "")); 560 PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat"); 561 PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "")); 562 PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat"); 563 PetscCall(PCSetUp(pc)); 564 if (!transpose && pc->ops->matapply) { 565 PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0)); 566 PetscUseTypeMethod(pc, matapply, X, Y); 567 PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0)); 568 } else if (transpose && pc->ops->matapplytranspose) { 569 PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0)); 570 PetscUseTypeMethod(pc, matapplytranspose, X, Y); 571 PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0)); 572 } else { 573 PetscCall(PetscInfo(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name)); 574 for (n1 = 0; n1 < N1; ++n1) { 575 PetscCall(MatDenseGetColumnVecRead(X, n1, &cx)); 576 PetscCall(MatDenseGetColumnVecWrite(Y, n1, &cy)); 577 if (!transpose) PetscCall(PCApply(pc, cx, cy)); 578 else PetscCall(PCApplyTranspose(pc, cx, cy)); 579 PetscCall(MatDenseRestoreColumnVecWrite(Y, n1, &cy)); 580 PetscCall(MatDenseRestoreColumnVecRead(X, n1, &cx)); 581 } 582 } 583 PetscFunctionReturn(PETSC_SUCCESS); 584 } 585 586 /*@ 587 PCMatApply - Applies the preconditioner to multiple vectors stored as a `MATDENSE`. Like `PCApply()`, `Y` and `X` must be different matrices. 588 589 Collective 590 591 Input Parameters: 592 + pc - the `PC` preconditioner context 593 - X - block of input vectors 594 595 Output Parameter: 596 . Y - block of output vectors 597 598 Level: developer 599 600 .seealso: [](ch_ksp), `PC`, `PCApply()`, `KSPMatSolve()` 601 @*/ 602 PetscErrorCode PCMatApply(PC pc, Mat X, Mat Y) 603 { 604 PetscFunctionBegin; 605 PetscCall(PCMatApplyTranspose_Private(pc, X, Y, PETSC_FALSE)); 606 PetscFunctionReturn(PETSC_SUCCESS); 607 } 608 609 /*@ 610 PCMatApplyTranspose - Applies the transpose of preconditioner to multiple vectors stored as a `MATDENSE`. Like `PCApplyTranspose()`, `Y` and `X` must be different matrices. 611 612 Collective 613 614 Input Parameters: 615 + pc - the `PC` preconditioner context 616 - X - block of input vectors 617 618 Output Parameter: 619 . Y - block of output vectors 620 621 Level: developer 622 623 .seealso: [](ch_ksp), `PC`, `PCApplyTranspose()`, `KSPMatSolveTranspose()` 624 @*/ 625 PetscErrorCode PCMatApplyTranspose(PC pc, Mat X, Mat Y) 626 { 627 PetscFunctionBegin; 628 PetscCall(PCMatApplyTranspose_Private(pc, X, Y, PETSC_TRUE)); 629 PetscFunctionReturn(PETSC_SUCCESS); 630 } 631 632 /*@ 633 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 634 635 Collective 636 637 Input Parameters: 638 + pc - the `PC` preconditioner context 639 - x - input vector 640 641 Output Parameter: 642 . y - output vector 643 644 Level: developer 645 646 Note: 647 Currently, this routine is implemented only for `PCICC` and `PCJACOBI` preconditioners. 648 649 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplySymmetricRight()` 650 @*/ 651 PetscErrorCode PCApplySymmetricLeft(PC pc, Vec x, Vec y) 652 { 653 PetscFunctionBegin; 654 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 655 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 656 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 657 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 658 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 659 PetscCall(PCSetUp(pc)); 660 PetscCall(VecLockReadPush(x)); 661 PetscCall(PetscLogEventBegin(PC_ApplySymmetricLeft, pc, x, y, 0)); 662 PetscUseTypeMethod(pc, applysymmetricleft, x, y); 663 PetscCall(PetscLogEventEnd(PC_ApplySymmetricLeft, pc, x, y, 0)); 664 PetscCall(VecLockReadPop(x)); 665 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 666 PetscFunctionReturn(PETSC_SUCCESS); 667 } 668 669 /*@ 670 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 671 672 Collective 673 674 Input Parameters: 675 + pc - the `PC` preconditioner context 676 - x - input vector 677 678 Output Parameter: 679 . y - output vector 680 681 Level: developer 682 683 Note: 684 Currently, this routine is implemented only for `PCICC` and `PCJACOBI` preconditioners. 685 686 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplySymmetricLeft()` 687 @*/ 688 PetscErrorCode PCApplySymmetricRight(PC pc, Vec x, Vec y) 689 { 690 PetscFunctionBegin; 691 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 692 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 693 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 694 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 695 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 696 PetscCall(PCSetUp(pc)); 697 PetscCall(VecLockReadPush(x)); 698 PetscCall(PetscLogEventBegin(PC_ApplySymmetricRight, pc, x, y, 0)); 699 PetscUseTypeMethod(pc, applysymmetricright, x, y); 700 PetscCall(PetscLogEventEnd(PC_ApplySymmetricRight, pc, x, y, 0)); 701 PetscCall(VecLockReadPop(x)); 702 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 703 PetscFunctionReturn(PETSC_SUCCESS); 704 } 705 706 /*@ 707 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 708 709 Collective 710 711 Input Parameters: 712 + pc - the `PC` preconditioner context 713 - x - input vector 714 715 Output Parameter: 716 . y - output vector 717 718 Level: developer 719 720 Note: 721 For complex numbers this applies the non-Hermitian transpose. 722 723 Developer Note: 724 We need to implement a `PCApplyHermitianTranspose()` 725 726 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplyBAorAB()`, `PCApplyBAorABTranspose()`, `PCApplyTransposeExists()` 727 @*/ 728 PetscErrorCode PCApplyTranspose(PC pc, Vec x, Vec y) 729 { 730 PetscFunctionBegin; 731 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 732 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 733 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 734 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 735 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 736 PetscCall(PCSetUp(pc)); 737 PetscCall(VecLockReadPush(x)); 738 PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0)); 739 PetscUseTypeMethod(pc, applytranspose, x, y); 740 PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0)); 741 PetscCall(VecLockReadPop(x)); 742 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 743 PetscFunctionReturn(PETSC_SUCCESS); 744 } 745 746 /*@ 747 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 748 749 Collective 750 751 Input Parameter: 752 . pc - the `PC` preconditioner context 753 754 Output Parameter: 755 . flg - `PETSC_TRUE` if a transpose operation is defined 756 757 Level: developer 758 759 .seealso: [](ch_ksp), `PC`, `PCApplyTranspose()` 760 @*/ 761 PetscErrorCode PCApplyTransposeExists(PC pc, PetscBool *flg) 762 { 763 PetscFunctionBegin; 764 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 765 PetscAssertPointer(flg, 2); 766 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 767 else *flg = PETSC_FALSE; 768 PetscFunctionReturn(PETSC_SUCCESS); 769 } 770 771 /*@ 772 PCApplyBAorAB - Applies the preconditioner and operator to a vector. $y = B*A*x $ or $ y = A*B*x$. 773 774 Collective 775 776 Input Parameters: 777 + pc - the `PC` preconditioner context 778 . side - indicates the preconditioner side, one of `PC_LEFT`, `PC_RIGHT`, or `PC_SYMMETRIC` 779 . x - input vector 780 - work - work vector 781 782 Output Parameter: 783 . y - output vector 784 785 Level: developer 786 787 Note: 788 If the `PC` has had `PCSetDiagonalScale()` set then $ D M A D^{-1} $ for left preconditioning or $ D A M D^{-1} $ is actually applied. 789 The specific `KSPSolve()` method must also be written to handle the post-solve "correction" for the diagonal scaling. 790 791 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorABTranspose()` 792 @*/ 793 PetscErrorCode PCApplyBAorAB(PC pc, PCSide side, Vec x, Vec y, Vec work) 794 { 795 PetscFunctionBegin; 796 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 797 PetscValidLogicalCollectiveEnum(pc, side, 2); 798 PetscValidHeaderSpecific(x, VEC_CLASSID, 3); 799 PetscValidHeaderSpecific(y, VEC_CLASSID, 4); 800 PetscValidHeaderSpecific(work, VEC_CLASSID, 5); 801 PetscCheckSameComm(pc, 1, x, 3); 802 PetscCheckSameComm(pc, 1, y, 4); 803 PetscCheckSameComm(pc, 1, work, 5); 804 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 805 PetscCheck(side == PC_LEFT || side == PC_SYMMETRIC || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right, left, or symmetric"); 806 PetscCheck(!pc->diagonalscale || side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot include diagonal scaling with symmetric preconditioner application"); 807 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 3, PETSC_TRUE)); 808 809 PetscCall(PCSetUp(pc)); 810 if (pc->diagonalscale) { 811 if (pc->ops->applyBA) { 812 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 813 PetscCall(VecDuplicate(x, &work2)); 814 PetscCall(PCDiagonalScaleRight(pc, x, work2)); 815 PetscUseTypeMethod(pc, applyBA, side, work2, y, work); 816 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 817 PetscCall(VecDestroy(&work2)); 818 } else if (side == PC_RIGHT) { 819 PetscCall(PCDiagonalScaleRight(pc, x, y)); 820 PetscCall(PCApply(pc, y, work)); 821 PetscCall(MatMult(pc->mat, work, y)); 822 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 823 } else if (side == PC_LEFT) { 824 PetscCall(PCDiagonalScaleRight(pc, x, y)); 825 PetscCall(MatMult(pc->mat, y, work)); 826 PetscCall(PCApply(pc, work, y)); 827 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 828 } else PetscCheck(side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot provide diagonal scaling with symmetric application of preconditioner"); 829 } else { 830 if (pc->ops->applyBA) { 831 PetscUseTypeMethod(pc, applyBA, side, x, y, work); 832 } else if (side == PC_RIGHT) { 833 PetscCall(PCApply(pc, x, work)); 834 PetscCall(MatMult(pc->mat, work, y)); 835 } else if (side == PC_LEFT) { 836 PetscCall(MatMult(pc->mat, x, work)); 837 PetscCall(PCApply(pc, work, y)); 838 } else if (side == PC_SYMMETRIC) { 839 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 840 PetscCall(PCApplySymmetricRight(pc, x, work)); 841 PetscCall(MatMult(pc->mat, work, y)); 842 PetscCall(VecCopy(y, work)); 843 PetscCall(PCApplySymmetricLeft(pc, work, y)); 844 } 845 } 846 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 847 PetscFunctionReturn(PETSC_SUCCESS); 848 } 849 850 /*@ 851 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 852 and operator to a vector. That is, applies $B^T * A^T$ with left preconditioning, 853 NOT $(B*A)^T = A^T*B^T$. 854 855 Collective 856 857 Input Parameters: 858 + pc - the `PC` preconditioner context 859 . side - indicates the preconditioner side, one of `PC_LEFT`, `PC_RIGHT`, or `PC_SYMMETRIC` 860 . x - input vector 861 - work - work vector 862 863 Output Parameter: 864 . y - output vector 865 866 Level: developer 867 868 Note: 869 This routine is used internally so that the same Krylov code can be used to solve $A x = b$ and $A^T x = b$, with a preconditioner 870 defined by $B^T$. This is why this has the funny form that it computes $B^T * A^T$ 871 872 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorAB()` 873 @*/ 874 PetscErrorCode PCApplyBAorABTranspose(PC pc, PCSide side, Vec x, Vec y, Vec work) 875 { 876 PetscFunctionBegin; 877 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 878 PetscValidHeaderSpecific(x, VEC_CLASSID, 3); 879 PetscValidHeaderSpecific(y, VEC_CLASSID, 4); 880 PetscValidHeaderSpecific(work, VEC_CLASSID, 5); 881 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 882 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 3, PETSC_TRUE)); 883 if (pc->ops->applyBAtranspose) { 884 PetscUseTypeMethod(pc, applyBAtranspose, side, x, y, work); 885 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 886 PetscFunctionReturn(PETSC_SUCCESS); 887 } 888 PetscCheck(side == PC_LEFT || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right or left"); 889 890 PetscCall(PCSetUp(pc)); 891 if (side == PC_RIGHT) { 892 PetscCall(PCApplyTranspose(pc, x, work)); 893 PetscCall(MatMultTranspose(pc->mat, work, y)); 894 } else if (side == PC_LEFT) { 895 PetscCall(MatMultTranspose(pc->mat, x, work)); 896 PetscCall(PCApplyTranspose(pc, work, y)); 897 } 898 /* add support for PC_SYMMETRIC */ 899 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 900 PetscFunctionReturn(PETSC_SUCCESS); 901 } 902 903 /*@ 904 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 905 built-in fast application of Richardson's method. 906 907 Not Collective 908 909 Input Parameter: 910 . pc - the preconditioner 911 912 Output Parameter: 913 . exists - `PETSC_TRUE` or `PETSC_FALSE` 914 915 Level: developer 916 917 .seealso: [](ch_ksp), `PC`, `KSPRICHARDSON`, `PCApplyRichardson()` 918 @*/ 919 PetscErrorCode PCApplyRichardsonExists(PC pc, PetscBool *exists) 920 { 921 PetscFunctionBegin; 922 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 923 PetscAssertPointer(exists, 2); 924 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 925 else *exists = PETSC_FALSE; 926 PetscFunctionReturn(PETSC_SUCCESS); 927 } 928 929 /*@ 930 PCApplyRichardson - Applies several steps of Richardson iteration with 931 the particular preconditioner. This routine is usually used by the 932 Krylov solvers and not the application code directly. 933 934 Collective 935 936 Input Parameters: 937 + pc - the `PC` preconditioner context 938 . b - the right-hand side 939 . w - one work vector 940 . rtol - relative decrease in residual norm convergence criteria 941 . abstol - absolute residual norm convergence criteria 942 . dtol - divergence residual norm increase criteria 943 . its - the number of iterations to apply. 944 - guesszero - if the input x contains nonzero initial guess 945 946 Output Parameters: 947 + outits - number of iterations actually used (for SOR this always equals its) 948 . reason - the reason the apply terminated 949 - y - the solution (also contains initial guess if guesszero is `PETSC_FALSE` 950 951 Level: developer 952 953 Notes: 954 Most preconditioners do not support this function. Use the command 955 `PCApplyRichardsonExists()` to determine if one does. 956 957 Except for the `PCMG` this routine ignores the convergence tolerances 958 and always runs for the number of iterations 959 960 .seealso: [](ch_ksp), `PC`, `PCApplyRichardsonExists()` 961 @*/ 962 PetscErrorCode PCApplyRichardson(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 963 { 964 PetscFunctionBegin; 965 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 966 PetscValidHeaderSpecific(b, VEC_CLASSID, 2); 967 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 968 PetscValidHeaderSpecific(w, VEC_CLASSID, 4); 969 PetscCheck(b != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "b and y must be different vectors"); 970 PetscCall(PCSetUp(pc)); 971 PetscUseTypeMethod(pc, applyrichardson, b, y, w, rtol, abstol, dtol, its, guesszero, outits, reason); 972 PetscFunctionReturn(PETSC_SUCCESS); 973 } 974 975 /*@ 976 PCSetFailedReason - Sets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail 977 978 Logically Collective 979 980 Input Parameters: 981 + pc - the `PC` preconditioner context 982 - reason - the reason it failed 983 984 Level: advanced 985 986 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCFailedReason` 987 @*/ 988 PetscErrorCode PCSetFailedReason(PC pc, PCFailedReason reason) 989 { 990 PetscFunctionBegin; 991 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 992 pc->failedreason = reason; 993 PetscFunctionReturn(PETSC_SUCCESS); 994 } 995 996 /*@ 997 PCGetFailedReason - Gets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail 998 999 Not Collective 1000 1001 Input Parameter: 1002 . pc - the `PC` preconditioner context 1003 1004 Output Parameter: 1005 . reason - the reason it failed 1006 1007 Level: advanced 1008 1009 Note: 1010 After a call to `KSPCheckDot()` or `KSPCheckNorm()` inside a `KSPSolve()` or a call to `PCReduceFailedReason()` 1011 this is the maximum reason over all MPI processes in the `PC` communicator and hence logically collective. 1012 Otherwise it returns the local value. 1013 1014 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCSetFailedReason()`, `PCFailedReason` 1015 @*/ 1016 PetscErrorCode PCGetFailedReason(PC pc, PCFailedReason *reason) 1017 { 1018 PetscFunctionBegin; 1019 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1020 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 1021 else *reason = pc->failedreason; 1022 PetscFunctionReturn(PETSC_SUCCESS); 1023 } 1024 1025 /*@ 1026 PCReduceFailedReason - Reduce the failed reason among the MPI processes that share the `PC` 1027 1028 Collective 1029 1030 Input Parameter: 1031 . pc - the `PC` preconditioner context 1032 1033 Level: advanced 1034 1035 Note: 1036 Different MPI processes may have different reasons or no reason, see `PCGetFailedReason()`. This routine 1037 makes them have a common value (failure if any MPI process had a failure). 1038 1039 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()`, `PCFailedReason` 1040 @*/ 1041 PetscErrorCode PCReduceFailedReason(PC pc) 1042 { 1043 PetscInt buf; 1044 1045 PetscFunctionBegin; 1046 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1047 buf = (PetscInt)pc->failedreason; 1048 PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &buf, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)pc))); 1049 pc->failedreason = (PCFailedReason)buf; 1050 PetscFunctionReturn(PETSC_SUCCESS); 1051 } 1052 1053 /* 1054 a setupcall of 0 indicates never setup, 1055 1 indicates has been previously setup 1056 -1 indicates a PCSetUp() was attempted and failed 1057 */ 1058 /*@ 1059 PCSetUp - Prepares for the use of a preconditioner. Performs all the one-time operations needed before the preconditioner 1060 can be used with `PCApply()` 1061 1062 Collective 1063 1064 Input Parameter: 1065 . pc - the `PC` preconditioner context 1066 1067 Level: developer 1068 1069 Notes: 1070 For example, for `PCLU` this will compute the factorization. 1071 1072 This is called automatically by `KSPSetUp()` or `PCApply()` so rarely needs to be called directly. 1073 1074 For nested preconditioners, such as `PCFIELDSPLIT` or `PCBJACOBI` this may not finish the construction of the preconditioner 1075 on the inner levels, the routine `PCSetUpOnBlocks()` may compute more of the preconditioner in those situations. 1076 1077 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `KSPSetUp()`, `PCSetUpOnBlocks()` 1078 @*/ 1079 PetscErrorCode PCSetUp(PC pc) 1080 { 1081 const char *def; 1082 PetscObjectState matstate, matnonzerostate; 1083 1084 PetscFunctionBegin; 1085 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1086 PetscCheck(pc->mat, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be set first"); 1087 1088 if (pc->setupcalled && pc->reusepreconditioner) { 1089 PetscCall(PetscInfo(pc, "Leaving PC with identical preconditioner since reuse preconditioner is set\n")); 1090 PetscFunctionReturn(PETSC_SUCCESS); 1091 } 1092 1093 PetscCall(PetscObjectStateGet((PetscObject)pc->pmat, &matstate)); 1094 PetscCall(MatGetNonzeroState(pc->pmat, &matnonzerostate)); 1095 if (!pc->setupcalled) { 1096 //PetscCall(PetscInfo(pc, "Setting up PC for first time\n")); 1097 pc->flag = DIFFERENT_NONZERO_PATTERN; 1098 } else if (matstate == pc->matstate) PetscFunctionReturn(PETSC_SUCCESS); 1099 else { 1100 if (matnonzerostate != pc->matnonzerostate) { 1101 PetscCall(PetscInfo(pc, "Setting up PC with different nonzero pattern\n")); 1102 pc->flag = DIFFERENT_NONZERO_PATTERN; 1103 } else { 1104 //PetscCall(PetscInfo(pc, "Setting up PC with same nonzero pattern\n")); 1105 pc->flag = SAME_NONZERO_PATTERN; 1106 } 1107 } 1108 pc->matstate = matstate; 1109 pc->matnonzerostate = matnonzerostate; 1110 1111 if (!((PetscObject)pc)->type_name) { 1112 PetscCall(PCGetDefaultType_Private(pc, &def)); 1113 PetscCall(PCSetType(pc, def)); 1114 } 1115 1116 PetscCall(MatSetErrorIfFailure(pc->pmat, pc->erroriffailure)); 1117 PetscCall(MatSetErrorIfFailure(pc->mat, pc->erroriffailure)); 1118 PetscCall(PetscLogEventBegin(PC_SetUp, pc, 0, 0, 0)); 1119 if (pc->ops->setup) { 1120 PetscCall(PCLogEventsDeactivatePush()); 1121 PetscUseTypeMethod(pc, setup); 1122 PetscCall(PCLogEventsDeactivatePop()); 1123 } 1124 PetscCall(PetscLogEventEnd(PC_SetUp, pc, 0, 0, 0)); 1125 if (!pc->setupcalled) pc->setupcalled = 1; 1126 PetscFunctionReturn(PETSC_SUCCESS); 1127 } 1128 1129 /*@ 1130 PCSetUpOnBlocks - Sets up the preconditioner for each block in 1131 the block Jacobi, overlapping Schwarz, and fieldsplit methods. 1132 1133 Collective 1134 1135 Input Parameter: 1136 . pc - the `PC` preconditioner context 1137 1138 Level: developer 1139 1140 Notes: 1141 For nested preconditioners such as `PCBJACOBI`, `PCSetUp()` is not called on each sub-`KSP` when `PCSetUp()` is 1142 called on the outer `PC`, this routine ensures it is called. 1143 1144 It calls `PCSetUp()` if not yet called. 1145 1146 .seealso: [](ch_ksp), `PC`, `PCSetUp()`, `PCCreate()`, `PCApply()`, `PCDestroy()` 1147 @*/ 1148 PetscErrorCode PCSetUpOnBlocks(PC pc) 1149 { 1150 PetscFunctionBegin; 1151 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1152 if (!pc->setupcalled) PetscCall(PCSetUp(pc)); /* "if" to prevent -info extra prints */ 1153 if (!pc->ops->setuponblocks) PetscFunctionReturn(PETSC_SUCCESS); 1154 PetscCall(MatSetErrorIfFailure(pc->pmat, pc->erroriffailure)); 1155 PetscCall(PetscLogEventBegin(PC_SetUpOnBlocks, pc, 0, 0, 0)); 1156 PetscCall(PCLogEventsDeactivatePush()); 1157 PetscUseTypeMethod(pc, setuponblocks); 1158 PetscCall(PCLogEventsDeactivatePop()); 1159 PetscCall(PetscLogEventEnd(PC_SetUpOnBlocks, pc, 0, 0, 0)); 1160 PetscFunctionReturn(PETSC_SUCCESS); 1161 } 1162 1163 /*@C 1164 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 1165 submatrices that arise within certain subdomain-based preconditioners such as `PCASM` 1166 1167 Logically Collective 1168 1169 Input Parameters: 1170 + pc - the `PC` preconditioner context 1171 . func - routine for modifying the submatrices, see `PCModifySubMatricesFn` 1172 - ctx - optional user-defined context (may be `NULL`) 1173 1174 Level: advanced 1175 1176 Notes: 1177 The basic submatrices are extracted from the matrix used to construct the preconditioner as 1178 usual; the user can then alter these (for example, to set different boundary 1179 conditions for each submatrix) before they are used for the local solves. 1180 1181 `PCSetModifySubMatrices()` MUST be called before `KSPSetUp()` and 1182 `KSPSolve()`. 1183 1184 A routine set by `PCSetModifySubMatrices()` is currently called within 1185 the block Jacobi (`PCBJACOBI`) and additive Schwarz (`PCASM`) 1186 preconditioners. All other preconditioners ignore this routine. 1187 1188 .seealso: [](ch_ksp), `PC`, `PCModifySubMatricesFn`, `PCBJACOBI`, `PCASM`, `PCModifySubMatrices()` 1189 @*/ 1190 PetscErrorCode PCSetModifySubMatrices(PC pc, PCModifySubMatricesFn *func, void *ctx) 1191 { 1192 PetscFunctionBegin; 1193 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1194 pc->modifysubmatrices = func; 1195 pc->modifysubmatricesP = ctx; 1196 PetscFunctionReturn(PETSC_SUCCESS); 1197 } 1198 1199 /*@C 1200 PCModifySubMatrices - Calls an optional user-defined routine within 1201 certain preconditioners if one has been set with `PCSetModifySubMatrices()`. 1202 1203 Collective 1204 1205 Input Parameters: 1206 + pc - the `PC` preconditioner context 1207 . nsub - the number of local submatrices 1208 . row - an array of index sets that contain the global row numbers 1209 that comprise each local submatrix 1210 . col - an array of index sets that contain the global column numbers 1211 that comprise each local submatrix 1212 . submat - array of local submatrices 1213 - ctx - optional user-defined context for private data for the 1214 user-defined routine (may be `NULL`) 1215 1216 Output Parameter: 1217 . submat - array of local submatrices (the entries of which may 1218 have been modified) 1219 1220 Level: developer 1221 1222 Note: 1223 The user should NOT generally call this routine, as it will 1224 automatically be called within certain preconditioners. 1225 1226 .seealso: [](ch_ksp), `PC`, `PCModifySubMatricesFn`, `PCSetModifySubMatrices()` 1227 @*/ 1228 PetscErrorCode PCModifySubMatrices(PC pc, PetscInt nsub, const IS row[], const IS col[], Mat submat[], void *ctx) 1229 { 1230 PetscFunctionBegin; 1231 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1232 if (!pc->modifysubmatrices) PetscFunctionReturn(PETSC_SUCCESS); 1233 PetscCall(PetscLogEventBegin(PC_ModifySubMatrices, pc, 0, 0, 0)); 1234 PetscCall((*pc->modifysubmatrices)(pc, nsub, row, col, submat, ctx)); 1235 PetscCall(PetscLogEventEnd(PC_ModifySubMatrices, pc, 0, 0, 0)); 1236 PetscFunctionReturn(PETSC_SUCCESS); 1237 } 1238 1239 /*@ 1240 PCSetOperators - Sets the matrix associated with the linear system and 1241 a (possibly) different one associated with the preconditioner. 1242 1243 Logically Collective 1244 1245 Input Parameters: 1246 + pc - the `PC` preconditioner context 1247 . Amat - the matrix that defines the linear system 1248 - Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat. 1249 1250 Level: intermediate 1251 1252 Notes: 1253 Passing a `NULL` for `Amat` or `Pmat` removes the matrix that is currently used. 1254 1255 If you wish to replace either `Amat` or `Pmat` but leave the other one untouched then 1256 first call `KSPGetOperators()` to get the one you wish to keep, call `PetscObjectReference()` 1257 on it and then pass it back in in your call to `KSPSetOperators()`. 1258 1259 More Notes about Repeated Solution of Linear Systems: 1260 PETSc does NOT reset the matrix entries of either `Amat` or `Pmat` 1261 to zero after a linear solve; the user is completely responsible for 1262 matrix assembly. See the routine `MatZeroEntries()` if desiring to 1263 zero all elements of a matrix. 1264 1265 .seealso: [](ch_ksp), `PC`, `PCGetOperators()`, `MatZeroEntries()` 1266 @*/ 1267 PetscErrorCode PCSetOperators(PC pc, Mat Amat, Mat Pmat) 1268 { 1269 PetscInt m1, n1, m2, n2; 1270 1271 PetscFunctionBegin; 1272 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1273 if (Amat) PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 1274 if (Pmat) PetscValidHeaderSpecific(Pmat, MAT_CLASSID, 3); 1275 if (Amat) PetscCheckSameComm(pc, 1, Amat, 2); 1276 if (Pmat) PetscCheckSameComm(pc, 1, Pmat, 3); 1277 if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) { 1278 PetscCall(MatGetLocalSize(Amat, &m1, &n1)); 1279 PetscCall(MatGetLocalSize(pc->mat, &m2, &n2)); 1280 PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Amat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1); 1281 PetscCall(MatGetLocalSize(Pmat, &m1, &n1)); 1282 PetscCall(MatGetLocalSize(pc->pmat, &m2, &n2)); 1283 PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Pmat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1); 1284 } 1285 1286 if (Pmat != pc->pmat) { 1287 /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */ 1288 pc->matnonzerostate = -1; 1289 pc->matstate = -1; 1290 } 1291 1292 /* reference first in case the matrices are the same */ 1293 if (Amat) PetscCall(PetscObjectReference((PetscObject)Amat)); 1294 PetscCall(MatDestroy(&pc->mat)); 1295 if (Pmat) PetscCall(PetscObjectReference((PetscObject)Pmat)); 1296 PetscCall(MatDestroy(&pc->pmat)); 1297 pc->mat = Amat; 1298 pc->pmat = Pmat; 1299 PetscFunctionReturn(PETSC_SUCCESS); 1300 } 1301 1302 /*@ 1303 PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner `PC` has changed. 1304 1305 Logically Collective 1306 1307 Input Parameters: 1308 + pc - the `PC` preconditioner context 1309 - flag - `PETSC_TRUE` do not compute a new preconditioner, `PETSC_FALSE` do compute a new preconditioner 1310 1311 Level: intermediate 1312 1313 Note: 1314 Normally if a matrix inside a `PC` changes the `PC` automatically updates itself using information from the changed matrix. This option 1315 prevents this. 1316 1317 .seealso: [](ch_ksp), `PC`, `PCGetOperators()`, `MatZeroEntries()`, `PCGetReusePreconditioner()`, `KSPSetReusePreconditioner()` 1318 @*/ 1319 PetscErrorCode PCSetReusePreconditioner(PC pc, PetscBool flag) 1320 { 1321 PetscFunctionBegin; 1322 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1323 PetscValidLogicalCollectiveBool(pc, flag, 2); 1324 pc->reusepreconditioner = flag; 1325 PetscFunctionReturn(PETSC_SUCCESS); 1326 } 1327 1328 /*@ 1329 PCGetReusePreconditioner - Determines if the `PC` reuses the current preconditioner even if the operator in the preconditioner has changed. 1330 1331 Not Collective 1332 1333 Input Parameter: 1334 . pc - the `PC` preconditioner context 1335 1336 Output Parameter: 1337 . flag - `PETSC_TRUE` do not compute a new preconditioner, `PETSC_FALSE` do compute a new preconditioner 1338 1339 Level: intermediate 1340 1341 .seealso: [](ch_ksp), `PC`, `PCGetOperators()`, `MatZeroEntries()`, `PCSetReusePreconditioner()` 1342 @*/ 1343 PetscErrorCode PCGetReusePreconditioner(PC pc, PetscBool *flag) 1344 { 1345 PetscFunctionBegin; 1346 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1347 PetscAssertPointer(flag, 2); 1348 *flag = pc->reusepreconditioner; 1349 PetscFunctionReturn(PETSC_SUCCESS); 1350 } 1351 1352 /*@ 1353 PCGetOperators - Gets the matrix associated with the linear system and 1354 possibly a different one which is used to construct the preconditioner. 1355 1356 Not Collective, though parallel `Mat`s are returned if `pc` is parallel 1357 1358 Input Parameter: 1359 . pc - the `PC` preconditioner context 1360 1361 Output Parameters: 1362 + Amat - the matrix defining the linear system 1363 - Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat. 1364 1365 Level: intermediate 1366 1367 Note: 1368 Does not increase the reference count of the matrices, so you should not destroy them 1369 1370 Alternative usage: If the operators have NOT been set with `KSPSetOperators()`/`PCSetOperators()` then the operators 1371 are created in `PC` and returned to the user. In this case, if both operators 1372 mat and pmat are requested, two DIFFERENT operators will be returned. If 1373 only one is requested both operators in the PC will be the same (i.e. as 1374 if one had called `KSPSetOperators()`/`PCSetOperators()` with the same argument for both Mats). 1375 The user must set the sizes of the returned matrices and their type etc just 1376 as if the user created them with `MatCreate()`. For example, 1377 1378 .vb 1379 KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to 1380 set size, type, etc of Amat 1381 1382 MatCreate(comm,&mat); 1383 KSP/PCSetOperators(ksp/pc,Amat,Amat); 1384 PetscObjectDereference((PetscObject)mat); 1385 set size, type, etc of Amat 1386 .ve 1387 1388 and 1389 1390 .vb 1391 KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to 1392 set size, type, etc of Amat and Pmat 1393 1394 MatCreate(comm,&Amat); 1395 MatCreate(comm,&Pmat); 1396 KSP/PCSetOperators(ksp/pc,Amat,Pmat); 1397 PetscObjectDereference((PetscObject)Amat); 1398 PetscObjectDereference((PetscObject)Pmat); 1399 set size, type, etc of Amat and Pmat 1400 .ve 1401 1402 The rationale for this support is so that when creating a `TS`, `SNES`, or `KSP` the hierarchy 1403 of underlying objects (i.e. `SNES`, `KSP`, `PC`, `Mat`) and their lifespans can be completely 1404 managed by the top most level object (i.e. the `TS`, `SNES`, or `KSP`). Another way to look 1405 at this is when you create a `SNES` you do not NEED to create a `KSP` and attach it to 1406 the `SNES` object (the `SNES` object manages it for you). Similarly when you create a KSP 1407 you do not need to attach a `PC` to it (the `KSP` object manages the `PC` object for you). 1408 Thus, why should YOU have to create the `Mat` and attach it to the `SNES`/`KSP`/`PC`, when 1409 it can be created for you? 1410 1411 .seealso: [](ch_ksp), `PC`, `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperatorsSet()` 1412 @*/ 1413 PetscErrorCode PCGetOperators(PC pc, Mat *Amat, Mat *Pmat) 1414 { 1415 PetscFunctionBegin; 1416 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1417 if (Amat) { 1418 if (!pc->mat) { 1419 if (pc->pmat && !Pmat) { /* Pmat has been set, but user did not request it, so use for Amat */ 1420 pc->mat = pc->pmat; 1421 PetscCall(PetscObjectReference((PetscObject)pc->mat)); 1422 } else { /* both Amat and Pmat are empty */ 1423 PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->mat)); 1424 if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */ 1425 pc->pmat = pc->mat; 1426 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 1427 } 1428 } 1429 } 1430 *Amat = pc->mat; 1431 } 1432 if (Pmat) { 1433 if (!pc->pmat) { 1434 if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */ 1435 pc->pmat = pc->mat; 1436 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 1437 } else { 1438 PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->pmat)); 1439 if (!Amat) { /* user did NOT request Amat, so make same as Pmat */ 1440 pc->mat = pc->pmat; 1441 PetscCall(PetscObjectReference((PetscObject)pc->mat)); 1442 } 1443 } 1444 } 1445 *Pmat = pc->pmat; 1446 } 1447 PetscFunctionReturn(PETSC_SUCCESS); 1448 } 1449 1450 /*@ 1451 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1452 possibly a different one associated with the preconditioner have been set in the `PC`. 1453 1454 Not Collective, though the results on all processes should be the same 1455 1456 Input Parameter: 1457 . pc - the `PC` preconditioner context 1458 1459 Output Parameters: 1460 + mat - the matrix associated with the linear system was set 1461 - pmat - matrix associated with the preconditioner was set, usually the same 1462 1463 Level: intermediate 1464 1465 .seealso: [](ch_ksp), `PC`, `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperators()` 1466 @*/ 1467 PetscErrorCode PCGetOperatorsSet(PC pc, PetscBool *mat, PetscBool *pmat) 1468 { 1469 PetscFunctionBegin; 1470 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1471 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1472 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1473 PetscFunctionReturn(PETSC_SUCCESS); 1474 } 1475 1476 /*@ 1477 PCFactorGetMatrix - Gets the factored matrix from the 1478 preconditioner context. This routine is valid only for the `PCLU`, 1479 `PCILU`, `PCCHOLESKY`, and `PCICC` methods. 1480 1481 Not Collective though `mat` is parallel if `pc` is parallel 1482 1483 Input Parameter: 1484 . pc - the `PC` preconditioner context 1485 1486 Output Parameters: 1487 . mat - the factored matrix 1488 1489 Level: advanced 1490 1491 Note: 1492 Does not increase the reference count for `mat` so DO NOT destroy it 1493 1494 .seealso: [](ch_ksp), `PC`, `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC` 1495 @*/ 1496 PetscErrorCode PCFactorGetMatrix(PC pc, Mat *mat) 1497 { 1498 PetscFunctionBegin; 1499 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1500 PetscAssertPointer(mat, 2); 1501 PetscCall(PCFactorSetUpMatSolverType(pc)); 1502 PetscUseTypeMethod(pc, getfactoredmatrix, mat); 1503 PetscFunctionReturn(PETSC_SUCCESS); 1504 } 1505 1506 /*@ 1507 PCSetOptionsPrefix - Sets the prefix used for searching for all 1508 `PC` options in the database. 1509 1510 Logically Collective 1511 1512 Input Parameters: 1513 + pc - the `PC` preconditioner context 1514 - prefix - the prefix string to prepend to all `PC` option requests 1515 1516 Note: 1517 A hyphen (-) must NOT be given at the beginning of the prefix name. 1518 The first character of all runtime options is AUTOMATICALLY the 1519 hyphen. 1520 1521 Level: advanced 1522 1523 .seealso: [](ch_ksp), `PC`, `PCSetFromOptions`, `PCAppendOptionsPrefix()`, `PCGetOptionsPrefix()` 1524 @*/ 1525 PetscErrorCode PCSetOptionsPrefix(PC pc, const char prefix[]) 1526 { 1527 PetscFunctionBegin; 1528 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1529 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc, prefix)); 1530 PetscFunctionReturn(PETSC_SUCCESS); 1531 } 1532 1533 /*@ 1534 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1535 `PC` options in the database. 1536 1537 Logically Collective 1538 1539 Input Parameters: 1540 + pc - the `PC` preconditioner context 1541 - prefix - the prefix string to prepend to all `PC` option requests 1542 1543 Note: 1544 A hyphen (-) must NOT be given at the beginning of the prefix name. 1545 The first character of all runtime options is AUTOMATICALLY the 1546 hyphen. 1547 1548 Level: advanced 1549 1550 .seealso: [](ch_ksp), `PC`, `PCSetFromOptions`, `PCSetOptionsPrefix()`, `PCGetOptionsPrefix()` 1551 @*/ 1552 PetscErrorCode PCAppendOptionsPrefix(PC pc, const char prefix[]) 1553 { 1554 PetscFunctionBegin; 1555 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1556 PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc, prefix)); 1557 PetscFunctionReturn(PETSC_SUCCESS); 1558 } 1559 1560 /*@ 1561 PCGetOptionsPrefix - Gets the prefix used for searching for all 1562 PC options in the database. 1563 1564 Not Collective 1565 1566 Input Parameter: 1567 . pc - the `PC` preconditioner context 1568 1569 Output Parameter: 1570 . prefix - pointer to the prefix string used, is returned 1571 1572 Level: advanced 1573 1574 .seealso: [](ch_ksp), `PC`, `PCSetFromOptions`, `PCSetOptionsPrefix()`, `PCAppendOptionsPrefix()` 1575 @*/ 1576 PetscErrorCode PCGetOptionsPrefix(PC pc, const char *prefix[]) 1577 { 1578 PetscFunctionBegin; 1579 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1580 PetscAssertPointer(prefix, 2); 1581 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, prefix)); 1582 PetscFunctionReturn(PETSC_SUCCESS); 1583 } 1584 1585 /* 1586 Indicates the right-hand side will be changed by KSPSolve(), this occurs for a few 1587 preconditioners including BDDC and Eisentat that transform the equations before applying 1588 the Krylov methods 1589 */ 1590 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc, PetscBool *change) 1591 { 1592 PetscFunctionBegin; 1593 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1594 PetscAssertPointer(change, 2); 1595 *change = PETSC_FALSE; 1596 PetscTryMethod(pc, "PCPreSolveChangeRHS_C", (PC, PetscBool *), (pc, change)); 1597 PetscFunctionReturn(PETSC_SUCCESS); 1598 } 1599 1600 /*@ 1601 PCPreSolve - Optional pre-solve phase, intended for any preconditioner-specific actions that must be performed before 1602 the iterative solve itself. Used in conjunction with `PCPostSolve()` 1603 1604 Collective 1605 1606 Input Parameters: 1607 + pc - the `PC` preconditioner context 1608 - ksp - the Krylov subspace context 1609 1610 Level: developer 1611 1612 Notes: 1613 `KSPSolve()` calls this directly, so is rarely called by the user. 1614 1615 Certain preconditioners, such as the `PCType` of `PCEISENSTAT`, change the formulation of the linear system to be solved iteratively. 1616 This function performs that transformation. `PCPostSolve()` then transforms the system back to its original form after the solve. 1617 `PCPostSolve()` also transforms the resulting solution of the transformed system to the solution of the original problem. 1618 1619 `KSPSetPreSolve()` and `KSPSetPostSolve()` provide an alternative way to provide such transformations. 1620 1621 .seealso: [](ch_ksp), `PC`, `PCPostSolve()`, `KSP`, `PCSetPreSolve()`, `KSPSetPreSolve()`, `KSPSetPostSolve()` 1622 @*/ 1623 PetscErrorCode PCPreSolve(PC pc, KSP ksp) 1624 { 1625 Vec x, rhs; 1626 1627 PetscFunctionBegin; 1628 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1629 PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2); 1630 pc->presolvedone++; 1631 PetscCheck(pc->presolvedone <= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot embed PCPreSolve() more than twice"); 1632 PetscCall(KSPGetSolution(ksp, &x)); 1633 PetscCall(KSPGetRhs(ksp, &rhs)); 1634 1635 if (pc->ops->presolve) PetscUseTypeMethod(pc, presolve, ksp, rhs, x); 1636 else if (pc->presolve) PetscCall(pc->presolve(pc, ksp)); 1637 PetscFunctionReturn(PETSC_SUCCESS); 1638 } 1639 1640 /*@C 1641 PCSetPreSolve - Sets function used by `PCPreSolve()` which is intended for any 1642 preconditioner-specific actions that must be performed before 1643 the iterative solve itself. 1644 1645 Logically Collective 1646 1647 Input Parameters: 1648 + pc - the preconditioner object 1649 - presolve - the function to call before the solve 1650 1651 Calling sequence of `presolve`: 1652 + pc - the `PC` context 1653 - ksp - the `KSP` context 1654 1655 Level: developer 1656 1657 .seealso: [](ch_ksp), `PC`, `PCSetUp()`, `PCPreSolve()` 1658 @*/ 1659 PetscErrorCode PCSetPreSolve(PC pc, PetscErrorCode (*presolve)(PC pc, KSP ksp)) 1660 { 1661 PetscFunctionBegin; 1662 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1663 pc->presolve = presolve; 1664 PetscFunctionReturn(PETSC_SUCCESS); 1665 } 1666 1667 /*@ 1668 PCPostSolve - Optional post-solve phase, intended for any 1669 preconditioner-specific actions that must be performed after 1670 the iterative solve itself. 1671 1672 Collective 1673 1674 Input Parameters: 1675 + pc - the `PC` preconditioner context 1676 - ksp - the `KSP` Krylov subspace context 1677 1678 Example Usage: 1679 .vb 1680 PCPreSolve(pc,ksp); 1681 KSPSolve(ksp,b,x); 1682 PCPostSolve(pc,ksp); 1683 .ve 1684 1685 Level: developer 1686 1687 Note: 1688 `KSPSolve()` calls this routine directly, so it is rarely called by the user. 1689 1690 .seealso: [](ch_ksp), `PC`, `PCSetPreSolve()`, `KSPSetPostSolve()`, `KSPSetPreSolve()`, `PCPreSolve()`, `KSPSolve()` 1691 @*/ 1692 PetscErrorCode PCPostSolve(PC pc, KSP ksp) 1693 { 1694 Vec x, rhs; 1695 1696 PetscFunctionBegin; 1697 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1698 PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2); 1699 pc->presolvedone--; 1700 PetscCall(KSPGetSolution(ksp, &x)); 1701 PetscCall(KSPGetRhs(ksp, &rhs)); 1702 PetscTryTypeMethod(pc, postsolve, ksp, rhs, x); 1703 PetscFunctionReturn(PETSC_SUCCESS); 1704 } 1705 1706 /*@ 1707 PCLoad - Loads a `PC` that has been stored in binary with `PCView()`. 1708 1709 Collective 1710 1711 Input Parameters: 1712 + newdm - the newly loaded `PC`, this needs to have been created with `PCCreate()` or 1713 some related function before a call to `PCLoad()`. 1714 - viewer - binary file viewer `PETSCVIEWERBINARY`, obtained from `PetscViewerBinaryOpen()` 1715 1716 Level: intermediate 1717 1718 Note: 1719 The type is determined by the data in the file, any `PCType` set into the `PC` before this call is ignored. 1720 1721 .seealso: [](ch_ksp), `PC`, `PetscViewerBinaryOpen()`, `PCView()`, `MatLoad()`, `VecLoad()`, `PETSCVIEWERBINARY` 1722 @*/ 1723 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) 1724 { 1725 PetscBool isbinary; 1726 PetscInt classid; 1727 char type[256]; 1728 1729 PetscFunctionBegin; 1730 PetscValidHeaderSpecific(newdm, PC_CLASSID, 1); 1731 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1732 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 1733 PetscCheck(isbinary, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 1734 1735 PetscCall(PetscViewerBinaryRead(viewer, &classid, 1, NULL, PETSC_INT)); 1736 PetscCheck(classid == PC_FILE_CLASSID, PetscObjectComm((PetscObject)newdm), PETSC_ERR_ARG_WRONG, "Not PC next in file"); 1737 PetscCall(PetscViewerBinaryRead(viewer, type, 256, NULL, PETSC_CHAR)); 1738 PetscCall(PCSetType(newdm, type)); 1739 PetscTryTypeMethod(newdm, load, viewer); 1740 PetscFunctionReturn(PETSC_SUCCESS); 1741 } 1742 1743 #include <petscdraw.h> 1744 #if defined(PETSC_HAVE_SAWS) 1745 #include <petscviewersaws.h> 1746 #endif 1747 1748 /*@ 1749 PCViewFromOptions - View (print or provide information about) the `PC`, based on options in the options database 1750 1751 Collective 1752 1753 Input Parameters: 1754 + A - the `PC` context 1755 . obj - Optional object that provides the options prefix 1756 - name - command line option name 1757 1758 Level: developer 1759 1760 .seealso: [](ch_ksp), `PC`, `PCView`, `PetscObjectViewFromOptions()`, `PCCreate()` 1761 @*/ 1762 PetscErrorCode PCViewFromOptions(PC A, PetscObject obj, const char name[]) 1763 { 1764 PetscFunctionBegin; 1765 PetscValidHeaderSpecific(A, PC_CLASSID, 1); 1766 PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name)); 1767 PetscFunctionReturn(PETSC_SUCCESS); 1768 } 1769 1770 /*@ 1771 PCView - Prints information about the `PC` 1772 1773 Collective 1774 1775 Input Parameters: 1776 + pc - the `PC` preconditioner context 1777 - viewer - optional `PetscViewer` visualization context 1778 1779 Level: intermediate 1780 1781 Notes: 1782 The available visualization contexts include 1783 + `PETSC_VIEWER_STDOUT_SELF` - standard output (default) 1784 - `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard 1785 output where only the first processor opens 1786 the file. All other processors send their 1787 data to the first processor to print. 1788 1789 The user can open an alternative visualization contexts with 1790 `PetscViewerASCIIOpen()` (output to a specified file). 1791 1792 .seealso: [](ch_ksp), `PC`, `PetscViewer`, `PetscViewerType`, `KSPView()`, `PetscViewerASCIIOpen()` 1793 @*/ 1794 PetscErrorCode PCView(PC pc, PetscViewer viewer) 1795 { 1796 PCType cstr; 1797 PetscViewerFormat format; 1798 PetscBool iascii, isstring, isbinary, isdraw, pop = PETSC_FALSE; 1799 #if defined(PETSC_HAVE_SAWS) 1800 PetscBool issaws; 1801 #endif 1802 1803 PetscFunctionBegin; 1804 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1805 if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer)); 1806 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1807 PetscCheckSameComm(pc, 1, viewer, 2); 1808 1809 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1810 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring)); 1811 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 1812 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 1813 #if defined(PETSC_HAVE_SAWS) 1814 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws)); 1815 #endif 1816 1817 if (iascii) { 1818 PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)pc, viewer)); 1819 if (!pc->setupcalled) PetscCall(PetscViewerASCIIPrintf(viewer, " PC has not been set up so information may be incomplete\n")); 1820 PetscCall(PetscViewerASCIIPushTab(viewer)); 1821 PetscTryTypeMethod(pc, view, viewer); 1822 PetscCall(PetscViewerASCIIPopTab(viewer)); 1823 if (pc->mat) { 1824 PetscCall(PetscViewerGetFormat(viewer, &format)); 1825 if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) { 1826 PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO)); 1827 pop = PETSC_TRUE; 1828 } 1829 if (pc->pmat == pc->mat) { 1830 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix = precond matrix:\n")); 1831 PetscCall(PetscViewerASCIIPushTab(viewer)); 1832 PetscCall(MatView(pc->mat, viewer)); 1833 PetscCall(PetscViewerASCIIPopTab(viewer)); 1834 } else { 1835 if (pc->pmat) { 1836 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix followed by preconditioner matrix:\n")); 1837 } else { 1838 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix:\n")); 1839 } 1840 PetscCall(PetscViewerASCIIPushTab(viewer)); 1841 PetscCall(MatView(pc->mat, viewer)); 1842 if (pc->pmat) PetscCall(MatView(pc->pmat, viewer)); 1843 PetscCall(PetscViewerASCIIPopTab(viewer)); 1844 } 1845 if (pop) PetscCall(PetscViewerPopFormat(viewer)); 1846 } 1847 } else if (isstring) { 1848 PetscCall(PCGetType(pc, &cstr)); 1849 PetscCall(PetscViewerStringSPrintf(viewer, " PCType: %-7.7s", cstr)); 1850 PetscTryTypeMethod(pc, view, viewer); 1851 if (pc->mat) PetscCall(MatView(pc->mat, viewer)); 1852 if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer)); 1853 } else if (isbinary) { 1854 PetscInt classid = PC_FILE_CLASSID; 1855 MPI_Comm comm; 1856 PetscMPIInt rank; 1857 char type[256]; 1858 1859 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 1860 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1861 if (rank == 0) { 1862 PetscCall(PetscViewerBinaryWrite(viewer, &classid, 1, PETSC_INT)); 1863 PetscCall(PetscStrncpy(type, ((PetscObject)pc)->type_name, 256)); 1864 PetscCall(PetscViewerBinaryWrite(viewer, type, 256, PETSC_CHAR)); 1865 } 1866 PetscTryTypeMethod(pc, view, viewer); 1867 } else if (isdraw) { 1868 PetscDraw draw; 1869 char str[25]; 1870 PetscReal x, y, bottom, h; 1871 PetscInt n; 1872 1873 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 1874 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 1875 if (pc->mat) { 1876 PetscCall(MatGetSize(pc->mat, &n, NULL)); 1877 PetscCall(PetscSNPrintf(str, 25, "PC: %s (%" PetscInt_FMT ")", ((PetscObject)pc)->type_name, n)); 1878 } else { 1879 PetscCall(PetscSNPrintf(str, 25, "PC: %s", ((PetscObject)pc)->type_name)); 1880 } 1881 PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 1882 bottom = y - h; 1883 PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom)); 1884 PetscTryTypeMethod(pc, view, viewer); 1885 PetscCall(PetscDrawPopCurrentPoint(draw)); 1886 #if defined(PETSC_HAVE_SAWS) 1887 } else if (issaws) { 1888 PetscMPIInt rank; 1889 1890 PetscCall(PetscObjectName((PetscObject)pc)); 1891 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); 1892 if (!((PetscObject)pc)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)pc, viewer)); 1893 if (pc->mat) PetscCall(MatView(pc->mat, viewer)); 1894 if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer)); 1895 #endif 1896 } 1897 PetscFunctionReturn(PETSC_SUCCESS); 1898 } 1899 1900 /*@C 1901 PCRegister - Adds a method (`PCType`) to the PETSc preconditioner package. 1902 1903 Not collective. No Fortran Support 1904 1905 Input Parameters: 1906 + sname - name of a new user-defined solver 1907 - function - routine to create the method context which will be stored in a `PC` when `PCSetType()` is called 1908 1909 Example Usage: 1910 .vb 1911 PCRegister("my_solver", MySolverCreate); 1912 .ve 1913 1914 Then, your solver can be chosen with the procedural interface via 1915 .vb 1916 PCSetType(pc, "my_solver") 1917 .ve 1918 or at runtime via the option 1919 .vb 1920 -pc_type my_solver 1921 .ve 1922 1923 Level: advanced 1924 1925 Note: 1926 A simpler alternative to using `PCRegister()` for an application specific preconditioner is to use a `PC` of `PCType` `PCSHELL` and 1927 provide your customizations with `PCShellSetContext()` and `PCShellSetApply()` 1928 1929 `PCRegister()` may be called multiple times to add several user-defined preconditioners. 1930 1931 .seealso: [](ch_ksp), `PC`, `PCType`, `PCRegisterAll()`, `PCSetType()`, `PCShellSetContext()`, `PCShellSetApply()`, `PCSHELL` 1932 @*/ 1933 PetscErrorCode PCRegister(const char sname[], PetscErrorCode (*function)(PC)) 1934 { 1935 PetscFunctionBegin; 1936 PetscCall(PCInitializePackage()); 1937 PetscCall(PetscFunctionListAdd(&PCList, sname, function)); 1938 PetscFunctionReturn(PETSC_SUCCESS); 1939 } 1940 1941 static PetscErrorCode MatMult_PC(Mat A, Vec X, Vec Y) 1942 { 1943 PC pc; 1944 1945 PetscFunctionBegin; 1946 PetscCall(MatShellGetContext(A, &pc)); 1947 PetscCall(PCApply(pc, X, Y)); 1948 PetscFunctionReturn(PETSC_SUCCESS); 1949 } 1950 1951 /*@ 1952 PCComputeOperator - Computes the explicit preconditioned operator as a matrix `Mat`. 1953 1954 Collective 1955 1956 Input Parameters: 1957 + pc - the `PC` preconditioner object 1958 - mattype - the `MatType` to be used for the operator 1959 1960 Output Parameter: 1961 . mat - the explicit preconditioned operator 1962 1963 Level: advanced 1964 1965 Note: 1966 This computation is done by applying the operators to columns of the identity matrix. 1967 This routine is costly in general, and is recommended for use only with relatively small systems. 1968 Currently, this routine uses a dense matrix format when `mattype` == `NULL` 1969 1970 Developer Note: 1971 This should be called `PCCreateExplicitOperator()` 1972 1973 .seealso: [](ch_ksp), `PC`, `KSPComputeOperator()`, `MatType` 1974 @*/ 1975 PetscErrorCode PCComputeOperator(PC pc, MatType mattype, Mat *mat) 1976 { 1977 PetscInt N, M, m, n; 1978 Mat A, Apc; 1979 1980 PetscFunctionBegin; 1981 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1982 PetscAssertPointer(mat, 3); 1983 PetscCall(PCGetOperators(pc, &A, NULL)); 1984 PetscCall(MatGetLocalSize(A, &m, &n)); 1985 PetscCall(MatGetSize(A, &M, &N)); 1986 PetscCall(MatCreateShell(PetscObjectComm((PetscObject)pc), m, n, M, N, pc, &Apc)); 1987 PetscCall(MatShellSetOperation(Apc, MATOP_MULT, (void (*)(void))MatMult_PC)); 1988 PetscCall(MatComputeOperator(Apc, mattype, mat)); 1989 PetscCall(MatDestroy(&Apc)); 1990 PetscFunctionReturn(PETSC_SUCCESS); 1991 } 1992 1993 /*@ 1994 PCSetCoordinates - sets the coordinates of all the nodes (degrees of freedom in the vector) on the local process 1995 1996 Collective 1997 1998 Input Parameters: 1999 + pc - the `PC` preconditioner context 2000 . dim - the dimension of the coordinates 1, 2, or 3 2001 . nloc - the blocked size of the coordinates array 2002 - coords - the coordinates array 2003 2004 Level: intermediate 2005 2006 Notes: 2007 `coords` is an array of the dim coordinates for the nodes on 2008 the local processor, of size `dim`*`nloc`. 2009 If there are 108 equations (dofs) on a processor 2010 for a 3d displacement finite element discretization of elasticity (so 2011 that there are nloc = 36 = 108/3 nodes) then the array must have 108 2012 double precision values (ie, 3 * 36). These x y z coordinates 2013 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 2014 ... , N-1.z ]. 2015 2016 The information provided here can be used by some preconditioners, such as `PCGAMG`, to produce a better preconditioner. 2017 See also `MatSetNearNullSpace()`. 2018 2019 .seealso: [](ch_ksp), `PC`, `MatSetNearNullSpace()` 2020 @*/ 2021 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 2022 { 2023 PetscFunctionBegin; 2024 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2025 PetscValidLogicalCollectiveInt(pc, dim, 2); 2026 PetscTryMethod(pc, "PCSetCoordinates_C", (PC, PetscInt, PetscInt, PetscReal[]), (pc, dim, nloc, coords)); 2027 PetscFunctionReturn(PETSC_SUCCESS); 2028 } 2029 2030 /*@ 2031 PCGetInterpolations - Gets interpolation matrices for all levels (except level 0) 2032 2033 Logically Collective 2034 2035 Input Parameter: 2036 . pc - the precondition context 2037 2038 Output Parameters: 2039 + num_levels - the number of levels 2040 - interpolations - the interpolation matrices (size of `num_levels`-1) 2041 2042 Level: advanced 2043 2044 Developer Note: 2045 Why is this here instead of in `PCMG` etc? 2046 2047 .seealso: [](ch_ksp), `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetInterpolation()`, `PCGetCoarseOperators()` 2048 @*/ 2049 PetscErrorCode PCGetInterpolations(PC pc, PetscInt *num_levels, Mat *interpolations[]) 2050 { 2051 PetscFunctionBegin; 2052 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2053 PetscAssertPointer(num_levels, 2); 2054 PetscAssertPointer(interpolations, 3); 2055 PetscUseMethod(pc, "PCGetInterpolations_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, interpolations)); 2056 PetscFunctionReturn(PETSC_SUCCESS); 2057 } 2058 2059 /*@ 2060 PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level) 2061 2062 Logically Collective 2063 2064 Input Parameter: 2065 . pc - the precondition context 2066 2067 Output Parameters: 2068 + num_levels - the number of levels 2069 - coarseOperators - the coarse operator matrices (size of `num_levels`-1) 2070 2071 Level: advanced 2072 2073 Developer Note: 2074 Why is this here instead of in `PCMG` etc? 2075 2076 .seealso: [](ch_ksp), `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()` 2077 @*/ 2078 PetscErrorCode PCGetCoarseOperators(PC pc, PetscInt *num_levels, Mat *coarseOperators[]) 2079 { 2080 PetscFunctionBegin; 2081 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2082 PetscAssertPointer(num_levels, 2); 2083 PetscAssertPointer(coarseOperators, 3); 2084 PetscUseMethod(pc, "PCGetCoarseOperators_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, coarseOperators)); 2085 PetscFunctionReturn(PETSC_SUCCESS); 2086 } 2087