1 2 /* 3 The PC (preconditioner) interface routines, callable by users. 4 */ 5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 6 #include <petscdm.h> 7 8 /* Logging support */ 9 PetscClassId PC_CLASSID; 10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 12 PetscInt PetscMGLevelId; 13 14 PetscErrorCode PCGetDefaultType_Private(PC pc, const char *type[]) { 15 PetscMPIInt size; 16 PetscBool hasop, flg1, flg2, set, flg3, isnormal; 17 18 PetscFunctionBegin; 19 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 20 if (pc->pmat) { 21 PetscCall(MatHasOperation(pc->pmat, MATOP_GET_DIAGONAL_BLOCK, &hasop)); 22 if (size == 1) { 23 PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ICC, &flg1)); 24 PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ILU, &flg2)); 25 PetscCall(MatIsSymmetricKnown(pc->pmat, &set, &flg3)); 26 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &isnormal, MATNORMAL, MATNORMALHERMITIAN, NULL)); 27 if (flg1 && (!flg2 || (set && flg3))) { 28 *type = PCICC; 29 } else if (flg2) { 30 *type = PCILU; 31 } else if (isnormal) { 32 *type = PCNONE; 33 } else if (hasop) { /* likely is a parallel matrix run on one processor */ 34 *type = PCBJACOBI; 35 } else { 36 *type = PCNONE; 37 } 38 } else { 39 if (hasop) { 40 *type = PCBJACOBI; 41 } else { 42 *type = PCNONE; 43 } 44 } 45 } else { 46 if (size == 1) { 47 *type = PCILU; 48 } else { 49 *type = PCBJACOBI; 50 } 51 } 52 PetscFunctionReturn(0); 53 } 54 55 /*@ 56 PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated `Vec`s and `Mat`s 57 58 Collective on pc 59 60 Input Parameter: 61 . pc - the preconditioner context 62 63 Level: developer 64 65 Note: 66 This allows a `PC` to be reused for a different sized linear system but using the same options that have been previously set in the PC 67 68 .seealso: `PC`, `PCCreate()`, `PCSetUp()` 69 @*/ 70 PetscErrorCode PCReset(PC pc) { 71 PetscFunctionBegin; 72 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 73 PetscTryTypeMethod(pc, reset); 74 PetscCall(VecDestroy(&pc->diagonalscaleright)); 75 PetscCall(VecDestroy(&pc->diagonalscaleleft)); 76 PetscCall(MatDestroy(&pc->pmat)); 77 PetscCall(MatDestroy(&pc->mat)); 78 79 pc->setupcalled = 0; 80 PetscFunctionReturn(0); 81 } 82 83 /*@C 84 PCDestroy - Destroys `PC` context that was created with `PCCreate()`. 85 86 Collective on pc 87 88 Input Parameter: 89 . pc - the preconditioner context 90 91 Level: developer 92 93 .seealso: `PC`, `PCCreate()`, `PCSetUp()` 94 @*/ 95 PetscErrorCode PCDestroy(PC *pc) { 96 PetscFunctionBegin; 97 if (!*pc) PetscFunctionReturn(0); 98 PetscValidHeaderSpecific((*pc), PC_CLASSID, 1); 99 if (--((PetscObject)(*pc))->refct > 0) { 100 *pc = NULL; 101 PetscFunctionReturn(0); 102 } 103 104 PetscCall(PCReset(*pc)); 105 106 /* if memory was published with SAWs then destroy it */ 107 PetscCall(PetscObjectSAWsViewOff((PetscObject)*pc)); 108 PetscTryTypeMethod((*pc), destroy); 109 PetscCall(DMDestroy(&(*pc)->dm)); 110 PetscCall(PetscHeaderDestroy(pc)); 111 PetscFunctionReturn(0); 112 } 113 114 /*@C 115 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 116 scaling as needed by certain time-stepping codes. 117 118 Logically Collective on pc 119 120 Input Parameter: 121 . pc - the preconditioner context 122 123 Output Parameter: 124 . flag - `PETSC_TRUE` if it applies the scaling 125 126 Level: developer 127 128 Note: 129 If this returns `PETSC_TRUE` then the system solved via the Krylov method is 130 .vb 131 D M A D^{-1} y = D M b for left preconditioning or 132 D A M D^{-1} z = D b for right preconditioning 133 .ve 134 135 .seealso: `PC`, `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCSetDiagonalScale()` 136 @*/ 137 PetscErrorCode PCGetDiagonalScale(PC pc, PetscBool *flag) { 138 PetscFunctionBegin; 139 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 140 PetscValidBoolPointer(flag, 2); 141 *flag = pc->diagonalscale; 142 PetscFunctionReturn(0); 143 } 144 145 /*@ 146 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 147 scaling as needed by certain time-stepping codes. 148 149 Logically Collective on pc 150 151 Input Parameters: 152 + pc - the preconditioner context 153 - s - scaling vector 154 155 Level: intermediate 156 157 Notes: 158 The system solved via the Krylov method is 159 .vb 160 D M A D^{-1} y = D M b for left preconditioning or 161 D A M D^{-1} z = D b for right preconditioning 162 .ve 163 164 `PCDiagonalScaleLeft()` scales a vector by D. `PCDiagonalScaleRight()` scales a vector by D^{-1}. 165 166 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCGetDiagonalScale()` 167 @*/ 168 PetscErrorCode PCSetDiagonalScale(PC pc, Vec s) { 169 PetscFunctionBegin; 170 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 171 PetscValidHeaderSpecific(s, VEC_CLASSID, 2); 172 pc->diagonalscale = PETSC_TRUE; 173 174 PetscCall(PetscObjectReference((PetscObject)s)); 175 PetscCall(VecDestroy(&pc->diagonalscaleleft)); 176 177 pc->diagonalscaleleft = s; 178 179 PetscCall(VecDuplicate(s, &pc->diagonalscaleright)); 180 PetscCall(VecCopy(s, pc->diagonalscaleright)); 181 PetscCall(VecReciprocal(pc->diagonalscaleright)); 182 PetscFunctionReturn(0); 183 } 184 185 /*@ 186 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 187 188 Logically Collective on pc 189 190 Input Parameters: 191 + pc - the preconditioner context 192 . in - input vector 193 - out - scaled vector (maybe the same as in) 194 195 Level: intermediate 196 197 Notes: 198 The system solved via the Krylov method is 199 .vb 200 D M A D^{-1} y = D M b for left preconditioning or 201 D A M D^{-1} z = D b for right preconditioning 202 .ve 203 204 `PCDiagonalScaleLeft()` scales a vector by D. `PCDiagonalScaleRight()` scales a vector by D^{-1}. 205 206 If diagonal scaling is turned off and in is not out then in is copied to out 207 208 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleSet()`, `PCDiagonalScaleRight()`, `PCDiagonalScale()` 209 @*/ 210 PetscErrorCode PCDiagonalScaleLeft(PC pc, Vec in, Vec out) { 211 PetscFunctionBegin; 212 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 213 PetscValidHeaderSpecific(in, VEC_CLASSID, 2); 214 PetscValidHeaderSpecific(out, VEC_CLASSID, 3); 215 if (pc->diagonalscale) { 216 PetscCall(VecPointwiseMult(out, pc->diagonalscaleleft, in)); 217 } else if (in != out) { 218 PetscCall(VecCopy(in, out)); 219 } 220 PetscFunctionReturn(0); 221 } 222 223 /*@ 224 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 225 226 Logically Collective on pc 227 228 Input Parameters: 229 + pc - the preconditioner context 230 . in - input vector 231 - out - scaled vector (maybe the same as in) 232 233 Level: intermediate 234 235 Notes: 236 The system solved via the Krylov method is 237 .vb 238 D M A D^{-1} y = D M b for left preconditioning or 239 D A M D^{-1} z = D b for right preconditioning 240 .ve 241 242 `PCDiagonalScaleLeft()` scales a vector by D. `PCDiagonalScaleRight()` scales a vector by D^{-1}. 243 244 If diagonal scaling is turned off and in is not out then in is copied to out 245 246 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleSet()`, `PCDiagonalScale()` 247 @*/ 248 PetscErrorCode PCDiagonalScaleRight(PC pc, Vec in, Vec out) { 249 PetscFunctionBegin; 250 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 251 PetscValidHeaderSpecific(in, VEC_CLASSID, 2); 252 PetscValidHeaderSpecific(out, VEC_CLASSID, 3); 253 if (pc->diagonalscale) { 254 PetscCall(VecPointwiseMult(out, pc->diagonalscaleright, in)); 255 } else if (in != out) { 256 PetscCall(VecCopy(in, out)); 257 } 258 PetscFunctionReturn(0); 259 } 260 261 /*@ 262 PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the 263 operator during the preconditioning process it applies the Amat provided to `TSSetRHSJacobian()`, 264 `TSSetIJacobian()`, `SNESSetJacobian()`, `KSPSetOperators()` or `PCSetOperators()` not the Pmat. 265 266 Logically Collective on pc 267 268 Input Parameters: 269 + pc - the preconditioner context 270 - flg - `PETSC_TRUE` to use the Amat, `PETSC_FALSE` to use the Pmat (default is false) 271 272 Options Database Key: 273 . -pc_use_amat <true,false> - use the amat to apply the operator 274 275 Note: 276 For the common case in which the linear system matrix and the matrix used to construct the 277 preconditioner are identical, this routine is does nothing. 278 279 Level: intermediate 280 281 .seealso: `PC`, `PCGetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE` 282 @*/ 283 PetscErrorCode PCSetUseAmat(PC pc, PetscBool flg) { 284 PetscFunctionBegin; 285 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 286 pc->useAmat = flg; 287 PetscFunctionReturn(0); 288 } 289 290 /*@ 291 PCSetErrorIfFailure - Causes `PC` to generate an error if a FPE, for example a zero pivot, is detected. 292 293 Logically Collective on pc 294 295 Input Parameters: 296 + pc - iterative context obtained from PCCreate() 297 - flg - `PETSC_TRUE` indicates you want the error generated 298 299 Level: advanced 300 301 Notes: 302 Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call `KSPGetConvergedReason()` after a `KSPSolve()` 303 to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is 304 detected. 305 306 This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs 307 308 .seealso: `PC`, `KSPSetErrorIfNotConverged()`, `PCGetInitialGuessNonzero()`, `PCSetInitialGuessKnoll()`, `PCGetInitialGuessKnoll()` 309 @*/ 310 PetscErrorCode PCSetErrorIfFailure(PC pc, PetscBool flg) { 311 PetscFunctionBegin; 312 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 313 PetscValidLogicalCollectiveBool(pc, flg, 2); 314 pc->erroriffailure = flg; 315 PetscFunctionReturn(0); 316 } 317 318 /*@ 319 PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the 320 operator during the preconditioning process it applies the Amat provided to `TSSetRHSJacobian()`, 321 `TSSetIJacobian()`, `SNESSetJacobian()`, `KSPSetOperators()` or `PCSetOperators()` not the Pmat. 322 323 Logically Collective on pc 324 325 Input Parameter: 326 . pc - the preconditioner context 327 328 Output Parameter: 329 . flg - `PETSC_TRUE` to use the Amat, `PETSC_FALSE` to use the Pmat (default is false) 330 331 Note: 332 For the common case in which the linear system matrix and the matrix used to construct the 333 preconditioner are identical, this routine is does nothing. 334 335 Level: intermediate 336 337 .seealso: `PC`, `PCSetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE` 338 @*/ 339 PetscErrorCode PCGetUseAmat(PC pc, PetscBool *flg) { 340 PetscFunctionBegin; 341 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 342 *flg = pc->useAmat; 343 PetscFunctionReturn(0); 344 } 345 346 /*@ 347 PCCreate - Creates a preconditioner context, `PC` 348 349 Collective 350 351 Input Parameter: 352 . comm - MPI communicator 353 354 Output Parameter: 355 . pc - location to put the preconditioner context 356 357 Note: 358 The default preconditioner for sparse matrices is `PCILU` or `PCICC` with 0 fill on one process and block Jacobi (`PCBJACOBI`) with `PCILU` or `PCICC` 359 in parallel. For dense matrices it is always `PCNONE`. 360 361 Level: developer 362 363 .seealso: `PC`, `PCSetUp()`, `PCApply()`, `PCDestroy()` 364 @*/ 365 PetscErrorCode PCCreate(MPI_Comm comm, PC *newpc) { 366 PC pc; 367 368 PetscFunctionBegin; 369 PetscValidPointer(newpc, 2); 370 *newpc = NULL; 371 PetscCall(PCInitializePackage()); 372 373 PetscCall(PetscHeaderCreate(pc, PC_CLASSID, "PC", "Preconditioner", "PC", comm, PCDestroy, PCView)); 374 375 pc->mat = NULL; 376 pc->pmat = NULL; 377 pc->setupcalled = 0; 378 pc->setfromoptionscalled = 0; 379 pc->data = NULL; 380 pc->diagonalscale = PETSC_FALSE; 381 pc->diagonalscaleleft = NULL; 382 pc->diagonalscaleright = NULL; 383 384 pc->modifysubmatrices = NULL; 385 pc->modifysubmatricesP = NULL; 386 387 *newpc = pc; 388 PetscFunctionReturn(0); 389 } 390 391 /*@ 392 PCApply - Applies the preconditioner to a vector. 393 394 Collective on pc 395 396 Input Parameters: 397 + pc - the preconditioner context 398 - x - input vector 399 400 Output Parameter: 401 . y - output vector 402 403 Level: developer 404 405 .seealso: `PC`, `PCApplyTranspose()`, `PCApplyBAorAB()` 406 @*/ 407 PetscErrorCode PCApply(PC pc, Vec x, Vec y) { 408 PetscInt m, n, mv, nv; 409 410 PetscFunctionBegin; 411 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 412 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 413 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 414 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 415 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 416 /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */ 417 PetscCall(MatGetLocalSize(pc->pmat, &m, &n)); 418 PetscCall(VecGetLocalSize(x, &mv)); 419 PetscCall(VecGetLocalSize(y, &nv)); 420 /* check pmat * y = x is feasible */ 421 PetscCheck(mv == m, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local rows %" PetscInt_FMT " does not equal input vector size %" PetscInt_FMT, m, mv); 422 PetscCheck(nv == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local columns %" PetscInt_FMT " does not equal output vector size %" PetscInt_FMT, n, nv); 423 PetscCall(VecSetErrorIfLocked(y, 3)); 424 425 PetscCall(PCSetUp(pc)); 426 PetscCall(VecLockReadPush(x)); 427 PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0)); 428 PetscUseTypeMethod(pc, apply, x, y); 429 PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0)); 430 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 431 PetscCall(VecLockReadPop(x)); 432 PetscFunctionReturn(0); 433 } 434 435 /*@ 436 PCMatApply - Applies the preconditioner to multiple vectors stored as a `MATDENSE`. Like `PCApply()`, Y and X must be different matrices. 437 438 Collective on pc 439 440 Input Parameters: 441 + pc - the preconditioner context 442 - X - block of input vectors 443 444 Output Parameter: 445 . Y - block of output vectors 446 447 Level: developer 448 449 .seealso: `PC`, `PCApply()`, `KSPMatSolve()` 450 @*/ 451 PetscErrorCode PCMatApply(PC pc, Mat X, Mat Y) { 452 Mat A; 453 Vec cy, cx; 454 PetscInt m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3; 455 PetscBool match; 456 457 PetscFunctionBegin; 458 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 459 PetscValidHeaderSpecific(X, MAT_CLASSID, 2); 460 PetscValidHeaderSpecific(Y, MAT_CLASSID, 3); 461 PetscCheckSameComm(pc, 1, X, 2); 462 PetscCheckSameComm(pc, 1, Y, 3); 463 PetscCheck(Y != X, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices"); 464 PetscCall(PCGetOperators(pc, NULL, &A)); 465 PetscCall(MatGetLocalSize(A, &m3, &n3)); 466 PetscCall(MatGetLocalSize(X, &m2, &n2)); 467 PetscCall(MatGetLocalSize(Y, &m1, &n1)); 468 PetscCall(MatGetSize(A, &M3, &N3)); 469 PetscCall(MatGetSize(X, &M2, &N2)); 470 PetscCall(MatGetSize(Y, &M1, &N1)); 471 PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of output vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1); 472 PetscCheck(m2 == m3 && M2 == M3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m2, M2, m3, M3, n3, N3); 473 PetscCheck(m1 == n3 && M1 == N3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m1, M1, m3, M3, n3, N3); 474 PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "")); 475 PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat"); 476 PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "")); 477 PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat"); 478 PetscCall(PCSetUp(pc)); 479 if (pc->ops->matapply) { 480 PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0)); 481 PetscUseTypeMethod(pc, matapply, X, Y); 482 PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0)); 483 } else { 484 PetscCall(PetscInfo(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name)); 485 for (n1 = 0; n1 < N1; ++n1) { 486 PetscCall(MatDenseGetColumnVecRead(X, n1, &cx)); 487 PetscCall(MatDenseGetColumnVecWrite(Y, n1, &cy)); 488 PetscCall(PCApply(pc, cx, cy)); 489 PetscCall(MatDenseRestoreColumnVecWrite(Y, n1, &cy)); 490 PetscCall(MatDenseRestoreColumnVecRead(X, n1, &cx)); 491 } 492 } 493 PetscFunctionReturn(0); 494 } 495 496 /*@ 497 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 498 499 Collective on pc 500 501 Input Parameters: 502 + pc - the preconditioner context 503 - x - input vector 504 505 Output Parameter: 506 . y - output vector 507 508 Note: 509 Currently, this routine is implemented only for `PCICC` and `PCJACOBI` preconditioners. 510 511 Level: developer 512 513 .seealso: `PC`, `PCApply()`, `PCApplySymmetricRight()` 514 @*/ 515 PetscErrorCode PCApplySymmetricLeft(PC pc, Vec x, Vec y) { 516 PetscFunctionBegin; 517 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 518 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 519 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 520 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 521 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 522 PetscCall(PCSetUp(pc)); 523 PetscCall(VecLockReadPush(x)); 524 PetscCall(PetscLogEventBegin(PC_ApplySymmetricLeft, pc, x, y, 0)); 525 PetscUseTypeMethod(pc, applysymmetricleft, x, y); 526 PetscCall(PetscLogEventEnd(PC_ApplySymmetricLeft, pc, x, y, 0)); 527 PetscCall(VecLockReadPop(x)); 528 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 529 PetscFunctionReturn(0); 530 } 531 532 /*@ 533 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 534 535 Collective on pc 536 537 Input Parameters: 538 + pc - the preconditioner context 539 - x - input vector 540 541 Output Parameter: 542 . y - output vector 543 544 Level: developer 545 546 Note: 547 Currently, this routine is implemented only for `PCICC` and `PCJACOBI` preconditioners. 548 549 .seealso: `PC`, `PCApply()`, `PCApplySymmetricLeft()` 550 @*/ 551 PetscErrorCode PCApplySymmetricRight(PC pc, Vec x, Vec y) { 552 PetscFunctionBegin; 553 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 554 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 555 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 556 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 557 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 558 PetscCall(PCSetUp(pc)); 559 PetscCall(VecLockReadPush(x)); 560 PetscCall(PetscLogEventBegin(PC_ApplySymmetricRight, pc, x, y, 0)); 561 PetscUseTypeMethod(pc, applysymmetricright, x, y); 562 PetscCall(PetscLogEventEnd(PC_ApplySymmetricRight, pc, x, y, 0)); 563 PetscCall(VecLockReadPop(x)); 564 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 565 PetscFunctionReturn(0); 566 } 567 568 /*@ 569 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 570 571 Collective on pc 572 573 Input Parameters: 574 + pc - the preconditioner context 575 - x - input vector 576 577 Output Parameter: 578 . y - output vector 579 580 Note: 581 For complex numbers this applies the non-Hermitian transpose. 582 583 Developer Note: 584 We need to implement a `PCApplyHermitianTranspose()` 585 586 Level: developer 587 588 .seealso: `PC`, `PCApply()`, `PCApplyBAorAB()`, `PCApplyBAorABTranspose()`, `PCApplyTransposeExists()` 589 @*/ 590 PetscErrorCode PCApplyTranspose(PC pc, Vec x, Vec y) { 591 PetscFunctionBegin; 592 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 593 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 594 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 595 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 596 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 597 PetscCall(PCSetUp(pc)); 598 PetscCall(VecLockReadPush(x)); 599 PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0)); 600 PetscUseTypeMethod(pc, applytranspose, x, y); 601 PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0)); 602 PetscCall(VecLockReadPop(x)); 603 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 604 PetscFunctionReturn(0); 605 } 606 607 /*@ 608 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 609 610 Collective on pc 611 612 Input Parameter: 613 . pc - the preconditioner context 614 615 Output Parameter: 616 . flg - `PETSC_TRUE` if a transpose operation is defined 617 618 Level: developer 619 620 .seealso: `PC`, `PCApplyTranspose()` 621 @*/ 622 PetscErrorCode PCApplyTransposeExists(PC pc, PetscBool *flg) { 623 PetscFunctionBegin; 624 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 625 PetscValidBoolPointer(flg, 2); 626 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 627 else *flg = PETSC_FALSE; 628 PetscFunctionReturn(0); 629 } 630 631 /*@ 632 PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x. 633 634 Collective on pc 635 636 Input Parameters: 637 + pc - the preconditioner context 638 . side - indicates the preconditioner side, one of `PC_LEFT`, `PC_RIGHT`, or `PC_SYMMETRIC` 639 . x - input vector 640 - work - work vector 641 642 Output Parameter: 643 . y - output vector 644 645 Level: developer 646 647 Note: 648 If the `PC` has had `PCSetDiagonalScale()` set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the 649 specific `KSPSolve()` method must also be written to handle the post-solve "correction" for the diagonal scaling. 650 651 .seealso: `PC`, `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorABTranspose()` 652 @*/ 653 PetscErrorCode PCApplyBAorAB(PC pc, PCSide side, Vec x, Vec y, Vec work) { 654 PetscFunctionBegin; 655 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 656 PetscValidLogicalCollectiveEnum(pc, side, 2); 657 PetscValidHeaderSpecific(x, VEC_CLASSID, 3); 658 PetscValidHeaderSpecific(y, VEC_CLASSID, 4); 659 PetscValidHeaderSpecific(work, VEC_CLASSID, 5); 660 PetscCheckSameComm(pc, 1, x, 3); 661 PetscCheckSameComm(pc, 1, y, 4); 662 PetscCheckSameComm(pc, 1, work, 5); 663 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 664 PetscCheck(side == PC_LEFT || side == PC_SYMMETRIC || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right, left, or symmetric"); 665 PetscCheck(!pc->diagonalscale || side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot include diagonal scaling with symmetric preconditioner application"); 666 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 3, PETSC_TRUE)); 667 668 PetscCall(PCSetUp(pc)); 669 if (pc->diagonalscale) { 670 if (pc->ops->applyBA) { 671 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 672 PetscCall(VecDuplicate(x, &work2)); 673 PetscCall(PCDiagonalScaleRight(pc, x, work2)); 674 PetscUseTypeMethod(pc, applyBA, side, work2, y, work); 675 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 676 PetscCall(VecDestroy(&work2)); 677 } else if (side == PC_RIGHT) { 678 PetscCall(PCDiagonalScaleRight(pc, x, y)); 679 PetscCall(PCApply(pc, y, work)); 680 PetscCall(MatMult(pc->mat, work, y)); 681 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 682 } else if (side == PC_LEFT) { 683 PetscCall(PCDiagonalScaleRight(pc, x, y)); 684 PetscCall(MatMult(pc->mat, y, work)); 685 PetscCall(PCApply(pc, work, y)); 686 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 687 } else PetscCheck(side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot provide diagonal scaling with symmetric application of preconditioner"); 688 } else { 689 if (pc->ops->applyBA) { 690 PetscUseTypeMethod(pc, applyBA, side, x, y, work); 691 } else if (side == PC_RIGHT) { 692 PetscCall(PCApply(pc, x, work)); 693 PetscCall(MatMult(pc->mat, work, y)); 694 } else if (side == PC_LEFT) { 695 PetscCall(MatMult(pc->mat, x, work)); 696 PetscCall(PCApply(pc, work, y)); 697 } else if (side == PC_SYMMETRIC) { 698 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 699 PetscCall(PCApplySymmetricRight(pc, x, work)); 700 PetscCall(MatMult(pc->mat, work, y)); 701 PetscCall(VecCopy(y, work)); 702 PetscCall(PCApplySymmetricLeft(pc, work, y)); 703 } 704 } 705 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 706 PetscFunctionReturn(0); 707 } 708 709 /*@ 710 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 711 and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning, 712 NOT tr(B*A) = tr(A)*tr(B). 713 714 Collective on pc 715 716 Input Parameters: 717 + pc - the preconditioner context 718 . side - indicates the preconditioner side, one of `PC_LEFT`, `PC_RIGHT`, or `PC_SYMMETRIC` 719 . x - input vector 720 - work - work vector 721 722 Output Parameter: 723 . y - output vector 724 725 Note: 726 This routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner 727 defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 728 729 Level: developer 730 731 .seealso: `PC`, `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorAB()` 732 @*/ 733 PetscErrorCode PCApplyBAorABTranspose(PC pc, PCSide side, Vec x, Vec y, Vec work) { 734 PetscFunctionBegin; 735 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 736 PetscValidHeaderSpecific(x, VEC_CLASSID, 3); 737 PetscValidHeaderSpecific(y, VEC_CLASSID, 4); 738 PetscValidHeaderSpecific(work, VEC_CLASSID, 5); 739 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 740 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 3, PETSC_TRUE)); 741 if (pc->ops->applyBAtranspose) { 742 PetscUseTypeMethod(pc, applyBAtranspose, side, x, y, work); 743 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 744 PetscFunctionReturn(0); 745 } 746 PetscCheck(side == PC_LEFT || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right or left"); 747 748 PetscCall(PCSetUp(pc)); 749 if (side == PC_RIGHT) { 750 PetscCall(PCApplyTranspose(pc, x, work)); 751 PetscCall(MatMultTranspose(pc->mat, work, y)); 752 } else if (side == PC_LEFT) { 753 PetscCall(MatMultTranspose(pc->mat, x, work)); 754 PetscCall(PCApplyTranspose(pc, work, y)); 755 } 756 /* add support for PC_SYMMETRIC */ 757 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 758 PetscFunctionReturn(0); 759 } 760 761 /*@ 762 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 763 built-in fast application of Richardson's method. 764 765 Not Collective 766 767 Input Parameter: 768 . pc - the preconditioner 769 770 Output Parameter: 771 . exists - `PETSC_TRUE` or `PETSC_FALSE` 772 773 Level: developer 774 775 .seealso: `PC`, `PCRICHARDSON`, `PCApplyRichardson()` 776 @*/ 777 PetscErrorCode PCApplyRichardsonExists(PC pc, PetscBool *exists) { 778 PetscFunctionBegin; 779 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 780 PetscValidBoolPointer(exists, 2); 781 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 782 else *exists = PETSC_FALSE; 783 PetscFunctionReturn(0); 784 } 785 786 /*@ 787 PCApplyRichardson - Applies several steps of Richardson iteration with 788 the particular preconditioner. This routine is usually used by the 789 Krylov solvers and not the application code directly. 790 791 Collective on pc 792 793 Input Parameters: 794 + pc - the preconditioner context 795 . b - the right hand side 796 . w - one work vector 797 . rtol - relative decrease in residual norm convergence criteria 798 . abstol - absolute residual norm convergence criteria 799 . dtol - divergence residual norm increase criteria 800 . its - the number of iterations to apply. 801 - guesszero - if the input x contains nonzero initial guess 802 803 Output Parameters: 804 + outits - number of iterations actually used (for SOR this always equals its) 805 . reason - the reason the apply terminated 806 - y - the solution (also contains initial guess if guesszero is `PETSC_FALSE` 807 808 Notes: 809 Most preconditioners do not support this function. Use the command 810 `PCApplyRichardsonExists()` to determine if one does. 811 812 Except for the `PCMG` this routine ignores the convergence tolerances 813 and always runs for the number of iterations 814 815 Level: developer 816 817 .seealso: `PC`, `PCApplyRichardsonExists()` 818 @*/ 819 PetscErrorCode PCApplyRichardson(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) { 820 PetscFunctionBegin; 821 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 822 PetscValidHeaderSpecific(b, VEC_CLASSID, 2); 823 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 824 PetscValidHeaderSpecific(w, VEC_CLASSID, 4); 825 PetscCheck(b != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "b and y must be different vectors"); 826 PetscCall(PCSetUp(pc)); 827 PetscUseTypeMethod(pc, applyrichardson, b, y, w, rtol, abstol, dtol, its, guesszero, outits, reason); 828 PetscFunctionReturn(0); 829 } 830 831 /*@ 832 PCSetFailedReason - Sets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail 833 834 Logically Collective on pc 835 836 Input Parameters: 837 + pc - the preconditioner context 838 - reason - the reason it failedx 839 840 Level: advanced 841 842 .seealso: `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCFailedReason` 843 @*/ 844 PetscErrorCode PCSetFailedReason(PC pc, PCFailedReason reason) { 845 PetscFunctionBegin; 846 pc->failedreason = reason; 847 PetscFunctionReturn(0); 848 } 849 850 /*@ 851 PCGetFailedReason - Gets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail 852 853 Logically Collective on pc 854 855 Input Parameter: 856 . pc - the preconditioner context 857 858 Output Parameter: 859 . reason - the reason it failed 860 861 Level: advanced 862 863 Note: 864 This is the maximum over reason over all ranks in the PC communicator. It is only valid after 865 a call `KSPCheckDot()` or `KSPCheckNorm()` inside a `KSPSolve()`. It is not valid immediately after a `PCSetUp()` 866 or `PCApply()`, then use `PCGetFailedReasonRank()` 867 868 .seealso: PC`, ``PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReasonRank()`, `PCSetFailedReason()` 869 @*/ 870 PetscErrorCode PCGetFailedReason(PC pc, PCFailedReason *reason) { 871 PetscFunctionBegin; 872 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 873 else *reason = pc->failedreason; 874 PetscFunctionReturn(0); 875 } 876 877 /*@ 878 PCGetFailedReasonRank - Gets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail on this MPI rank 879 880 Not Collective 881 882 Input Parameter: 883 . pc - the preconditioner context 884 885 Output Parameter: 886 . reason - the reason it failed 887 888 Note: 889 Different ranks may have different reasons or no reason, see `PCGetFailedReason()` 890 891 Level: advanced 892 893 .seealso: `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()` 894 @*/ 895 PetscErrorCode PCGetFailedReasonRank(PC pc, PCFailedReason *reason) { 896 PetscFunctionBegin; 897 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 898 else *reason = pc->failedreason; 899 PetscFunctionReturn(0); 900 } 901 902 /* Next line needed to deactivate KSP_Solve logging */ 903 #include <petsc/private/kspimpl.h> 904 905 /* 906 a setupcall of 0 indicates never setup, 907 1 indicates has been previously setup 908 -1 indicates a PCSetUp() was attempted and failed 909 */ 910 /*@ 911 PCSetUp - Prepares for the use of a preconditioner. 912 913 Collective on pc 914 915 Input Parameter: 916 . pc - the preconditioner context 917 918 Level: developer 919 920 .seealso: `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()` 921 @*/ 922 PetscErrorCode PCSetUp(PC pc) { 923 const char *def; 924 PetscObjectState matstate, matnonzerostate; 925 926 PetscFunctionBegin; 927 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 928 PetscCheck(pc->mat, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be set first"); 929 930 if (pc->setupcalled && pc->reusepreconditioner) { 931 PetscCall(PetscInfo(pc, "Leaving PC with identical preconditioner since reuse preconditioner is set\n")); 932 PetscFunctionReturn(0); 933 } 934 935 PetscCall(PetscObjectStateGet((PetscObject)pc->pmat, &matstate)); 936 PetscCall(MatGetNonzeroState(pc->pmat, &matnonzerostate)); 937 if (!pc->setupcalled) { 938 PetscCall(PetscInfo(pc, "Setting up PC for first time\n")); 939 pc->flag = DIFFERENT_NONZERO_PATTERN; 940 } else if (matstate == pc->matstate) { 941 PetscCall(PetscInfo(pc, "Leaving PC with identical preconditioner since operator is unchanged\n")); 942 PetscFunctionReturn(0); 943 } else { 944 if (matnonzerostate > pc->matnonzerostate) { 945 PetscCall(PetscInfo(pc, "Setting up PC with different nonzero pattern\n")); 946 pc->flag = DIFFERENT_NONZERO_PATTERN; 947 } else { 948 PetscCall(PetscInfo(pc, "Setting up PC with same nonzero pattern\n")); 949 pc->flag = SAME_NONZERO_PATTERN; 950 } 951 } 952 pc->matstate = matstate; 953 pc->matnonzerostate = matnonzerostate; 954 955 if (!((PetscObject)pc)->type_name) { 956 PetscCall(PCGetDefaultType_Private(pc, &def)); 957 PetscCall(PCSetType(pc, def)); 958 } 959 960 PetscCall(MatSetErrorIfFailure(pc->pmat, pc->erroriffailure)); 961 PetscCall(MatSetErrorIfFailure(pc->mat, pc->erroriffailure)); 962 PetscCall(PetscLogEventBegin(PC_SetUp, pc, 0, 0, 0)); 963 if (pc->ops->setup) { 964 /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */ 965 PetscCall(KSPInitializePackage()); 966 PetscCall(PetscLogEventDeactivatePush(KSP_Solve)); 967 PetscCall(PetscLogEventDeactivatePush(PC_Apply)); 968 PetscUseTypeMethod(pc, setup); 969 PetscCall(PetscLogEventDeactivatePop(KSP_Solve)); 970 PetscCall(PetscLogEventDeactivatePop(PC_Apply)); 971 } 972 PetscCall(PetscLogEventEnd(PC_SetUp, pc, 0, 0, 0)); 973 if (!pc->setupcalled) pc->setupcalled = 1; 974 PetscFunctionReturn(0); 975 } 976 977 /*@ 978 PCSetUpOnBlocks - Sets up the preconditioner for each block in 979 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 980 methods. 981 982 Collective on pc 983 984 Input Parameter: 985 . pc - the preconditioner context 986 987 Level: developer 988 989 Note: 990 For nested preconditioners such as `PCBJACOBI` `PCSetUp()` is not called on each sub-`KSP` when `PCSetUp()` is 991 called on the outer `PC`, this routine ensures it is called. 992 993 .seealso: `PC`, `PCSetUp()`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCSetUp()` 994 @*/ 995 PetscErrorCode PCSetUpOnBlocks(PC pc) { 996 PetscFunctionBegin; 997 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 998 if (!pc->ops->setuponblocks) PetscFunctionReturn(0); 999 PetscCall(PetscLogEventBegin(PC_SetUpOnBlocks, pc, 0, 0, 0)); 1000 PetscUseTypeMethod(pc, setuponblocks); 1001 PetscCall(PetscLogEventEnd(PC_SetUpOnBlocks, pc, 0, 0, 0)); 1002 PetscFunctionReturn(0); 1003 } 1004 1005 /*@C 1006 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 1007 submatrices that arise within certain subdomain-based preconditioners. 1008 The basic submatrices are extracted from the preconditioner matrix as 1009 usual; the user can then alter these (for example, to set different boundary 1010 conditions for each submatrix) before they are used for the local solves. 1011 1012 Logically Collective on pc 1013 1014 Input Parameters: 1015 + pc - the preconditioner context 1016 . func - routine for modifying the submatrices 1017 - ctx - optional user-defined context (may be null) 1018 1019 Calling sequence of func: 1020 $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx); 1021 1022 + row - an array of index sets that contain the global row numbers 1023 that comprise each local submatrix 1024 . col - an array of index sets that contain the global column numbers 1025 that comprise each local submatrix 1026 . submat - array of local submatrices 1027 - ctx - optional user-defined context for private data for the 1028 user-defined func routine (may be null) 1029 1030 Notes: 1031 `PCSetModifySubMatrices()` MUST be called before `KSPSetUp()` and 1032 `KSPSolve()`. 1033 1034 A routine set by `PCSetModifySubMatrices()` is currently called within 1035 the block Jacobi (`PCBJACOBI`) and additive Schwarz (`PCASM`) 1036 preconditioners. All other preconditioners ignore this routine. 1037 1038 Level: advanced 1039 1040 .seealso: `PC`, `PCBJACOBI`, `PCASM`, `PCModifySubMatrices()` 1041 @*/ 1042 PetscErrorCode PCSetModifySubMatrices(PC pc, PetscErrorCode (*func)(PC, PetscInt, const IS[], const IS[], Mat[], void *), void *ctx) { 1043 PetscFunctionBegin; 1044 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1045 pc->modifysubmatrices = func; 1046 pc->modifysubmatricesP = ctx; 1047 PetscFunctionReturn(0); 1048 } 1049 1050 /*@C 1051 PCModifySubMatrices - Calls an optional user-defined routine within 1052 certain preconditioners if one has been set with `PCSetModifySubMatrices()`. 1053 1054 Collective on pc 1055 1056 Input Parameters: 1057 + pc - the preconditioner context 1058 . nsub - the number of local submatrices 1059 . row - an array of index sets that contain the global row numbers 1060 that comprise each local submatrix 1061 . col - an array of index sets that contain the global column numbers 1062 that comprise each local submatrix 1063 . submat - array of local submatrices 1064 - ctx - optional user-defined context for private data for the 1065 user-defined routine (may be null) 1066 1067 Output Parameter: 1068 . submat - array of local submatrices (the entries of which may 1069 have been modified) 1070 1071 Notes: 1072 The user should NOT generally call this routine, as it will 1073 automatically be called within certain preconditioners (currently 1074 block Jacobi, additive Schwarz) if set. 1075 1076 The basic submatrices are extracted from the preconditioner matrix 1077 as usual; the user can then alter these (for example, to set different 1078 boundary conditions for each submatrix) before they are used for the 1079 local solves. 1080 1081 Level: developer 1082 1083 .seealso: `PC`, `PCSetModifySubMatrices()` 1084 @*/ 1085 PetscErrorCode PCModifySubMatrices(PC pc, PetscInt nsub, const IS row[], const IS col[], Mat submat[], void *ctx) { 1086 PetscFunctionBegin; 1087 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1088 if (!pc->modifysubmatrices) PetscFunctionReturn(0); 1089 PetscCall(PetscLogEventBegin(PC_ModifySubMatrices, pc, 0, 0, 0)); 1090 PetscCall((*pc->modifysubmatrices)(pc, nsub, row, col, submat, ctx)); 1091 PetscCall(PetscLogEventEnd(PC_ModifySubMatrices, pc, 0, 0, 0)); 1092 PetscFunctionReturn(0); 1093 } 1094 1095 /*@ 1096 PCSetOperators - Sets the matrix associated with the linear system and 1097 a (possibly) different one associated with the preconditioner. 1098 1099 Logically Collective on pc 1100 1101 Input Parameters: 1102 + pc - the preconditioner context 1103 . Amat - the matrix that defines the linear system 1104 - Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat. 1105 1106 Notes: 1107 Passing a NULL for Amat or Pmat removes the matrix that is currently used. 1108 1109 If you wish to replace either Amat or Pmat but leave the other one untouched then 1110 first call `KSPGetOperators()` to get the one you wish to keep, call `PetscObjectReference()` 1111 on it and then pass it back in in your call to `KSPSetOperators()`. 1112 1113 More Notes about Repeated Solution of Linear Systems: 1114 PETSc does NOT reset the matrix entries of either Amat or Pmat 1115 to zero after a linear solve; the user is completely responsible for 1116 matrix assembly. See the routine `MatZeroEntries()` if desiring to 1117 zero all elements of a matrix. 1118 1119 Level: intermediate 1120 1121 .seealso: `PC`, `PCGetOperators()`, `MatZeroEntries()` 1122 @*/ 1123 PetscErrorCode PCSetOperators(PC pc, Mat Amat, Mat Pmat) { 1124 PetscInt m1, n1, m2, n2; 1125 1126 PetscFunctionBegin; 1127 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1128 if (Amat) PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 1129 if (Pmat) PetscValidHeaderSpecific(Pmat, MAT_CLASSID, 3); 1130 if (Amat) PetscCheckSameComm(pc, 1, Amat, 2); 1131 if (Pmat) PetscCheckSameComm(pc, 1, Pmat, 3); 1132 if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) { 1133 PetscCall(MatGetLocalSize(Amat, &m1, &n1)); 1134 PetscCall(MatGetLocalSize(pc->mat, &m2, &n2)); 1135 PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Amat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1); 1136 PetscCall(MatGetLocalSize(Pmat, &m1, &n1)); 1137 PetscCall(MatGetLocalSize(pc->pmat, &m2, &n2)); 1138 PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Pmat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1); 1139 } 1140 1141 if (Pmat != pc->pmat) { 1142 /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */ 1143 pc->matnonzerostate = -1; 1144 pc->matstate = -1; 1145 } 1146 1147 /* reference first in case the matrices are the same */ 1148 if (Amat) PetscCall(PetscObjectReference((PetscObject)Amat)); 1149 PetscCall(MatDestroy(&pc->mat)); 1150 if (Pmat) PetscCall(PetscObjectReference((PetscObject)Pmat)); 1151 PetscCall(MatDestroy(&pc->pmat)); 1152 pc->mat = Amat; 1153 pc->pmat = Pmat; 1154 PetscFunctionReturn(0); 1155 } 1156 1157 /*@ 1158 PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed. 1159 1160 Logically Collective on pc 1161 1162 Input Parameters: 1163 + pc - the preconditioner context 1164 - flag - `PETSC_TRUE` do not compute a new preconditioner, `PETSC_FALSE` do compute a new preconditioner 1165 1166 Level: intermediate 1167 1168 Note: 1169 Normally if a matrix inside a `PC` changes the `PC` automatically updates itself using information from the changed matrix. This option 1170 prevents this. 1171 1172 .seealso: `PC`, `PCGetOperators()`, `MatZeroEntries()`, `PCGetReusePreconditioner()`, `KSPSetReusePreconditioner()` 1173 @*/ 1174 PetscErrorCode PCSetReusePreconditioner(PC pc, PetscBool flag) { 1175 PetscFunctionBegin; 1176 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1177 PetscValidLogicalCollectiveBool(pc, flag, 2); 1178 pc->reusepreconditioner = flag; 1179 PetscFunctionReturn(0); 1180 } 1181 1182 /*@ 1183 PCGetReusePreconditioner - Determines if the `PC` reuses the current preconditioner even if the operator in the preconditioner has changed. 1184 1185 Not Collective 1186 1187 Input Parameter: 1188 . pc - the preconditioner context 1189 1190 Output Parameter: 1191 . flag - `PETSC_TRUE` do not compute a new preconditioner, `PETSC_FALSE` do compute a new preconditioner 1192 1193 Level: intermediate 1194 1195 .seealso: `PC`, `PCGetOperators()`, `MatZeroEntries()`, `PCSetReusePreconditioner()` 1196 @*/ 1197 PetscErrorCode PCGetReusePreconditioner(PC pc, PetscBool *flag) { 1198 PetscFunctionBegin; 1199 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1200 PetscValidBoolPointer(flag, 2); 1201 *flag = pc->reusepreconditioner; 1202 PetscFunctionReturn(0); 1203 } 1204 1205 /*@ 1206 PCGetOperators - Gets the matrix associated with the linear system and 1207 possibly a different one associated with the preconditioner. 1208 1209 Not collective, though parallel mats are returned if pc is parallel 1210 1211 Input Parameter: 1212 . pc - the preconditioner context 1213 1214 Output Parameters: 1215 + Amat - the matrix defining the linear system 1216 - Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat. 1217 1218 Level: intermediate 1219 1220 Note: 1221 Does not increase the reference count of the matrices, so you should not destroy them 1222 1223 Alternative usage: If the operators have NOT been set with `KSPSetOperators()`/`PCSetOperators()` then the operators 1224 are created in `PC` and returned to the user. In this case, if both operators 1225 mat and pmat are requested, two DIFFERENT operators will be returned. If 1226 only one is requested both operators in the PC will be the same (i.e. as 1227 if one had called `KSPSetOperators()`/`PCSetOperators()` with the same argument for both Mats). 1228 The user must set the sizes of the returned matrices and their type etc just 1229 as if the user created them with `MatCreate()`. For example, 1230 1231 .vb 1232 KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to 1233 set size, type, etc of Amat 1234 1235 MatCreate(comm,&mat); 1236 KSP/PCSetOperators(ksp/pc,Amat,Amat); 1237 PetscObjectDereference((PetscObject)mat); 1238 set size, type, etc of Amat 1239 .ve 1240 1241 and 1242 1243 .vb 1244 KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to 1245 set size, type, etc of Amat and Pmat 1246 1247 MatCreate(comm,&Amat); 1248 MatCreate(comm,&Pmat); 1249 KSP/PCSetOperators(ksp/pc,Amat,Pmat); 1250 PetscObjectDereference((PetscObject)Amat); 1251 PetscObjectDereference((PetscObject)Pmat); 1252 set size, type, etc of Amat and Pmat 1253 .ve 1254 1255 The rationale for this support is so that when creating a `TS`, `SNES`, or `KSP` the hierarchy 1256 of underlying objects (i.e. `SNES`, `KSP`, `PC`, `Mat`) and their livespans can be completely 1257 managed by the top most level object (i.e. the `TS`, `SNES`, or `KSP`). Another way to look 1258 at this is when you create a `SNES` you do not NEED to create a `KSP` and attach it to 1259 the `SNES` object (the `SNES` object manages it for you). Similarly when you create a KSP 1260 you do not need to attach a `PC` to it (the `KSP` object manages the `PC` object for you). 1261 Thus, why should YOU have to create the `Mat` and attach it to the `SNES`/`KSP`/`PC`, when 1262 it can be created for you? 1263 1264 .seealso: `PC`, `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperatorsSet()` 1265 @*/ 1266 PetscErrorCode PCGetOperators(PC pc, Mat *Amat, Mat *Pmat) { 1267 PetscFunctionBegin; 1268 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1269 if (Amat) { 1270 if (!pc->mat) { 1271 if (pc->pmat && !Pmat) { /* Apmat has been set, but user did not request it, so use for Amat */ 1272 pc->mat = pc->pmat; 1273 PetscCall(PetscObjectReference((PetscObject)pc->mat)); 1274 } else { /* both Amat and Pmat are empty */ 1275 PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->mat)); 1276 if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */ 1277 pc->pmat = pc->mat; 1278 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 1279 } 1280 } 1281 } 1282 *Amat = pc->mat; 1283 } 1284 if (Pmat) { 1285 if (!pc->pmat) { 1286 if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */ 1287 pc->pmat = pc->mat; 1288 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 1289 } else { 1290 PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->pmat)); 1291 if (!Amat) { /* user did NOT request Amat, so make same as Pmat */ 1292 pc->mat = pc->pmat; 1293 PetscCall(PetscObjectReference((PetscObject)pc->mat)); 1294 } 1295 } 1296 } 1297 *Pmat = pc->pmat; 1298 } 1299 PetscFunctionReturn(0); 1300 } 1301 1302 /*@C 1303 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1304 possibly a different one associated with the preconditioner have been set in the `PC`. 1305 1306 Not collective, though the results on all processes should be the same 1307 1308 Input Parameter: 1309 . pc - the preconditioner context 1310 1311 Output Parameters: 1312 + mat - the matrix associated with the linear system was set 1313 - pmat - matrix associated with the preconditioner was set, usually the same 1314 1315 Level: intermediate 1316 1317 .seealso: `PC`, `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperators()` 1318 @*/ 1319 PetscErrorCode PCGetOperatorsSet(PC pc, PetscBool *mat, PetscBool *pmat) { 1320 PetscFunctionBegin; 1321 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1322 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1323 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1324 PetscFunctionReturn(0); 1325 } 1326 1327 /*@ 1328 PCFactorGetMatrix - Gets the factored matrix from the 1329 preconditioner context. This routine is valid only for the `PCLU`, 1330 `PCILU`, `PCCHOLESKY`, and `PCICC` methods. 1331 1332 Not Collective on pc though mat is parallel if pc is parallel 1333 1334 Input Parameter: 1335 . pc - the preconditioner context 1336 1337 Output parameters: 1338 . mat - the factored matrix 1339 1340 Level: advanced 1341 1342 Note: 1343 Does not increase the reference count for the matrix so DO NOT destroy it 1344 1345 .seealso: `PC`, `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC` 1346 @*/ 1347 PetscErrorCode PCFactorGetMatrix(PC pc, Mat *mat) { 1348 PetscFunctionBegin; 1349 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1350 PetscValidPointer(mat, 2); 1351 PetscUseTypeMethod(pc, getfactoredmatrix, mat); 1352 PetscFunctionReturn(0); 1353 } 1354 1355 /*@C 1356 PCSetOptionsPrefix - Sets the prefix used for searching for all 1357 `PC` options in the database. 1358 1359 Logically Collective on pc 1360 1361 Input Parameters: 1362 + pc - the preconditioner context 1363 - prefix - the prefix string to prepend to all `PC` option requests 1364 1365 Note: 1366 A hyphen (-) must NOT be given at the beginning of the prefix name. 1367 The first character of all runtime options is AUTOMATICALLY the 1368 hyphen. 1369 1370 Level: advanced 1371 1372 .seealso: `PC`, `PCSetFromOptions`, `PCAppendOptionsPrefix()`, `PCGetOptionsPrefix()` 1373 @*/ 1374 PetscErrorCode PCSetOptionsPrefix(PC pc, const char prefix[]) { 1375 PetscFunctionBegin; 1376 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1377 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc, prefix)); 1378 PetscFunctionReturn(0); 1379 } 1380 1381 /*@C 1382 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1383 `PC` options in the database. 1384 1385 Logically Collective on pc 1386 1387 Input Parameters: 1388 + pc - the preconditioner context 1389 - prefix - the prefix string to prepend to all `PC` option requests 1390 1391 Note: 1392 A hyphen (-) must NOT be given at the beginning of the prefix name. 1393 The first character of all runtime options is AUTOMATICALLY the 1394 hyphen. 1395 1396 Level: advanced 1397 1398 .seealso: `PC`, `PCSetFromOptions`, `PCSetOptionsPrefix()`, `PCGetOptionsPrefix()` 1399 @*/ 1400 PetscErrorCode PCAppendOptionsPrefix(PC pc, const char prefix[]) { 1401 PetscFunctionBegin; 1402 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1403 PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc, prefix)); 1404 PetscFunctionReturn(0); 1405 } 1406 1407 /*@C 1408 PCGetOptionsPrefix - Gets the prefix used for searching for all 1409 PC options in the database. 1410 1411 Not Collective 1412 1413 Input Parameter: 1414 . pc - the preconditioner context 1415 1416 Output Parameter: 1417 . prefix - pointer to the prefix string used, is returned 1418 1419 Fortran Note: 1420 The user should pass in a string 'prefix' of 1421 sufficient length to hold the prefix. 1422 1423 Level: advanced 1424 1425 .seealso: `PC`, `PCSetFromOptions`, `PCSetOptionsPrefix()`, `PCAppendOptionsPrefix()` 1426 @*/ 1427 PetscErrorCode PCGetOptionsPrefix(PC pc, const char *prefix[]) { 1428 PetscFunctionBegin; 1429 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1430 PetscValidPointer(prefix, 2); 1431 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, prefix)); 1432 PetscFunctionReturn(0); 1433 } 1434 1435 /* 1436 Indicates the right hand side will be changed by KSPSolve(), this occurs for a few 1437 preconditioners including BDDC and Eisentat that transform the equations before applying 1438 the Krylov methods 1439 */ 1440 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc, PetscBool *change) { 1441 PetscFunctionBegin; 1442 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1443 PetscValidPointer(change, 2); 1444 *change = PETSC_FALSE; 1445 PetscTryMethod(pc, "PCPreSolveChangeRHS_C", (PC, PetscBool *), (pc, change)); 1446 PetscFunctionReturn(0); 1447 } 1448 1449 /*@ 1450 PCPreSolve - Optional pre-solve phase, intended for any 1451 preconditioner-specific actions that must be performed before 1452 the iterative solve itself. 1453 1454 Collective on pc 1455 1456 Input Parameters: 1457 + pc - the preconditioner context 1458 - ksp - the Krylov subspace context 1459 1460 Level: developer 1461 1462 Sample of Usage: 1463 .vb 1464 PCPreSolve(pc,ksp); 1465 KSPSolve(ksp,b,x); 1466 PCPostSolve(pc,ksp); 1467 .ve 1468 1469 Notes: 1470 The pre-solve phase is distinct from the `PCSetUp()` phase. 1471 1472 `KSPSolve()` calls this directly, so is rarely called by the user. 1473 1474 .seealso: `PC`, `PCPostSolve()` 1475 @*/ 1476 PetscErrorCode PCPreSolve(PC pc, KSP ksp) { 1477 Vec x, rhs; 1478 1479 PetscFunctionBegin; 1480 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1481 PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2); 1482 pc->presolvedone++; 1483 PetscCheck(pc->presolvedone <= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot embed PCPreSolve() more than twice"); 1484 PetscCall(KSPGetSolution(ksp, &x)); 1485 PetscCall(KSPGetRhs(ksp, &rhs)); 1486 1487 if (pc->ops->presolve) PetscUseTypeMethod(pc, presolve, ksp, rhs, x); 1488 else if (pc->presolve) PetscCall((pc->presolve)(pc, ksp)); 1489 PetscFunctionReturn(0); 1490 } 1491 1492 /*@C 1493 PCSetPreSolve - Sets function used by `PCPreSolve()` which is intended for any 1494 preconditioner-specific actions that must be performed before 1495 the iterative solve itself. 1496 1497 Logically Collective on pc 1498 1499 Input Parameters: 1500 + pc - the preconditioner object 1501 - presolve - the function to call before the solve 1502 1503 Calling sequence of presolve: 1504 $ func(PC pc,KSP ksp) 1505 1506 + pc - the PC context 1507 - ksp - the KSP context 1508 1509 Level: developer 1510 1511 .seealso: `PC`, `PCSetUp()`, `PCPreSolve()` 1512 @*/ 1513 PetscErrorCode PCSetPreSolve(PC pc, PetscErrorCode (*presolve)(PC, KSP)) { 1514 PetscFunctionBegin; 1515 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1516 pc->presolve = presolve; 1517 PetscFunctionReturn(0); 1518 } 1519 1520 /*@ 1521 PCPostSolve - Optional post-solve phase, intended for any 1522 preconditioner-specific actions that must be performed after 1523 the iterative solve itself. 1524 1525 Collective on pc 1526 1527 Input Parameters: 1528 + pc - the preconditioner context 1529 - ksp - the Krylov subspace context 1530 1531 Sample of Usage: 1532 .vb 1533 PCPreSolve(pc,ksp); 1534 KSPSolve(ksp,b,x); 1535 PCPostSolve(pc,ksp); 1536 .ve 1537 1538 Note: 1539 `KSPSolve()` calls this routine directly, so it is rarely called by the user. 1540 1541 Level: developer 1542 1543 .seealso: `PC`, `PCSetPostSolve()`, `PCSetPresolve()`, `PCPreSolve()`, `KSPSolve()` 1544 @*/ 1545 PetscErrorCode PCPostSolve(PC pc, KSP ksp) { 1546 Vec x, rhs; 1547 1548 PetscFunctionBegin; 1549 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1550 PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2); 1551 pc->presolvedone--; 1552 PetscCall(KSPGetSolution(ksp, &x)); 1553 PetscCall(KSPGetRhs(ksp, &rhs)); 1554 PetscTryTypeMethod(pc, postsolve, ksp, rhs, x); 1555 PetscFunctionReturn(0); 1556 } 1557 1558 /*@C 1559 PCLoad - Loads a `PC` that has been stored in binary with `PCView()`. 1560 1561 Collective on newdm 1562 1563 Input Parameters: 1564 + newdm - the newly loaded `PC`, this needs to have been created with `PCCreate()` or 1565 some related function before a call to `PCLoad()`. 1566 - viewer - binary file viewer, obtained from `PetscViewerBinaryOpen()` 1567 1568 Level: intermediate 1569 1570 Note: 1571 The type is determined by the data in the file, any type set into the PC before this call is ignored. 1572 1573 .seealso: `PC`, `PetscViewerBinaryOpen()`, `PCView()`, `MatLoad()`, `VecLoad()` 1574 @*/ 1575 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) { 1576 PetscBool isbinary; 1577 PetscInt classid; 1578 char type[256]; 1579 1580 PetscFunctionBegin; 1581 PetscValidHeaderSpecific(newdm, PC_CLASSID, 1); 1582 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1583 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 1584 PetscCheck(isbinary, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 1585 1586 PetscCall(PetscViewerBinaryRead(viewer, &classid, 1, NULL, PETSC_INT)); 1587 PetscCheck(classid == PC_FILE_CLASSID, PetscObjectComm((PetscObject)newdm), PETSC_ERR_ARG_WRONG, "Not PC next in file"); 1588 PetscCall(PetscViewerBinaryRead(viewer, type, 256, NULL, PETSC_CHAR)); 1589 PetscCall(PCSetType(newdm, type)); 1590 PetscTryTypeMethod(newdm, load, viewer); 1591 PetscFunctionReturn(0); 1592 } 1593 1594 #include <petscdraw.h> 1595 #if defined(PETSC_HAVE_SAWS) 1596 #include <petscviewersaws.h> 1597 #endif 1598 1599 /*@C 1600 PCViewFromOptions - View from the `PC` based on options in the database 1601 1602 Collective on pc 1603 1604 Input Parameters: 1605 + A - the PC context 1606 . obj - Optional object that provides the options prefix 1607 - name - command line option 1608 1609 Level: intermediate 1610 1611 .seealso: `PC`, `PCView`, `PetscObjectViewFromOptions()`, `PCCreate()` 1612 @*/ 1613 PetscErrorCode PCViewFromOptions(PC A, PetscObject obj, const char name[]) { 1614 PetscFunctionBegin; 1615 PetscValidHeaderSpecific(A, PC_CLASSID, 1); 1616 PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name)); 1617 PetscFunctionReturn(0); 1618 } 1619 1620 /*@C 1621 PCView - Prints information about the `PC` 1622 1623 Collective on pc 1624 1625 Input Parameters: 1626 + PC - the `PC` context 1627 - viewer - optional visualization context 1628 1629 Notes: 1630 The available visualization contexts include 1631 + `PETSC_VIEWER_STDOUT_SELF` - standard output (default) 1632 - `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard 1633 output where only the first processor opens 1634 the file. All other processors send their 1635 data to the first processor to print. 1636 1637 The user can open an alternative visualization contexts with 1638 `PetscViewerASCIIOpen()` (output to a specified file). 1639 1640 Level: developer 1641 1642 .seealso: `PC`, `PetscViewer`, `KSPView()`, `PetscViewerASCIIOpen()` 1643 @*/ 1644 PetscErrorCode PCView(PC pc, PetscViewer viewer) { 1645 PCType cstr; 1646 PetscBool iascii, isstring, isbinary, isdraw; 1647 #if defined(PETSC_HAVE_SAWS) 1648 PetscBool issaws; 1649 #endif 1650 1651 PetscFunctionBegin; 1652 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1653 if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer)); 1654 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1655 PetscCheckSameComm(pc, 1, viewer, 2); 1656 1657 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1658 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring)); 1659 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 1660 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 1661 #if defined(PETSC_HAVE_SAWS) 1662 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws)); 1663 #endif 1664 1665 if (iascii) { 1666 PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)pc, viewer)); 1667 if (!pc->setupcalled) PetscCall(PetscViewerASCIIPrintf(viewer, " PC has not been set up so information may be incomplete\n")); 1668 PetscCall(PetscViewerASCIIPushTab(viewer)); 1669 PetscTryTypeMethod(pc, view, viewer); 1670 PetscCall(PetscViewerASCIIPopTab(viewer)); 1671 if (pc->mat) { 1672 PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO)); 1673 if (pc->pmat == pc->mat) { 1674 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix = precond matrix:\n")); 1675 PetscCall(PetscViewerASCIIPushTab(viewer)); 1676 PetscCall(MatView(pc->mat, viewer)); 1677 PetscCall(PetscViewerASCIIPopTab(viewer)); 1678 } else { 1679 if (pc->pmat) { 1680 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix followed by preconditioner matrix:\n")); 1681 } else { 1682 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix:\n")); 1683 } 1684 PetscCall(PetscViewerASCIIPushTab(viewer)); 1685 PetscCall(MatView(pc->mat, viewer)); 1686 if (pc->pmat) PetscCall(MatView(pc->pmat, viewer)); 1687 PetscCall(PetscViewerASCIIPopTab(viewer)); 1688 } 1689 PetscCall(PetscViewerPopFormat(viewer)); 1690 } 1691 } else if (isstring) { 1692 PetscCall(PCGetType(pc, &cstr)); 1693 PetscCall(PetscViewerStringSPrintf(viewer, " PCType: %-7.7s", cstr)); 1694 PetscTryTypeMethod(pc, view, viewer); 1695 if (pc->mat) PetscCall(MatView(pc->mat, viewer)); 1696 if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer)); 1697 } else if (isbinary) { 1698 PetscInt classid = PC_FILE_CLASSID; 1699 MPI_Comm comm; 1700 PetscMPIInt rank; 1701 char type[256]; 1702 1703 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 1704 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1705 if (rank == 0) { 1706 PetscCall(PetscViewerBinaryWrite(viewer, &classid, 1, PETSC_INT)); 1707 PetscCall(PetscStrncpy(type, ((PetscObject)pc)->type_name, 256)); 1708 PetscCall(PetscViewerBinaryWrite(viewer, type, 256, PETSC_CHAR)); 1709 } 1710 PetscTryTypeMethod(pc, view, viewer); 1711 } else if (isdraw) { 1712 PetscDraw draw; 1713 char str[25]; 1714 PetscReal x, y, bottom, h; 1715 PetscInt n; 1716 1717 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 1718 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 1719 if (pc->mat) { 1720 PetscCall(MatGetSize(pc->mat, &n, NULL)); 1721 PetscCall(PetscSNPrintf(str, 25, "PC: %s (%" PetscInt_FMT ")", ((PetscObject)pc)->type_name, n)); 1722 } else { 1723 PetscCall(PetscSNPrintf(str, 25, "PC: %s", ((PetscObject)pc)->type_name)); 1724 } 1725 PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 1726 bottom = y - h; 1727 PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom)); 1728 PetscTryTypeMethod(pc, view, viewer); 1729 PetscCall(PetscDrawPopCurrentPoint(draw)); 1730 #if defined(PETSC_HAVE_SAWS) 1731 } else if (issaws) { 1732 PetscMPIInt rank; 1733 1734 PetscCall(PetscObjectName((PetscObject)pc)); 1735 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); 1736 if (!((PetscObject)pc)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)pc, viewer)); 1737 if (pc->mat) PetscCall(MatView(pc->mat, viewer)); 1738 if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer)); 1739 #endif 1740 } 1741 PetscFunctionReturn(0); 1742 } 1743 1744 /*@C 1745 PCRegister - Adds a method to the preconditioner package. 1746 1747 Not collective 1748 1749 Input Parameters: 1750 + name_solver - name of a new user-defined solver 1751 - routine_create - routine to create method context 1752 1753 Note: 1754 `PCRegister()` may be called multiple times to add several user-defined preconditioners. 1755 1756 Sample usage: 1757 .vb 1758 PCRegister("my_solver", MySolverCreate); 1759 .ve 1760 1761 Then, your solver can be chosen with the procedural interface via 1762 $ PCSetType(pc,"my_solver") 1763 or at runtime via the option 1764 $ -pc_type my_solver 1765 1766 Level: advanced 1767 1768 .seealso: `PC`, `PCType`, `PCRegisterAll()` 1769 @*/ 1770 PetscErrorCode PCRegister(const char sname[], PetscErrorCode (*function)(PC)) { 1771 PetscFunctionBegin; 1772 PetscCall(PCInitializePackage()); 1773 PetscCall(PetscFunctionListAdd(&PCList, sname, function)); 1774 PetscFunctionReturn(0); 1775 } 1776 1777 static PetscErrorCode MatMult_PC(Mat A, Vec X, Vec Y) { 1778 PC pc; 1779 1780 PetscFunctionBegin; 1781 PetscCall(MatShellGetContext(A, &pc)); 1782 PetscCall(PCApply(pc, X, Y)); 1783 PetscFunctionReturn(0); 1784 } 1785 1786 /*@ 1787 PCComputeOperator - Computes the explicit preconditioned operator. 1788 1789 Collective on pc 1790 1791 Input Parameters: 1792 + pc - the preconditioner object 1793 - mattype - the matrix type to be used for the operator 1794 1795 Output Parameter: 1796 . mat - the explicit preconditioned operator 1797 1798 Note: 1799 This computation is done by applying the operators to columns of the identity matrix. 1800 This routine is costly in general, and is recommended for use only with relatively small systems. 1801 Currently, this routine uses a dense matrix format when mattype == NULL 1802 1803 Level: advanced 1804 1805 .seealso: `PC`, `KSPComputeOperator()`, `MatType` 1806 1807 @*/ 1808 PetscErrorCode PCComputeOperator(PC pc, MatType mattype, Mat *mat) { 1809 PetscInt N, M, m, n; 1810 Mat A, Apc; 1811 1812 PetscFunctionBegin; 1813 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1814 PetscValidPointer(mat, 3); 1815 PetscCall(PCGetOperators(pc, &A, NULL)); 1816 PetscCall(MatGetLocalSize(A, &m, &n)); 1817 PetscCall(MatGetSize(A, &M, &N)); 1818 PetscCall(MatCreateShell(PetscObjectComm((PetscObject)pc), m, n, M, N, pc, &Apc)); 1819 PetscCall(MatShellSetOperation(Apc, MATOP_MULT, (void (*)(void))MatMult_PC)); 1820 PetscCall(MatComputeOperator(Apc, mattype, mat)); 1821 PetscCall(MatDestroy(&Apc)); 1822 PetscFunctionReturn(0); 1823 } 1824 1825 /*@ 1826 PCSetCoordinates - sets the coordinates of all the nodes on the local process 1827 1828 Collective on pc 1829 1830 Input Parameters: 1831 + pc - the solver context 1832 . dim - the dimension of the coordinates 1, 2, or 3 1833 . nloc - the blocked size of the coordinates array 1834 - coords - the coordinates array 1835 1836 Level: intermediate 1837 1838 Note: 1839 coords is an array of the dim coordinates for the nodes on 1840 the local processor, of size dim*nloc. 1841 If there are 108 equation on a processor 1842 for a displacement finite element discretization of elasticity (so 1843 that there are nloc = 36 = 108/3 nodes) then the array must have 108 1844 double precision values (ie, 3 * 36). These x y z coordinates 1845 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 1846 ... , N-1.z ]. 1847 1848 .seealso: `PC`, `MatSetNearNullSpace()` 1849 @*/ 1850 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) { 1851 PetscFunctionBegin; 1852 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1853 PetscValidLogicalCollectiveInt(pc, dim, 2); 1854 PetscTryMethod(pc, "PCSetCoordinates_C", (PC, PetscInt, PetscInt, PetscReal *), (pc, dim, nloc, coords)); 1855 PetscFunctionReturn(0); 1856 } 1857 1858 /*@ 1859 PCGetInterpolations - Gets interpolation matrices for all levels (except level 0) 1860 1861 Logically Collective on pc 1862 1863 Input Parameter: 1864 . pc - the precondition context 1865 1866 Output Parameters: 1867 + num_levels - the number of levels 1868 - interpolations - the interpolation matrices (size of num_levels-1) 1869 1870 Level: advanced 1871 1872 Developer Note: 1873 Why is this here instead of in `PCMG` etc? 1874 1875 .seealso: `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetInterpolation()`, `PCGetCoarseOperators()` 1876 @*/ 1877 PetscErrorCode PCGetInterpolations(PC pc, PetscInt *num_levels, Mat *interpolations[]) { 1878 PetscFunctionBegin; 1879 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1880 PetscValidIntPointer(num_levels, 2); 1881 PetscValidPointer(interpolations, 3); 1882 PetscUseMethod(pc, "PCGetInterpolations_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, interpolations)); 1883 PetscFunctionReturn(0); 1884 } 1885 1886 /*@ 1887 PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level) 1888 1889 Logically Collective on pc 1890 1891 Input Parameter: 1892 . pc - the precondition context 1893 1894 Output Parameters: 1895 + num_levels - the number of levels 1896 - coarseOperators - the coarse operator matrices (size of num_levels-1) 1897 1898 Level: advanced 1899 1900 Developer Note: 1901 Why is this here instead of in `PCMG` etc? 1902 1903 .seealso: `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()` 1904 @*/ 1905 PetscErrorCode PCGetCoarseOperators(PC pc, PetscInt *num_levels, Mat *coarseOperators[]) { 1906 PetscFunctionBegin; 1907 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1908 PetscValidIntPointer(num_levels, 2); 1909 PetscValidPointer(coarseOperators, 3); 1910 PetscUseMethod(pc, "PCGetCoarseOperators_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, coarseOperators)); 1911 PetscFunctionReturn(0); 1912 } 1913