1 2 /* 3 The PC (preconditioner) interface routines, callable by users. 4 */ 5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 6 #include <petscdm.h> 7 8 /* Logging support */ 9 PetscClassId PC_CLASSID; 10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 12 PetscInt PetscMGLevelId; 13 14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[]) 15 { 16 PetscErrorCode ierr; 17 PetscMPIInt size; 18 PetscBool hasop,flg1,flg2,set,flg3; 19 20 PetscFunctionBegin; 21 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRMPI(ierr); 22 if (pc->pmat) { 23 ierr = MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 24 if (size == 1) { 25 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);CHKERRQ(ierr); 26 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);CHKERRQ(ierr); 27 ierr = MatIsSymmetricKnown(pc->pmat,&set,&flg3);CHKERRQ(ierr); 28 if (flg1 && (!flg2 || (set && flg3))) { 29 *type = PCICC; 30 } else if (flg2) { 31 *type = PCILU; 32 } else if (hasop) { /* likely is a parallel matrix run on one processor */ 33 *type = PCBJACOBI; 34 } else { 35 *type = PCNONE; 36 } 37 } else { 38 if (hasop) { 39 *type = PCBJACOBI; 40 } else { 41 *type = PCNONE; 42 } 43 } 44 } else { 45 if (size == 1) { 46 *type = PCILU; 47 } else { 48 *type = PCBJACOBI; 49 } 50 } 51 PetscFunctionReturn(0); 52 } 53 54 /*@ 55 PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats 56 57 Collective on PC 58 59 Input Parameter: 60 . pc - the preconditioner context 61 62 Level: developer 63 64 Notes: 65 This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC 66 67 .seealso: PCCreate(), PCSetUp() 68 @*/ 69 PetscErrorCode PCReset(PC pc) 70 { 71 PetscErrorCode ierr; 72 73 PetscFunctionBegin; 74 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 75 if (pc->ops->reset) { 76 ierr = (*pc->ops->reset)(pc);CHKERRQ(ierr); 77 } 78 ierr = VecDestroy(&pc->diagonalscaleright);CHKERRQ(ierr); 79 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 80 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 81 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 82 83 pc->setupcalled = 0; 84 PetscFunctionReturn(0); 85 } 86 87 /*@C 88 PCDestroy - Destroys PC context that was created with PCCreate(). 89 90 Collective on PC 91 92 Input Parameter: 93 . pc - the preconditioner context 94 95 Level: developer 96 97 .seealso: PCCreate(), PCSetUp() 98 @*/ 99 PetscErrorCode PCDestroy(PC *pc) 100 { 101 PetscErrorCode ierr; 102 103 PetscFunctionBegin; 104 if (!*pc) PetscFunctionReturn(0); 105 PetscValidHeaderSpecific((*pc),PC_CLASSID,1); 106 if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; PetscFunctionReturn(0);} 107 108 ierr = PCReset(*pc);CHKERRQ(ierr); 109 110 /* if memory was published with SAWs then destroy it */ 111 ierr = PetscObjectSAWsViewOff((PetscObject)*pc);CHKERRQ(ierr); 112 if ((*pc)->ops->destroy) {ierr = (*(*pc)->ops->destroy)((*pc));CHKERRQ(ierr);} 113 ierr = DMDestroy(&(*pc)->dm);CHKERRQ(ierr); 114 ierr = PetscHeaderDestroy(pc);CHKERRQ(ierr); 115 PetscFunctionReturn(0); 116 } 117 118 /*@C 119 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 120 scaling as needed by certain time-stepping codes. 121 122 Logically Collective on PC 123 124 Input Parameter: 125 . pc - the preconditioner context 126 127 Output Parameter: 128 . flag - PETSC_TRUE if it applies the scaling 129 130 Level: developer 131 132 Notes: 133 If this returns PETSC_TRUE then the system solved via the Krylov method is 134 $ D M A D^{-1} y = D M b for left preconditioning or 135 $ D A M D^{-1} z = D b for right preconditioning 136 137 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale() 138 @*/ 139 PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag) 140 { 141 PetscFunctionBegin; 142 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 143 PetscValidBoolPointer(flag,2); 144 *flag = pc->diagonalscale; 145 PetscFunctionReturn(0); 146 } 147 148 /*@ 149 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 150 scaling as needed by certain time-stepping codes. 151 152 Logically Collective on PC 153 154 Input Parameters: 155 + pc - the preconditioner context 156 - s - scaling vector 157 158 Level: intermediate 159 160 Notes: 161 The system solved via the Krylov method is 162 $ D M A D^{-1} y = D M b for left preconditioning or 163 $ D A M D^{-1} z = D b for right preconditioning 164 165 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 166 167 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale() 168 @*/ 169 PetscErrorCode PCSetDiagonalScale(PC pc,Vec s) 170 { 171 PetscErrorCode ierr; 172 173 PetscFunctionBegin; 174 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 175 PetscValidHeaderSpecific(s,VEC_CLASSID,2); 176 pc->diagonalscale = PETSC_TRUE; 177 178 ierr = PetscObjectReference((PetscObject)s);CHKERRQ(ierr); 179 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 180 181 pc->diagonalscaleleft = s; 182 183 ierr = VecDuplicate(s,&pc->diagonalscaleright);CHKERRQ(ierr); 184 ierr = VecCopy(s,pc->diagonalscaleright);CHKERRQ(ierr); 185 ierr = VecReciprocal(pc->diagonalscaleright);CHKERRQ(ierr); 186 PetscFunctionReturn(0); 187 } 188 189 /*@ 190 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 191 192 Logically Collective on PC 193 194 Input Parameters: 195 + pc - the preconditioner context 196 . in - input vector 197 - out - scaled vector (maybe the same as in) 198 199 Level: intermediate 200 201 Notes: 202 The system solved via the Krylov method is 203 $ D M A D^{-1} y = D M b for left preconditioning or 204 $ D A M D^{-1} z = D b for right preconditioning 205 206 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 207 208 If diagonal scaling is turned off and in is not out then in is copied to out 209 210 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale() 211 @*/ 212 PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out) 213 { 214 PetscErrorCode ierr; 215 216 PetscFunctionBegin; 217 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 218 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 219 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 220 if (pc->diagonalscale) { 221 ierr = VecPointwiseMult(out,pc->diagonalscaleleft,in);CHKERRQ(ierr); 222 } else if (in != out) { 223 ierr = VecCopy(in,out);CHKERRQ(ierr); 224 } 225 PetscFunctionReturn(0); 226 } 227 228 /*@ 229 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 230 231 Logically Collective on PC 232 233 Input Parameters: 234 + pc - the preconditioner context 235 . in - input vector 236 - out - scaled vector (maybe the same as in) 237 238 Level: intermediate 239 240 Notes: 241 The system solved via the Krylov method is 242 $ D M A D^{-1} y = D M b for left preconditioning or 243 $ D A M D^{-1} z = D b for right preconditioning 244 245 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 246 247 If diagonal scaling is turned off and in is not out then in is copied to out 248 249 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale() 250 @*/ 251 PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out) 252 { 253 PetscErrorCode ierr; 254 255 PetscFunctionBegin; 256 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 257 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 258 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 259 if (pc->diagonalscale) { 260 ierr = VecPointwiseMult(out,pc->diagonalscaleright,in);CHKERRQ(ierr); 261 } else if (in != out) { 262 ierr = VecCopy(in,out);CHKERRQ(ierr); 263 } 264 PetscFunctionReturn(0); 265 } 266 267 /*@ 268 PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the 269 operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(), 270 TSSetIJacobian(), SNESSetJacobian(), KSPSetOperators() or PCSetOperators() not the Pmat. 271 272 Logically Collective on PC 273 274 Input Parameters: 275 + pc - the preconditioner context 276 - flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false) 277 278 Options Database Key: 279 . -pc_use_amat <true,false> - use the amat to apply the operator 280 281 Notes: 282 For the common case in which the linear system matrix and the matrix used to construct the 283 preconditioner are identical, this routine is does nothing. 284 285 Level: intermediate 286 287 .seealso: PCGetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE 288 @*/ 289 PetscErrorCode PCSetUseAmat(PC pc,PetscBool flg) 290 { 291 PetscFunctionBegin; 292 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 293 pc->useAmat = flg; 294 PetscFunctionReturn(0); 295 } 296 297 /*@ 298 PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected. 299 300 Logically Collective on PC 301 302 Input Parameters: 303 + pc - iterative context obtained from PCCreate() 304 - flg - PETSC_TRUE indicates you want the error generated 305 306 Level: advanced 307 308 Notes: 309 Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve() 310 to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is 311 detected. 312 313 This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs 314 315 .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll() 316 @*/ 317 PetscErrorCode PCSetErrorIfFailure(PC pc,PetscBool flg) 318 { 319 PetscFunctionBegin; 320 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 321 PetscValidLogicalCollectiveBool(pc,flg,2); 322 pc->erroriffailure = flg; 323 PetscFunctionReturn(0); 324 } 325 326 /*@ 327 PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the 328 operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(), 329 TSSetIJacobian(), SNESSetJacobian(), KSPSetOperators() or PCSetOperators() not the Pmat. 330 331 Logically Collective on PC 332 333 Input Parameter: 334 . pc - the preconditioner context 335 336 Output Parameter: 337 . flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false) 338 339 Notes: 340 For the common case in which the linear system matrix and the matrix used to construct the 341 preconditioner are identical, this routine is does nothing. 342 343 Level: intermediate 344 345 .seealso: PCSetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE 346 @*/ 347 PetscErrorCode PCGetUseAmat(PC pc,PetscBool *flg) 348 { 349 PetscFunctionBegin; 350 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 351 *flg = pc->useAmat; 352 PetscFunctionReturn(0); 353 } 354 355 /*@ 356 PCCreate - Creates a preconditioner context. 357 358 Collective 359 360 Input Parameter: 361 . comm - MPI communicator 362 363 Output Parameter: 364 . pc - location to put the preconditioner context 365 366 Notes: 367 The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC 368 in parallel. For dense matrices it is always PCNONE. 369 370 Level: developer 371 372 .seealso: PCSetUp(), PCApply(), PCDestroy() 373 @*/ 374 PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc) 375 { 376 PC pc; 377 PetscErrorCode ierr; 378 379 PetscFunctionBegin; 380 PetscValidPointer(newpc,2); 381 *newpc = NULL; 382 ierr = PCInitializePackage();CHKERRQ(ierr); 383 384 ierr = PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView);CHKERRQ(ierr); 385 386 pc->mat = NULL; 387 pc->pmat = NULL; 388 pc->setupcalled = 0; 389 pc->setfromoptionscalled = 0; 390 pc->data = NULL; 391 pc->diagonalscale = PETSC_FALSE; 392 pc->diagonalscaleleft = NULL; 393 pc->diagonalscaleright = NULL; 394 395 pc->modifysubmatrices = NULL; 396 pc->modifysubmatricesP = NULL; 397 398 *newpc = pc; 399 PetscFunctionReturn(0); 400 401 } 402 403 /* -------------------------------------------------------------------------------*/ 404 405 /*@ 406 PCApply - Applies the preconditioner to a vector. 407 408 Collective on PC 409 410 Input Parameters: 411 + pc - the preconditioner context 412 - x - input vector 413 414 Output Parameter: 415 . y - output vector 416 417 Level: developer 418 419 .seealso: PCApplyTranspose(), PCApplyBAorAB() 420 @*/ 421 PetscErrorCode PCApply(PC pc,Vec x,Vec y) 422 { 423 PetscErrorCode ierr; 424 PetscInt m,n,mv,nv; 425 426 PetscFunctionBegin; 427 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 428 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 429 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 430 PetscCheckFalse(x == y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 431 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 432 /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */ 433 ierr = MatGetLocalSize(pc->pmat,&m,&n);CHKERRQ(ierr); 434 ierr = VecGetLocalSize(x,&mv);CHKERRQ(ierr); 435 ierr = VecGetLocalSize(y,&nv);CHKERRQ(ierr); 436 /* check pmat * y = x is feasible */ 437 PetscCheckFalse(mv != m,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %D does not equal input vector size %D",m,mv); 438 PetscCheckFalse(nv != n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %D does not equal output vector size %D",n,nv); 439 ierr = VecSetErrorIfLocked(y,3);CHKERRQ(ierr); 440 441 ierr = PCSetUp(pc);CHKERRQ(ierr); 442 PetscCheckFalse(!pc->ops->apply,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply"); 443 ierr = VecLockReadPush(x);CHKERRQ(ierr); 444 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 445 ierr = (*pc->ops->apply)(pc,x,y);CHKERRQ(ierr); 446 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 447 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 448 ierr = VecLockReadPop(x);CHKERRQ(ierr); 449 PetscFunctionReturn(0); 450 } 451 452 /*@ 453 PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices. 454 455 Collective on PC 456 457 Input Parameters: 458 + pc - the preconditioner context 459 - X - block of input vectors 460 461 Output Parameter: 462 . Y - block of output vectors 463 464 Level: developer 465 466 .seealso: PCApply(), KSPMatSolve() 467 @*/ 468 PetscErrorCode PCMatApply(PC pc,Mat X,Mat Y) 469 { 470 Mat A; 471 Vec cy, cx; 472 PetscInt m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3; 473 PetscBool match; 474 PetscErrorCode ierr; 475 476 PetscFunctionBegin; 477 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 478 PetscValidHeaderSpecific(X, MAT_CLASSID, 2); 479 PetscValidHeaderSpecific(Y, MAT_CLASSID, 3); 480 PetscCheckSameComm(pc, 1, X, 2); 481 PetscCheckSameComm(pc, 1, Y, 3); 482 PetscCheckFalse(Y == X,PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices"); 483 ierr = PCGetOperators(pc, NULL, &A);CHKERRQ(ierr); 484 ierr = MatGetLocalSize(A, &m3, &n3);CHKERRQ(ierr); 485 ierr = MatGetLocalSize(X, &m2, &n2);CHKERRQ(ierr); 486 ierr = MatGetLocalSize(Y, &m1, &n1);CHKERRQ(ierr); 487 ierr = MatGetSize(A, &M3, &N3);CHKERRQ(ierr); 488 ierr = MatGetSize(X, &M2, &N2);CHKERRQ(ierr); 489 ierr = MatGetSize(Y, &M1, &N1);CHKERRQ(ierr); 490 PetscCheckFalse(n1 != n2 || N1 != N2,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%D,%D) and block of output vectors (n,N) = (%D,%D)", n2, N2, n1, N1); 491 PetscCheckFalse(m2 != m3 || M2 != M3,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%D,%D) and Pmat (m,M)x(n,N) = (%D,%D)x(%D,%D)", m2, M2, m3, M3, n3, N3); 492 PetscCheckFalse(m1 != n3 || M1 != N3,PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%D,%D) and Pmat (m,M)x(n,N) = (%D,%D)x(%D,%D)", m1, M1, m3, M3, n3, N3); 493 ierr = PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "");CHKERRQ(ierr); 494 PetscCheckFalse(!match,PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat"); 495 ierr = PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "");CHKERRQ(ierr); 496 PetscCheckFalse(!match,PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat"); 497 ierr = PCSetUp(pc);CHKERRQ(ierr); 498 if (pc->ops->matapply) { 499 ierr = PetscLogEventBegin(PC_MatApply, pc, X, Y, 0);CHKERRQ(ierr); 500 ierr = (*pc->ops->matapply)(pc, X, Y);CHKERRQ(ierr); 501 ierr = PetscLogEventEnd(PC_MatApply, pc, X, Y, 0);CHKERRQ(ierr); 502 } else { 503 ierr = PetscInfo(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name);CHKERRQ(ierr); 504 for (n1 = 0; n1 < N1; ++n1) { 505 ierr = MatDenseGetColumnVecRead(X, n1, &cx);CHKERRQ(ierr); 506 ierr = MatDenseGetColumnVecWrite(Y, n1, &cy);CHKERRQ(ierr); 507 ierr = PCApply(pc, cx, cy);CHKERRQ(ierr); 508 ierr = MatDenseRestoreColumnVecWrite(Y, n1, &cy);CHKERRQ(ierr); 509 ierr = MatDenseRestoreColumnVecRead(X, n1, &cx);CHKERRQ(ierr); 510 } 511 } 512 PetscFunctionReturn(0); 513 } 514 515 /*@ 516 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 517 518 Collective on PC 519 520 Input Parameters: 521 + pc - the preconditioner context 522 - x - input vector 523 524 Output Parameter: 525 . y - output vector 526 527 Notes: 528 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 529 530 Level: developer 531 532 .seealso: PCApply(), PCApplySymmetricRight() 533 @*/ 534 PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y) 535 { 536 PetscErrorCode ierr; 537 538 PetscFunctionBegin; 539 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 540 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 541 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 542 PetscCheckFalse(x == y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 543 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 544 ierr = PCSetUp(pc);CHKERRQ(ierr); 545 PetscCheckFalse(!pc->ops->applysymmetricleft,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply"); 546 ierr = VecLockReadPush(x);CHKERRQ(ierr); 547 ierr = PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 548 ierr = (*pc->ops->applysymmetricleft)(pc,x,y);CHKERRQ(ierr); 549 ierr = PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 550 ierr = VecLockReadPop(x);CHKERRQ(ierr); 551 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 552 PetscFunctionReturn(0); 553 } 554 555 /*@ 556 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 557 558 Collective on PC 559 560 Input Parameters: 561 + pc - the preconditioner context 562 - x - input vector 563 564 Output Parameter: 565 . y - output vector 566 567 Level: developer 568 569 Notes: 570 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 571 572 .seealso: PCApply(), PCApplySymmetricLeft() 573 @*/ 574 PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y) 575 { 576 PetscErrorCode ierr; 577 578 PetscFunctionBegin; 579 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 580 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 581 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 582 PetscCheckFalse(x == y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 583 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 584 ierr = PCSetUp(pc);CHKERRQ(ierr); 585 PetscCheckFalse(!pc->ops->applysymmetricright,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply"); 586 ierr = VecLockReadPush(x);CHKERRQ(ierr); 587 ierr = PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 588 ierr = (*pc->ops->applysymmetricright)(pc,x,y);CHKERRQ(ierr); 589 ierr = PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 590 ierr = VecLockReadPop(x);CHKERRQ(ierr); 591 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 592 PetscFunctionReturn(0); 593 } 594 595 /*@ 596 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 597 598 Collective on PC 599 600 Input Parameters: 601 + pc - the preconditioner context 602 - x - input vector 603 604 Output Parameter: 605 . y - output vector 606 607 Notes: 608 For complex numbers this applies the non-Hermitian transpose. 609 610 Developer Notes: 611 We need to implement a PCApplyHermitianTranspose() 612 613 Level: developer 614 615 .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists() 616 @*/ 617 PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y) 618 { 619 PetscErrorCode ierr; 620 621 PetscFunctionBegin; 622 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 623 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 624 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 625 PetscCheckFalse(x == y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 626 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 627 ierr = PCSetUp(pc);CHKERRQ(ierr); 628 PetscCheckFalse(!pc->ops->applytranspose,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose"); 629 ierr = VecLockReadPush(x);CHKERRQ(ierr); 630 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 631 ierr = (*pc->ops->applytranspose)(pc,x,y);CHKERRQ(ierr); 632 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 633 ierr = VecLockReadPop(x);CHKERRQ(ierr); 634 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 635 PetscFunctionReturn(0); 636 } 637 638 /*@ 639 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 640 641 Collective on PC 642 643 Input Parameters: 644 . pc - the preconditioner context 645 646 Output Parameter: 647 . flg - PETSC_TRUE if a transpose operation is defined 648 649 Level: developer 650 651 .seealso: PCApplyTranspose() 652 @*/ 653 PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg) 654 { 655 PetscFunctionBegin; 656 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 657 PetscValidBoolPointer(flg,2); 658 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 659 else *flg = PETSC_FALSE; 660 PetscFunctionReturn(0); 661 } 662 663 /*@ 664 PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x. 665 666 Collective on PC 667 668 Input Parameters: 669 + pc - the preconditioner context 670 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 671 . x - input vector 672 - work - work vector 673 674 Output Parameter: 675 . y - output vector 676 677 Level: developer 678 679 Notes: 680 If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the 681 specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling. 682 683 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose() 684 @*/ 685 PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work) 686 { 687 PetscErrorCode ierr; 688 689 PetscFunctionBegin; 690 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 691 PetscValidLogicalCollectiveEnum(pc,side,2); 692 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 693 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 694 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 695 PetscCheckSameComm(pc,1,x,3); 696 PetscCheckSameComm(pc,1,y,4); 697 PetscCheckSameComm(pc,1,work,5); 698 PetscCheckFalse(x == y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 699 PetscCheckFalse(side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric"); 700 PetscCheckFalse(pc->diagonalscale && side == PC_SYMMETRIC,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application"); 701 if (pc->erroriffailure) {ierr = VecValidValues(x,3,PETSC_TRUE);CHKERRQ(ierr);} 702 703 ierr = PCSetUp(pc);CHKERRQ(ierr); 704 if (pc->diagonalscale) { 705 if (pc->ops->applyBA) { 706 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 707 ierr = VecDuplicate(x,&work2);CHKERRQ(ierr); 708 ierr = PCDiagonalScaleRight(pc,x,work2);CHKERRQ(ierr); 709 ierr = (*pc->ops->applyBA)(pc,side,work2,y,work);CHKERRQ(ierr); 710 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 711 ierr = VecDestroy(&work2);CHKERRQ(ierr); 712 } else if (side == PC_RIGHT) { 713 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 714 ierr = PCApply(pc,y,work);CHKERRQ(ierr); 715 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 716 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 717 } else if (side == PC_LEFT) { 718 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 719 ierr = MatMult(pc->mat,y,work);CHKERRQ(ierr); 720 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 721 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 722 } else PetscCheckFalse(side == PC_SYMMETRIC,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner"); 723 } else { 724 if (pc->ops->applyBA) { 725 ierr = (*pc->ops->applyBA)(pc,side,x,y,work);CHKERRQ(ierr); 726 } else if (side == PC_RIGHT) { 727 ierr = PCApply(pc,x,work);CHKERRQ(ierr); 728 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 729 } else if (side == PC_LEFT) { 730 ierr = MatMult(pc->mat,x,work);CHKERRQ(ierr); 731 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 732 } else if (side == PC_SYMMETRIC) { 733 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 734 ierr = PCApplySymmetricRight(pc,x,work);CHKERRQ(ierr); 735 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 736 ierr = VecCopy(y,work);CHKERRQ(ierr); 737 ierr = PCApplySymmetricLeft(pc,work,y);CHKERRQ(ierr); 738 } 739 } 740 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 741 PetscFunctionReturn(0); 742 } 743 744 /*@ 745 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 746 and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning, 747 NOT tr(B*A) = tr(A)*tr(B). 748 749 Collective on PC 750 751 Input Parameters: 752 + pc - the preconditioner context 753 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 754 . x - input vector 755 - work - work vector 756 757 Output Parameter: 758 . y - output vector 759 760 Notes: 761 this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner 762 defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 763 764 Level: developer 765 766 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB() 767 @*/ 768 PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work) 769 { 770 PetscErrorCode ierr; 771 772 PetscFunctionBegin; 773 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 774 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 775 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 776 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 777 PetscCheckFalse(x == y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 778 if (pc->erroriffailure) {ierr = VecValidValues(x,3,PETSC_TRUE);CHKERRQ(ierr);} 779 if (pc->ops->applyBAtranspose) { 780 ierr = (*pc->ops->applyBAtranspose)(pc,side,x,y,work);CHKERRQ(ierr); 781 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 782 PetscFunctionReturn(0); 783 } 784 PetscCheckFalse(side != PC_LEFT && side != PC_RIGHT,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left"); 785 786 ierr = PCSetUp(pc);CHKERRQ(ierr); 787 if (side == PC_RIGHT) { 788 ierr = PCApplyTranspose(pc,x,work);CHKERRQ(ierr); 789 ierr = MatMultTranspose(pc->mat,work,y);CHKERRQ(ierr); 790 } else if (side == PC_LEFT) { 791 ierr = MatMultTranspose(pc->mat,x,work);CHKERRQ(ierr); 792 ierr = PCApplyTranspose(pc,work,y);CHKERRQ(ierr); 793 } 794 /* add support for PC_SYMMETRIC */ 795 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 796 PetscFunctionReturn(0); 797 } 798 799 /* -------------------------------------------------------------------------------*/ 800 801 /*@ 802 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 803 built-in fast application of Richardson's method. 804 805 Not Collective 806 807 Input Parameter: 808 . pc - the preconditioner 809 810 Output Parameter: 811 . exists - PETSC_TRUE or PETSC_FALSE 812 813 Level: developer 814 815 .seealso: PCApplyRichardson() 816 @*/ 817 PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists) 818 { 819 PetscFunctionBegin; 820 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 821 PetscValidPointer(exists,2); 822 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 823 else *exists = PETSC_FALSE; 824 PetscFunctionReturn(0); 825 } 826 827 /*@ 828 PCApplyRichardson - Applies several steps of Richardson iteration with 829 the particular preconditioner. This routine is usually used by the 830 Krylov solvers and not the application code directly. 831 832 Collective on PC 833 834 Input Parameters: 835 + pc - the preconditioner context 836 . b - the right hand side 837 . w - one work vector 838 . rtol - relative decrease in residual norm convergence criteria 839 . abstol - absolute residual norm convergence criteria 840 . dtol - divergence residual norm increase criteria 841 . its - the number of iterations to apply. 842 - guesszero - if the input x contains nonzero initial guess 843 844 Output Parameters: 845 + outits - number of iterations actually used (for SOR this always equals its) 846 . reason - the reason the apply terminated 847 - y - the solution (also contains initial guess if guesszero is PETSC_FALSE 848 849 Notes: 850 Most preconditioners do not support this function. Use the command 851 PCApplyRichardsonExists() to determine if one does. 852 853 Except for the multigrid PC this routine ignores the convergence tolerances 854 and always runs for the number of iterations 855 856 Level: developer 857 858 .seealso: PCApplyRichardsonExists() 859 @*/ 860 PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason) 861 { 862 PetscErrorCode ierr; 863 864 PetscFunctionBegin; 865 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 866 PetscValidHeaderSpecific(b,VEC_CLASSID,2); 867 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 868 PetscValidHeaderSpecific(w,VEC_CLASSID,4); 869 PetscCheckFalse(b == y,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors"); 870 ierr = PCSetUp(pc);CHKERRQ(ierr); 871 PetscCheckFalse(!pc->ops->applyrichardson,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson"); 872 ierr = (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);CHKERRQ(ierr); 873 PetscFunctionReturn(0); 874 } 875 876 /*@ 877 PCSetFailedReason - Sets the reason a PCSetUp() failed or PC_NOERROR if it did not fail 878 879 Logically Collective on PC 880 881 Input Parameters: 882 + pc - the preconditioner context 883 - reason - the reason it failedx 884 885 Level: advanced 886 887 .seealso: PCCreate(), PCApply(), PCDestroy(), PCFailedReason 888 @*/ 889 PetscErrorCode PCSetFailedReason(PC pc,PCFailedReason reason) 890 { 891 PetscFunctionBegin; 892 pc->failedreason = reason; 893 PetscFunctionReturn(0); 894 } 895 896 /*@ 897 PCGetFailedReason - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail 898 899 Logically Collective on PC 900 901 Input Parameter: 902 . pc - the preconditioner context 903 904 Output Parameter: 905 . reason - the reason it failed 906 907 Level: advanced 908 909 Notes: This is the maximum over reason over all ranks in the PC communicator. It is only valid after 910 a call KSPCheckDot() or KSPCheckNorm() inside a KSPSolve(). It is not valid immediately after a PCSetUp() 911 or PCApply(), then use PCGetFailedReasonRank() 912 913 .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReasonRank(), PCSetFailedReason() 914 @*/ 915 PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason) 916 { 917 PetscFunctionBegin; 918 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 919 else *reason = pc->failedreason; 920 PetscFunctionReturn(0); 921 } 922 923 /*@ 924 PCGetFailedReasonRank - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail on this MPI rank 925 926 Not Collective on PC 927 928 Input Parameter: 929 . pc - the preconditioner context 930 931 Output Parameter: 932 . reason - the reason it failed 933 934 Notes: 935 Different ranks may have different reasons or no reason, see PCGetFailedReason() 936 937 Level: advanced 938 939 .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReason(), PCSetFailedReason() 940 @*/ 941 PetscErrorCode PCGetFailedReasonRank(PC pc,PCFailedReason *reason) 942 { 943 PetscFunctionBegin; 944 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 945 else *reason = pc->failedreason; 946 PetscFunctionReturn(0); 947 } 948 949 /* Next line needed to deactivate KSP_Solve logging */ 950 #include <petsc/private/kspimpl.h> 951 952 /* 953 a setupcall of 0 indicates never setup, 954 1 indicates has been previously setup 955 -1 indicates a PCSetUp() was attempted and failed 956 */ 957 /*@ 958 PCSetUp - Prepares for the use of a preconditioner. 959 960 Collective on PC 961 962 Input Parameter: 963 . pc - the preconditioner context 964 965 Level: developer 966 967 .seealso: PCCreate(), PCApply(), PCDestroy() 968 @*/ 969 PetscErrorCode PCSetUp(PC pc) 970 { 971 PetscErrorCode ierr; 972 const char *def; 973 PetscObjectState matstate, matnonzerostate; 974 975 PetscFunctionBegin; 976 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 977 PetscCheckFalse(!pc->mat,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first"); 978 979 if (pc->setupcalled && pc->reusepreconditioner) { 980 ierr = PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n");CHKERRQ(ierr); 981 PetscFunctionReturn(0); 982 } 983 984 ierr = PetscObjectStateGet((PetscObject)pc->pmat,&matstate);CHKERRQ(ierr); 985 ierr = MatGetNonzeroState(pc->pmat,&matnonzerostate);CHKERRQ(ierr); 986 if (!pc->setupcalled) { 987 ierr = PetscInfo(pc,"Setting up PC for first time\n");CHKERRQ(ierr); 988 pc->flag = DIFFERENT_NONZERO_PATTERN; 989 } else if (matstate == pc->matstate) { 990 ierr = PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n");CHKERRQ(ierr); 991 PetscFunctionReturn(0); 992 } else { 993 if (matnonzerostate > pc->matnonzerostate) { 994 ierr = PetscInfo(pc,"Setting up PC with different nonzero pattern\n");CHKERRQ(ierr); 995 pc->flag = DIFFERENT_NONZERO_PATTERN; 996 } else { 997 ierr = PetscInfo(pc,"Setting up PC with same nonzero pattern\n");CHKERRQ(ierr); 998 pc->flag = SAME_NONZERO_PATTERN; 999 } 1000 } 1001 pc->matstate = matstate; 1002 pc->matnonzerostate = matnonzerostate; 1003 1004 if (!((PetscObject)pc)->type_name) { 1005 ierr = PCGetDefaultType_Private(pc,&def);CHKERRQ(ierr); 1006 ierr = PCSetType(pc,def);CHKERRQ(ierr); 1007 } 1008 1009 ierr = MatSetErrorIfFailure(pc->pmat,pc->erroriffailure);CHKERRQ(ierr); 1010 ierr = MatSetErrorIfFailure(pc->mat,pc->erroriffailure);CHKERRQ(ierr); 1011 ierr = PetscLogEventBegin(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 1012 if (pc->ops->setup) { 1013 /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */ 1014 ierr = KSPInitializePackage();CHKERRQ(ierr); 1015 ierr = PetscLogEventDeactivatePush(KSP_Solve);CHKERRQ(ierr); 1016 ierr = PetscLogEventDeactivatePush(PC_Apply);CHKERRQ(ierr); 1017 ierr = (*pc->ops->setup)(pc);CHKERRQ(ierr); 1018 ierr = PetscLogEventDeactivatePop(KSP_Solve);CHKERRQ(ierr); 1019 ierr = PetscLogEventDeactivatePop(PC_Apply);CHKERRQ(ierr); 1020 } 1021 ierr = PetscLogEventEnd(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 1022 if (!pc->setupcalled) pc->setupcalled = 1; 1023 PetscFunctionReturn(0); 1024 } 1025 1026 /*@ 1027 PCSetUpOnBlocks - Sets up the preconditioner for each block in 1028 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 1029 methods. 1030 1031 Collective on PC 1032 1033 Input Parameters: 1034 . pc - the preconditioner context 1035 1036 Level: developer 1037 1038 .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp() 1039 @*/ 1040 PetscErrorCode PCSetUpOnBlocks(PC pc) 1041 { 1042 PetscErrorCode ierr; 1043 1044 PetscFunctionBegin; 1045 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1046 if (!pc->ops->setuponblocks) PetscFunctionReturn(0); 1047 ierr = PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 1048 ierr = (*pc->ops->setuponblocks)(pc);CHKERRQ(ierr); 1049 ierr = PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 1050 PetscFunctionReturn(0); 1051 } 1052 1053 /*@C 1054 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 1055 submatrices that arise within certain subdomain-based preconditioners. 1056 The basic submatrices are extracted from the preconditioner matrix as 1057 usual; the user can then alter these (for example, to set different boundary 1058 conditions for each submatrix) before they are used for the local solves. 1059 1060 Logically Collective on PC 1061 1062 Input Parameters: 1063 + pc - the preconditioner context 1064 . func - routine for modifying the submatrices 1065 - ctx - optional user-defined context (may be null) 1066 1067 Calling sequence of func: 1068 $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx); 1069 1070 + row - an array of index sets that contain the global row numbers 1071 that comprise each local submatrix 1072 . col - an array of index sets that contain the global column numbers 1073 that comprise each local submatrix 1074 . submat - array of local submatrices 1075 - ctx - optional user-defined context for private data for the 1076 user-defined func routine (may be null) 1077 1078 Notes: 1079 PCSetModifySubMatrices() MUST be called before KSPSetUp() and 1080 KSPSolve(). 1081 1082 A routine set by PCSetModifySubMatrices() is currently called within 1083 the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM) 1084 preconditioners. All other preconditioners ignore this routine. 1085 1086 Level: advanced 1087 1088 .seealso: PCModifySubMatrices() 1089 @*/ 1090 PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx) 1091 { 1092 PetscFunctionBegin; 1093 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1094 pc->modifysubmatrices = func; 1095 pc->modifysubmatricesP = ctx; 1096 PetscFunctionReturn(0); 1097 } 1098 1099 /*@C 1100 PCModifySubMatrices - Calls an optional user-defined routine within 1101 certain preconditioners if one has been set with PCSetModifySubMatrices(). 1102 1103 Collective on PC 1104 1105 Input Parameters: 1106 + pc - the preconditioner context 1107 . nsub - the number of local submatrices 1108 . row - an array of index sets that contain the global row numbers 1109 that comprise each local submatrix 1110 . col - an array of index sets that contain the global column numbers 1111 that comprise each local submatrix 1112 . submat - array of local submatrices 1113 - ctx - optional user-defined context for private data for the 1114 user-defined routine (may be null) 1115 1116 Output Parameter: 1117 . submat - array of local submatrices (the entries of which may 1118 have been modified) 1119 1120 Notes: 1121 The user should NOT generally call this routine, as it will 1122 automatically be called within certain preconditioners (currently 1123 block Jacobi, additive Schwarz) if set. 1124 1125 The basic submatrices are extracted from the preconditioner matrix 1126 as usual; the user can then alter these (for example, to set different 1127 boundary conditions for each submatrix) before they are used for the 1128 local solves. 1129 1130 Level: developer 1131 1132 .seealso: PCSetModifySubMatrices() 1133 @*/ 1134 PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx) 1135 { 1136 PetscErrorCode ierr; 1137 1138 PetscFunctionBegin; 1139 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1140 if (!pc->modifysubmatrices) PetscFunctionReturn(0); 1141 ierr = PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 1142 ierr = (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);CHKERRQ(ierr); 1143 ierr = PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 1144 PetscFunctionReturn(0); 1145 } 1146 1147 /*@ 1148 PCSetOperators - Sets the matrix associated with the linear system and 1149 a (possibly) different one associated with the preconditioner. 1150 1151 Logically Collective on PC 1152 1153 Input Parameters: 1154 + pc - the preconditioner context 1155 . Amat - the matrix that defines the linear system 1156 - Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat. 1157 1158 Notes: 1159 Passing a NULL for Amat or Pmat removes the matrix that is currently used. 1160 1161 If you wish to replace either Amat or Pmat but leave the other one untouched then 1162 first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference() 1163 on it and then pass it back in in your call to KSPSetOperators(). 1164 1165 More Notes about Repeated Solution of Linear Systems: 1166 PETSc does NOT reset the matrix entries of either Amat or Pmat 1167 to zero after a linear solve; the user is completely responsible for 1168 matrix assembly. See the routine MatZeroEntries() if desiring to 1169 zero all elements of a matrix. 1170 1171 Level: intermediate 1172 1173 .seealso: PCGetOperators(), MatZeroEntries() 1174 @*/ 1175 PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat) 1176 { 1177 PetscErrorCode ierr; 1178 PetscInt m1,n1,m2,n2; 1179 1180 PetscFunctionBegin; 1181 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1182 if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1183 if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3); 1184 if (Amat) PetscCheckSameComm(pc,1,Amat,2); 1185 if (Pmat) PetscCheckSameComm(pc,1,Pmat,3); 1186 if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) { 1187 ierr = MatGetLocalSize(Amat,&m1,&n1);CHKERRQ(ierr); 1188 ierr = MatGetLocalSize(pc->mat,&m2,&n2);CHKERRQ(ierr); 1189 PetscCheckFalse(m1 != m2 || n1 != n2,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1190 ierr = MatGetLocalSize(Pmat,&m1,&n1);CHKERRQ(ierr); 1191 ierr = MatGetLocalSize(pc->pmat,&m2,&n2);CHKERRQ(ierr); 1192 PetscCheckFalse(m1 != m2 || n1 != n2,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1193 } 1194 1195 if (Pmat != pc->pmat) { 1196 /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */ 1197 pc->matnonzerostate = -1; 1198 pc->matstate = -1; 1199 } 1200 1201 /* reference first in case the matrices are the same */ 1202 if (Amat) {ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr);} 1203 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 1204 if (Pmat) {ierr = PetscObjectReference((PetscObject)Pmat);CHKERRQ(ierr);} 1205 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 1206 pc->mat = Amat; 1207 pc->pmat = Pmat; 1208 PetscFunctionReturn(0); 1209 } 1210 1211 /*@ 1212 PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed. 1213 1214 Logically Collective on PC 1215 1216 Input Parameters: 1217 + pc - the preconditioner context 1218 - flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner 1219 1220 Level: intermediate 1221 1222 .seealso: PCGetOperators(), MatZeroEntries(), PCGetReusePreconditioner(), KSPSetReusePreconditioner() 1223 @*/ 1224 PetscErrorCode PCSetReusePreconditioner(PC pc,PetscBool flag) 1225 { 1226 PetscFunctionBegin; 1227 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1228 PetscValidLogicalCollectiveBool(pc,flag,2); 1229 pc->reusepreconditioner = flag; 1230 PetscFunctionReturn(0); 1231 } 1232 1233 /*@ 1234 PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed. 1235 1236 Not Collective 1237 1238 Input Parameter: 1239 . pc - the preconditioner context 1240 1241 Output Parameter: 1242 . flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner 1243 1244 Level: intermediate 1245 1246 .seealso: PCGetOperators(), MatZeroEntries(), PCSetReusePreconditioner() 1247 @*/ 1248 PetscErrorCode PCGetReusePreconditioner(PC pc,PetscBool *flag) 1249 { 1250 PetscFunctionBegin; 1251 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1252 PetscValidPointer(flag,2); 1253 *flag = pc->reusepreconditioner; 1254 PetscFunctionReturn(0); 1255 } 1256 1257 /*@ 1258 PCGetOperators - Gets the matrix associated with the linear system and 1259 possibly a different one associated with the preconditioner. 1260 1261 Not collective, though parallel Mats are returned if the PC is parallel 1262 1263 Input Parameter: 1264 . pc - the preconditioner context 1265 1266 Output Parameters: 1267 + Amat - the matrix defining the linear system 1268 - Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat. 1269 1270 Level: intermediate 1271 1272 Notes: 1273 Does not increase the reference count of the matrices, so you should not destroy them 1274 1275 Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators 1276 are created in PC and returned to the user. In this case, if both operators 1277 mat and pmat are requested, two DIFFERENT operators will be returned. If 1278 only one is requested both operators in the PC will be the same (i.e. as 1279 if one had called KSP/PCSetOperators() with the same argument for both Mats). 1280 The user must set the sizes of the returned matrices and their type etc just 1281 as if the user created them with MatCreate(). For example, 1282 1283 $ KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to 1284 $ set size, type, etc of Amat 1285 1286 $ MatCreate(comm,&mat); 1287 $ KSP/PCSetOperators(ksp/pc,Amat,Amat); 1288 $ PetscObjectDereference((PetscObject)mat); 1289 $ set size, type, etc of Amat 1290 1291 and 1292 1293 $ KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to 1294 $ set size, type, etc of Amat and Pmat 1295 1296 $ MatCreate(comm,&Amat); 1297 $ MatCreate(comm,&Pmat); 1298 $ KSP/PCSetOperators(ksp/pc,Amat,Pmat); 1299 $ PetscObjectDereference((PetscObject)Amat); 1300 $ PetscObjectDereference((PetscObject)Pmat); 1301 $ set size, type, etc of Amat and Pmat 1302 1303 The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy 1304 of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely 1305 managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look 1306 at this is when you create a SNES you do not NEED to create a KSP and attach it to 1307 the SNES object (the SNES object manages it for you). Similarly when you create a KSP 1308 you do not need to attach a PC to it (the KSP object manages the PC object for you). 1309 Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when 1310 it can be created for you? 1311 1312 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet() 1313 @*/ 1314 PetscErrorCode PCGetOperators(PC pc,Mat *Amat,Mat *Pmat) 1315 { 1316 PetscErrorCode ierr; 1317 1318 PetscFunctionBegin; 1319 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1320 if (Amat) { 1321 if (!pc->mat) { 1322 if (pc->pmat && !Pmat) { /* Apmat has been set, but user did not request it, so use for Amat */ 1323 pc->mat = pc->pmat; 1324 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1325 } else { /* both Amat and Pmat are empty */ 1326 ierr = MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat);CHKERRQ(ierr); 1327 if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */ 1328 pc->pmat = pc->mat; 1329 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1330 } 1331 } 1332 } 1333 *Amat = pc->mat; 1334 } 1335 if (Pmat) { 1336 if (!pc->pmat) { 1337 if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */ 1338 pc->pmat = pc->mat; 1339 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1340 } else { 1341 ierr = MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat);CHKERRQ(ierr); 1342 if (!Amat) { /* user did NOT request Amat, so make same as Pmat */ 1343 pc->mat = pc->pmat; 1344 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1345 } 1346 } 1347 } 1348 *Pmat = pc->pmat; 1349 } 1350 PetscFunctionReturn(0); 1351 } 1352 1353 /*@C 1354 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1355 possibly a different one associated with the preconditioner have been set in the PC. 1356 1357 Not collective, though the results on all processes should be the same 1358 1359 Input Parameter: 1360 . pc - the preconditioner context 1361 1362 Output Parameters: 1363 + mat - the matrix associated with the linear system was set 1364 - pmat - matrix associated with the preconditioner was set, usually the same 1365 1366 Level: intermediate 1367 1368 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators() 1369 @*/ 1370 PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat) 1371 { 1372 PetscFunctionBegin; 1373 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1374 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1375 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1376 PetscFunctionReturn(0); 1377 } 1378 1379 /*@ 1380 PCFactorGetMatrix - Gets the factored matrix from the 1381 preconditioner context. This routine is valid only for the LU, 1382 incomplete LU, Cholesky, and incomplete Cholesky methods. 1383 1384 Not Collective on PC though Mat is parallel if PC is parallel 1385 1386 Input Parameters: 1387 . pc - the preconditioner context 1388 1389 Output parameters: 1390 . mat - the factored matrix 1391 1392 Level: advanced 1393 1394 Notes: 1395 Does not increase the reference count for the matrix so DO NOT destroy it 1396 1397 @*/ 1398 PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat) 1399 { 1400 PetscErrorCode ierr; 1401 1402 PetscFunctionBegin; 1403 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1404 PetscValidPointer(mat,2); 1405 if (pc->ops->getfactoredmatrix) { 1406 ierr = (*pc->ops->getfactoredmatrix)(pc,mat);CHKERRQ(ierr); 1407 } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix"); 1408 PetscFunctionReturn(0); 1409 } 1410 1411 /*@C 1412 PCSetOptionsPrefix - Sets the prefix used for searching for all 1413 PC options in the database. 1414 1415 Logically Collective on PC 1416 1417 Input Parameters: 1418 + pc - the preconditioner context 1419 - prefix - the prefix string to prepend to all PC option requests 1420 1421 Notes: 1422 A hyphen (-) must NOT be given at the beginning of the prefix name. 1423 The first character of all runtime options is AUTOMATICALLY the 1424 hyphen. 1425 1426 Level: advanced 1427 1428 .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix() 1429 @*/ 1430 PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[]) 1431 { 1432 PetscErrorCode ierr; 1433 1434 PetscFunctionBegin; 1435 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1436 ierr = PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1437 PetscFunctionReturn(0); 1438 } 1439 1440 /*@C 1441 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1442 PC options in the database. 1443 1444 Logically Collective on PC 1445 1446 Input Parameters: 1447 + pc - the preconditioner context 1448 - prefix - the prefix string to prepend to all PC option requests 1449 1450 Notes: 1451 A hyphen (-) must NOT be given at the beginning of the prefix name. 1452 The first character of all runtime options is AUTOMATICALLY the 1453 hyphen. 1454 1455 Level: advanced 1456 1457 .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix() 1458 @*/ 1459 PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[]) 1460 { 1461 PetscErrorCode ierr; 1462 1463 PetscFunctionBegin; 1464 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1465 ierr = PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1466 PetscFunctionReturn(0); 1467 } 1468 1469 /*@C 1470 PCGetOptionsPrefix - Gets the prefix used for searching for all 1471 PC options in the database. 1472 1473 Not Collective 1474 1475 Input Parameters: 1476 . pc - the preconditioner context 1477 1478 Output Parameters: 1479 . prefix - pointer to the prefix string used, is returned 1480 1481 Notes: 1482 On the fortran side, the user should pass in a string 'prifix' of 1483 sufficient length to hold the prefix. 1484 1485 Level: advanced 1486 1487 .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix() 1488 @*/ 1489 PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[]) 1490 { 1491 PetscErrorCode ierr; 1492 1493 PetscFunctionBegin; 1494 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1495 PetscValidPointer(prefix,2); 1496 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1497 PetscFunctionReturn(0); 1498 } 1499 1500 /* 1501 Indicates the right hand side will be changed by KSPSolve(), this occurs for a few 1502 preconditioners including BDDC and Eisentat that transform the equations before applying 1503 the Krylov methods 1504 */ 1505 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc,PetscBool *change) 1506 { 1507 PetscErrorCode ierr; 1508 1509 PetscFunctionBegin; 1510 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1511 PetscValidPointer(change,2); 1512 *change = PETSC_FALSE; 1513 ierr = PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));CHKERRQ(ierr); 1514 PetscFunctionReturn(0); 1515 } 1516 1517 /*@ 1518 PCPreSolve - Optional pre-solve phase, intended for any 1519 preconditioner-specific actions that must be performed before 1520 the iterative solve itself. 1521 1522 Collective on PC 1523 1524 Input Parameters: 1525 + pc - the preconditioner context 1526 - ksp - the Krylov subspace context 1527 1528 Level: developer 1529 1530 Sample of Usage: 1531 .vb 1532 PCPreSolve(pc,ksp); 1533 KSPSolve(ksp,b,x); 1534 PCPostSolve(pc,ksp); 1535 .ve 1536 1537 Notes: 1538 The pre-solve phase is distinct from the PCSetUp() phase. 1539 1540 KSPSolve() calls this directly, so is rarely called by the user. 1541 1542 .seealso: PCPostSolve() 1543 @*/ 1544 PetscErrorCode PCPreSolve(PC pc,KSP ksp) 1545 { 1546 PetscErrorCode ierr; 1547 Vec x,rhs; 1548 1549 PetscFunctionBegin; 1550 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1551 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1552 pc->presolvedone++; 1553 PetscCheckFalse(pc->presolvedone > 2,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice"); 1554 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1555 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1556 1557 if (pc->ops->presolve) { 1558 ierr = (*pc->ops->presolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1559 } else if (pc->presolve) { 1560 ierr = (pc->presolve)(pc,ksp);CHKERRQ(ierr); 1561 } 1562 PetscFunctionReturn(0); 1563 } 1564 1565 /*@C 1566 PCSetPreSolve - Sets function PCPreSolve() which is intended for any 1567 preconditioner-specific actions that must be performed before 1568 the iterative solve itself. 1569 1570 Logically Collective on pc 1571 1572 Input Parameters: 1573 + pc - the preconditioner object 1574 - presolve - the function to call before the solve 1575 1576 Calling sequence of presolve: 1577 $ func(PC pc,KSP ksp) 1578 1579 + pc - the PC context 1580 - ksp - the KSP context 1581 1582 Level: developer 1583 1584 .seealso: PC, PCSetUp(), PCPreSolve() 1585 @*/ 1586 PetscErrorCode PCSetPreSolve(PC pc,PetscErrorCode (*presolve)(PC,KSP)) 1587 { 1588 PetscFunctionBegin; 1589 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1590 pc->presolve = presolve; 1591 PetscFunctionReturn(0); 1592 } 1593 1594 /*@ 1595 PCPostSolve - Optional post-solve phase, intended for any 1596 preconditioner-specific actions that must be performed after 1597 the iterative solve itself. 1598 1599 Collective on PC 1600 1601 Input Parameters: 1602 + pc - the preconditioner context 1603 - ksp - the Krylov subspace context 1604 1605 Sample of Usage: 1606 .vb 1607 PCPreSolve(pc,ksp); 1608 KSPSolve(ksp,b,x); 1609 PCPostSolve(pc,ksp); 1610 .ve 1611 1612 Note: 1613 KSPSolve() calls this routine directly, so it is rarely called by the user. 1614 1615 Level: developer 1616 1617 .seealso: PCPreSolve(), KSPSolve() 1618 @*/ 1619 PetscErrorCode PCPostSolve(PC pc,KSP ksp) 1620 { 1621 PetscErrorCode ierr; 1622 Vec x,rhs; 1623 1624 PetscFunctionBegin; 1625 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1626 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1627 pc->presolvedone--; 1628 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1629 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1630 if (pc->ops->postsolve) { 1631 ierr = (*pc->ops->postsolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1632 } 1633 PetscFunctionReturn(0); 1634 } 1635 1636 /*@C 1637 PCLoad - Loads a PC that has been stored in binary with PCView(). 1638 1639 Collective on PetscViewer 1640 1641 Input Parameters: 1642 + newdm - the newly loaded PC, this needs to have been created with PCCreate() or 1643 some related function before a call to PCLoad(). 1644 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() 1645 1646 Level: intermediate 1647 1648 Notes: 1649 The type is determined by the data in the file, any type set into the PC before this call is ignored. 1650 1651 Notes for advanced users: 1652 Most users should not need to know the details of the binary storage 1653 format, since PCLoad() and PCView() completely hide these details. 1654 But for anyone who's interested, the standard binary matrix storage 1655 format is 1656 .vb 1657 has not yet been determined 1658 .ve 1659 1660 .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad() 1661 @*/ 1662 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) 1663 { 1664 PetscErrorCode ierr; 1665 PetscBool isbinary; 1666 PetscInt classid; 1667 char type[256]; 1668 1669 PetscFunctionBegin; 1670 PetscValidHeaderSpecific(newdm,PC_CLASSID,1); 1671 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1672 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1673 PetscCheckFalse(!isbinary,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 1674 1675 ierr = PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT);CHKERRQ(ierr); 1676 PetscCheckFalse(classid != PC_FILE_CLASSID,PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file"); 1677 ierr = PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR);CHKERRQ(ierr); 1678 ierr = PCSetType(newdm, type);CHKERRQ(ierr); 1679 if (newdm->ops->load) { 1680 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 1681 } 1682 PetscFunctionReturn(0); 1683 } 1684 1685 #include <petscdraw.h> 1686 #if defined(PETSC_HAVE_SAWS) 1687 #include <petscviewersaws.h> 1688 #endif 1689 1690 /*@C 1691 PCViewFromOptions - View from Options 1692 1693 Collective on PC 1694 1695 Input Parameters: 1696 + A - the PC context 1697 . obj - Optional object 1698 - name - command line option 1699 1700 Level: intermediate 1701 .seealso: PC, PCView, PetscObjectViewFromOptions(), PCCreate() 1702 @*/ 1703 PetscErrorCode PCViewFromOptions(PC A,PetscObject obj,const char name[]) 1704 { 1705 PetscErrorCode ierr; 1706 1707 PetscFunctionBegin; 1708 PetscValidHeaderSpecific(A,PC_CLASSID,1); 1709 ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr); 1710 PetscFunctionReturn(0); 1711 } 1712 1713 /*@C 1714 PCView - Prints the PC data structure. 1715 1716 Collective on PC 1717 1718 Input Parameters: 1719 + PC - the PC context 1720 - viewer - optional visualization context 1721 1722 Note: 1723 The available visualization contexts include 1724 + PETSC_VIEWER_STDOUT_SELF - standard output (default) 1725 - PETSC_VIEWER_STDOUT_WORLD - synchronized standard 1726 output where only the first processor opens 1727 the file. All other processors send their 1728 data to the first processor to print. 1729 1730 The user can open an alternative visualization contexts with 1731 PetscViewerASCIIOpen() (output to a specified file). 1732 1733 Level: developer 1734 1735 .seealso: KSPView(), PetscViewerASCIIOpen() 1736 @*/ 1737 PetscErrorCode PCView(PC pc,PetscViewer viewer) 1738 { 1739 PCType cstr; 1740 PetscErrorCode ierr; 1741 PetscBool iascii,isstring,isbinary,isdraw; 1742 #if defined(PETSC_HAVE_SAWS) 1743 PetscBool issaws; 1744 #endif 1745 1746 PetscFunctionBegin; 1747 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1748 if (!viewer) { 1749 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);CHKERRQ(ierr); 1750 } 1751 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1752 PetscCheckSameComm(pc,1,viewer,2); 1753 1754 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1755 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 1756 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1757 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 1758 #if defined(PETSC_HAVE_SAWS) 1759 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws);CHKERRQ(ierr); 1760 #endif 1761 1762 if (iascii) { 1763 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer);CHKERRQ(ierr); 1764 if (!pc->setupcalled) { 1765 ierr = PetscViewerASCIIPrintf(viewer," PC has not been set up so information may be incomplete\n");CHKERRQ(ierr); 1766 } 1767 if (pc->ops->view) { 1768 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1769 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1770 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1771 } 1772 if (pc->mat) { 1773 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);CHKERRQ(ierr); 1774 if (pc->pmat == pc->mat) { 1775 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");CHKERRQ(ierr); 1776 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1777 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1778 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1779 } else { 1780 if (pc->pmat) { 1781 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");CHKERRQ(ierr); 1782 } else { 1783 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix:\n");CHKERRQ(ierr); 1784 } 1785 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1786 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1787 if (pc->pmat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1788 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1789 } 1790 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1791 } 1792 } else if (isstring) { 1793 ierr = PCGetType(pc,&cstr);CHKERRQ(ierr); 1794 ierr = PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr);CHKERRQ(ierr); 1795 if (pc->ops->view) {ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr);} 1796 if (pc->mat) {ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);} 1797 if (pc->pmat && pc->pmat != pc->mat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1798 } else if (isbinary) { 1799 PetscInt classid = PC_FILE_CLASSID; 1800 MPI_Comm comm; 1801 PetscMPIInt rank; 1802 char type[256]; 1803 1804 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1805 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 1806 if (rank == 0) { 1807 ierr = PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 1808 ierr = PetscStrncpy(type,((PetscObject)pc)->type_name,256);CHKERRQ(ierr); 1809 ierr = PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 1810 } 1811 if (pc->ops->view) { 1812 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1813 } 1814 } else if (isdraw) { 1815 PetscDraw draw; 1816 char str[25]; 1817 PetscReal x,y,bottom,h; 1818 PetscInt n; 1819 1820 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1821 ierr = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr); 1822 if (pc->mat) { 1823 ierr = MatGetSize(pc->mat,&n,NULL);CHKERRQ(ierr); 1824 ierr = PetscSNPrintf(str,25,"PC: %s (%D)",((PetscObject)pc)->type_name,n);CHKERRQ(ierr); 1825 } else { 1826 ierr = PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name);CHKERRQ(ierr); 1827 } 1828 ierr = PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr); 1829 bottom = y - h; 1830 ierr = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr); 1831 if (pc->ops->view) { 1832 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1833 } 1834 ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr); 1835 #if defined(PETSC_HAVE_SAWS) 1836 } else if (issaws) { 1837 PetscMPIInt rank; 1838 1839 ierr = PetscObjectName((PetscObject)pc);CHKERRQ(ierr); 1840 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr); 1841 if (!((PetscObject)pc)->amsmem && rank == 0) { 1842 ierr = PetscObjectViewSAWs((PetscObject)pc,viewer);CHKERRQ(ierr); 1843 } 1844 if (pc->mat) {ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);} 1845 if (pc->pmat && pc->pmat != pc->mat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1846 #endif 1847 } 1848 PetscFunctionReturn(0); 1849 } 1850 1851 /*@C 1852 PCRegister - Adds a method to the preconditioner package. 1853 1854 Not collective 1855 1856 Input Parameters: 1857 + name_solver - name of a new user-defined solver 1858 - routine_create - routine to create method context 1859 1860 Notes: 1861 PCRegister() may be called multiple times to add several user-defined preconditioners. 1862 1863 Sample usage: 1864 .vb 1865 PCRegister("my_solver", MySolverCreate); 1866 .ve 1867 1868 Then, your solver can be chosen with the procedural interface via 1869 $ PCSetType(pc,"my_solver") 1870 or at runtime via the option 1871 $ -pc_type my_solver 1872 1873 Level: advanced 1874 1875 .seealso: PCRegisterAll() 1876 @*/ 1877 PetscErrorCode PCRegister(const char sname[],PetscErrorCode (*function)(PC)) 1878 { 1879 PetscErrorCode ierr; 1880 1881 PetscFunctionBegin; 1882 ierr = PCInitializePackage();CHKERRQ(ierr); 1883 ierr = PetscFunctionListAdd(&PCList,sname,function);CHKERRQ(ierr); 1884 PetscFunctionReturn(0); 1885 } 1886 1887 static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y) 1888 { 1889 PC pc; 1890 PetscErrorCode ierr; 1891 1892 PetscFunctionBegin; 1893 ierr = MatShellGetContext(A,&pc);CHKERRQ(ierr); 1894 ierr = PCApply(pc,X,Y);CHKERRQ(ierr); 1895 PetscFunctionReturn(0); 1896 } 1897 1898 /*@ 1899 PCComputeOperator - Computes the explicit preconditioned operator. 1900 1901 Collective on PC 1902 1903 Input Parameters: 1904 + pc - the preconditioner object 1905 - mattype - the matrix type to be used for the operator 1906 1907 Output Parameter: 1908 . mat - the explicit preconditioned operator 1909 1910 Notes: 1911 This computation is done by applying the operators to columns of the identity matrix. 1912 This routine is costly in general, and is recommended for use only with relatively small systems. 1913 Currently, this routine uses a dense matrix format when mattype == NULL 1914 1915 Level: advanced 1916 1917 .seealso: KSPComputeOperator(), MatType 1918 1919 @*/ 1920 PetscErrorCode PCComputeOperator(PC pc,MatType mattype,Mat *mat) 1921 { 1922 PetscErrorCode ierr; 1923 PetscInt N,M,m,n; 1924 Mat A,Apc; 1925 1926 PetscFunctionBegin; 1927 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1928 PetscValidPointer(mat,3); 1929 ierr = PCGetOperators(pc,&A,NULL);CHKERRQ(ierr); 1930 ierr = MatGetLocalSize(A,&m,&n);CHKERRQ(ierr); 1931 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr); 1932 ierr = MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc);CHKERRQ(ierr); 1933 ierr = MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC);CHKERRQ(ierr); 1934 ierr = MatComputeOperator(Apc,mattype,mat);CHKERRQ(ierr); 1935 ierr = MatDestroy(&Apc);CHKERRQ(ierr); 1936 PetscFunctionReturn(0); 1937 } 1938 1939 /*@ 1940 PCSetCoordinates - sets the coordinates of all the nodes on the local process 1941 1942 Collective on PC 1943 1944 Input Parameters: 1945 + pc - the solver context 1946 . dim - the dimension of the coordinates 1, 2, or 3 1947 . nloc - the blocked size of the coordinates array 1948 - coords - the coordinates array 1949 1950 Level: intermediate 1951 1952 Notes: 1953 coords is an array of the dim coordinates for the nodes on 1954 the local processor, of size dim*nloc. 1955 If there are 108 equation on a processor 1956 for a displacement finite element discretization of elasticity (so 1957 that there are nloc = 36 = 108/3 nodes) then the array must have 108 1958 double precision values (ie, 3 * 36). These x y z coordinates 1959 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 1960 ... , N-1.z ]. 1961 1962 .seealso: MatSetNearNullSpace() 1963 @*/ 1964 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 1965 { 1966 PetscErrorCode ierr; 1967 1968 PetscFunctionBegin; 1969 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1970 PetscValidLogicalCollectiveInt(pc,dim,2); 1971 ierr = PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));CHKERRQ(ierr); 1972 PetscFunctionReturn(0); 1973 } 1974 1975 /*@ 1976 PCGetInterpolations - Gets interpolation matrices for all levels (except level 0) 1977 1978 Logically Collective on PC 1979 1980 Input Parameter: 1981 . pc - the precondition context 1982 1983 Output Parameters: 1984 + num_levels - the number of levels 1985 - interpolations - the interpolation matrices (size of num_levels-1) 1986 1987 Level: advanced 1988 1989 .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level 1990 1991 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetInterpolation(), PCGetCoarseOperators() 1992 @*/ 1993 PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[]) 1994 { 1995 PetscErrorCode ierr; 1996 1997 PetscFunctionBegin; 1998 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1999 PetscValidPointer(num_levels,2); 2000 PetscValidPointer(interpolations,3); 2001 ierr = PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));CHKERRQ(ierr); 2002 PetscFunctionReturn(0); 2003 } 2004 2005 /*@ 2006 PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level) 2007 2008 Logically Collective on PC 2009 2010 Input Parameter: 2011 . pc - the precondition context 2012 2013 Output Parameters: 2014 + num_levels - the number of levels 2015 - coarseOperators - the coarse operator matrices (size of num_levels-1) 2016 2017 Level: advanced 2018 2019 .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level 2020 2021 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetRScale(), PCMGGetInterpolation(), PCGetInterpolations() 2022 @*/ 2023 PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[]) 2024 { 2025 PetscErrorCode ierr; 2026 2027 PetscFunctionBegin; 2028 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 2029 PetscValidPointer(num_levels,2); 2030 PetscValidPointer(coarseOperators,3); 2031 ierr = PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));CHKERRQ(ierr); 2032 PetscFunctionReturn(0); 2033 } 2034