1 2 /* 3 The PC (preconditioner) interface routines, callable by users. 4 */ 5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 6 #include <petscdm.h> 7 8 /* Logging support */ 9 PetscClassId PC_CLASSID; 10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 12 PetscInt PetscMGLevelId; 13 14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[]) 15 { 16 PetscErrorCode ierr; 17 PetscMPIInt size; 18 PetscBool hasop,flg1,flg2,set,flg3; 19 20 PetscFunctionBegin; 21 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 22 if (pc->pmat) { 23 ierr = MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 24 if (size == 1) { 25 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);CHKERRQ(ierr); 26 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);CHKERRQ(ierr); 27 ierr = MatIsSymmetricKnown(pc->pmat,&set,&flg3);CHKERRQ(ierr); 28 if (flg1 && (!flg2 || (set && flg3))) { 29 *type = PCICC; 30 } else if (flg2) { 31 *type = PCILU; 32 } else if (hasop) { /* likely is a parallel matrix run on one processor */ 33 *type = PCBJACOBI; 34 } else { 35 *type = PCNONE; 36 } 37 } else { 38 if (hasop) { 39 *type = PCBJACOBI; 40 } else { 41 *type = PCNONE; 42 } 43 } 44 } else { 45 if (size == 1) { 46 *type = PCILU; 47 } else { 48 *type = PCBJACOBI; 49 } 50 } 51 PetscFunctionReturn(0); 52 } 53 54 /*@ 55 PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats 56 57 Collective on PC 58 59 Input Parameter: 60 . pc - the preconditioner context 61 62 Level: developer 63 64 Notes: 65 This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC 66 67 .seealso: PCCreate(), PCSetUp() 68 @*/ 69 PetscErrorCode PCReset(PC pc) 70 { 71 PetscErrorCode ierr; 72 73 PetscFunctionBegin; 74 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 75 if (pc->ops->reset) { 76 ierr = (*pc->ops->reset)(pc);CHKERRQ(ierr); 77 } 78 ierr = VecDestroy(&pc->diagonalscaleright);CHKERRQ(ierr); 79 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 80 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 81 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 82 83 pc->setupcalled = 0; 84 PetscFunctionReturn(0); 85 } 86 87 /*@ 88 PCDestroy - Destroys PC context that was created with PCCreate(). 89 90 Collective on PC 91 92 Input Parameter: 93 . pc - the preconditioner context 94 95 Level: developer 96 97 .seealso: PCCreate(), PCSetUp() 98 @*/ 99 PetscErrorCode PCDestroy(PC *pc) 100 { 101 PetscErrorCode ierr; 102 103 PetscFunctionBegin; 104 if (!*pc) PetscFunctionReturn(0); 105 PetscValidHeaderSpecific((*pc),PC_CLASSID,1); 106 if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; PetscFunctionReturn(0);} 107 108 ierr = PCReset(*pc);CHKERRQ(ierr); 109 110 /* if memory was published with SAWs then destroy it */ 111 ierr = PetscObjectSAWsViewOff((PetscObject)*pc);CHKERRQ(ierr); 112 if ((*pc)->ops->destroy) {ierr = (*(*pc)->ops->destroy)((*pc));CHKERRQ(ierr);} 113 ierr = DMDestroy(&(*pc)->dm);CHKERRQ(ierr); 114 ierr = PetscHeaderDestroy(pc);CHKERRQ(ierr); 115 PetscFunctionReturn(0); 116 } 117 118 /*@C 119 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 120 scaling as needed by certain time-stepping codes. 121 122 Logically Collective on PC 123 124 Input Parameter: 125 . pc - the preconditioner context 126 127 Output Parameter: 128 . flag - PETSC_TRUE if it applies the scaling 129 130 Level: developer 131 132 Notes: 133 If this returns PETSC_TRUE then the system solved via the Krylov method is 134 $ D M A D^{-1} y = D M b for left preconditioning or 135 $ D A M D^{-1} z = D b for right preconditioning 136 137 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale() 138 @*/ 139 PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag) 140 { 141 PetscFunctionBegin; 142 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 143 PetscValidBoolPointer(flag,2); 144 *flag = pc->diagonalscale; 145 PetscFunctionReturn(0); 146 } 147 148 /*@ 149 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 150 scaling as needed by certain time-stepping codes. 151 152 Logically Collective on PC 153 154 Input Parameters: 155 + pc - the preconditioner context 156 - s - scaling vector 157 158 Level: intermediate 159 160 Notes: 161 The system solved via the Krylov method is 162 $ D M A D^{-1} y = D M b for left preconditioning or 163 $ D A M D^{-1} z = D b for right preconditioning 164 165 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 166 167 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale() 168 @*/ 169 PetscErrorCode PCSetDiagonalScale(PC pc,Vec s) 170 { 171 PetscErrorCode ierr; 172 173 PetscFunctionBegin; 174 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 175 PetscValidHeaderSpecific(s,VEC_CLASSID,2); 176 pc->diagonalscale = PETSC_TRUE; 177 178 ierr = PetscObjectReference((PetscObject)s);CHKERRQ(ierr); 179 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 180 181 pc->diagonalscaleleft = s; 182 183 ierr = VecDuplicate(s,&pc->diagonalscaleright);CHKERRQ(ierr); 184 ierr = VecCopy(s,pc->diagonalscaleright);CHKERRQ(ierr); 185 ierr = VecReciprocal(pc->diagonalscaleright);CHKERRQ(ierr); 186 PetscFunctionReturn(0); 187 } 188 189 /*@ 190 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 191 192 Logically Collective on PC 193 194 Input Parameters: 195 + pc - the preconditioner context 196 . in - input vector 197 - out - scaled vector (maybe the same as in) 198 199 Level: intermediate 200 201 Notes: 202 The system solved via the Krylov method is 203 $ D M A D^{-1} y = D M b for left preconditioning or 204 $ D A M D^{-1} z = D b for right preconditioning 205 206 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 207 208 If diagonal scaling is turned off and in is not out then in is copied to out 209 210 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale() 211 @*/ 212 PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out) 213 { 214 PetscErrorCode ierr; 215 216 PetscFunctionBegin; 217 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 218 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 219 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 220 if (pc->diagonalscale) { 221 ierr = VecPointwiseMult(out,pc->diagonalscaleleft,in);CHKERRQ(ierr); 222 } else if (in != out) { 223 ierr = VecCopy(in,out);CHKERRQ(ierr); 224 } 225 PetscFunctionReturn(0); 226 } 227 228 /*@ 229 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 230 231 Logically Collective on PC 232 233 Input Parameters: 234 + pc - the preconditioner context 235 . in - input vector 236 - out - scaled vector (maybe the same as in) 237 238 Level: intermediate 239 240 Notes: 241 The system solved via the Krylov method is 242 $ D M A D^{-1} y = D M b for left preconditioning or 243 $ D A M D^{-1} z = D b for right preconditioning 244 245 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 246 247 If diagonal scaling is turned off and in is not out then in is copied to out 248 249 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale() 250 @*/ 251 PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out) 252 { 253 PetscErrorCode ierr; 254 255 PetscFunctionBegin; 256 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 257 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 258 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 259 if (pc->diagonalscale) { 260 ierr = VecPointwiseMult(out,pc->diagonalscaleright,in);CHKERRQ(ierr); 261 } else if (in != out) { 262 ierr = VecCopy(in,out);CHKERRQ(ierr); 263 } 264 PetscFunctionReturn(0); 265 } 266 267 /*@ 268 PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the 269 operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(), 270 TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat. 271 272 Logically Collective on PC 273 274 Input Parameters: 275 + pc - the preconditioner context 276 - flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false) 277 278 Options Database Key: 279 . -pc_use_amat <true,false> 280 281 Notes: 282 For the common case in which the linear system matrix and the matrix used to construct the 283 preconditioner are identical, this routine is does nothing. 284 285 Level: intermediate 286 287 .seealso: PCGetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE 288 @*/ 289 PetscErrorCode PCSetUseAmat(PC pc,PetscBool flg) 290 { 291 PetscFunctionBegin; 292 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 293 pc->useAmat = flg; 294 PetscFunctionReturn(0); 295 } 296 297 /*@ 298 PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected. 299 300 Logically Collective on PC 301 302 Input Parameters: 303 + pc - iterative context obtained from PCCreate() 304 - flg - PETSC_TRUE indicates you want the error generated 305 306 Level: advanced 307 308 Notes: 309 Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve() 310 to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is 311 detected. 312 313 This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs 314 315 .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll() 316 @*/ 317 PetscErrorCode PCSetErrorIfFailure(PC pc,PetscBool flg) 318 { 319 PetscFunctionBegin; 320 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 321 PetscValidLogicalCollectiveBool(pc,flg,2); 322 pc->erroriffailure = flg; 323 PetscFunctionReturn(0); 324 } 325 326 /*@ 327 PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the 328 operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(), 329 TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat. 330 331 Logically Collective on PC 332 333 Input Parameter: 334 . pc - the preconditioner context 335 336 Output Parameter: 337 . flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false) 338 339 Notes: 340 For the common case in which the linear system matrix and the matrix used to construct the 341 preconditioner are identical, this routine is does nothing. 342 343 Level: intermediate 344 345 .seealso: PCSetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE 346 @*/ 347 PetscErrorCode PCGetUseAmat(PC pc,PetscBool *flg) 348 { 349 PetscFunctionBegin; 350 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 351 *flg = pc->useAmat; 352 PetscFunctionReturn(0); 353 } 354 355 /*@ 356 PCCreate - Creates a preconditioner context. 357 358 Collective 359 360 Input Parameter: 361 . comm - MPI communicator 362 363 Output Parameter: 364 . pc - location to put the preconditioner context 365 366 Notes: 367 The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC 368 in parallel. For dense matrices it is always PCNONE. 369 370 Level: developer 371 372 .seealso: PCSetUp(), PCApply(), PCDestroy() 373 @*/ 374 PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc) 375 { 376 PC pc; 377 PetscErrorCode ierr; 378 379 PetscFunctionBegin; 380 PetscValidPointer(newpc,1); 381 *newpc = NULL; 382 ierr = PCInitializePackage();CHKERRQ(ierr); 383 384 ierr = PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView);CHKERRQ(ierr); 385 386 pc->mat = NULL; 387 pc->pmat = NULL; 388 pc->setupcalled = 0; 389 pc->setfromoptionscalled = 0; 390 pc->data = NULL; 391 pc->diagonalscale = PETSC_FALSE; 392 pc->diagonalscaleleft = NULL; 393 pc->diagonalscaleright = NULL; 394 395 pc->modifysubmatrices = NULL; 396 pc->modifysubmatricesP = NULL; 397 398 *newpc = pc; 399 PetscFunctionReturn(0); 400 401 } 402 403 /* -------------------------------------------------------------------------------*/ 404 405 /*@ 406 PCApply - Applies the preconditioner to a vector. 407 408 Collective on PC 409 410 Input Parameters: 411 + pc - the preconditioner context 412 - x - input vector 413 414 Output Parameter: 415 . y - output vector 416 417 Level: developer 418 419 .seealso: PCApplyTranspose(), PCApplyBAorAB() 420 @*/ 421 PetscErrorCode PCApply(PC pc,Vec x,Vec y) 422 { 423 PetscErrorCode ierr; 424 PetscInt m,n,mv,nv; 425 426 PetscFunctionBegin; 427 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 428 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 429 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 430 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 431 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 432 /* use pmat to check vector sizes since for KSPLQR the pmat may be of a different size than mat */ 433 ierr = MatGetLocalSize(pc->pmat,&m,&n);CHKERRQ(ierr); 434 ierr = VecGetLocalSize(x,&nv);CHKERRQ(ierr); 435 ierr = VecGetLocalSize(y,&mv);CHKERRQ(ierr); 436 if (mv != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %D does not equal resulting vector number of rows %D",m,mv); 437 if (nv != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %D does not equal resulting vector number of rows %D",n,nv); 438 ierr = VecSetErrorIfLocked(y,3);CHKERRQ(ierr); 439 440 ierr = PCSetUp(pc);CHKERRQ(ierr); 441 if (!pc->ops->apply) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply"); 442 ierr = VecLockReadPush(x);CHKERRQ(ierr); 443 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 444 ierr = (*pc->ops->apply)(pc,x,y);CHKERRQ(ierr); 445 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 446 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 447 ierr = VecLockReadPop(x);CHKERRQ(ierr); 448 PetscFunctionReturn(0); 449 } 450 451 /*@ 452 PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices. 453 Collective on PC 454 455 Input Parameters: 456 + pc - the preconditioner context 457 - X - block of input vectors 458 459 Output Parameter: 460 . Y - block of output vectors 461 462 Level: developer 463 464 .seealso: PCApply(), KSPMatSolve() 465 @*/ 466 PetscErrorCode PCMatApply(PC pc,Mat X,Mat Y) 467 { 468 Mat A; 469 Vec cy, cx; 470 PetscInt m1, M1, m2, M2, n1, N1, n2, N2; 471 PetscBool match; 472 PetscErrorCode ierr; 473 474 PetscFunctionBegin; 475 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 476 PetscValidHeaderSpecific(Y, MAT_CLASSID, 2); 477 PetscValidHeaderSpecific(X, MAT_CLASSID, 3); 478 PetscCheckSameComm(pc, 1, Y, 2); 479 PetscCheckSameComm(pc, 1, X, 3); 480 if (Y == X) SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices"); 481 ierr = PCGetOperators(pc, NULL, &A);CHKERRQ(ierr); 482 ierr = MatGetLocalSize(A, &m1, NULL);CHKERRQ(ierr); 483 ierr = MatGetLocalSize(Y, &m2, &n2);CHKERRQ(ierr); 484 ierr = MatGetSize(A, &M1, NULL);CHKERRQ(ierr); 485 ierr = MatGetSize(X, &M2, &N2);CHKERRQ(ierr); 486 if (m1 != m2 || M1 != M2) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot use a block of input vectors with (m2,M2) = (%D,%D) for a preconditioner with (m1,M1) = (%D,%D)", m2, M2, m1, M1); 487 ierr = MatGetLocalSize(Y, &m1, &n1);CHKERRQ(ierr); 488 ierr = MatGetSize(Y, &M1, &N1);CHKERRQ(ierr); 489 if (m1 != m2 || M1 != M2 || n1 != n2 || N1 != N2) SETERRQ8(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible block of input vectors (m2,M2)x(n2,N2) = (%D,%D)x(%D,%D) and output vectors (m1,M1)x(n1,N1) = (%D,%D)x(%D,%D)", m2, M2, n2, N2, m1, M1, n1, N1); 490 ierr = PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "");CHKERRQ(ierr); 491 if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat"); 492 ierr = PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "");CHKERRQ(ierr); 493 if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat"); 494 ierr = PCSetUp(pc);CHKERRQ(ierr); 495 if (pc->ops->matapply) { 496 ierr = PetscLogEventBegin(PC_MatApply, pc, X, Y, 0);CHKERRQ(ierr); 497 ierr = (*pc->ops->matapply)(pc, X, Y);CHKERRQ(ierr); 498 ierr = PetscLogEventEnd(PC_MatApply, pc, X, Y, 0);CHKERRQ(ierr); 499 } else { 500 ierr = PetscInfo1(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name);CHKERRQ(ierr); 501 for (n2 = 0; n2 < N2; ++n2) { 502 ierr = MatDenseGetColumnVecRead(X, n2, &cx);CHKERRQ(ierr); 503 ierr = MatDenseGetColumnVecWrite(Y, n2, &cy);CHKERRQ(ierr); 504 ierr = PCApply(pc, cx, cy);CHKERRQ(ierr); 505 ierr = MatDenseRestoreColumnVecWrite(Y, n2, &cy);CHKERRQ(ierr); 506 ierr = MatDenseRestoreColumnVecRead(X, n2, &cx);CHKERRQ(ierr); 507 } 508 } 509 PetscFunctionReturn(0); 510 } 511 512 /*@ 513 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 514 515 Collective on PC 516 517 Input Parameters: 518 + pc - the preconditioner context 519 - x - input vector 520 521 Output Parameter: 522 . y - output vector 523 524 Notes: 525 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 526 527 Level: developer 528 529 .seealso: PCApply(), PCApplySymmetricRight() 530 @*/ 531 PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y) 532 { 533 PetscErrorCode ierr; 534 535 PetscFunctionBegin; 536 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 537 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 538 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 539 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 540 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 541 ierr = PCSetUp(pc);CHKERRQ(ierr); 542 if (!pc->ops->applysymmetricleft) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply"); 543 ierr = VecLockReadPush(x);CHKERRQ(ierr); 544 ierr = PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 545 ierr = (*pc->ops->applysymmetricleft)(pc,x,y);CHKERRQ(ierr); 546 ierr = PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 547 ierr = VecLockReadPop(x);CHKERRQ(ierr); 548 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 549 PetscFunctionReturn(0); 550 } 551 552 /*@ 553 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 554 555 Collective on PC 556 557 Input Parameters: 558 + pc - the preconditioner context 559 - x - input vector 560 561 Output Parameter: 562 . y - output vector 563 564 Level: developer 565 566 Notes: 567 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 568 569 .seealso: PCApply(), PCApplySymmetricLeft() 570 @*/ 571 PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y) 572 { 573 PetscErrorCode ierr; 574 575 PetscFunctionBegin; 576 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 577 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 578 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 579 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 580 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 581 ierr = PCSetUp(pc);CHKERRQ(ierr); 582 if (!pc->ops->applysymmetricright) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply"); 583 ierr = VecLockReadPush(x);CHKERRQ(ierr); 584 ierr = PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 585 ierr = (*pc->ops->applysymmetricright)(pc,x,y);CHKERRQ(ierr); 586 ierr = PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 587 ierr = VecLockReadPop(x);CHKERRQ(ierr); 588 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 589 PetscFunctionReturn(0); 590 } 591 592 /*@ 593 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 594 595 Collective on PC 596 597 Input Parameters: 598 + pc - the preconditioner context 599 - x - input vector 600 601 Output Parameter: 602 . y - output vector 603 604 Notes: 605 For complex numbers this applies the non-Hermitian transpose. 606 607 Developer Notes: 608 We need to implement a PCApplyHermitianTranspose() 609 610 Level: developer 611 612 .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists() 613 @*/ 614 PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y) 615 { 616 PetscErrorCode ierr; 617 618 PetscFunctionBegin; 619 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 620 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 621 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 622 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 623 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 624 ierr = PCSetUp(pc);CHKERRQ(ierr); 625 if (!pc->ops->applytranspose) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose"); 626 ierr = VecLockReadPush(x);CHKERRQ(ierr); 627 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 628 ierr = (*pc->ops->applytranspose)(pc,x,y);CHKERRQ(ierr); 629 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 630 ierr = VecLockReadPop(x);CHKERRQ(ierr); 631 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 632 PetscFunctionReturn(0); 633 } 634 635 /*@ 636 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 637 638 Collective on PC 639 640 Input Parameters: 641 . pc - the preconditioner context 642 643 Output Parameter: 644 . flg - PETSC_TRUE if a transpose operation is defined 645 646 Level: developer 647 648 .seealso: PCApplyTranspose() 649 @*/ 650 PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg) 651 { 652 PetscFunctionBegin; 653 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 654 PetscValidBoolPointer(flg,2); 655 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 656 else *flg = PETSC_FALSE; 657 PetscFunctionReturn(0); 658 } 659 660 /*@ 661 PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x. 662 663 Collective on PC 664 665 Input Parameters: 666 + pc - the preconditioner context 667 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 668 . x - input vector 669 - work - work vector 670 671 Output Parameter: 672 . y - output vector 673 674 Level: developer 675 676 Notes: 677 If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the 678 specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling. 679 680 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose() 681 @*/ 682 PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work) 683 { 684 PetscErrorCode ierr; 685 686 PetscFunctionBegin; 687 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 688 PetscValidLogicalCollectiveEnum(pc,side,2); 689 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 690 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 691 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 692 PetscCheckSameComm(pc,1,x,3); 693 PetscCheckSameComm(pc,1,y,4); 694 PetscCheckSameComm(pc,1,work,5); 695 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 696 if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric"); 697 if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application"); 698 if (pc->erroriffailure) {ierr = VecValidValues(x,3,PETSC_TRUE);CHKERRQ(ierr);} 699 700 ierr = PCSetUp(pc);CHKERRQ(ierr); 701 if (pc->diagonalscale) { 702 if (pc->ops->applyBA) { 703 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 704 ierr = VecDuplicate(x,&work2);CHKERRQ(ierr); 705 ierr = PCDiagonalScaleRight(pc,x,work2);CHKERRQ(ierr); 706 ierr = (*pc->ops->applyBA)(pc,side,work2,y,work);CHKERRQ(ierr); 707 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 708 ierr = VecDestroy(&work2);CHKERRQ(ierr); 709 } else if (side == PC_RIGHT) { 710 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 711 ierr = PCApply(pc,y,work);CHKERRQ(ierr); 712 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 713 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 714 } else if (side == PC_LEFT) { 715 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 716 ierr = MatMult(pc->mat,y,work);CHKERRQ(ierr); 717 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 718 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 719 } else if (side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner"); 720 } else { 721 if (pc->ops->applyBA) { 722 ierr = (*pc->ops->applyBA)(pc,side,x,y,work);CHKERRQ(ierr); 723 } else if (side == PC_RIGHT) { 724 ierr = PCApply(pc,x,work);CHKERRQ(ierr); 725 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 726 } else if (side == PC_LEFT) { 727 ierr = MatMult(pc->mat,x,work);CHKERRQ(ierr); 728 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 729 } else if (side == PC_SYMMETRIC) { 730 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 731 ierr = PCApplySymmetricRight(pc,x,work);CHKERRQ(ierr); 732 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 733 ierr = VecCopy(y,work);CHKERRQ(ierr); 734 ierr = PCApplySymmetricLeft(pc,work,y);CHKERRQ(ierr); 735 } 736 } 737 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 738 PetscFunctionReturn(0); 739 } 740 741 /*@ 742 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 743 and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning, 744 NOT tr(B*A) = tr(A)*tr(B). 745 746 Collective on PC 747 748 Input Parameters: 749 + pc - the preconditioner context 750 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 751 . x - input vector 752 - work - work vector 753 754 Output Parameter: 755 . y - output vector 756 757 758 Notes: 759 this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner 760 defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 761 762 Level: developer 763 764 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB() 765 @*/ 766 PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work) 767 { 768 PetscErrorCode ierr; 769 770 PetscFunctionBegin; 771 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 772 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 773 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 774 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 775 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 776 if (pc->erroriffailure) {ierr = VecValidValues(x,3,PETSC_TRUE);CHKERRQ(ierr);} 777 if (pc->ops->applyBAtranspose) { 778 ierr = (*pc->ops->applyBAtranspose)(pc,side,x,y,work);CHKERRQ(ierr); 779 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 780 PetscFunctionReturn(0); 781 } 782 if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left"); 783 784 ierr = PCSetUp(pc);CHKERRQ(ierr); 785 if (side == PC_RIGHT) { 786 ierr = PCApplyTranspose(pc,x,work);CHKERRQ(ierr); 787 ierr = MatMultTranspose(pc->mat,work,y);CHKERRQ(ierr); 788 } else if (side == PC_LEFT) { 789 ierr = MatMultTranspose(pc->mat,x,work);CHKERRQ(ierr); 790 ierr = PCApplyTranspose(pc,work,y);CHKERRQ(ierr); 791 } 792 /* add support for PC_SYMMETRIC */ 793 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 794 PetscFunctionReturn(0); 795 } 796 797 /* -------------------------------------------------------------------------------*/ 798 799 /*@ 800 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 801 built-in fast application of Richardson's method. 802 803 Not Collective 804 805 Input Parameter: 806 . pc - the preconditioner 807 808 Output Parameter: 809 . exists - PETSC_TRUE or PETSC_FALSE 810 811 Level: developer 812 813 .seealso: PCApplyRichardson() 814 @*/ 815 PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists) 816 { 817 PetscFunctionBegin; 818 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 819 PetscValidPointer(exists,2); 820 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 821 else *exists = PETSC_FALSE; 822 PetscFunctionReturn(0); 823 } 824 825 /*@ 826 PCApplyRichardson - Applies several steps of Richardson iteration with 827 the particular preconditioner. This routine is usually used by the 828 Krylov solvers and not the application code directly. 829 830 Collective on PC 831 832 Input Parameters: 833 + pc - the preconditioner context 834 . b - the right hand side 835 . w - one work vector 836 . rtol - relative decrease in residual norm convergence criteria 837 . abstol - absolute residual norm convergence criteria 838 . dtol - divergence residual norm increase criteria 839 . its - the number of iterations to apply. 840 - guesszero - if the input x contains nonzero initial guess 841 842 Output Parameter: 843 + outits - number of iterations actually used (for SOR this always equals its) 844 . reason - the reason the apply terminated 845 - y - the solution (also contains initial guess if guesszero is PETSC_FALSE 846 847 Notes: 848 Most preconditioners do not support this function. Use the command 849 PCApplyRichardsonExists() to determine if one does. 850 851 Except for the multigrid PC this routine ignores the convergence tolerances 852 and always runs for the number of iterations 853 854 Level: developer 855 856 .seealso: PCApplyRichardsonExists() 857 @*/ 858 PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason) 859 { 860 PetscErrorCode ierr; 861 862 PetscFunctionBegin; 863 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 864 PetscValidHeaderSpecific(b,VEC_CLASSID,2); 865 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 866 PetscValidHeaderSpecific(w,VEC_CLASSID,4); 867 if (b == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors"); 868 ierr = PCSetUp(pc);CHKERRQ(ierr); 869 if (!pc->ops->applyrichardson) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson"); 870 ierr = (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);CHKERRQ(ierr); 871 PetscFunctionReturn(0); 872 } 873 874 /*@ 875 PCGetFailedReason - Gets the reason a PCSetUp() failed or 0 if it did not fail 876 877 Logically Collective on PC 878 879 Input Parameter: 880 . pc - the preconditioner context 881 882 Output Parameter: 883 . reason - the reason it failed, currently only -1 884 885 Level: advanced 886 887 .seealso: PCCreate(), PCApply(), PCDestroy() 888 @*/ 889 PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason) 890 { 891 PetscFunctionBegin; 892 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 893 else *reason = pc->failedreason; 894 PetscFunctionReturn(0); 895 } 896 897 898 /* 899 a setupcall of 0 indicates never setup, 900 1 indicates has been previously setup 901 -1 indicates a PCSetUp() was attempted and failed 902 */ 903 /*@ 904 PCSetUp - Prepares for the use of a preconditioner. 905 906 Collective on PC 907 908 Input Parameter: 909 . pc - the preconditioner context 910 911 Level: developer 912 913 .seealso: PCCreate(), PCApply(), PCDestroy() 914 @*/ 915 PetscErrorCode PCSetUp(PC pc) 916 { 917 PetscErrorCode ierr; 918 const char *def; 919 PetscObjectState matstate, matnonzerostate; 920 921 PetscFunctionBegin; 922 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 923 if (!pc->mat) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first"); 924 925 if (pc->setupcalled && pc->reusepreconditioner) { 926 ierr = PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n");CHKERRQ(ierr); 927 PetscFunctionReturn(0); 928 } 929 930 ierr = PetscObjectStateGet((PetscObject)pc->pmat,&matstate);CHKERRQ(ierr); 931 ierr = MatGetNonzeroState(pc->pmat,&matnonzerostate);CHKERRQ(ierr); 932 if (!pc->setupcalled) { 933 ierr = PetscInfo(pc,"Setting up PC for first time\n");CHKERRQ(ierr); 934 pc->flag = DIFFERENT_NONZERO_PATTERN; 935 } else if (matstate == pc->matstate) { 936 ierr = PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n");CHKERRQ(ierr); 937 PetscFunctionReturn(0); 938 } else { 939 if (matnonzerostate > pc->matnonzerostate) { 940 ierr = PetscInfo(pc,"Setting up PC with different nonzero pattern\n");CHKERRQ(ierr); 941 pc->flag = DIFFERENT_NONZERO_PATTERN; 942 } else { 943 ierr = PetscInfo(pc,"Setting up PC with same nonzero pattern\n");CHKERRQ(ierr); 944 pc->flag = SAME_NONZERO_PATTERN; 945 } 946 } 947 pc->matstate = matstate; 948 pc->matnonzerostate = matnonzerostate; 949 950 if (!((PetscObject)pc)->type_name) { 951 ierr = PCGetDefaultType_Private(pc,&def);CHKERRQ(ierr); 952 ierr = PCSetType(pc,def);CHKERRQ(ierr); 953 } 954 955 ierr = MatSetErrorIfFailure(pc->pmat,pc->erroriffailure);CHKERRQ(ierr); 956 ierr = MatSetErrorIfFailure(pc->mat,pc->erroriffailure);CHKERRQ(ierr); 957 ierr = PetscLogEventBegin(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 958 if (pc->ops->setup) { 959 ierr = (*pc->ops->setup)(pc);CHKERRQ(ierr); 960 } 961 ierr = PetscLogEventEnd(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 962 if (!pc->setupcalled) pc->setupcalled = 1; 963 PetscFunctionReturn(0); 964 } 965 966 /*@ 967 PCSetUpOnBlocks - Sets up the preconditioner for each block in 968 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 969 methods. 970 971 Collective on PC 972 973 Input Parameters: 974 . pc - the preconditioner context 975 976 Level: developer 977 978 .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp() 979 @*/ 980 PetscErrorCode PCSetUpOnBlocks(PC pc) 981 { 982 PetscErrorCode ierr; 983 984 PetscFunctionBegin; 985 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 986 if (!pc->ops->setuponblocks) PetscFunctionReturn(0); 987 ierr = PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 988 ierr = (*pc->ops->setuponblocks)(pc);CHKERRQ(ierr); 989 ierr = PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 990 PetscFunctionReturn(0); 991 } 992 993 /*@C 994 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 995 submatrices that arise within certain subdomain-based preconditioners. 996 The basic submatrices are extracted from the preconditioner matrix as 997 usual; the user can then alter these (for example, to set different boundary 998 conditions for each submatrix) before they are used for the local solves. 999 1000 Logically Collective on PC 1001 1002 Input Parameters: 1003 + pc - the preconditioner context 1004 . func - routine for modifying the submatrices 1005 - ctx - optional user-defined context (may be null) 1006 1007 Calling sequence of func: 1008 $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx); 1009 1010 + row - an array of index sets that contain the global row numbers 1011 that comprise each local submatrix 1012 . col - an array of index sets that contain the global column numbers 1013 that comprise each local submatrix 1014 . submat - array of local submatrices 1015 - ctx - optional user-defined context for private data for the 1016 user-defined func routine (may be null) 1017 1018 Notes: 1019 PCSetModifySubMatrices() MUST be called before KSPSetUp() and 1020 KSPSolve(). 1021 1022 A routine set by PCSetModifySubMatrices() is currently called within 1023 the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM) 1024 preconditioners. All other preconditioners ignore this routine. 1025 1026 Level: advanced 1027 1028 .seealso: PCModifySubMatrices() 1029 @*/ 1030 PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx) 1031 { 1032 PetscFunctionBegin; 1033 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1034 pc->modifysubmatrices = func; 1035 pc->modifysubmatricesP = ctx; 1036 PetscFunctionReturn(0); 1037 } 1038 1039 /*@C 1040 PCModifySubMatrices - Calls an optional user-defined routine within 1041 certain preconditioners if one has been set with PCSetModifySubMatrices(). 1042 1043 Collective on PC 1044 1045 Input Parameters: 1046 + pc - the preconditioner context 1047 . nsub - the number of local submatrices 1048 . row - an array of index sets that contain the global row numbers 1049 that comprise each local submatrix 1050 . col - an array of index sets that contain the global column numbers 1051 that comprise each local submatrix 1052 . submat - array of local submatrices 1053 - ctx - optional user-defined context for private data for the 1054 user-defined routine (may be null) 1055 1056 Output Parameter: 1057 . submat - array of local submatrices (the entries of which may 1058 have been modified) 1059 1060 Notes: 1061 The user should NOT generally call this routine, as it will 1062 automatically be called within certain preconditioners (currently 1063 block Jacobi, additive Schwarz) if set. 1064 1065 The basic submatrices are extracted from the preconditioner matrix 1066 as usual; the user can then alter these (for example, to set different 1067 boundary conditions for each submatrix) before they are used for the 1068 local solves. 1069 1070 Level: developer 1071 1072 .seealso: PCSetModifySubMatrices() 1073 @*/ 1074 PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx) 1075 { 1076 PetscErrorCode ierr; 1077 1078 PetscFunctionBegin; 1079 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1080 if (!pc->modifysubmatrices) PetscFunctionReturn(0); 1081 ierr = PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 1082 ierr = (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);CHKERRQ(ierr); 1083 ierr = PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 1084 PetscFunctionReturn(0); 1085 } 1086 1087 /*@ 1088 PCSetOperators - Sets the matrix associated with the linear system and 1089 a (possibly) different one associated with the preconditioner. 1090 1091 Logically Collective on PC 1092 1093 Input Parameters: 1094 + pc - the preconditioner context 1095 . Amat - the matrix that defines the linear system 1096 - Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat. 1097 1098 Notes: 1099 Passing a NULL for Amat or Pmat removes the matrix that is currently used. 1100 1101 If you wish to replace either Amat or Pmat but leave the other one untouched then 1102 first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference() 1103 on it and then pass it back in in your call to KSPSetOperators(). 1104 1105 More Notes about Repeated Solution of Linear Systems: 1106 PETSc does NOT reset the matrix entries of either Amat or Pmat 1107 to zero after a linear solve; the user is completely responsible for 1108 matrix assembly. See the routine MatZeroEntries() if desiring to 1109 zero all elements of a matrix. 1110 1111 Level: intermediate 1112 1113 .seealso: PCGetOperators(), MatZeroEntries() 1114 @*/ 1115 PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat) 1116 { 1117 PetscErrorCode ierr; 1118 PetscInt m1,n1,m2,n2; 1119 1120 PetscFunctionBegin; 1121 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1122 if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1123 if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3); 1124 if (Amat) PetscCheckSameComm(pc,1,Amat,2); 1125 if (Pmat) PetscCheckSameComm(pc,1,Pmat,3); 1126 if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) { 1127 ierr = MatGetLocalSize(Amat,&m1,&n1);CHKERRQ(ierr); 1128 ierr = MatGetLocalSize(pc->mat,&m2,&n2);CHKERRQ(ierr); 1129 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1130 ierr = MatGetLocalSize(Pmat,&m1,&n1);CHKERRQ(ierr); 1131 ierr = MatGetLocalSize(pc->pmat,&m2,&n2);CHKERRQ(ierr); 1132 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1133 } 1134 1135 if (Pmat != pc->pmat) { 1136 /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */ 1137 pc->matnonzerostate = -1; 1138 pc->matstate = -1; 1139 } 1140 1141 /* reference first in case the matrices are the same */ 1142 if (Amat) {ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr);} 1143 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 1144 if (Pmat) {ierr = PetscObjectReference((PetscObject)Pmat);CHKERRQ(ierr);} 1145 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 1146 pc->mat = Amat; 1147 pc->pmat = Pmat; 1148 PetscFunctionReturn(0); 1149 } 1150 1151 /*@ 1152 PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed. 1153 1154 Logically Collective on PC 1155 1156 Input Parameters: 1157 + pc - the preconditioner context 1158 - flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner 1159 1160 Level: intermediate 1161 1162 .seealso: PCGetOperators(), MatZeroEntries(), PCGetReusePreconditioner(), KSPSetReusePreconditioner() 1163 @*/ 1164 PetscErrorCode PCSetReusePreconditioner(PC pc,PetscBool flag) 1165 { 1166 PetscFunctionBegin; 1167 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1168 PetscValidLogicalCollectiveBool(pc,flag,2); 1169 pc->reusepreconditioner = flag; 1170 PetscFunctionReturn(0); 1171 } 1172 1173 /*@ 1174 PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed. 1175 1176 Not Collective 1177 1178 Input Parameter: 1179 . pc - the preconditioner context 1180 1181 Output Parameter: 1182 . flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner 1183 1184 Level: intermediate 1185 1186 .seealso: PCGetOperators(), MatZeroEntries(), PCSetReusePreconditioner() 1187 @*/ 1188 PetscErrorCode PCGetReusePreconditioner(PC pc,PetscBool *flag) 1189 { 1190 PetscFunctionBegin; 1191 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1192 PetscValidPointer(flag,2); 1193 *flag = pc->reusepreconditioner; 1194 PetscFunctionReturn(0); 1195 } 1196 1197 /*@ 1198 PCGetOperators - Gets the matrix associated with the linear system and 1199 possibly a different one associated with the preconditioner. 1200 1201 Not collective, though parallel Mats are returned if the PC is parallel 1202 1203 Input Parameter: 1204 . pc - the preconditioner context 1205 1206 Output Parameters: 1207 + Amat - the matrix defining the linear system 1208 - Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat. 1209 1210 Level: intermediate 1211 1212 Notes: 1213 Does not increase the reference count of the matrices, so you should not destroy them 1214 1215 Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators 1216 are created in PC and returned to the user. In this case, if both operators 1217 mat and pmat are requested, two DIFFERENT operators will be returned. If 1218 only one is requested both operators in the PC will be the same (i.e. as 1219 if one had called KSP/PCSetOperators() with the same argument for both Mats). 1220 The user must set the sizes of the returned matrices and their type etc just 1221 as if the user created them with MatCreate(). For example, 1222 1223 $ KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to 1224 $ set size, type, etc of Amat 1225 1226 $ MatCreate(comm,&mat); 1227 $ KSP/PCSetOperators(ksp/pc,Amat,Amat); 1228 $ PetscObjectDereference((PetscObject)mat); 1229 $ set size, type, etc of Amat 1230 1231 and 1232 1233 $ KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to 1234 $ set size, type, etc of Amat and Pmat 1235 1236 $ MatCreate(comm,&Amat); 1237 $ MatCreate(comm,&Pmat); 1238 $ KSP/PCSetOperators(ksp/pc,Amat,Pmat); 1239 $ PetscObjectDereference((PetscObject)Amat); 1240 $ PetscObjectDereference((PetscObject)Pmat); 1241 $ set size, type, etc of Amat and Pmat 1242 1243 The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy 1244 of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely 1245 managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look 1246 at this is when you create a SNES you do not NEED to create a KSP and attach it to 1247 the SNES object (the SNES object manages it for you). Similarly when you create a KSP 1248 you do not need to attach a PC to it (the KSP object manages the PC object for you). 1249 Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when 1250 it can be created for you? 1251 1252 1253 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet() 1254 @*/ 1255 PetscErrorCode PCGetOperators(PC pc,Mat *Amat,Mat *Pmat) 1256 { 1257 PetscErrorCode ierr; 1258 1259 PetscFunctionBegin; 1260 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1261 if (Amat) { 1262 if (!pc->mat) { 1263 if (pc->pmat && !Pmat) { /* Apmat has been set, but user did not request it, so use for Amat */ 1264 pc->mat = pc->pmat; 1265 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1266 } else { /* both Amat and Pmat are empty */ 1267 ierr = MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat);CHKERRQ(ierr); 1268 if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */ 1269 pc->pmat = pc->mat; 1270 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1271 } 1272 } 1273 } 1274 *Amat = pc->mat; 1275 } 1276 if (Pmat) { 1277 if (!pc->pmat) { 1278 if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */ 1279 pc->pmat = pc->mat; 1280 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1281 } else { 1282 ierr = MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat);CHKERRQ(ierr); 1283 if (!Amat) { /* user did NOT request Amat, so make same as Pmat */ 1284 pc->mat = pc->pmat; 1285 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1286 } 1287 } 1288 } 1289 *Pmat = pc->pmat; 1290 } 1291 PetscFunctionReturn(0); 1292 } 1293 1294 /*@C 1295 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1296 possibly a different one associated with the preconditioner have been set in the PC. 1297 1298 Not collective, though the results on all processes should be the same 1299 1300 Input Parameter: 1301 . pc - the preconditioner context 1302 1303 Output Parameters: 1304 + mat - the matrix associated with the linear system was set 1305 - pmat - matrix associated with the preconditioner was set, usually the same 1306 1307 Level: intermediate 1308 1309 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators() 1310 @*/ 1311 PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat) 1312 { 1313 PetscFunctionBegin; 1314 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1315 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1316 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1317 PetscFunctionReturn(0); 1318 } 1319 1320 /*@ 1321 PCFactorGetMatrix - Gets the factored matrix from the 1322 preconditioner context. This routine is valid only for the LU, 1323 incomplete LU, Cholesky, and incomplete Cholesky methods. 1324 1325 Not Collective on PC though Mat is parallel if PC is parallel 1326 1327 Input Parameters: 1328 . pc - the preconditioner context 1329 1330 Output parameters: 1331 . mat - the factored matrix 1332 1333 Level: advanced 1334 1335 Notes: 1336 Does not increase the reference count for the matrix so DO NOT destroy it 1337 1338 @*/ 1339 PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat) 1340 { 1341 PetscErrorCode ierr; 1342 1343 PetscFunctionBegin; 1344 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1345 PetscValidPointer(mat,2); 1346 if (pc->ops->getfactoredmatrix) { 1347 ierr = (*pc->ops->getfactoredmatrix)(pc,mat);CHKERRQ(ierr); 1348 } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix"); 1349 PetscFunctionReturn(0); 1350 } 1351 1352 /*@C 1353 PCSetOptionsPrefix - Sets the prefix used for searching for all 1354 PC options in the database. 1355 1356 Logically Collective on PC 1357 1358 Input Parameters: 1359 + pc - the preconditioner context 1360 - prefix - the prefix string to prepend to all PC option requests 1361 1362 Notes: 1363 A hyphen (-) must NOT be given at the beginning of the prefix name. 1364 The first character of all runtime options is AUTOMATICALLY the 1365 hyphen. 1366 1367 Level: advanced 1368 1369 .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix() 1370 @*/ 1371 PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[]) 1372 { 1373 PetscErrorCode ierr; 1374 1375 PetscFunctionBegin; 1376 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1377 ierr = PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1378 PetscFunctionReturn(0); 1379 } 1380 1381 /*@C 1382 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1383 PC options in the database. 1384 1385 Logically Collective on PC 1386 1387 Input Parameters: 1388 + pc - the preconditioner context 1389 - prefix - the prefix string to prepend to all PC option requests 1390 1391 Notes: 1392 A hyphen (-) must NOT be given at the beginning of the prefix name. 1393 The first character of all runtime options is AUTOMATICALLY the 1394 hyphen. 1395 1396 Level: advanced 1397 1398 .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix() 1399 @*/ 1400 PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[]) 1401 { 1402 PetscErrorCode ierr; 1403 1404 PetscFunctionBegin; 1405 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1406 ierr = PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1407 PetscFunctionReturn(0); 1408 } 1409 1410 /*@C 1411 PCGetOptionsPrefix - Gets the prefix used for searching for all 1412 PC options in the database. 1413 1414 Not Collective 1415 1416 Input Parameters: 1417 . pc - the preconditioner context 1418 1419 Output Parameters: 1420 . prefix - pointer to the prefix string used, is returned 1421 1422 Notes: 1423 On the fortran side, the user should pass in a string 'prifix' of 1424 sufficient length to hold the prefix. 1425 1426 Level: advanced 1427 1428 .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix() 1429 @*/ 1430 PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[]) 1431 { 1432 PetscErrorCode ierr; 1433 1434 PetscFunctionBegin; 1435 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1436 PetscValidPointer(prefix,2); 1437 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1438 PetscFunctionReturn(0); 1439 } 1440 1441 /* 1442 Indicates the right hand side will be changed by KSPSolve(), this occurs for a few 1443 preconditioners including BDDC and Eisentat that transform the equations before applying 1444 the Krylov methods 1445 */ 1446 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc,PetscBool *change) 1447 { 1448 PetscErrorCode ierr; 1449 1450 PetscFunctionBegin; 1451 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1452 PetscValidPointer(change,2); 1453 *change = PETSC_FALSE; 1454 ierr = PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));CHKERRQ(ierr); 1455 PetscFunctionReturn(0); 1456 } 1457 1458 /*@ 1459 PCPreSolve - Optional pre-solve phase, intended for any 1460 preconditioner-specific actions that must be performed before 1461 the iterative solve itself. 1462 1463 Collective on PC 1464 1465 Input Parameters: 1466 + pc - the preconditioner context 1467 - ksp - the Krylov subspace context 1468 1469 Level: developer 1470 1471 Sample of Usage: 1472 .vb 1473 PCPreSolve(pc,ksp); 1474 KSPSolve(ksp,b,x); 1475 PCPostSolve(pc,ksp); 1476 .ve 1477 1478 Notes: 1479 The pre-solve phase is distinct from the PCSetUp() phase. 1480 1481 KSPSolve() calls this directly, so is rarely called by the user. 1482 1483 .seealso: PCPostSolve() 1484 @*/ 1485 PetscErrorCode PCPreSolve(PC pc,KSP ksp) 1486 { 1487 PetscErrorCode ierr; 1488 Vec x,rhs; 1489 1490 PetscFunctionBegin; 1491 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1492 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1493 pc->presolvedone++; 1494 if (pc->presolvedone > 2) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice"); 1495 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1496 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1497 1498 if (pc->ops->presolve) { 1499 ierr = (*pc->ops->presolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1500 } 1501 PetscFunctionReturn(0); 1502 } 1503 1504 /*@ 1505 PCPostSolve - Optional post-solve phase, intended for any 1506 preconditioner-specific actions that must be performed after 1507 the iterative solve itself. 1508 1509 Collective on PC 1510 1511 Input Parameters: 1512 + pc - the preconditioner context 1513 - ksp - the Krylov subspace context 1514 1515 Sample of Usage: 1516 .vb 1517 PCPreSolve(pc,ksp); 1518 KSPSolve(ksp,b,x); 1519 PCPostSolve(pc,ksp); 1520 .ve 1521 1522 Note: 1523 KSPSolve() calls this routine directly, so it is rarely called by the user. 1524 1525 Level: developer 1526 1527 .seealso: PCPreSolve(), KSPSolve() 1528 @*/ 1529 PetscErrorCode PCPostSolve(PC pc,KSP ksp) 1530 { 1531 PetscErrorCode ierr; 1532 Vec x,rhs; 1533 1534 PetscFunctionBegin; 1535 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1536 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1537 pc->presolvedone--; 1538 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1539 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1540 if (pc->ops->postsolve) { 1541 ierr = (*pc->ops->postsolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1542 } 1543 PetscFunctionReturn(0); 1544 } 1545 1546 /*@C 1547 PCLoad - Loads a PC that has been stored in binary with PCView(). 1548 1549 Collective on PetscViewer 1550 1551 Input Parameters: 1552 + newdm - the newly loaded PC, this needs to have been created with PCCreate() or 1553 some related function before a call to PCLoad(). 1554 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() 1555 1556 Level: intermediate 1557 1558 Notes: 1559 The type is determined by the data in the file, any type set into the PC before this call is ignored. 1560 1561 Notes for advanced users: 1562 Most users should not need to know the details of the binary storage 1563 format, since PCLoad() and PCView() completely hide these details. 1564 But for anyone who's interested, the standard binary matrix storage 1565 format is 1566 .vb 1567 has not yet been determined 1568 .ve 1569 1570 .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad() 1571 @*/ 1572 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) 1573 { 1574 PetscErrorCode ierr; 1575 PetscBool isbinary; 1576 PetscInt classid; 1577 char type[256]; 1578 1579 PetscFunctionBegin; 1580 PetscValidHeaderSpecific(newdm,PC_CLASSID,1); 1581 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1582 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1583 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 1584 1585 ierr = PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT);CHKERRQ(ierr); 1586 if (classid != PC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file"); 1587 ierr = PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR);CHKERRQ(ierr); 1588 ierr = PCSetType(newdm, type);CHKERRQ(ierr); 1589 if (newdm->ops->load) { 1590 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 1591 } 1592 PetscFunctionReturn(0); 1593 } 1594 1595 #include <petscdraw.h> 1596 #if defined(PETSC_HAVE_SAWS) 1597 #include <petscviewersaws.h> 1598 #endif 1599 1600 /*@C 1601 PCViewFromOptions - View from Options 1602 1603 Collective on PC 1604 1605 Input Parameters: 1606 + A - the PC context 1607 . obj - Optional object 1608 - name - command line option 1609 1610 Level: intermediate 1611 .seealso: PC, PCView, PetscObjectViewFromOptions(), PCCreate() 1612 @*/ 1613 PetscErrorCode PCViewFromOptions(PC A,PetscObject obj,const char name[]) 1614 { 1615 PetscErrorCode ierr; 1616 1617 PetscFunctionBegin; 1618 PetscValidHeaderSpecific(A,PC_CLASSID,1); 1619 ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr); 1620 PetscFunctionReturn(0); 1621 } 1622 1623 /*@C 1624 PCView - Prints the PC data structure. 1625 1626 Collective on PC 1627 1628 Input Parameters: 1629 + PC - the PC context 1630 - viewer - optional visualization context 1631 1632 Note: 1633 The available visualization contexts include 1634 + PETSC_VIEWER_STDOUT_SELF - standard output (default) 1635 - PETSC_VIEWER_STDOUT_WORLD - synchronized standard 1636 output where only the first processor opens 1637 the file. All other processors send their 1638 data to the first processor to print. 1639 1640 The user can open an alternative visualization contexts with 1641 PetscViewerASCIIOpen() (output to a specified file). 1642 1643 Level: developer 1644 1645 .seealso: KSPView(), PetscViewerASCIIOpen() 1646 @*/ 1647 PetscErrorCode PCView(PC pc,PetscViewer viewer) 1648 { 1649 PCType cstr; 1650 PetscErrorCode ierr; 1651 PetscBool iascii,isstring,isbinary,isdraw; 1652 #if defined(PETSC_HAVE_SAWS) 1653 PetscBool issaws; 1654 #endif 1655 1656 PetscFunctionBegin; 1657 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1658 if (!viewer) { 1659 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);CHKERRQ(ierr); 1660 } 1661 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1662 PetscCheckSameComm(pc,1,viewer,2); 1663 1664 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1665 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 1666 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1667 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 1668 #if defined(PETSC_HAVE_SAWS) 1669 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws);CHKERRQ(ierr); 1670 #endif 1671 1672 if (iascii) { 1673 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer);CHKERRQ(ierr); 1674 if (!pc->setupcalled) { 1675 ierr = PetscViewerASCIIPrintf(viewer," PC has not been set up so information may be incomplete\n");CHKERRQ(ierr); 1676 } 1677 if (pc->ops->view) { 1678 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1679 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1680 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1681 } 1682 if (pc->mat) { 1683 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);CHKERRQ(ierr); 1684 if (pc->pmat == pc->mat) { 1685 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");CHKERRQ(ierr); 1686 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1687 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1688 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1689 } else { 1690 if (pc->pmat) { 1691 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");CHKERRQ(ierr); 1692 } else { 1693 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix:\n");CHKERRQ(ierr); 1694 } 1695 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1696 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1697 if (pc->pmat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1698 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1699 } 1700 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1701 } 1702 } else if (isstring) { 1703 ierr = PCGetType(pc,&cstr);CHKERRQ(ierr); 1704 ierr = PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr);CHKERRQ(ierr); 1705 if (pc->ops->view) {ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr);} 1706 if (pc->mat) {ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);} 1707 if (pc->pmat && pc->pmat != pc->mat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1708 } else if (isbinary) { 1709 PetscInt classid = PC_FILE_CLASSID; 1710 MPI_Comm comm; 1711 PetscMPIInt rank; 1712 char type[256]; 1713 1714 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1715 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1716 if (!rank) { 1717 ierr = PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 1718 ierr = PetscStrncpy(type,((PetscObject)pc)->type_name,256);CHKERRQ(ierr); 1719 ierr = PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 1720 } 1721 if (pc->ops->view) { 1722 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1723 } 1724 } else if (isdraw) { 1725 PetscDraw draw; 1726 char str[25]; 1727 PetscReal x,y,bottom,h; 1728 PetscInt n; 1729 1730 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1731 ierr = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr); 1732 if (pc->mat) { 1733 ierr = MatGetSize(pc->mat,&n,NULL);CHKERRQ(ierr); 1734 ierr = PetscSNPrintf(str,25,"PC: %s (%D)",((PetscObject)pc)->type_name,n);CHKERRQ(ierr); 1735 } else { 1736 ierr = PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name);CHKERRQ(ierr); 1737 } 1738 ierr = PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr); 1739 bottom = y - h; 1740 ierr = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr); 1741 if (pc->ops->view) { 1742 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1743 } 1744 ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr); 1745 #if defined(PETSC_HAVE_SAWS) 1746 } else if (issaws) { 1747 PetscMPIInt rank; 1748 1749 ierr = PetscObjectName((PetscObject)pc);CHKERRQ(ierr); 1750 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr); 1751 if (!((PetscObject)pc)->amsmem && !rank) { 1752 ierr = PetscObjectViewSAWs((PetscObject)pc,viewer);CHKERRQ(ierr); 1753 } 1754 if (pc->mat) {ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);} 1755 if (pc->pmat && pc->pmat != pc->mat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1756 #endif 1757 } 1758 PetscFunctionReturn(0); 1759 } 1760 1761 /*@C 1762 PCRegister - Adds a method to the preconditioner package. 1763 1764 Not collective 1765 1766 Input Parameters: 1767 + name_solver - name of a new user-defined solver 1768 - routine_create - routine to create method context 1769 1770 Notes: 1771 PCRegister() may be called multiple times to add several user-defined preconditioners. 1772 1773 Sample usage: 1774 .vb 1775 PCRegister("my_solver", MySolverCreate); 1776 .ve 1777 1778 Then, your solver can be chosen with the procedural interface via 1779 $ PCSetType(pc,"my_solver") 1780 or at runtime via the option 1781 $ -pc_type my_solver 1782 1783 Level: advanced 1784 1785 .seealso: PCRegisterAll() 1786 @*/ 1787 PetscErrorCode PCRegister(const char sname[],PetscErrorCode (*function)(PC)) 1788 { 1789 PetscErrorCode ierr; 1790 1791 PetscFunctionBegin; 1792 ierr = PCInitializePackage();CHKERRQ(ierr); 1793 ierr = PetscFunctionListAdd(&PCList,sname,function);CHKERRQ(ierr); 1794 PetscFunctionReturn(0); 1795 } 1796 1797 static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y) 1798 { 1799 PC pc; 1800 PetscErrorCode ierr; 1801 1802 PetscFunctionBegin; 1803 ierr = MatShellGetContext(A,&pc);CHKERRQ(ierr); 1804 ierr = PCApply(pc,X,Y);CHKERRQ(ierr); 1805 PetscFunctionReturn(0); 1806 } 1807 1808 /*@ 1809 PCComputeOperator - Computes the explicit preconditioned operator. 1810 1811 Collective on PC 1812 1813 Input Parameter: 1814 + pc - the preconditioner object 1815 - mattype - the matrix type to be used for the operator 1816 1817 Output Parameter: 1818 . mat - the explict preconditioned operator 1819 1820 Notes: 1821 This computation is done by applying the operators to columns of the identity matrix. 1822 This routine is costly in general, and is recommended for use only with relatively small systems. 1823 Currently, this routine uses a dense matrix format when mattype == NULL 1824 1825 Level: advanced 1826 1827 .seealso: KSPComputeOperator(), MatType 1828 1829 @*/ 1830 PetscErrorCode PCComputeOperator(PC pc,MatType mattype,Mat *mat) 1831 { 1832 PetscErrorCode ierr; 1833 PetscInt N,M,m,n; 1834 Mat A,Apc; 1835 1836 PetscFunctionBegin; 1837 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1838 PetscValidPointer(mat,3); 1839 ierr = PCGetOperators(pc,&A,NULL);CHKERRQ(ierr); 1840 ierr = MatGetLocalSize(A,&m,&n);CHKERRQ(ierr); 1841 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr); 1842 ierr = MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc);CHKERRQ(ierr); 1843 ierr = MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC);CHKERRQ(ierr); 1844 ierr = MatComputeOperator(Apc,mattype,mat);CHKERRQ(ierr); 1845 ierr = MatDestroy(&Apc);CHKERRQ(ierr); 1846 PetscFunctionReturn(0); 1847 } 1848 1849 /*@ 1850 PCSetCoordinates - sets the coordinates of all the nodes on the local process 1851 1852 Collective on PC 1853 1854 Input Parameters: 1855 + pc - the solver context 1856 . dim - the dimension of the coordinates 1, 2, or 3 1857 . nloc - the blocked size of the coordinates array 1858 - coords - the coordinates array 1859 1860 Level: intermediate 1861 1862 Notes: 1863 coords is an array of the dim coordinates for the nodes on 1864 the local processor, of size dim*nloc. 1865 If there are 108 equation on a processor 1866 for a displacement finite element discretization of elasticity (so 1867 that there are nloc = 36 = 108/3 nodes) then the array must have 108 1868 double precision values (ie, 3 * 36). These x y z coordinates 1869 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 1870 ... , N-1.z ]. 1871 1872 .seealso: MatSetNearNullSpace() 1873 @*/ 1874 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 1875 { 1876 PetscErrorCode ierr; 1877 1878 PetscFunctionBegin; 1879 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1880 PetscValidLogicalCollectiveInt(pc,dim,2); 1881 ierr = PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));CHKERRQ(ierr); 1882 PetscFunctionReturn(0); 1883 } 1884 1885 /*@ 1886 PCGetInterpolations - Gets interpolation matrices for all levels (except level 0) 1887 1888 Logically Collective on PC 1889 1890 Input Parameters: 1891 + pc - the precondition context 1892 1893 Output Parameter: 1894 - num_levels - the number of levels 1895 . interpolations - the interpolation matrices (size of num_levels-1) 1896 1897 Level: advanced 1898 1899 .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level 1900 1901 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetInterpolation(), PCGetCoarseOperators() 1902 @*/ 1903 PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[]) 1904 { 1905 PetscErrorCode ierr; 1906 1907 PetscFunctionBegin; 1908 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1909 PetscValidPointer(num_levels,2); 1910 PetscValidPointer(interpolations,3); 1911 ierr = PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));CHKERRQ(ierr); 1912 PetscFunctionReturn(0); 1913 } 1914 1915 /*@ 1916 PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level) 1917 1918 Logically Collective on PC 1919 1920 Input Parameters: 1921 + pc - the precondition context 1922 1923 Output Parameter: 1924 - num_levels - the number of levels 1925 . coarseOperators - the coarse operator matrices (size of num_levels-1) 1926 1927 Level: advanced 1928 1929 .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level 1930 1931 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetRScale(), PCMGGetInterpolation(), PCGetInterpolations() 1932 @*/ 1933 PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[]) 1934 { 1935 PetscErrorCode ierr; 1936 1937 PetscFunctionBegin; 1938 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1939 PetscValidPointer(num_levels,2); 1940 PetscValidPointer(coarseOperators,3); 1941 ierr = PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));CHKERRQ(ierr); 1942 PetscFunctionReturn(0); 1943 } 1944