1 2 /* 3 The PC (preconditioner) interface routines, callable by users. 4 */ 5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 6 #include <petscdm.h> 7 8 /* Logging support */ 9 PetscClassId PC_CLASSID; 10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 12 PetscInt PetscMGLevelId; 13 14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[]) 15 { 16 PetscErrorCode ierr; 17 PetscMPIInt size; 18 PetscBool hasop,flg1,flg2,set,flg3; 19 20 PetscFunctionBegin; 21 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 22 if (pc->pmat) { 23 ierr = MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 24 if (size == 1) { 25 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);CHKERRQ(ierr); 26 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);CHKERRQ(ierr); 27 ierr = MatIsSymmetricKnown(pc->pmat,&set,&flg3);CHKERRQ(ierr); 28 if (flg1 && (!flg2 || (set && flg3))) { 29 *type = PCICC; 30 } else if (flg2) { 31 *type = PCILU; 32 } else if (hasop) { /* likely is a parallel matrix run on one processor */ 33 *type = PCBJACOBI; 34 } else { 35 *type = PCNONE; 36 } 37 } else { 38 if (hasop) { 39 *type = PCBJACOBI; 40 } else { 41 *type = PCNONE; 42 } 43 } 44 } else { 45 if (size == 1) { 46 *type = PCILU; 47 } else { 48 *type = PCBJACOBI; 49 } 50 } 51 PetscFunctionReturn(0); 52 } 53 54 /*@ 55 PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats 56 57 Collective on PC 58 59 Input Parameter: 60 . pc - the preconditioner context 61 62 Level: developer 63 64 Notes: 65 This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC 66 67 .seealso: PCCreate(), PCSetUp() 68 @*/ 69 PetscErrorCode PCReset(PC pc) 70 { 71 PetscErrorCode ierr; 72 73 PetscFunctionBegin; 74 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 75 if (pc->ops->reset) { 76 ierr = (*pc->ops->reset)(pc);CHKERRQ(ierr); 77 } 78 ierr = VecDestroy(&pc->diagonalscaleright);CHKERRQ(ierr); 79 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 80 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 81 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 82 83 pc->setupcalled = 0; 84 PetscFunctionReturn(0); 85 } 86 87 /*@ 88 PCDestroy - Destroys PC context that was created with PCCreate(). 89 90 Collective on PC 91 92 Input Parameter: 93 . pc - the preconditioner context 94 95 Level: developer 96 97 .seealso: PCCreate(), PCSetUp() 98 @*/ 99 PetscErrorCode PCDestroy(PC *pc) 100 { 101 PetscErrorCode ierr; 102 103 PetscFunctionBegin; 104 if (!*pc) PetscFunctionReturn(0); 105 PetscValidHeaderSpecific((*pc),PC_CLASSID,1); 106 if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; PetscFunctionReturn(0);} 107 108 ierr = PCReset(*pc);CHKERRQ(ierr); 109 110 /* if memory was published with SAWs then destroy it */ 111 ierr = PetscObjectSAWsViewOff((PetscObject)*pc);CHKERRQ(ierr); 112 if ((*pc)->ops->destroy) {ierr = (*(*pc)->ops->destroy)((*pc));CHKERRQ(ierr);} 113 ierr = DMDestroy(&(*pc)->dm);CHKERRQ(ierr); 114 ierr = PetscHeaderDestroy(pc);CHKERRQ(ierr); 115 PetscFunctionReturn(0); 116 } 117 118 /*@C 119 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 120 scaling as needed by certain time-stepping codes. 121 122 Logically Collective on PC 123 124 Input Parameter: 125 . pc - the preconditioner context 126 127 Output Parameter: 128 . flag - PETSC_TRUE if it applies the scaling 129 130 Level: developer 131 132 Notes: 133 If this returns PETSC_TRUE then the system solved via the Krylov method is 134 $ D M A D^{-1} y = D M b for left preconditioning or 135 $ D A M D^{-1} z = D b for right preconditioning 136 137 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale() 138 @*/ 139 PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag) 140 { 141 PetscFunctionBegin; 142 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 143 PetscValidBoolPointer(flag,2); 144 *flag = pc->diagonalscale; 145 PetscFunctionReturn(0); 146 } 147 148 /*@ 149 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 150 scaling as needed by certain time-stepping codes. 151 152 Logically Collective on PC 153 154 Input Parameters: 155 + pc - the preconditioner context 156 - s - scaling vector 157 158 Level: intermediate 159 160 Notes: 161 The system solved via the Krylov method is 162 $ D M A D^{-1} y = D M b for left preconditioning or 163 $ D A M D^{-1} z = D b for right preconditioning 164 165 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 166 167 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale() 168 @*/ 169 PetscErrorCode PCSetDiagonalScale(PC pc,Vec s) 170 { 171 PetscErrorCode ierr; 172 173 PetscFunctionBegin; 174 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 175 PetscValidHeaderSpecific(s,VEC_CLASSID,2); 176 pc->diagonalscale = PETSC_TRUE; 177 178 ierr = PetscObjectReference((PetscObject)s);CHKERRQ(ierr); 179 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 180 181 pc->diagonalscaleleft = s; 182 183 ierr = VecDuplicate(s,&pc->diagonalscaleright);CHKERRQ(ierr); 184 ierr = VecCopy(s,pc->diagonalscaleright);CHKERRQ(ierr); 185 ierr = VecReciprocal(pc->diagonalscaleright);CHKERRQ(ierr); 186 PetscFunctionReturn(0); 187 } 188 189 /*@ 190 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 191 192 Logically Collective on PC 193 194 Input Parameters: 195 + pc - the preconditioner context 196 . in - input vector 197 - out - scaled vector (maybe the same as in) 198 199 Level: intermediate 200 201 Notes: 202 The system solved via the Krylov method is 203 $ D M A D^{-1} y = D M b for left preconditioning or 204 $ D A M D^{-1} z = D b for right preconditioning 205 206 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 207 208 If diagonal scaling is turned off and in is not out then in is copied to out 209 210 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale() 211 @*/ 212 PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out) 213 { 214 PetscErrorCode ierr; 215 216 PetscFunctionBegin; 217 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 218 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 219 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 220 if (pc->diagonalscale) { 221 ierr = VecPointwiseMult(out,pc->diagonalscaleleft,in);CHKERRQ(ierr); 222 } else if (in != out) { 223 ierr = VecCopy(in,out);CHKERRQ(ierr); 224 } 225 PetscFunctionReturn(0); 226 } 227 228 /*@ 229 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 230 231 Logically Collective on PC 232 233 Input Parameters: 234 + pc - the preconditioner context 235 . in - input vector 236 - out - scaled vector (maybe the same as in) 237 238 Level: intermediate 239 240 Notes: 241 The system solved via the Krylov method is 242 $ D M A D^{-1} y = D M b for left preconditioning or 243 $ D A M D^{-1} z = D b for right preconditioning 244 245 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 246 247 If diagonal scaling is turned off and in is not out then in is copied to out 248 249 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale() 250 @*/ 251 PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out) 252 { 253 PetscErrorCode ierr; 254 255 PetscFunctionBegin; 256 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 257 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 258 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 259 if (pc->diagonalscale) { 260 ierr = VecPointwiseMult(out,pc->diagonalscaleright,in);CHKERRQ(ierr); 261 } else if (in != out) { 262 ierr = VecCopy(in,out);CHKERRQ(ierr); 263 } 264 PetscFunctionReturn(0); 265 } 266 267 /*@ 268 PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the 269 operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(), 270 TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat. 271 272 Logically Collective on PC 273 274 Input Parameters: 275 + pc - the preconditioner context 276 - flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false) 277 278 Options Database Key: 279 . -pc_use_amat <true,false> 280 281 Notes: 282 For the common case in which the linear system matrix and the matrix used to construct the 283 preconditioner are identical, this routine is does nothing. 284 285 Level: intermediate 286 287 .seealso: PCGetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE 288 @*/ 289 PetscErrorCode PCSetUseAmat(PC pc,PetscBool flg) 290 { 291 PetscFunctionBegin; 292 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 293 pc->useAmat = flg; 294 PetscFunctionReturn(0); 295 } 296 297 /*@ 298 PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected. 299 300 Logically Collective on PC 301 302 Input Parameters: 303 + pc - iterative context obtained from PCCreate() 304 - flg - PETSC_TRUE indicates you want the error generated 305 306 Level: advanced 307 308 Notes: 309 Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve() 310 to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is 311 detected. 312 313 This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs 314 315 .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll() 316 @*/ 317 PetscErrorCode PCSetErrorIfFailure(PC pc,PetscBool flg) 318 { 319 PetscFunctionBegin; 320 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 321 PetscValidLogicalCollectiveBool(pc,flg,2); 322 pc->erroriffailure = flg; 323 PetscFunctionReturn(0); 324 } 325 326 /*@ 327 PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the 328 operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(), 329 TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat. 330 331 Logically Collective on PC 332 333 Input Parameter: 334 . pc - the preconditioner context 335 336 Output Parameter: 337 . flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false) 338 339 Notes: 340 For the common case in which the linear system matrix and the matrix used to construct the 341 preconditioner are identical, this routine is does nothing. 342 343 Level: intermediate 344 345 .seealso: PCSetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE 346 @*/ 347 PetscErrorCode PCGetUseAmat(PC pc,PetscBool *flg) 348 { 349 PetscFunctionBegin; 350 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 351 *flg = pc->useAmat; 352 PetscFunctionReturn(0); 353 } 354 355 /*@ 356 PCCreate - Creates a preconditioner context. 357 358 Collective 359 360 Input Parameter: 361 . comm - MPI communicator 362 363 Output Parameter: 364 . pc - location to put the preconditioner context 365 366 Notes: 367 The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC 368 in parallel. For dense matrices it is always PCNONE. 369 370 Level: developer 371 372 .seealso: PCSetUp(), PCApply(), PCDestroy() 373 @*/ 374 PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc) 375 { 376 PC pc; 377 PetscErrorCode ierr; 378 379 PetscFunctionBegin; 380 PetscValidPointer(newpc,1); 381 *newpc = NULL; 382 ierr = PCInitializePackage();CHKERRQ(ierr); 383 384 ierr = PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView);CHKERRQ(ierr); 385 386 pc->mat = NULL; 387 pc->pmat = NULL; 388 pc->setupcalled = 0; 389 pc->setfromoptionscalled = 0; 390 pc->data = NULL; 391 pc->diagonalscale = PETSC_FALSE; 392 pc->diagonalscaleleft = NULL; 393 pc->diagonalscaleright = NULL; 394 395 pc->modifysubmatrices = NULL; 396 pc->modifysubmatricesP = NULL; 397 398 *newpc = pc; 399 PetscFunctionReturn(0); 400 401 } 402 403 /* -------------------------------------------------------------------------------*/ 404 405 /*@ 406 PCApply - Applies the preconditioner to a vector. 407 408 Collective on PC 409 410 Input Parameters: 411 + pc - the preconditioner context 412 - x - input vector 413 414 Output Parameter: 415 . y - output vector 416 417 Level: developer 418 419 .seealso: PCApplyTranspose(), PCApplyBAorAB() 420 @*/ 421 PetscErrorCode PCApply(PC pc,Vec x,Vec y) 422 { 423 PetscErrorCode ierr; 424 PetscInt m,n,mv,nv; 425 426 PetscFunctionBegin; 427 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 428 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 429 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 430 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 431 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 432 /* use pmat to check vector sizes since for KSPLQR the pmat may be of a different size than mat */ 433 ierr = MatGetLocalSize(pc->pmat,&m,&n);CHKERRQ(ierr); 434 ierr = VecGetLocalSize(x,&nv);CHKERRQ(ierr); 435 ierr = VecGetLocalSize(y,&mv);CHKERRQ(ierr); 436 if (mv != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %D does not equal resulting vector number of rows %D",m,mv); 437 if (nv != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %D does not equal resulting vector number of rows %D",n,nv); 438 ierr = VecSetErrorIfLocked(y,3);CHKERRQ(ierr); 439 440 ierr = PCSetUp(pc);CHKERRQ(ierr); 441 if (!pc->ops->apply) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply"); 442 ierr = VecLockReadPush(x);CHKERRQ(ierr); 443 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 444 ierr = (*pc->ops->apply)(pc,x,y);CHKERRQ(ierr); 445 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 446 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 447 ierr = VecLockReadPop(x);CHKERRQ(ierr); 448 PetscFunctionReturn(0); 449 } 450 451 /*@ 452 PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices. 453 454 Collective on PC 455 456 Input Parameters: 457 + pc - the preconditioner context 458 - X - block of input vectors 459 460 Output Parameter: 461 . Y - block of output vectors 462 463 Level: developer 464 465 .seealso: PCApply(), KSPMatSolve() 466 @*/ 467 PetscErrorCode PCMatApply(PC pc,Mat X,Mat Y) 468 { 469 Mat A; 470 Vec cy, cx; 471 PetscInt m1, M1, m2, M2, n1, N1, n2, N2; 472 PetscBool match; 473 PetscErrorCode ierr; 474 475 PetscFunctionBegin; 476 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 477 PetscValidHeaderSpecific(Y, MAT_CLASSID, 2); 478 PetscValidHeaderSpecific(X, MAT_CLASSID, 3); 479 PetscCheckSameComm(pc, 1, Y, 2); 480 PetscCheckSameComm(pc, 1, X, 3); 481 if (Y == X) SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices"); 482 ierr = PCGetOperators(pc, NULL, &A);CHKERRQ(ierr); 483 ierr = MatGetLocalSize(A, &m1, NULL);CHKERRQ(ierr); 484 ierr = MatGetLocalSize(Y, &m2, &n2);CHKERRQ(ierr); 485 ierr = MatGetSize(A, &M1, NULL);CHKERRQ(ierr); 486 ierr = MatGetSize(X, &M2, &N2);CHKERRQ(ierr); 487 if (m1 != m2 || M1 != M2) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot use a block of input vectors with (m2,M2) = (%D,%D) for a preconditioner with (m1,M1) = (%D,%D)", m2, M2, m1, M1); 488 ierr = MatGetLocalSize(Y, &m1, &n1);CHKERRQ(ierr); 489 ierr = MatGetSize(Y, &M1, &N1);CHKERRQ(ierr); 490 if (m1 != m2 || M1 != M2 || n1 != n2 || N1 != N2) SETERRQ8(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible block of input vectors (m2,M2)x(n2,N2) = (%D,%D)x(%D,%D) and output vectors (m1,M1)x(n1,N1) = (%D,%D)x(%D,%D)", m2, M2, n2, N2, m1, M1, n1, N1); 491 ierr = PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "");CHKERRQ(ierr); 492 if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat"); 493 ierr = PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "");CHKERRQ(ierr); 494 if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat"); 495 ierr = PCSetUp(pc);CHKERRQ(ierr); 496 if (pc->ops->matapply) { 497 ierr = PetscLogEventBegin(PC_MatApply, pc, X, Y, 0);CHKERRQ(ierr); 498 ierr = (*pc->ops->matapply)(pc, X, Y);CHKERRQ(ierr); 499 ierr = PetscLogEventEnd(PC_MatApply, pc, X, Y, 0);CHKERRQ(ierr); 500 } else { 501 ierr = PetscInfo1(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name);CHKERRQ(ierr); 502 for (n2 = 0; n2 < N2; ++n2) { 503 ierr = MatDenseGetColumnVecRead(X, n2, &cx);CHKERRQ(ierr); 504 ierr = MatDenseGetColumnVecWrite(Y, n2, &cy);CHKERRQ(ierr); 505 ierr = PCApply(pc, cx, cy);CHKERRQ(ierr); 506 ierr = MatDenseRestoreColumnVecWrite(Y, n2, &cy);CHKERRQ(ierr); 507 ierr = MatDenseRestoreColumnVecRead(X, n2, &cx);CHKERRQ(ierr); 508 } 509 } 510 PetscFunctionReturn(0); 511 } 512 513 /*@ 514 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 515 516 Collective on PC 517 518 Input Parameters: 519 + pc - the preconditioner context 520 - x - input vector 521 522 Output Parameter: 523 . y - output vector 524 525 Notes: 526 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 527 528 Level: developer 529 530 .seealso: PCApply(), PCApplySymmetricRight() 531 @*/ 532 PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y) 533 { 534 PetscErrorCode ierr; 535 536 PetscFunctionBegin; 537 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 538 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 539 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 540 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 541 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 542 ierr = PCSetUp(pc);CHKERRQ(ierr); 543 if (!pc->ops->applysymmetricleft) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply"); 544 ierr = VecLockReadPush(x);CHKERRQ(ierr); 545 ierr = PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 546 ierr = (*pc->ops->applysymmetricleft)(pc,x,y);CHKERRQ(ierr); 547 ierr = PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 548 ierr = VecLockReadPop(x);CHKERRQ(ierr); 549 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 550 PetscFunctionReturn(0); 551 } 552 553 /*@ 554 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 555 556 Collective on PC 557 558 Input Parameters: 559 + pc - the preconditioner context 560 - x - input vector 561 562 Output Parameter: 563 . y - output vector 564 565 Level: developer 566 567 Notes: 568 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 569 570 .seealso: PCApply(), PCApplySymmetricLeft() 571 @*/ 572 PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y) 573 { 574 PetscErrorCode ierr; 575 576 PetscFunctionBegin; 577 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 578 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 579 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 580 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 581 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 582 ierr = PCSetUp(pc);CHKERRQ(ierr); 583 if (!pc->ops->applysymmetricright) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply"); 584 ierr = VecLockReadPush(x);CHKERRQ(ierr); 585 ierr = PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 586 ierr = (*pc->ops->applysymmetricright)(pc,x,y);CHKERRQ(ierr); 587 ierr = PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 588 ierr = VecLockReadPop(x);CHKERRQ(ierr); 589 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 590 PetscFunctionReturn(0); 591 } 592 593 /*@ 594 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 595 596 Collective on PC 597 598 Input Parameters: 599 + pc - the preconditioner context 600 - x - input vector 601 602 Output Parameter: 603 . y - output vector 604 605 Notes: 606 For complex numbers this applies the non-Hermitian transpose. 607 608 Developer Notes: 609 We need to implement a PCApplyHermitianTranspose() 610 611 Level: developer 612 613 .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists() 614 @*/ 615 PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y) 616 { 617 PetscErrorCode ierr; 618 619 PetscFunctionBegin; 620 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 621 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 622 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 623 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 624 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 625 ierr = PCSetUp(pc);CHKERRQ(ierr); 626 if (!pc->ops->applytranspose) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose"); 627 ierr = VecLockReadPush(x);CHKERRQ(ierr); 628 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 629 ierr = (*pc->ops->applytranspose)(pc,x,y);CHKERRQ(ierr); 630 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 631 ierr = VecLockReadPop(x);CHKERRQ(ierr); 632 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 633 PetscFunctionReturn(0); 634 } 635 636 /*@ 637 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 638 639 Collective on PC 640 641 Input Parameters: 642 . pc - the preconditioner context 643 644 Output Parameter: 645 . flg - PETSC_TRUE if a transpose operation is defined 646 647 Level: developer 648 649 .seealso: PCApplyTranspose() 650 @*/ 651 PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg) 652 { 653 PetscFunctionBegin; 654 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 655 PetscValidBoolPointer(flg,2); 656 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 657 else *flg = PETSC_FALSE; 658 PetscFunctionReturn(0); 659 } 660 661 /*@ 662 PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x. 663 664 Collective on PC 665 666 Input Parameters: 667 + pc - the preconditioner context 668 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 669 . x - input vector 670 - work - work vector 671 672 Output Parameter: 673 . y - output vector 674 675 Level: developer 676 677 Notes: 678 If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the 679 specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling. 680 681 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose() 682 @*/ 683 PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work) 684 { 685 PetscErrorCode ierr; 686 687 PetscFunctionBegin; 688 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 689 PetscValidLogicalCollectiveEnum(pc,side,2); 690 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 691 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 692 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 693 PetscCheckSameComm(pc,1,x,3); 694 PetscCheckSameComm(pc,1,y,4); 695 PetscCheckSameComm(pc,1,work,5); 696 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 697 if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric"); 698 if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application"); 699 if (pc->erroriffailure) {ierr = VecValidValues(x,3,PETSC_TRUE);CHKERRQ(ierr);} 700 701 ierr = PCSetUp(pc);CHKERRQ(ierr); 702 if (pc->diagonalscale) { 703 if (pc->ops->applyBA) { 704 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 705 ierr = VecDuplicate(x,&work2);CHKERRQ(ierr); 706 ierr = PCDiagonalScaleRight(pc,x,work2);CHKERRQ(ierr); 707 ierr = (*pc->ops->applyBA)(pc,side,work2,y,work);CHKERRQ(ierr); 708 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 709 ierr = VecDestroy(&work2);CHKERRQ(ierr); 710 } else if (side == PC_RIGHT) { 711 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 712 ierr = PCApply(pc,y,work);CHKERRQ(ierr); 713 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 714 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 715 } else if (side == PC_LEFT) { 716 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 717 ierr = MatMult(pc->mat,y,work);CHKERRQ(ierr); 718 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 719 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 720 } else if (side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner"); 721 } else { 722 if (pc->ops->applyBA) { 723 ierr = (*pc->ops->applyBA)(pc,side,x,y,work);CHKERRQ(ierr); 724 } else if (side == PC_RIGHT) { 725 ierr = PCApply(pc,x,work);CHKERRQ(ierr); 726 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 727 } else if (side == PC_LEFT) { 728 ierr = MatMult(pc->mat,x,work);CHKERRQ(ierr); 729 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 730 } else if (side == PC_SYMMETRIC) { 731 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 732 ierr = PCApplySymmetricRight(pc,x,work);CHKERRQ(ierr); 733 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 734 ierr = VecCopy(y,work);CHKERRQ(ierr); 735 ierr = PCApplySymmetricLeft(pc,work,y);CHKERRQ(ierr); 736 } 737 } 738 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 739 PetscFunctionReturn(0); 740 } 741 742 /*@ 743 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 744 and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning, 745 NOT tr(B*A) = tr(A)*tr(B). 746 747 Collective on PC 748 749 Input Parameters: 750 + pc - the preconditioner context 751 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 752 . x - input vector 753 - work - work vector 754 755 Output Parameter: 756 . y - output vector 757 758 759 Notes: 760 this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner 761 defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 762 763 Level: developer 764 765 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB() 766 @*/ 767 PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work) 768 { 769 PetscErrorCode ierr; 770 771 PetscFunctionBegin; 772 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 773 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 774 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 775 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 776 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 777 if (pc->erroriffailure) {ierr = VecValidValues(x,3,PETSC_TRUE);CHKERRQ(ierr);} 778 if (pc->ops->applyBAtranspose) { 779 ierr = (*pc->ops->applyBAtranspose)(pc,side,x,y,work);CHKERRQ(ierr); 780 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 781 PetscFunctionReturn(0); 782 } 783 if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left"); 784 785 ierr = PCSetUp(pc);CHKERRQ(ierr); 786 if (side == PC_RIGHT) { 787 ierr = PCApplyTranspose(pc,x,work);CHKERRQ(ierr); 788 ierr = MatMultTranspose(pc->mat,work,y);CHKERRQ(ierr); 789 } else if (side == PC_LEFT) { 790 ierr = MatMultTranspose(pc->mat,x,work);CHKERRQ(ierr); 791 ierr = PCApplyTranspose(pc,work,y);CHKERRQ(ierr); 792 } 793 /* add support for PC_SYMMETRIC */ 794 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 795 PetscFunctionReturn(0); 796 } 797 798 /* -------------------------------------------------------------------------------*/ 799 800 /*@ 801 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 802 built-in fast application of Richardson's method. 803 804 Not Collective 805 806 Input Parameter: 807 . pc - the preconditioner 808 809 Output Parameter: 810 . exists - PETSC_TRUE or PETSC_FALSE 811 812 Level: developer 813 814 .seealso: PCApplyRichardson() 815 @*/ 816 PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists) 817 { 818 PetscFunctionBegin; 819 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 820 PetscValidPointer(exists,2); 821 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 822 else *exists = PETSC_FALSE; 823 PetscFunctionReturn(0); 824 } 825 826 /*@ 827 PCApplyRichardson - Applies several steps of Richardson iteration with 828 the particular preconditioner. This routine is usually used by the 829 Krylov solvers and not the application code directly. 830 831 Collective on PC 832 833 Input Parameters: 834 + pc - the preconditioner context 835 . b - the right hand side 836 . w - one work vector 837 . rtol - relative decrease in residual norm convergence criteria 838 . abstol - absolute residual norm convergence criteria 839 . dtol - divergence residual norm increase criteria 840 . its - the number of iterations to apply. 841 - guesszero - if the input x contains nonzero initial guess 842 843 Output Parameter: 844 + outits - number of iterations actually used (for SOR this always equals its) 845 . reason - the reason the apply terminated 846 - y - the solution (also contains initial guess if guesszero is PETSC_FALSE 847 848 Notes: 849 Most preconditioners do not support this function. Use the command 850 PCApplyRichardsonExists() to determine if one does. 851 852 Except for the multigrid PC this routine ignores the convergence tolerances 853 and always runs for the number of iterations 854 855 Level: developer 856 857 .seealso: PCApplyRichardsonExists() 858 @*/ 859 PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason) 860 { 861 PetscErrorCode ierr; 862 863 PetscFunctionBegin; 864 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 865 PetscValidHeaderSpecific(b,VEC_CLASSID,2); 866 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 867 PetscValidHeaderSpecific(w,VEC_CLASSID,4); 868 if (b == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors"); 869 ierr = PCSetUp(pc);CHKERRQ(ierr); 870 if (!pc->ops->applyrichardson) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson"); 871 ierr = (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);CHKERRQ(ierr); 872 PetscFunctionReturn(0); 873 } 874 875 /*@ 876 PCGetFailedReason - Gets the reason a PCSetUp() failed or 0 if it did not fail 877 878 Logically Collective on PC 879 880 Input Parameter: 881 . pc - the preconditioner context 882 883 Output Parameter: 884 . reason - the reason it failed, currently only -1 885 886 Level: advanced 887 888 .seealso: PCCreate(), PCApply(), PCDestroy() 889 @*/ 890 PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason) 891 { 892 PetscFunctionBegin; 893 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 894 else *reason = pc->failedreason; 895 PetscFunctionReturn(0); 896 } 897 898 899 /* 900 a setupcall of 0 indicates never setup, 901 1 indicates has been previously setup 902 -1 indicates a PCSetUp() was attempted and failed 903 */ 904 /*@ 905 PCSetUp - Prepares for the use of a preconditioner. 906 907 Collective on PC 908 909 Input Parameter: 910 . pc - the preconditioner context 911 912 Level: developer 913 914 .seealso: PCCreate(), PCApply(), PCDestroy() 915 @*/ 916 PetscErrorCode PCSetUp(PC pc) 917 { 918 PetscErrorCode ierr; 919 const char *def; 920 PetscObjectState matstate, matnonzerostate; 921 922 PetscFunctionBegin; 923 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 924 if (!pc->mat) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first"); 925 926 if (pc->setupcalled && pc->reusepreconditioner) { 927 ierr = PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n");CHKERRQ(ierr); 928 PetscFunctionReturn(0); 929 } 930 931 ierr = PetscObjectStateGet((PetscObject)pc->pmat,&matstate);CHKERRQ(ierr); 932 ierr = MatGetNonzeroState(pc->pmat,&matnonzerostate);CHKERRQ(ierr); 933 if (!pc->setupcalled) { 934 ierr = PetscInfo(pc,"Setting up PC for first time\n");CHKERRQ(ierr); 935 pc->flag = DIFFERENT_NONZERO_PATTERN; 936 } else if (matstate == pc->matstate) { 937 ierr = PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n");CHKERRQ(ierr); 938 PetscFunctionReturn(0); 939 } else { 940 if (matnonzerostate > pc->matnonzerostate) { 941 ierr = PetscInfo(pc,"Setting up PC with different nonzero pattern\n");CHKERRQ(ierr); 942 pc->flag = DIFFERENT_NONZERO_PATTERN; 943 } else { 944 ierr = PetscInfo(pc,"Setting up PC with same nonzero pattern\n");CHKERRQ(ierr); 945 pc->flag = SAME_NONZERO_PATTERN; 946 } 947 } 948 pc->matstate = matstate; 949 pc->matnonzerostate = matnonzerostate; 950 951 if (!((PetscObject)pc)->type_name) { 952 ierr = PCGetDefaultType_Private(pc,&def);CHKERRQ(ierr); 953 ierr = PCSetType(pc,def);CHKERRQ(ierr); 954 } 955 956 ierr = MatSetErrorIfFailure(pc->pmat,pc->erroriffailure);CHKERRQ(ierr); 957 ierr = MatSetErrorIfFailure(pc->mat,pc->erroriffailure);CHKERRQ(ierr); 958 ierr = PetscLogEventBegin(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 959 if (pc->ops->setup) { 960 ierr = (*pc->ops->setup)(pc);CHKERRQ(ierr); 961 } 962 ierr = PetscLogEventEnd(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 963 if (!pc->setupcalled) pc->setupcalled = 1; 964 PetscFunctionReturn(0); 965 } 966 967 /*@ 968 PCSetUpOnBlocks - Sets up the preconditioner for each block in 969 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 970 methods. 971 972 Collective on PC 973 974 Input Parameters: 975 . pc - the preconditioner context 976 977 Level: developer 978 979 .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp() 980 @*/ 981 PetscErrorCode PCSetUpOnBlocks(PC pc) 982 { 983 PetscErrorCode ierr; 984 985 PetscFunctionBegin; 986 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 987 if (!pc->ops->setuponblocks) PetscFunctionReturn(0); 988 ierr = PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 989 ierr = (*pc->ops->setuponblocks)(pc);CHKERRQ(ierr); 990 ierr = PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 991 PetscFunctionReturn(0); 992 } 993 994 /*@C 995 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 996 submatrices that arise within certain subdomain-based preconditioners. 997 The basic submatrices are extracted from the preconditioner matrix as 998 usual; the user can then alter these (for example, to set different boundary 999 conditions for each submatrix) before they are used for the local solves. 1000 1001 Logically Collective on PC 1002 1003 Input Parameters: 1004 + pc - the preconditioner context 1005 . func - routine for modifying the submatrices 1006 - ctx - optional user-defined context (may be null) 1007 1008 Calling sequence of func: 1009 $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx); 1010 1011 + row - an array of index sets that contain the global row numbers 1012 that comprise each local submatrix 1013 . col - an array of index sets that contain the global column numbers 1014 that comprise each local submatrix 1015 . submat - array of local submatrices 1016 - ctx - optional user-defined context for private data for the 1017 user-defined func routine (may be null) 1018 1019 Notes: 1020 PCSetModifySubMatrices() MUST be called before KSPSetUp() and 1021 KSPSolve(). 1022 1023 A routine set by PCSetModifySubMatrices() is currently called within 1024 the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM) 1025 preconditioners. All other preconditioners ignore this routine. 1026 1027 Level: advanced 1028 1029 .seealso: PCModifySubMatrices() 1030 @*/ 1031 PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx) 1032 { 1033 PetscFunctionBegin; 1034 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1035 pc->modifysubmatrices = func; 1036 pc->modifysubmatricesP = ctx; 1037 PetscFunctionReturn(0); 1038 } 1039 1040 /*@C 1041 PCModifySubMatrices - Calls an optional user-defined routine within 1042 certain preconditioners if one has been set with PCSetModifySubMatrices(). 1043 1044 Collective on PC 1045 1046 Input Parameters: 1047 + pc - the preconditioner context 1048 . nsub - the number of local submatrices 1049 . row - an array of index sets that contain the global row numbers 1050 that comprise each local submatrix 1051 . col - an array of index sets that contain the global column numbers 1052 that comprise each local submatrix 1053 . submat - array of local submatrices 1054 - ctx - optional user-defined context for private data for the 1055 user-defined routine (may be null) 1056 1057 Output Parameter: 1058 . submat - array of local submatrices (the entries of which may 1059 have been modified) 1060 1061 Notes: 1062 The user should NOT generally call this routine, as it will 1063 automatically be called within certain preconditioners (currently 1064 block Jacobi, additive Schwarz) if set. 1065 1066 The basic submatrices are extracted from the preconditioner matrix 1067 as usual; the user can then alter these (for example, to set different 1068 boundary conditions for each submatrix) before they are used for the 1069 local solves. 1070 1071 Level: developer 1072 1073 .seealso: PCSetModifySubMatrices() 1074 @*/ 1075 PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx) 1076 { 1077 PetscErrorCode ierr; 1078 1079 PetscFunctionBegin; 1080 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1081 if (!pc->modifysubmatrices) PetscFunctionReturn(0); 1082 ierr = PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 1083 ierr = (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);CHKERRQ(ierr); 1084 ierr = PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 1085 PetscFunctionReturn(0); 1086 } 1087 1088 /*@ 1089 PCSetOperators - Sets the matrix associated with the linear system and 1090 a (possibly) different one associated with the preconditioner. 1091 1092 Logically Collective on PC 1093 1094 Input Parameters: 1095 + pc - the preconditioner context 1096 . Amat - the matrix that defines the linear system 1097 - Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat. 1098 1099 Notes: 1100 Passing a NULL for Amat or Pmat removes the matrix that is currently used. 1101 1102 If you wish to replace either Amat or Pmat but leave the other one untouched then 1103 first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference() 1104 on it and then pass it back in in your call to KSPSetOperators(). 1105 1106 More Notes about Repeated Solution of Linear Systems: 1107 PETSc does NOT reset the matrix entries of either Amat or Pmat 1108 to zero after a linear solve; the user is completely responsible for 1109 matrix assembly. See the routine MatZeroEntries() if desiring to 1110 zero all elements of a matrix. 1111 1112 Level: intermediate 1113 1114 .seealso: PCGetOperators(), MatZeroEntries() 1115 @*/ 1116 PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat) 1117 { 1118 PetscErrorCode ierr; 1119 PetscInt m1,n1,m2,n2; 1120 1121 PetscFunctionBegin; 1122 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1123 if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1124 if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3); 1125 if (Amat) PetscCheckSameComm(pc,1,Amat,2); 1126 if (Pmat) PetscCheckSameComm(pc,1,Pmat,3); 1127 if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) { 1128 ierr = MatGetLocalSize(Amat,&m1,&n1);CHKERRQ(ierr); 1129 ierr = MatGetLocalSize(pc->mat,&m2,&n2);CHKERRQ(ierr); 1130 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1131 ierr = MatGetLocalSize(Pmat,&m1,&n1);CHKERRQ(ierr); 1132 ierr = MatGetLocalSize(pc->pmat,&m2,&n2);CHKERRQ(ierr); 1133 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1134 } 1135 1136 if (Pmat != pc->pmat) { 1137 /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */ 1138 pc->matnonzerostate = -1; 1139 pc->matstate = -1; 1140 } 1141 1142 /* reference first in case the matrices are the same */ 1143 if (Amat) {ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr);} 1144 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 1145 if (Pmat) {ierr = PetscObjectReference((PetscObject)Pmat);CHKERRQ(ierr);} 1146 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 1147 pc->mat = Amat; 1148 pc->pmat = Pmat; 1149 PetscFunctionReturn(0); 1150 } 1151 1152 /*@ 1153 PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed. 1154 1155 Logically Collective on PC 1156 1157 Input Parameters: 1158 + pc - the preconditioner context 1159 - flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner 1160 1161 Level: intermediate 1162 1163 .seealso: PCGetOperators(), MatZeroEntries(), PCGetReusePreconditioner(), KSPSetReusePreconditioner() 1164 @*/ 1165 PetscErrorCode PCSetReusePreconditioner(PC pc,PetscBool flag) 1166 { 1167 PetscFunctionBegin; 1168 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1169 PetscValidLogicalCollectiveBool(pc,flag,2); 1170 pc->reusepreconditioner = flag; 1171 PetscFunctionReturn(0); 1172 } 1173 1174 /*@ 1175 PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed. 1176 1177 Not Collective 1178 1179 Input Parameter: 1180 . pc - the preconditioner context 1181 1182 Output Parameter: 1183 . flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner 1184 1185 Level: intermediate 1186 1187 .seealso: PCGetOperators(), MatZeroEntries(), PCSetReusePreconditioner() 1188 @*/ 1189 PetscErrorCode PCGetReusePreconditioner(PC pc,PetscBool *flag) 1190 { 1191 PetscFunctionBegin; 1192 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1193 PetscValidPointer(flag,2); 1194 *flag = pc->reusepreconditioner; 1195 PetscFunctionReturn(0); 1196 } 1197 1198 /*@ 1199 PCGetOperators - Gets the matrix associated with the linear system and 1200 possibly a different one associated with the preconditioner. 1201 1202 Not collective, though parallel Mats are returned if the PC is parallel 1203 1204 Input Parameter: 1205 . pc - the preconditioner context 1206 1207 Output Parameters: 1208 + Amat - the matrix defining the linear system 1209 - Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat. 1210 1211 Level: intermediate 1212 1213 Notes: 1214 Does not increase the reference count of the matrices, so you should not destroy them 1215 1216 Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators 1217 are created in PC and returned to the user. In this case, if both operators 1218 mat and pmat are requested, two DIFFERENT operators will be returned. If 1219 only one is requested both operators in the PC will be the same (i.e. as 1220 if one had called KSP/PCSetOperators() with the same argument for both Mats). 1221 The user must set the sizes of the returned matrices and their type etc just 1222 as if the user created them with MatCreate(). For example, 1223 1224 $ KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to 1225 $ set size, type, etc of Amat 1226 1227 $ MatCreate(comm,&mat); 1228 $ KSP/PCSetOperators(ksp/pc,Amat,Amat); 1229 $ PetscObjectDereference((PetscObject)mat); 1230 $ set size, type, etc of Amat 1231 1232 and 1233 1234 $ KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to 1235 $ set size, type, etc of Amat and Pmat 1236 1237 $ MatCreate(comm,&Amat); 1238 $ MatCreate(comm,&Pmat); 1239 $ KSP/PCSetOperators(ksp/pc,Amat,Pmat); 1240 $ PetscObjectDereference((PetscObject)Amat); 1241 $ PetscObjectDereference((PetscObject)Pmat); 1242 $ set size, type, etc of Amat and Pmat 1243 1244 The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy 1245 of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely 1246 managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look 1247 at this is when you create a SNES you do not NEED to create a KSP and attach it to 1248 the SNES object (the SNES object manages it for you). Similarly when you create a KSP 1249 you do not need to attach a PC to it (the KSP object manages the PC object for you). 1250 Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when 1251 it can be created for you? 1252 1253 1254 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet() 1255 @*/ 1256 PetscErrorCode PCGetOperators(PC pc,Mat *Amat,Mat *Pmat) 1257 { 1258 PetscErrorCode ierr; 1259 1260 PetscFunctionBegin; 1261 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1262 if (Amat) { 1263 if (!pc->mat) { 1264 if (pc->pmat && !Pmat) { /* Apmat has been set, but user did not request it, so use for Amat */ 1265 pc->mat = pc->pmat; 1266 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1267 } else { /* both Amat and Pmat are empty */ 1268 ierr = MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat);CHKERRQ(ierr); 1269 if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */ 1270 pc->pmat = pc->mat; 1271 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1272 } 1273 } 1274 } 1275 *Amat = pc->mat; 1276 } 1277 if (Pmat) { 1278 if (!pc->pmat) { 1279 if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */ 1280 pc->pmat = pc->mat; 1281 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1282 } else { 1283 ierr = MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat);CHKERRQ(ierr); 1284 if (!Amat) { /* user did NOT request Amat, so make same as Pmat */ 1285 pc->mat = pc->pmat; 1286 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1287 } 1288 } 1289 } 1290 *Pmat = pc->pmat; 1291 } 1292 PetscFunctionReturn(0); 1293 } 1294 1295 /*@C 1296 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1297 possibly a different one associated with the preconditioner have been set in the PC. 1298 1299 Not collective, though the results on all processes should be the same 1300 1301 Input Parameter: 1302 . pc - the preconditioner context 1303 1304 Output Parameters: 1305 + mat - the matrix associated with the linear system was set 1306 - pmat - matrix associated with the preconditioner was set, usually the same 1307 1308 Level: intermediate 1309 1310 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators() 1311 @*/ 1312 PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat) 1313 { 1314 PetscFunctionBegin; 1315 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1316 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1317 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1318 PetscFunctionReturn(0); 1319 } 1320 1321 /*@ 1322 PCFactorGetMatrix - Gets the factored matrix from the 1323 preconditioner context. This routine is valid only for the LU, 1324 incomplete LU, Cholesky, and incomplete Cholesky methods. 1325 1326 Not Collective on PC though Mat is parallel if PC is parallel 1327 1328 Input Parameters: 1329 . pc - the preconditioner context 1330 1331 Output parameters: 1332 . mat - the factored matrix 1333 1334 Level: advanced 1335 1336 Notes: 1337 Does not increase the reference count for the matrix so DO NOT destroy it 1338 1339 @*/ 1340 PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat) 1341 { 1342 PetscErrorCode ierr; 1343 1344 PetscFunctionBegin; 1345 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1346 PetscValidPointer(mat,2); 1347 if (pc->ops->getfactoredmatrix) { 1348 ierr = (*pc->ops->getfactoredmatrix)(pc,mat);CHKERRQ(ierr); 1349 } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix"); 1350 PetscFunctionReturn(0); 1351 } 1352 1353 /*@C 1354 PCSetOptionsPrefix - Sets the prefix used for searching for all 1355 PC options in the database. 1356 1357 Logically Collective on PC 1358 1359 Input Parameters: 1360 + pc - the preconditioner context 1361 - prefix - the prefix string to prepend to all PC option requests 1362 1363 Notes: 1364 A hyphen (-) must NOT be given at the beginning of the prefix name. 1365 The first character of all runtime options is AUTOMATICALLY the 1366 hyphen. 1367 1368 Level: advanced 1369 1370 .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix() 1371 @*/ 1372 PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[]) 1373 { 1374 PetscErrorCode ierr; 1375 1376 PetscFunctionBegin; 1377 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1378 ierr = PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1379 PetscFunctionReturn(0); 1380 } 1381 1382 /*@C 1383 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1384 PC options in the database. 1385 1386 Logically Collective on PC 1387 1388 Input Parameters: 1389 + pc - the preconditioner context 1390 - prefix - the prefix string to prepend to all PC option requests 1391 1392 Notes: 1393 A hyphen (-) must NOT be given at the beginning of the prefix name. 1394 The first character of all runtime options is AUTOMATICALLY the 1395 hyphen. 1396 1397 Level: advanced 1398 1399 .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix() 1400 @*/ 1401 PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[]) 1402 { 1403 PetscErrorCode ierr; 1404 1405 PetscFunctionBegin; 1406 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1407 ierr = PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1408 PetscFunctionReturn(0); 1409 } 1410 1411 /*@C 1412 PCGetOptionsPrefix - Gets the prefix used for searching for all 1413 PC options in the database. 1414 1415 Not Collective 1416 1417 Input Parameters: 1418 . pc - the preconditioner context 1419 1420 Output Parameters: 1421 . prefix - pointer to the prefix string used, is returned 1422 1423 Notes: 1424 On the fortran side, the user should pass in a string 'prifix' of 1425 sufficient length to hold the prefix. 1426 1427 Level: advanced 1428 1429 .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix() 1430 @*/ 1431 PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[]) 1432 { 1433 PetscErrorCode ierr; 1434 1435 PetscFunctionBegin; 1436 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1437 PetscValidPointer(prefix,2); 1438 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1439 PetscFunctionReturn(0); 1440 } 1441 1442 /* 1443 Indicates the right hand side will be changed by KSPSolve(), this occurs for a few 1444 preconditioners including BDDC and Eisentat that transform the equations before applying 1445 the Krylov methods 1446 */ 1447 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc,PetscBool *change) 1448 { 1449 PetscErrorCode ierr; 1450 1451 PetscFunctionBegin; 1452 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1453 PetscValidPointer(change,2); 1454 *change = PETSC_FALSE; 1455 ierr = PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));CHKERRQ(ierr); 1456 PetscFunctionReturn(0); 1457 } 1458 1459 /*@ 1460 PCPreSolve - Optional pre-solve phase, intended for any 1461 preconditioner-specific actions that must be performed before 1462 the iterative solve itself. 1463 1464 Collective on PC 1465 1466 Input Parameters: 1467 + pc - the preconditioner context 1468 - ksp - the Krylov subspace context 1469 1470 Level: developer 1471 1472 Sample of Usage: 1473 .vb 1474 PCPreSolve(pc,ksp); 1475 KSPSolve(ksp,b,x); 1476 PCPostSolve(pc,ksp); 1477 .ve 1478 1479 Notes: 1480 The pre-solve phase is distinct from the PCSetUp() phase. 1481 1482 KSPSolve() calls this directly, so is rarely called by the user. 1483 1484 .seealso: PCPostSolve() 1485 @*/ 1486 PetscErrorCode PCPreSolve(PC pc,KSP ksp) 1487 { 1488 PetscErrorCode ierr; 1489 Vec x,rhs; 1490 1491 PetscFunctionBegin; 1492 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1493 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1494 pc->presolvedone++; 1495 if (pc->presolvedone > 2) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice"); 1496 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1497 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1498 1499 if (pc->ops->presolve) { 1500 ierr = (*pc->ops->presolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1501 } 1502 PetscFunctionReturn(0); 1503 } 1504 1505 /*@ 1506 PCPostSolve - Optional post-solve phase, intended for any 1507 preconditioner-specific actions that must be performed after 1508 the iterative solve itself. 1509 1510 Collective on PC 1511 1512 Input Parameters: 1513 + pc - the preconditioner context 1514 - ksp - the Krylov subspace context 1515 1516 Sample of Usage: 1517 .vb 1518 PCPreSolve(pc,ksp); 1519 KSPSolve(ksp,b,x); 1520 PCPostSolve(pc,ksp); 1521 .ve 1522 1523 Note: 1524 KSPSolve() calls this routine directly, so it is rarely called by the user. 1525 1526 Level: developer 1527 1528 .seealso: PCPreSolve(), KSPSolve() 1529 @*/ 1530 PetscErrorCode PCPostSolve(PC pc,KSP ksp) 1531 { 1532 PetscErrorCode ierr; 1533 Vec x,rhs; 1534 1535 PetscFunctionBegin; 1536 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1537 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1538 pc->presolvedone--; 1539 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1540 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1541 if (pc->ops->postsolve) { 1542 ierr = (*pc->ops->postsolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1543 } 1544 PetscFunctionReturn(0); 1545 } 1546 1547 /*@C 1548 PCLoad - Loads a PC that has been stored in binary with PCView(). 1549 1550 Collective on PetscViewer 1551 1552 Input Parameters: 1553 + newdm - the newly loaded PC, this needs to have been created with PCCreate() or 1554 some related function before a call to PCLoad(). 1555 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() 1556 1557 Level: intermediate 1558 1559 Notes: 1560 The type is determined by the data in the file, any type set into the PC before this call is ignored. 1561 1562 Notes for advanced users: 1563 Most users should not need to know the details of the binary storage 1564 format, since PCLoad() and PCView() completely hide these details. 1565 But for anyone who's interested, the standard binary matrix storage 1566 format is 1567 .vb 1568 has not yet been determined 1569 .ve 1570 1571 .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad() 1572 @*/ 1573 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) 1574 { 1575 PetscErrorCode ierr; 1576 PetscBool isbinary; 1577 PetscInt classid; 1578 char type[256]; 1579 1580 PetscFunctionBegin; 1581 PetscValidHeaderSpecific(newdm,PC_CLASSID,1); 1582 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1583 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1584 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 1585 1586 ierr = PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT);CHKERRQ(ierr); 1587 if (classid != PC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file"); 1588 ierr = PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR);CHKERRQ(ierr); 1589 ierr = PCSetType(newdm, type);CHKERRQ(ierr); 1590 if (newdm->ops->load) { 1591 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 1592 } 1593 PetscFunctionReturn(0); 1594 } 1595 1596 #include <petscdraw.h> 1597 #if defined(PETSC_HAVE_SAWS) 1598 #include <petscviewersaws.h> 1599 #endif 1600 1601 /*@C 1602 PCViewFromOptions - View from Options 1603 1604 Collective on PC 1605 1606 Input Parameters: 1607 + A - the PC context 1608 . obj - Optional object 1609 - name - command line option 1610 1611 Level: intermediate 1612 .seealso: PC, PCView, PetscObjectViewFromOptions(), PCCreate() 1613 @*/ 1614 PetscErrorCode PCViewFromOptions(PC A,PetscObject obj,const char name[]) 1615 { 1616 PetscErrorCode ierr; 1617 1618 PetscFunctionBegin; 1619 PetscValidHeaderSpecific(A,PC_CLASSID,1); 1620 ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr); 1621 PetscFunctionReturn(0); 1622 } 1623 1624 /*@C 1625 PCView - Prints the PC data structure. 1626 1627 Collective on PC 1628 1629 Input Parameters: 1630 + PC - the PC context 1631 - viewer - optional visualization context 1632 1633 Note: 1634 The available visualization contexts include 1635 + PETSC_VIEWER_STDOUT_SELF - standard output (default) 1636 - PETSC_VIEWER_STDOUT_WORLD - synchronized standard 1637 output where only the first processor opens 1638 the file. All other processors send their 1639 data to the first processor to print. 1640 1641 The user can open an alternative visualization contexts with 1642 PetscViewerASCIIOpen() (output to a specified file). 1643 1644 Level: developer 1645 1646 .seealso: KSPView(), PetscViewerASCIIOpen() 1647 @*/ 1648 PetscErrorCode PCView(PC pc,PetscViewer viewer) 1649 { 1650 PCType cstr; 1651 PetscErrorCode ierr; 1652 PetscBool iascii,isstring,isbinary,isdraw; 1653 #if defined(PETSC_HAVE_SAWS) 1654 PetscBool issaws; 1655 #endif 1656 1657 PetscFunctionBegin; 1658 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1659 if (!viewer) { 1660 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);CHKERRQ(ierr); 1661 } 1662 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1663 PetscCheckSameComm(pc,1,viewer,2); 1664 1665 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1666 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 1667 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1668 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 1669 #if defined(PETSC_HAVE_SAWS) 1670 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws);CHKERRQ(ierr); 1671 #endif 1672 1673 if (iascii) { 1674 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer);CHKERRQ(ierr); 1675 if (!pc->setupcalled) { 1676 ierr = PetscViewerASCIIPrintf(viewer," PC has not been set up so information may be incomplete\n");CHKERRQ(ierr); 1677 } 1678 if (pc->ops->view) { 1679 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1680 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1681 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1682 } 1683 if (pc->mat) { 1684 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);CHKERRQ(ierr); 1685 if (pc->pmat == pc->mat) { 1686 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");CHKERRQ(ierr); 1687 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1688 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1689 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1690 } else { 1691 if (pc->pmat) { 1692 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");CHKERRQ(ierr); 1693 } else { 1694 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix:\n");CHKERRQ(ierr); 1695 } 1696 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1697 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1698 if (pc->pmat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1699 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1700 } 1701 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1702 } 1703 } else if (isstring) { 1704 ierr = PCGetType(pc,&cstr);CHKERRQ(ierr); 1705 ierr = PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr);CHKERRQ(ierr); 1706 if (pc->ops->view) {ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr);} 1707 if (pc->mat) {ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);} 1708 if (pc->pmat && pc->pmat != pc->mat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1709 } else if (isbinary) { 1710 PetscInt classid = PC_FILE_CLASSID; 1711 MPI_Comm comm; 1712 PetscMPIInt rank; 1713 char type[256]; 1714 1715 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1716 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1717 if (!rank) { 1718 ierr = PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 1719 ierr = PetscStrncpy(type,((PetscObject)pc)->type_name,256);CHKERRQ(ierr); 1720 ierr = PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 1721 } 1722 if (pc->ops->view) { 1723 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1724 } 1725 } else if (isdraw) { 1726 PetscDraw draw; 1727 char str[25]; 1728 PetscReal x,y,bottom,h; 1729 PetscInt n; 1730 1731 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1732 ierr = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr); 1733 if (pc->mat) { 1734 ierr = MatGetSize(pc->mat,&n,NULL);CHKERRQ(ierr); 1735 ierr = PetscSNPrintf(str,25,"PC: %s (%D)",((PetscObject)pc)->type_name,n);CHKERRQ(ierr); 1736 } else { 1737 ierr = PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name);CHKERRQ(ierr); 1738 } 1739 ierr = PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr); 1740 bottom = y - h; 1741 ierr = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr); 1742 if (pc->ops->view) { 1743 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1744 } 1745 ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr); 1746 #if defined(PETSC_HAVE_SAWS) 1747 } else if (issaws) { 1748 PetscMPIInt rank; 1749 1750 ierr = PetscObjectName((PetscObject)pc);CHKERRQ(ierr); 1751 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr); 1752 if (!((PetscObject)pc)->amsmem && !rank) { 1753 ierr = PetscObjectViewSAWs((PetscObject)pc,viewer);CHKERRQ(ierr); 1754 } 1755 if (pc->mat) {ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);} 1756 if (pc->pmat && pc->pmat != pc->mat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1757 #endif 1758 } 1759 PetscFunctionReturn(0); 1760 } 1761 1762 /*@C 1763 PCRegister - Adds a method to the preconditioner package. 1764 1765 Not collective 1766 1767 Input Parameters: 1768 + name_solver - name of a new user-defined solver 1769 - routine_create - routine to create method context 1770 1771 Notes: 1772 PCRegister() may be called multiple times to add several user-defined preconditioners. 1773 1774 Sample usage: 1775 .vb 1776 PCRegister("my_solver", MySolverCreate); 1777 .ve 1778 1779 Then, your solver can be chosen with the procedural interface via 1780 $ PCSetType(pc,"my_solver") 1781 or at runtime via the option 1782 $ -pc_type my_solver 1783 1784 Level: advanced 1785 1786 .seealso: PCRegisterAll() 1787 @*/ 1788 PetscErrorCode PCRegister(const char sname[],PetscErrorCode (*function)(PC)) 1789 { 1790 PetscErrorCode ierr; 1791 1792 PetscFunctionBegin; 1793 ierr = PCInitializePackage();CHKERRQ(ierr); 1794 ierr = PetscFunctionListAdd(&PCList,sname,function);CHKERRQ(ierr); 1795 PetscFunctionReturn(0); 1796 } 1797 1798 static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y) 1799 { 1800 PC pc; 1801 PetscErrorCode ierr; 1802 1803 PetscFunctionBegin; 1804 ierr = MatShellGetContext(A,&pc);CHKERRQ(ierr); 1805 ierr = PCApply(pc,X,Y);CHKERRQ(ierr); 1806 PetscFunctionReturn(0); 1807 } 1808 1809 /*@ 1810 PCComputeOperator - Computes the explicit preconditioned operator. 1811 1812 Collective on PC 1813 1814 Input Parameter: 1815 + pc - the preconditioner object 1816 - mattype - the matrix type to be used for the operator 1817 1818 Output Parameter: 1819 . mat - the explict preconditioned operator 1820 1821 Notes: 1822 This computation is done by applying the operators to columns of the identity matrix. 1823 This routine is costly in general, and is recommended for use only with relatively small systems. 1824 Currently, this routine uses a dense matrix format when mattype == NULL 1825 1826 Level: advanced 1827 1828 .seealso: KSPComputeOperator(), MatType 1829 1830 @*/ 1831 PetscErrorCode PCComputeOperator(PC pc,MatType mattype,Mat *mat) 1832 { 1833 PetscErrorCode ierr; 1834 PetscInt N,M,m,n; 1835 Mat A,Apc; 1836 1837 PetscFunctionBegin; 1838 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1839 PetscValidPointer(mat,3); 1840 ierr = PCGetOperators(pc,&A,NULL);CHKERRQ(ierr); 1841 ierr = MatGetLocalSize(A,&m,&n);CHKERRQ(ierr); 1842 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr); 1843 ierr = MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc);CHKERRQ(ierr); 1844 ierr = MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC);CHKERRQ(ierr); 1845 ierr = MatComputeOperator(Apc,mattype,mat);CHKERRQ(ierr); 1846 ierr = MatDestroy(&Apc);CHKERRQ(ierr); 1847 PetscFunctionReturn(0); 1848 } 1849 1850 /*@ 1851 PCSetCoordinates - sets the coordinates of all the nodes on the local process 1852 1853 Collective on PC 1854 1855 Input Parameters: 1856 + pc - the solver context 1857 . dim - the dimension of the coordinates 1, 2, or 3 1858 . nloc - the blocked size of the coordinates array 1859 - coords - the coordinates array 1860 1861 Level: intermediate 1862 1863 Notes: 1864 coords is an array of the dim coordinates for the nodes on 1865 the local processor, of size dim*nloc. 1866 If there are 108 equation on a processor 1867 for a displacement finite element discretization of elasticity (so 1868 that there are nloc = 36 = 108/3 nodes) then the array must have 108 1869 double precision values (ie, 3 * 36). These x y z coordinates 1870 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 1871 ... , N-1.z ]. 1872 1873 .seealso: MatSetNearNullSpace() 1874 @*/ 1875 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 1876 { 1877 PetscErrorCode ierr; 1878 1879 PetscFunctionBegin; 1880 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1881 PetscValidLogicalCollectiveInt(pc,dim,2); 1882 ierr = PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));CHKERRQ(ierr); 1883 PetscFunctionReturn(0); 1884 } 1885 1886 /*@ 1887 PCGetInterpolations - Gets interpolation matrices for all levels (except level 0) 1888 1889 Logically Collective on PC 1890 1891 Input Parameters: 1892 + pc - the precondition context 1893 1894 Output Parameter: 1895 - num_levels - the number of levels 1896 . interpolations - the interpolation matrices (size of num_levels-1) 1897 1898 Level: advanced 1899 1900 .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level 1901 1902 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetInterpolation(), PCGetCoarseOperators() 1903 @*/ 1904 PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[]) 1905 { 1906 PetscErrorCode ierr; 1907 1908 PetscFunctionBegin; 1909 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1910 PetscValidPointer(num_levels,2); 1911 PetscValidPointer(interpolations,3); 1912 ierr = PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));CHKERRQ(ierr); 1913 PetscFunctionReturn(0); 1914 } 1915 1916 /*@ 1917 PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level) 1918 1919 Logically Collective on PC 1920 1921 Input Parameters: 1922 + pc - the precondition context 1923 1924 Output Parameter: 1925 - num_levels - the number of levels 1926 . coarseOperators - the coarse operator matrices (size of num_levels-1) 1927 1928 Level: advanced 1929 1930 .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level 1931 1932 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetRScale(), PCMGGetInterpolation(), PCGetInterpolations() 1933 @*/ 1934 PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[]) 1935 { 1936 PetscErrorCode ierr; 1937 1938 PetscFunctionBegin; 1939 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1940 PetscValidPointer(num_levels,2); 1941 PetscValidPointer(coarseOperators,3); 1942 ierr = PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));CHKERRQ(ierr); 1943 PetscFunctionReturn(0); 1944 } 1945