1 2 /* 3 The PC (preconditioner) interface routines, callable by users. 4 */ 5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 6 #include <petscdm.h> 7 8 /* Logging support */ 9 PetscClassId PC_CLASSID; 10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 12 PetscInt PetscMGLevelId; 13 14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[]) 15 { 16 PetscErrorCode ierr; 17 PetscMPIInt size; 18 PetscBool hasop,flg1,flg2,set,flg3; 19 20 PetscFunctionBegin; 21 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRMPI(ierr); 22 if (pc->pmat) { 23 ierr = MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 24 if (size == 1) { 25 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);CHKERRQ(ierr); 26 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);CHKERRQ(ierr); 27 ierr = MatIsSymmetricKnown(pc->pmat,&set,&flg3);CHKERRQ(ierr); 28 if (flg1 && (!flg2 || (set && flg3))) { 29 *type = PCICC; 30 } else if (flg2) { 31 *type = PCILU; 32 } else if (hasop) { /* likely is a parallel matrix run on one processor */ 33 *type = PCBJACOBI; 34 } else { 35 *type = PCNONE; 36 } 37 } else { 38 if (hasop) { 39 *type = PCBJACOBI; 40 } else { 41 *type = PCNONE; 42 } 43 } 44 } else { 45 if (size == 1) { 46 *type = PCILU; 47 } else { 48 *type = PCBJACOBI; 49 } 50 } 51 PetscFunctionReturn(0); 52 } 53 54 /*@ 55 PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats 56 57 Collective on PC 58 59 Input Parameter: 60 . pc - the preconditioner context 61 62 Level: developer 63 64 Notes: 65 This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC 66 67 .seealso: PCCreate(), PCSetUp() 68 @*/ 69 PetscErrorCode PCReset(PC pc) 70 { 71 PetscErrorCode ierr; 72 73 PetscFunctionBegin; 74 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 75 if (pc->ops->reset) { 76 ierr = (*pc->ops->reset)(pc);CHKERRQ(ierr); 77 } 78 ierr = VecDestroy(&pc->diagonalscaleright);CHKERRQ(ierr); 79 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 80 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 81 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 82 83 pc->setupcalled = 0; 84 PetscFunctionReturn(0); 85 } 86 87 /*@C 88 PCDestroy - Destroys PC context that was created with PCCreate(). 89 90 Collective on PC 91 92 Input Parameter: 93 . pc - the preconditioner context 94 95 Level: developer 96 97 .seealso: PCCreate(), PCSetUp() 98 @*/ 99 PetscErrorCode PCDestroy(PC *pc) 100 { 101 PetscErrorCode ierr; 102 103 PetscFunctionBegin; 104 if (!*pc) PetscFunctionReturn(0); 105 PetscValidHeaderSpecific((*pc),PC_CLASSID,1); 106 if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; PetscFunctionReturn(0);} 107 108 ierr = PCReset(*pc);CHKERRQ(ierr); 109 110 /* if memory was published with SAWs then destroy it */ 111 ierr = PetscObjectSAWsViewOff((PetscObject)*pc);CHKERRQ(ierr); 112 if ((*pc)->ops->destroy) {ierr = (*(*pc)->ops->destroy)((*pc));CHKERRQ(ierr);} 113 ierr = DMDestroy(&(*pc)->dm);CHKERRQ(ierr); 114 ierr = PetscHeaderDestroy(pc);CHKERRQ(ierr); 115 PetscFunctionReturn(0); 116 } 117 118 /*@C 119 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 120 scaling as needed by certain time-stepping codes. 121 122 Logically Collective on PC 123 124 Input Parameter: 125 . pc - the preconditioner context 126 127 Output Parameter: 128 . flag - PETSC_TRUE if it applies the scaling 129 130 Level: developer 131 132 Notes: 133 If this returns PETSC_TRUE then the system solved via the Krylov method is 134 $ D M A D^{-1} y = D M b for left preconditioning or 135 $ D A M D^{-1} z = D b for right preconditioning 136 137 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale() 138 @*/ 139 PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag) 140 { 141 PetscFunctionBegin; 142 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 143 PetscValidBoolPointer(flag,2); 144 *flag = pc->diagonalscale; 145 PetscFunctionReturn(0); 146 } 147 148 /*@ 149 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 150 scaling as needed by certain time-stepping codes. 151 152 Logically Collective on PC 153 154 Input Parameters: 155 + pc - the preconditioner context 156 - s - scaling vector 157 158 Level: intermediate 159 160 Notes: 161 The system solved via the Krylov method is 162 $ D M A D^{-1} y = D M b for left preconditioning or 163 $ D A M D^{-1} z = D b for right preconditioning 164 165 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 166 167 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale() 168 @*/ 169 PetscErrorCode PCSetDiagonalScale(PC pc,Vec s) 170 { 171 PetscErrorCode ierr; 172 173 PetscFunctionBegin; 174 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 175 PetscValidHeaderSpecific(s,VEC_CLASSID,2); 176 pc->diagonalscale = PETSC_TRUE; 177 178 ierr = PetscObjectReference((PetscObject)s);CHKERRQ(ierr); 179 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 180 181 pc->diagonalscaleleft = s; 182 183 ierr = VecDuplicate(s,&pc->diagonalscaleright);CHKERRQ(ierr); 184 ierr = VecCopy(s,pc->diagonalscaleright);CHKERRQ(ierr); 185 ierr = VecReciprocal(pc->diagonalscaleright);CHKERRQ(ierr); 186 PetscFunctionReturn(0); 187 } 188 189 /*@ 190 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 191 192 Logically Collective on PC 193 194 Input Parameters: 195 + pc - the preconditioner context 196 . in - input vector 197 - out - scaled vector (maybe the same as in) 198 199 Level: intermediate 200 201 Notes: 202 The system solved via the Krylov method is 203 $ D M A D^{-1} y = D M b for left preconditioning or 204 $ D A M D^{-1} z = D b for right preconditioning 205 206 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 207 208 If diagonal scaling is turned off and in is not out then in is copied to out 209 210 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale() 211 @*/ 212 PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out) 213 { 214 PetscErrorCode ierr; 215 216 PetscFunctionBegin; 217 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 218 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 219 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 220 if (pc->diagonalscale) { 221 ierr = VecPointwiseMult(out,pc->diagonalscaleleft,in);CHKERRQ(ierr); 222 } else if (in != out) { 223 ierr = VecCopy(in,out);CHKERRQ(ierr); 224 } 225 PetscFunctionReturn(0); 226 } 227 228 /*@ 229 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 230 231 Logically Collective on PC 232 233 Input Parameters: 234 + pc - the preconditioner context 235 . in - input vector 236 - out - scaled vector (maybe the same as in) 237 238 Level: intermediate 239 240 Notes: 241 The system solved via the Krylov method is 242 $ D M A D^{-1} y = D M b for left preconditioning or 243 $ D A M D^{-1} z = D b for right preconditioning 244 245 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 246 247 If diagonal scaling is turned off and in is not out then in is copied to out 248 249 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale() 250 @*/ 251 PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out) 252 { 253 PetscErrorCode ierr; 254 255 PetscFunctionBegin; 256 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 257 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 258 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 259 if (pc->diagonalscale) { 260 ierr = VecPointwiseMult(out,pc->diagonalscaleright,in);CHKERRQ(ierr); 261 } else if (in != out) { 262 ierr = VecCopy(in,out);CHKERRQ(ierr); 263 } 264 PetscFunctionReturn(0); 265 } 266 267 /*@ 268 PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the 269 operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(), 270 TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat. 271 272 Logically Collective on PC 273 274 Input Parameters: 275 + pc - the preconditioner context 276 - flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false) 277 278 Options Database Key: 279 . -pc_use_amat <true,false> 280 281 Notes: 282 For the common case in which the linear system matrix and the matrix used to construct the 283 preconditioner are identical, this routine is does nothing. 284 285 Level: intermediate 286 287 .seealso: PCGetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE 288 @*/ 289 PetscErrorCode PCSetUseAmat(PC pc,PetscBool flg) 290 { 291 PetscFunctionBegin; 292 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 293 pc->useAmat = flg; 294 PetscFunctionReturn(0); 295 } 296 297 /*@ 298 PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected. 299 300 Logically Collective on PC 301 302 Input Parameters: 303 + pc - iterative context obtained from PCCreate() 304 - flg - PETSC_TRUE indicates you want the error generated 305 306 Level: advanced 307 308 Notes: 309 Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve() 310 to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is 311 detected. 312 313 This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs 314 315 .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll() 316 @*/ 317 PetscErrorCode PCSetErrorIfFailure(PC pc,PetscBool flg) 318 { 319 PetscFunctionBegin; 320 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 321 PetscValidLogicalCollectiveBool(pc,flg,2); 322 pc->erroriffailure = flg; 323 PetscFunctionReturn(0); 324 } 325 326 /*@ 327 PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the 328 operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(), 329 TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat. 330 331 Logically Collective on PC 332 333 Input Parameter: 334 . pc - the preconditioner context 335 336 Output Parameter: 337 . flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false) 338 339 Notes: 340 For the common case in which the linear system matrix and the matrix used to construct the 341 preconditioner are identical, this routine is does nothing. 342 343 Level: intermediate 344 345 .seealso: PCSetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE 346 @*/ 347 PetscErrorCode PCGetUseAmat(PC pc,PetscBool *flg) 348 { 349 PetscFunctionBegin; 350 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 351 *flg = pc->useAmat; 352 PetscFunctionReturn(0); 353 } 354 355 /*@ 356 PCCreate - Creates a preconditioner context. 357 358 Collective 359 360 Input Parameter: 361 . comm - MPI communicator 362 363 Output Parameter: 364 . pc - location to put the preconditioner context 365 366 Notes: 367 The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC 368 in parallel. For dense matrices it is always PCNONE. 369 370 Level: developer 371 372 .seealso: PCSetUp(), PCApply(), PCDestroy() 373 @*/ 374 PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc) 375 { 376 PC pc; 377 PetscErrorCode ierr; 378 379 PetscFunctionBegin; 380 PetscValidPointer(newpc,2); 381 *newpc = NULL; 382 ierr = PCInitializePackage();CHKERRQ(ierr); 383 384 ierr = PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView);CHKERRQ(ierr); 385 386 pc->mat = NULL; 387 pc->pmat = NULL; 388 pc->setupcalled = 0; 389 pc->setfromoptionscalled = 0; 390 pc->data = NULL; 391 pc->diagonalscale = PETSC_FALSE; 392 pc->diagonalscaleleft = NULL; 393 pc->diagonalscaleright = NULL; 394 395 pc->modifysubmatrices = NULL; 396 pc->modifysubmatricesP = NULL; 397 398 *newpc = pc; 399 PetscFunctionReturn(0); 400 401 } 402 403 /* -------------------------------------------------------------------------------*/ 404 405 /*@ 406 PCApply - Applies the preconditioner to a vector. 407 408 Collective on PC 409 410 Input Parameters: 411 + pc - the preconditioner context 412 - x - input vector 413 414 Output Parameter: 415 . y - output vector 416 417 Level: developer 418 419 .seealso: PCApplyTranspose(), PCApplyBAorAB() 420 @*/ 421 PetscErrorCode PCApply(PC pc,Vec x,Vec y) 422 { 423 PetscErrorCode ierr; 424 PetscInt m,n,mv,nv; 425 426 PetscFunctionBegin; 427 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 428 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 429 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 430 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 431 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 432 /* use pmat to check vector sizes since for KSPLQR the pmat may be of a different size than mat */ 433 ierr = MatGetLocalSize(pc->pmat,&m,&n);CHKERRQ(ierr); 434 ierr = VecGetLocalSize(x,&nv);CHKERRQ(ierr); 435 ierr = VecGetLocalSize(y,&mv);CHKERRQ(ierr); 436 if (mv != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %D does not equal resulting vector number of rows %D",m,mv); 437 if (nv != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %D does not equal resulting vector number of rows %D",n,nv); 438 ierr = VecSetErrorIfLocked(y,3);CHKERRQ(ierr); 439 440 ierr = PCSetUp(pc);CHKERRQ(ierr); 441 if (!pc->ops->apply) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply"); 442 ierr = VecLockReadPush(x);CHKERRQ(ierr); 443 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 444 ierr = (*pc->ops->apply)(pc,x,y);CHKERRQ(ierr); 445 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 446 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 447 ierr = VecLockReadPop(x);CHKERRQ(ierr); 448 PetscFunctionReturn(0); 449 } 450 451 /*@ 452 PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices. 453 454 Collective on PC 455 456 Input Parameters: 457 + pc - the preconditioner context 458 - X - block of input vectors 459 460 Output Parameter: 461 . Y - block of output vectors 462 463 Level: developer 464 465 .seealso: PCApply(), KSPMatSolve() 466 @*/ 467 PetscErrorCode PCMatApply(PC pc,Mat X,Mat Y) 468 { 469 Mat A; 470 Vec cy, cx; 471 PetscInt m1, M1, m2, M2, n1, N1, n2, N2; 472 PetscBool match; 473 PetscErrorCode ierr; 474 475 PetscFunctionBegin; 476 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 477 PetscValidHeaderSpecific(X, MAT_CLASSID, 2); 478 PetscValidHeaderSpecific(Y, MAT_CLASSID, 3); 479 PetscCheckSameComm(pc, 1, X, 2); 480 PetscCheckSameComm(pc, 1, Y, 3); 481 if (Y == X) SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices"); 482 ierr = PCGetOperators(pc, NULL, &A);CHKERRQ(ierr); 483 ierr = MatGetLocalSize(A, &m1, NULL);CHKERRQ(ierr); 484 ierr = MatGetLocalSize(Y, &m2, &n2);CHKERRQ(ierr); 485 ierr = MatGetSize(A, &M1, NULL);CHKERRQ(ierr); 486 ierr = MatGetSize(X, &M2, &N2);CHKERRQ(ierr); 487 if (m1 != m2 || M1 != M2) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot use a block of input vectors with (m2,M2) = (%D,%D) for a preconditioner with (m1,M1) = (%D,%D)", m2, M2, m1, M1); 488 ierr = MatGetLocalSize(Y, &m1, &n1);CHKERRQ(ierr); 489 ierr = MatGetSize(Y, &M1, &N1);CHKERRQ(ierr); 490 if (m1 != m2 || M1 != M2 || n1 != n2 || N1 != N2) SETERRQ8(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible block of input vectors (m2,M2)x(n2,N2) = (%D,%D)x(%D,%D) and output vectors (m1,M1)x(n1,N1) = (%D,%D)x(%D,%D)", m2, M2, n2, N2, m1, M1, n1, N1); 491 ierr = PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "");CHKERRQ(ierr); 492 if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat"); 493 ierr = PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "");CHKERRQ(ierr); 494 if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat"); 495 ierr = PCSetUp(pc);CHKERRQ(ierr); 496 if (pc->ops->matapply) { 497 ierr = PetscLogEventBegin(PC_MatApply, pc, X, Y, 0);CHKERRQ(ierr); 498 ierr = (*pc->ops->matapply)(pc, X, Y);CHKERRQ(ierr); 499 ierr = PetscLogEventEnd(PC_MatApply, pc, X, Y, 0);CHKERRQ(ierr); 500 } else { 501 ierr = PetscInfo1(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name);CHKERRQ(ierr); 502 for (n2 = 0; n2 < N2; ++n2) { 503 ierr = MatDenseGetColumnVecRead(X, n2, &cx);CHKERRQ(ierr); 504 ierr = MatDenseGetColumnVecWrite(Y, n2, &cy);CHKERRQ(ierr); 505 ierr = PCApply(pc, cx, cy);CHKERRQ(ierr); 506 ierr = MatDenseRestoreColumnVecWrite(Y, n2, &cy);CHKERRQ(ierr); 507 ierr = MatDenseRestoreColumnVecRead(X, n2, &cx);CHKERRQ(ierr); 508 } 509 } 510 PetscFunctionReturn(0); 511 } 512 513 /*@ 514 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 515 516 Collective on PC 517 518 Input Parameters: 519 + pc - the preconditioner context 520 - x - input vector 521 522 Output Parameter: 523 . y - output vector 524 525 Notes: 526 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 527 528 Level: developer 529 530 .seealso: PCApply(), PCApplySymmetricRight() 531 @*/ 532 PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y) 533 { 534 PetscErrorCode ierr; 535 536 PetscFunctionBegin; 537 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 538 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 539 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 540 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 541 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 542 ierr = PCSetUp(pc);CHKERRQ(ierr); 543 if (!pc->ops->applysymmetricleft) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply"); 544 ierr = VecLockReadPush(x);CHKERRQ(ierr); 545 ierr = PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 546 ierr = (*pc->ops->applysymmetricleft)(pc,x,y);CHKERRQ(ierr); 547 ierr = PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 548 ierr = VecLockReadPop(x);CHKERRQ(ierr); 549 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 550 PetscFunctionReturn(0); 551 } 552 553 /*@ 554 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 555 556 Collective on PC 557 558 Input Parameters: 559 + pc - the preconditioner context 560 - x - input vector 561 562 Output Parameter: 563 . y - output vector 564 565 Level: developer 566 567 Notes: 568 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 569 570 .seealso: PCApply(), PCApplySymmetricLeft() 571 @*/ 572 PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y) 573 { 574 PetscErrorCode ierr; 575 576 PetscFunctionBegin; 577 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 578 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 579 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 580 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 581 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 582 ierr = PCSetUp(pc);CHKERRQ(ierr); 583 if (!pc->ops->applysymmetricright) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply"); 584 ierr = VecLockReadPush(x);CHKERRQ(ierr); 585 ierr = PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 586 ierr = (*pc->ops->applysymmetricright)(pc,x,y);CHKERRQ(ierr); 587 ierr = PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 588 ierr = VecLockReadPop(x);CHKERRQ(ierr); 589 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 590 PetscFunctionReturn(0); 591 } 592 593 /*@ 594 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 595 596 Collective on PC 597 598 Input Parameters: 599 + pc - the preconditioner context 600 - x - input vector 601 602 Output Parameter: 603 . y - output vector 604 605 Notes: 606 For complex numbers this applies the non-Hermitian transpose. 607 608 Developer Notes: 609 We need to implement a PCApplyHermitianTranspose() 610 611 Level: developer 612 613 .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists() 614 @*/ 615 PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y) 616 { 617 PetscErrorCode ierr; 618 619 PetscFunctionBegin; 620 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 621 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 622 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 623 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 624 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 625 ierr = PCSetUp(pc);CHKERRQ(ierr); 626 if (!pc->ops->applytranspose) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose"); 627 ierr = VecLockReadPush(x);CHKERRQ(ierr); 628 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 629 ierr = (*pc->ops->applytranspose)(pc,x,y);CHKERRQ(ierr); 630 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 631 ierr = VecLockReadPop(x);CHKERRQ(ierr); 632 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 633 PetscFunctionReturn(0); 634 } 635 636 /*@ 637 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 638 639 Collective on PC 640 641 Input Parameters: 642 . pc - the preconditioner context 643 644 Output Parameter: 645 . flg - PETSC_TRUE if a transpose operation is defined 646 647 Level: developer 648 649 .seealso: PCApplyTranspose() 650 @*/ 651 PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg) 652 { 653 PetscFunctionBegin; 654 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 655 PetscValidBoolPointer(flg,2); 656 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 657 else *flg = PETSC_FALSE; 658 PetscFunctionReturn(0); 659 } 660 661 /*@ 662 PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x. 663 664 Collective on PC 665 666 Input Parameters: 667 + pc - the preconditioner context 668 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 669 . x - input vector 670 - work - work vector 671 672 Output Parameter: 673 . y - output vector 674 675 Level: developer 676 677 Notes: 678 If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the 679 specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling. 680 681 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose() 682 @*/ 683 PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work) 684 { 685 PetscErrorCode ierr; 686 687 PetscFunctionBegin; 688 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 689 PetscValidLogicalCollectiveEnum(pc,side,2); 690 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 691 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 692 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 693 PetscCheckSameComm(pc,1,x,3); 694 PetscCheckSameComm(pc,1,y,4); 695 PetscCheckSameComm(pc,1,work,5); 696 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 697 if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric"); 698 if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application"); 699 if (pc->erroriffailure) {ierr = VecValidValues(x,3,PETSC_TRUE);CHKERRQ(ierr);} 700 701 ierr = PCSetUp(pc);CHKERRQ(ierr); 702 if (pc->diagonalscale) { 703 if (pc->ops->applyBA) { 704 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 705 ierr = VecDuplicate(x,&work2);CHKERRQ(ierr); 706 ierr = PCDiagonalScaleRight(pc,x,work2);CHKERRQ(ierr); 707 ierr = (*pc->ops->applyBA)(pc,side,work2,y,work);CHKERRQ(ierr); 708 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 709 ierr = VecDestroy(&work2);CHKERRQ(ierr); 710 } else if (side == PC_RIGHT) { 711 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 712 ierr = PCApply(pc,y,work);CHKERRQ(ierr); 713 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 714 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 715 } else if (side == PC_LEFT) { 716 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 717 ierr = MatMult(pc->mat,y,work);CHKERRQ(ierr); 718 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 719 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 720 } else if (side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner"); 721 } else { 722 if (pc->ops->applyBA) { 723 ierr = (*pc->ops->applyBA)(pc,side,x,y,work);CHKERRQ(ierr); 724 } else if (side == PC_RIGHT) { 725 ierr = PCApply(pc,x,work);CHKERRQ(ierr); 726 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 727 } else if (side == PC_LEFT) { 728 ierr = MatMult(pc->mat,x,work);CHKERRQ(ierr); 729 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 730 } else if (side == PC_SYMMETRIC) { 731 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 732 ierr = PCApplySymmetricRight(pc,x,work);CHKERRQ(ierr); 733 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 734 ierr = VecCopy(y,work);CHKERRQ(ierr); 735 ierr = PCApplySymmetricLeft(pc,work,y);CHKERRQ(ierr); 736 } 737 } 738 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 739 PetscFunctionReturn(0); 740 } 741 742 /*@ 743 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 744 and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning, 745 NOT tr(B*A) = tr(A)*tr(B). 746 747 Collective on PC 748 749 Input Parameters: 750 + pc - the preconditioner context 751 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 752 . x - input vector 753 - work - work vector 754 755 Output Parameter: 756 . y - output vector 757 758 Notes: 759 this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner 760 defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 761 762 Level: developer 763 764 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB() 765 @*/ 766 PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work) 767 { 768 PetscErrorCode ierr; 769 770 PetscFunctionBegin; 771 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 772 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 773 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 774 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 775 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 776 if (pc->erroriffailure) {ierr = VecValidValues(x,3,PETSC_TRUE);CHKERRQ(ierr);} 777 if (pc->ops->applyBAtranspose) { 778 ierr = (*pc->ops->applyBAtranspose)(pc,side,x,y,work);CHKERRQ(ierr); 779 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 780 PetscFunctionReturn(0); 781 } 782 if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left"); 783 784 ierr = PCSetUp(pc);CHKERRQ(ierr); 785 if (side == PC_RIGHT) { 786 ierr = PCApplyTranspose(pc,x,work);CHKERRQ(ierr); 787 ierr = MatMultTranspose(pc->mat,work,y);CHKERRQ(ierr); 788 } else if (side == PC_LEFT) { 789 ierr = MatMultTranspose(pc->mat,x,work);CHKERRQ(ierr); 790 ierr = PCApplyTranspose(pc,work,y);CHKERRQ(ierr); 791 } 792 /* add support for PC_SYMMETRIC */ 793 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 794 PetscFunctionReturn(0); 795 } 796 797 /* -------------------------------------------------------------------------------*/ 798 799 /*@ 800 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 801 built-in fast application of Richardson's method. 802 803 Not Collective 804 805 Input Parameter: 806 . pc - the preconditioner 807 808 Output Parameter: 809 . exists - PETSC_TRUE or PETSC_FALSE 810 811 Level: developer 812 813 .seealso: PCApplyRichardson() 814 @*/ 815 PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists) 816 { 817 PetscFunctionBegin; 818 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 819 PetscValidPointer(exists,2); 820 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 821 else *exists = PETSC_FALSE; 822 PetscFunctionReturn(0); 823 } 824 825 /*@ 826 PCApplyRichardson - Applies several steps of Richardson iteration with 827 the particular preconditioner. This routine is usually used by the 828 Krylov solvers and not the application code directly. 829 830 Collective on PC 831 832 Input Parameters: 833 + pc - the preconditioner context 834 . b - the right hand side 835 . w - one work vector 836 . rtol - relative decrease in residual norm convergence criteria 837 . abstol - absolute residual norm convergence criteria 838 . dtol - divergence residual norm increase criteria 839 . its - the number of iterations to apply. 840 - guesszero - if the input x contains nonzero initial guess 841 842 Output Parameter: 843 + outits - number of iterations actually used (for SOR this always equals its) 844 . reason - the reason the apply terminated 845 - y - the solution (also contains initial guess if guesszero is PETSC_FALSE 846 847 Notes: 848 Most preconditioners do not support this function. Use the command 849 PCApplyRichardsonExists() to determine if one does. 850 851 Except for the multigrid PC this routine ignores the convergence tolerances 852 and always runs for the number of iterations 853 854 Level: developer 855 856 .seealso: PCApplyRichardsonExists() 857 @*/ 858 PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason) 859 { 860 PetscErrorCode ierr; 861 862 PetscFunctionBegin; 863 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 864 PetscValidHeaderSpecific(b,VEC_CLASSID,2); 865 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 866 PetscValidHeaderSpecific(w,VEC_CLASSID,4); 867 if (b == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors"); 868 ierr = PCSetUp(pc);CHKERRQ(ierr); 869 if (!pc->ops->applyrichardson) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson"); 870 ierr = (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);CHKERRQ(ierr); 871 PetscFunctionReturn(0); 872 } 873 874 /*@ 875 PCSetFailedReason - Sets the reason a PCSetUp() failed or PC_NOERROR if it did not fail 876 877 Logically Collective on PC 878 879 Input Parameter: 880 + pc - the preconditioner context 881 - reason - the reason it failedx 882 883 Level: advanced 884 885 .seealso: PCCreate(), PCApply(), PCDestroy(), PCFailedReason 886 @*/ 887 PetscErrorCode PCSetFailedReason(PC pc,PCFailedReason reason) 888 { 889 PetscFunctionBegin; 890 pc->failedreason = reason; 891 PetscFunctionReturn(0); 892 } 893 894 /*@ 895 PCGetFailedReason - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail 896 897 Logically Collective on PC 898 899 Input Parameter: 900 . pc - the preconditioner context 901 902 Output Parameter: 903 . reason - the reason it failed 904 905 Level: advanced 906 907 Notes: This is the maximum over reason over all ranks in the PC communicator. It is only valid after 908 a call KSPCheckDot() or KSPCheckNorm() inside a KSPSolve(). It is not valid immediately after a PCSetUp() 909 or PCApply(), then use PCGetFailedReasonRank() 910 911 .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReasonRank(), PCSetFailedReason() 912 @*/ 913 PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason) 914 { 915 PetscFunctionBegin; 916 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 917 else *reason = pc->failedreason; 918 PetscFunctionReturn(0); 919 } 920 921 /*@ 922 PCGetFailedReasonRank - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail on this MPI rank 923 924 Not Collective on PC 925 926 Input Parameter: 927 . pc - the preconditioner context 928 929 Output Parameter: 930 . reason - the reason it failed 931 932 Notes: 933 Different ranks may have different reasons or no reason, see PCGetFailedReason() 934 935 Level: advanced 936 937 .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReason(), PCSetFailedReason() 938 @*/ 939 PetscErrorCode PCGetFailedReasonRank(PC pc,PCFailedReason *reason) 940 { 941 PetscFunctionBegin; 942 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 943 else *reason = pc->failedreason; 944 PetscFunctionReturn(0); 945 } 946 947 /* Next line needed to deactivate KSP_Solve logging */ 948 #include <petsc/private/kspimpl.h> 949 950 /* 951 a setupcall of 0 indicates never setup, 952 1 indicates has been previously setup 953 -1 indicates a PCSetUp() was attempted and failed 954 */ 955 /*@ 956 PCSetUp - Prepares for the use of a preconditioner. 957 958 Collective on PC 959 960 Input Parameter: 961 . pc - the preconditioner context 962 963 Level: developer 964 965 .seealso: PCCreate(), PCApply(), PCDestroy() 966 @*/ 967 PetscErrorCode PCSetUp(PC pc) 968 { 969 PetscErrorCode ierr; 970 const char *def; 971 PetscObjectState matstate, matnonzerostate; 972 973 PetscFunctionBegin; 974 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 975 if (!pc->mat) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first"); 976 977 if (pc->setupcalled && pc->reusepreconditioner) { 978 ierr = PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n");CHKERRQ(ierr); 979 PetscFunctionReturn(0); 980 } 981 982 ierr = PetscObjectStateGet((PetscObject)pc->pmat,&matstate);CHKERRQ(ierr); 983 ierr = MatGetNonzeroState(pc->pmat,&matnonzerostate);CHKERRQ(ierr); 984 if (!pc->setupcalled) { 985 ierr = PetscInfo(pc,"Setting up PC for first time\n");CHKERRQ(ierr); 986 pc->flag = DIFFERENT_NONZERO_PATTERN; 987 } else if (matstate == pc->matstate) { 988 ierr = PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n");CHKERRQ(ierr); 989 PetscFunctionReturn(0); 990 } else { 991 if (matnonzerostate > pc->matnonzerostate) { 992 ierr = PetscInfo(pc,"Setting up PC with different nonzero pattern\n");CHKERRQ(ierr); 993 pc->flag = DIFFERENT_NONZERO_PATTERN; 994 } else { 995 ierr = PetscInfo(pc,"Setting up PC with same nonzero pattern\n");CHKERRQ(ierr); 996 pc->flag = SAME_NONZERO_PATTERN; 997 } 998 } 999 pc->matstate = matstate; 1000 pc->matnonzerostate = matnonzerostate; 1001 1002 if (!((PetscObject)pc)->type_name) { 1003 ierr = PCGetDefaultType_Private(pc,&def);CHKERRQ(ierr); 1004 ierr = PCSetType(pc,def);CHKERRQ(ierr); 1005 } 1006 1007 ierr = MatSetErrorIfFailure(pc->pmat,pc->erroriffailure);CHKERRQ(ierr); 1008 ierr = MatSetErrorIfFailure(pc->mat,pc->erroriffailure);CHKERRQ(ierr); 1009 ierr = PetscLogEventBegin(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 1010 if (pc->ops->setup) { 1011 /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */ 1012 ierr = PetscLogEventDeactivatePush(KSP_Solve);CHKERRQ(ierr); 1013 ierr = PetscLogEventDeactivatePush(PC_Apply);CHKERRQ(ierr); 1014 ierr = (*pc->ops->setup)(pc);CHKERRQ(ierr); 1015 ierr = PetscLogEventDeactivatePop(KSP_Solve);CHKERRQ(ierr); 1016 ierr = PetscLogEventDeactivatePop(PC_Apply);CHKERRQ(ierr); 1017 } 1018 ierr = PetscLogEventEnd(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 1019 if (!pc->setupcalled) pc->setupcalled = 1; 1020 PetscFunctionReturn(0); 1021 } 1022 1023 /*@ 1024 PCSetUpOnBlocks - Sets up the preconditioner for each block in 1025 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 1026 methods. 1027 1028 Collective on PC 1029 1030 Input Parameters: 1031 . pc - the preconditioner context 1032 1033 Level: developer 1034 1035 .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp() 1036 @*/ 1037 PetscErrorCode PCSetUpOnBlocks(PC pc) 1038 { 1039 PetscErrorCode ierr; 1040 1041 PetscFunctionBegin; 1042 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1043 if (!pc->ops->setuponblocks) PetscFunctionReturn(0); 1044 ierr = PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 1045 ierr = (*pc->ops->setuponblocks)(pc);CHKERRQ(ierr); 1046 ierr = PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 1047 PetscFunctionReturn(0); 1048 } 1049 1050 /*@C 1051 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 1052 submatrices that arise within certain subdomain-based preconditioners. 1053 The basic submatrices are extracted from the preconditioner matrix as 1054 usual; the user can then alter these (for example, to set different boundary 1055 conditions for each submatrix) before they are used for the local solves. 1056 1057 Logically Collective on PC 1058 1059 Input Parameters: 1060 + pc - the preconditioner context 1061 . func - routine for modifying the submatrices 1062 - ctx - optional user-defined context (may be null) 1063 1064 Calling sequence of func: 1065 $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx); 1066 1067 + row - an array of index sets that contain the global row numbers 1068 that comprise each local submatrix 1069 . col - an array of index sets that contain the global column numbers 1070 that comprise each local submatrix 1071 . submat - array of local submatrices 1072 - ctx - optional user-defined context for private data for the 1073 user-defined func routine (may be null) 1074 1075 Notes: 1076 PCSetModifySubMatrices() MUST be called before KSPSetUp() and 1077 KSPSolve(). 1078 1079 A routine set by PCSetModifySubMatrices() is currently called within 1080 the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM) 1081 preconditioners. All other preconditioners ignore this routine. 1082 1083 Level: advanced 1084 1085 .seealso: PCModifySubMatrices() 1086 @*/ 1087 PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx) 1088 { 1089 PetscFunctionBegin; 1090 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1091 pc->modifysubmatrices = func; 1092 pc->modifysubmatricesP = ctx; 1093 PetscFunctionReturn(0); 1094 } 1095 1096 /*@C 1097 PCModifySubMatrices - Calls an optional user-defined routine within 1098 certain preconditioners if one has been set with PCSetModifySubMatrices(). 1099 1100 Collective on PC 1101 1102 Input Parameters: 1103 + pc - the preconditioner context 1104 . nsub - the number of local submatrices 1105 . row - an array of index sets that contain the global row numbers 1106 that comprise each local submatrix 1107 . col - an array of index sets that contain the global column numbers 1108 that comprise each local submatrix 1109 . submat - array of local submatrices 1110 - ctx - optional user-defined context for private data for the 1111 user-defined routine (may be null) 1112 1113 Output Parameter: 1114 . submat - array of local submatrices (the entries of which may 1115 have been modified) 1116 1117 Notes: 1118 The user should NOT generally call this routine, as it will 1119 automatically be called within certain preconditioners (currently 1120 block Jacobi, additive Schwarz) if set. 1121 1122 The basic submatrices are extracted from the preconditioner matrix 1123 as usual; the user can then alter these (for example, to set different 1124 boundary conditions for each submatrix) before they are used for the 1125 local solves. 1126 1127 Level: developer 1128 1129 .seealso: PCSetModifySubMatrices() 1130 @*/ 1131 PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx) 1132 { 1133 PetscErrorCode ierr; 1134 1135 PetscFunctionBegin; 1136 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1137 if (!pc->modifysubmatrices) PetscFunctionReturn(0); 1138 ierr = PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 1139 ierr = (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);CHKERRQ(ierr); 1140 ierr = PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 1141 PetscFunctionReturn(0); 1142 } 1143 1144 /*@ 1145 PCSetOperators - Sets the matrix associated with the linear system and 1146 a (possibly) different one associated with the preconditioner. 1147 1148 Logically Collective on PC 1149 1150 Input Parameters: 1151 + pc - the preconditioner context 1152 . Amat - the matrix that defines the linear system 1153 - Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat. 1154 1155 Notes: 1156 Passing a NULL for Amat or Pmat removes the matrix that is currently used. 1157 1158 If you wish to replace either Amat or Pmat but leave the other one untouched then 1159 first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference() 1160 on it and then pass it back in in your call to KSPSetOperators(). 1161 1162 More Notes about Repeated Solution of Linear Systems: 1163 PETSc does NOT reset the matrix entries of either Amat or Pmat 1164 to zero after a linear solve; the user is completely responsible for 1165 matrix assembly. See the routine MatZeroEntries() if desiring to 1166 zero all elements of a matrix. 1167 1168 Level: intermediate 1169 1170 .seealso: PCGetOperators(), MatZeroEntries() 1171 @*/ 1172 PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat) 1173 { 1174 PetscErrorCode ierr; 1175 PetscInt m1,n1,m2,n2; 1176 1177 PetscFunctionBegin; 1178 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1179 if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1180 if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3); 1181 if (Amat) PetscCheckSameComm(pc,1,Amat,2); 1182 if (Pmat) PetscCheckSameComm(pc,1,Pmat,3); 1183 if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) { 1184 ierr = MatGetLocalSize(Amat,&m1,&n1);CHKERRQ(ierr); 1185 ierr = MatGetLocalSize(pc->mat,&m2,&n2);CHKERRQ(ierr); 1186 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1187 ierr = MatGetLocalSize(Pmat,&m1,&n1);CHKERRQ(ierr); 1188 ierr = MatGetLocalSize(pc->pmat,&m2,&n2);CHKERRQ(ierr); 1189 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1190 } 1191 1192 if (Pmat != pc->pmat) { 1193 /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */ 1194 pc->matnonzerostate = -1; 1195 pc->matstate = -1; 1196 } 1197 1198 /* reference first in case the matrices are the same */ 1199 if (Amat) {ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr);} 1200 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 1201 if (Pmat) {ierr = PetscObjectReference((PetscObject)Pmat);CHKERRQ(ierr);} 1202 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 1203 pc->mat = Amat; 1204 pc->pmat = Pmat; 1205 PetscFunctionReturn(0); 1206 } 1207 1208 /*@ 1209 PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed. 1210 1211 Logically Collective on PC 1212 1213 Input Parameters: 1214 + pc - the preconditioner context 1215 - flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner 1216 1217 Level: intermediate 1218 1219 .seealso: PCGetOperators(), MatZeroEntries(), PCGetReusePreconditioner(), KSPSetReusePreconditioner() 1220 @*/ 1221 PetscErrorCode PCSetReusePreconditioner(PC pc,PetscBool flag) 1222 { 1223 PetscFunctionBegin; 1224 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1225 PetscValidLogicalCollectiveBool(pc,flag,2); 1226 pc->reusepreconditioner = flag; 1227 PetscFunctionReturn(0); 1228 } 1229 1230 /*@ 1231 PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed. 1232 1233 Not Collective 1234 1235 Input Parameter: 1236 . pc - the preconditioner context 1237 1238 Output Parameter: 1239 . flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner 1240 1241 Level: intermediate 1242 1243 .seealso: PCGetOperators(), MatZeroEntries(), PCSetReusePreconditioner() 1244 @*/ 1245 PetscErrorCode PCGetReusePreconditioner(PC pc,PetscBool *flag) 1246 { 1247 PetscFunctionBegin; 1248 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1249 PetscValidPointer(flag,2); 1250 *flag = pc->reusepreconditioner; 1251 PetscFunctionReturn(0); 1252 } 1253 1254 /*@ 1255 PCGetOperators - Gets the matrix associated with the linear system and 1256 possibly a different one associated with the preconditioner. 1257 1258 Not collective, though parallel Mats are returned if the PC is parallel 1259 1260 Input Parameter: 1261 . pc - the preconditioner context 1262 1263 Output Parameters: 1264 + Amat - the matrix defining the linear system 1265 - Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat. 1266 1267 Level: intermediate 1268 1269 Notes: 1270 Does not increase the reference count of the matrices, so you should not destroy them 1271 1272 Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators 1273 are created in PC and returned to the user. In this case, if both operators 1274 mat and pmat are requested, two DIFFERENT operators will be returned. If 1275 only one is requested both operators in the PC will be the same (i.e. as 1276 if one had called KSP/PCSetOperators() with the same argument for both Mats). 1277 The user must set the sizes of the returned matrices and their type etc just 1278 as if the user created them with MatCreate(). For example, 1279 1280 $ KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to 1281 $ set size, type, etc of Amat 1282 1283 $ MatCreate(comm,&mat); 1284 $ KSP/PCSetOperators(ksp/pc,Amat,Amat); 1285 $ PetscObjectDereference((PetscObject)mat); 1286 $ set size, type, etc of Amat 1287 1288 and 1289 1290 $ KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to 1291 $ set size, type, etc of Amat and Pmat 1292 1293 $ MatCreate(comm,&Amat); 1294 $ MatCreate(comm,&Pmat); 1295 $ KSP/PCSetOperators(ksp/pc,Amat,Pmat); 1296 $ PetscObjectDereference((PetscObject)Amat); 1297 $ PetscObjectDereference((PetscObject)Pmat); 1298 $ set size, type, etc of Amat and Pmat 1299 1300 The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy 1301 of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely 1302 managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look 1303 at this is when you create a SNES you do not NEED to create a KSP and attach it to 1304 the SNES object (the SNES object manages it for you). Similarly when you create a KSP 1305 you do not need to attach a PC to it (the KSP object manages the PC object for you). 1306 Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when 1307 it can be created for you? 1308 1309 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet() 1310 @*/ 1311 PetscErrorCode PCGetOperators(PC pc,Mat *Amat,Mat *Pmat) 1312 { 1313 PetscErrorCode ierr; 1314 1315 PetscFunctionBegin; 1316 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1317 if (Amat) { 1318 if (!pc->mat) { 1319 if (pc->pmat && !Pmat) { /* Apmat has been set, but user did not request it, so use for Amat */ 1320 pc->mat = pc->pmat; 1321 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1322 } else { /* both Amat and Pmat are empty */ 1323 ierr = MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat);CHKERRQ(ierr); 1324 if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */ 1325 pc->pmat = pc->mat; 1326 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1327 } 1328 } 1329 } 1330 *Amat = pc->mat; 1331 } 1332 if (Pmat) { 1333 if (!pc->pmat) { 1334 if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */ 1335 pc->pmat = pc->mat; 1336 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1337 } else { 1338 ierr = MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat);CHKERRQ(ierr); 1339 if (!Amat) { /* user did NOT request Amat, so make same as Pmat */ 1340 pc->mat = pc->pmat; 1341 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1342 } 1343 } 1344 } 1345 *Pmat = pc->pmat; 1346 } 1347 PetscFunctionReturn(0); 1348 } 1349 1350 /*@C 1351 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1352 possibly a different one associated with the preconditioner have been set in the PC. 1353 1354 Not collective, though the results on all processes should be the same 1355 1356 Input Parameter: 1357 . pc - the preconditioner context 1358 1359 Output Parameters: 1360 + mat - the matrix associated with the linear system was set 1361 - pmat - matrix associated with the preconditioner was set, usually the same 1362 1363 Level: intermediate 1364 1365 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators() 1366 @*/ 1367 PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat) 1368 { 1369 PetscFunctionBegin; 1370 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1371 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1372 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1373 PetscFunctionReturn(0); 1374 } 1375 1376 /*@ 1377 PCFactorGetMatrix - Gets the factored matrix from the 1378 preconditioner context. This routine is valid only for the LU, 1379 incomplete LU, Cholesky, and incomplete Cholesky methods. 1380 1381 Not Collective on PC though Mat is parallel if PC is parallel 1382 1383 Input Parameters: 1384 . pc - the preconditioner context 1385 1386 Output parameters: 1387 . mat - the factored matrix 1388 1389 Level: advanced 1390 1391 Notes: 1392 Does not increase the reference count for the matrix so DO NOT destroy it 1393 1394 @*/ 1395 PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat) 1396 { 1397 PetscErrorCode ierr; 1398 1399 PetscFunctionBegin; 1400 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1401 PetscValidPointer(mat,2); 1402 if (pc->ops->getfactoredmatrix) { 1403 ierr = (*pc->ops->getfactoredmatrix)(pc,mat);CHKERRQ(ierr); 1404 } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix"); 1405 PetscFunctionReturn(0); 1406 } 1407 1408 /*@C 1409 PCSetOptionsPrefix - Sets the prefix used for searching for all 1410 PC options in the database. 1411 1412 Logically Collective on PC 1413 1414 Input Parameters: 1415 + pc - the preconditioner context 1416 - prefix - the prefix string to prepend to all PC option requests 1417 1418 Notes: 1419 A hyphen (-) must NOT be given at the beginning of the prefix name. 1420 The first character of all runtime options is AUTOMATICALLY the 1421 hyphen. 1422 1423 Level: advanced 1424 1425 .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix() 1426 @*/ 1427 PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[]) 1428 { 1429 PetscErrorCode ierr; 1430 1431 PetscFunctionBegin; 1432 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1433 ierr = PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1434 PetscFunctionReturn(0); 1435 } 1436 1437 /*@C 1438 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1439 PC options in the database. 1440 1441 Logically Collective on PC 1442 1443 Input Parameters: 1444 + pc - the preconditioner context 1445 - prefix - the prefix string to prepend to all PC option requests 1446 1447 Notes: 1448 A hyphen (-) must NOT be given at the beginning of the prefix name. 1449 The first character of all runtime options is AUTOMATICALLY the 1450 hyphen. 1451 1452 Level: advanced 1453 1454 .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix() 1455 @*/ 1456 PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[]) 1457 { 1458 PetscErrorCode ierr; 1459 1460 PetscFunctionBegin; 1461 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1462 ierr = PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1463 PetscFunctionReturn(0); 1464 } 1465 1466 /*@C 1467 PCGetOptionsPrefix - Gets the prefix used for searching for all 1468 PC options in the database. 1469 1470 Not Collective 1471 1472 Input Parameters: 1473 . pc - the preconditioner context 1474 1475 Output Parameters: 1476 . prefix - pointer to the prefix string used, is returned 1477 1478 Notes: 1479 On the fortran side, the user should pass in a string 'prifix' of 1480 sufficient length to hold the prefix. 1481 1482 Level: advanced 1483 1484 .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix() 1485 @*/ 1486 PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[]) 1487 { 1488 PetscErrorCode ierr; 1489 1490 PetscFunctionBegin; 1491 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1492 PetscValidPointer(prefix,2); 1493 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1494 PetscFunctionReturn(0); 1495 } 1496 1497 /* 1498 Indicates the right hand side will be changed by KSPSolve(), this occurs for a few 1499 preconditioners including BDDC and Eisentat that transform the equations before applying 1500 the Krylov methods 1501 */ 1502 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc,PetscBool *change) 1503 { 1504 PetscErrorCode ierr; 1505 1506 PetscFunctionBegin; 1507 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1508 PetscValidPointer(change,2); 1509 *change = PETSC_FALSE; 1510 ierr = PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));CHKERRQ(ierr); 1511 PetscFunctionReturn(0); 1512 } 1513 1514 /*@ 1515 PCPreSolve - Optional pre-solve phase, intended for any 1516 preconditioner-specific actions that must be performed before 1517 the iterative solve itself. 1518 1519 Collective on PC 1520 1521 Input Parameters: 1522 + pc - the preconditioner context 1523 - ksp - the Krylov subspace context 1524 1525 Level: developer 1526 1527 Sample of Usage: 1528 .vb 1529 PCPreSolve(pc,ksp); 1530 KSPSolve(ksp,b,x); 1531 PCPostSolve(pc,ksp); 1532 .ve 1533 1534 Notes: 1535 The pre-solve phase is distinct from the PCSetUp() phase. 1536 1537 KSPSolve() calls this directly, so is rarely called by the user. 1538 1539 .seealso: PCPostSolve() 1540 @*/ 1541 PetscErrorCode PCPreSolve(PC pc,KSP ksp) 1542 { 1543 PetscErrorCode ierr; 1544 Vec x,rhs; 1545 1546 PetscFunctionBegin; 1547 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1548 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1549 pc->presolvedone++; 1550 if (pc->presolvedone > 2) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice"); 1551 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1552 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1553 1554 if (pc->ops->presolve) { 1555 ierr = (*pc->ops->presolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1556 } 1557 PetscFunctionReturn(0); 1558 } 1559 1560 /*@ 1561 PCPostSolve - Optional post-solve phase, intended for any 1562 preconditioner-specific actions that must be performed after 1563 the iterative solve itself. 1564 1565 Collective on PC 1566 1567 Input Parameters: 1568 + pc - the preconditioner context 1569 - ksp - the Krylov subspace context 1570 1571 Sample of Usage: 1572 .vb 1573 PCPreSolve(pc,ksp); 1574 KSPSolve(ksp,b,x); 1575 PCPostSolve(pc,ksp); 1576 .ve 1577 1578 Note: 1579 KSPSolve() calls this routine directly, so it is rarely called by the user. 1580 1581 Level: developer 1582 1583 .seealso: PCPreSolve(), KSPSolve() 1584 @*/ 1585 PetscErrorCode PCPostSolve(PC pc,KSP ksp) 1586 { 1587 PetscErrorCode ierr; 1588 Vec x,rhs; 1589 1590 PetscFunctionBegin; 1591 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1592 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1593 pc->presolvedone--; 1594 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1595 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1596 if (pc->ops->postsolve) { 1597 ierr = (*pc->ops->postsolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1598 } 1599 PetscFunctionReturn(0); 1600 } 1601 1602 /*@C 1603 PCLoad - Loads a PC that has been stored in binary with PCView(). 1604 1605 Collective on PetscViewer 1606 1607 Input Parameters: 1608 + newdm - the newly loaded PC, this needs to have been created with PCCreate() or 1609 some related function before a call to PCLoad(). 1610 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() 1611 1612 Level: intermediate 1613 1614 Notes: 1615 The type is determined by the data in the file, any type set into the PC before this call is ignored. 1616 1617 Notes for advanced users: 1618 Most users should not need to know the details of the binary storage 1619 format, since PCLoad() and PCView() completely hide these details. 1620 But for anyone who's interested, the standard binary matrix storage 1621 format is 1622 .vb 1623 has not yet been determined 1624 .ve 1625 1626 .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad() 1627 @*/ 1628 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) 1629 { 1630 PetscErrorCode ierr; 1631 PetscBool isbinary; 1632 PetscInt classid; 1633 char type[256]; 1634 1635 PetscFunctionBegin; 1636 PetscValidHeaderSpecific(newdm,PC_CLASSID,1); 1637 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1638 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1639 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 1640 1641 ierr = PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT);CHKERRQ(ierr); 1642 if (classid != PC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file"); 1643 ierr = PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR);CHKERRQ(ierr); 1644 ierr = PCSetType(newdm, type);CHKERRQ(ierr); 1645 if (newdm->ops->load) { 1646 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 1647 } 1648 PetscFunctionReturn(0); 1649 } 1650 1651 #include <petscdraw.h> 1652 #if defined(PETSC_HAVE_SAWS) 1653 #include <petscviewersaws.h> 1654 #endif 1655 1656 /*@C 1657 PCViewFromOptions - View from Options 1658 1659 Collective on PC 1660 1661 Input Parameters: 1662 + A - the PC context 1663 . obj - Optional object 1664 - name - command line option 1665 1666 Level: intermediate 1667 .seealso: PC, PCView, PetscObjectViewFromOptions(), PCCreate() 1668 @*/ 1669 PetscErrorCode PCViewFromOptions(PC A,PetscObject obj,const char name[]) 1670 { 1671 PetscErrorCode ierr; 1672 1673 PetscFunctionBegin; 1674 PetscValidHeaderSpecific(A,PC_CLASSID,1); 1675 ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr); 1676 PetscFunctionReturn(0); 1677 } 1678 1679 /*@C 1680 PCView - Prints the PC data structure. 1681 1682 Collective on PC 1683 1684 Input Parameters: 1685 + PC - the PC context 1686 - viewer - optional visualization context 1687 1688 Note: 1689 The available visualization contexts include 1690 + PETSC_VIEWER_STDOUT_SELF - standard output (default) 1691 - PETSC_VIEWER_STDOUT_WORLD - synchronized standard 1692 output where only the first processor opens 1693 the file. All other processors send their 1694 data to the first processor to print. 1695 1696 The user can open an alternative visualization contexts with 1697 PetscViewerASCIIOpen() (output to a specified file). 1698 1699 Level: developer 1700 1701 .seealso: KSPView(), PetscViewerASCIIOpen() 1702 @*/ 1703 PetscErrorCode PCView(PC pc,PetscViewer viewer) 1704 { 1705 PCType cstr; 1706 PetscErrorCode ierr; 1707 PetscBool iascii,isstring,isbinary,isdraw; 1708 #if defined(PETSC_HAVE_SAWS) 1709 PetscBool issaws; 1710 #endif 1711 1712 PetscFunctionBegin; 1713 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1714 if (!viewer) { 1715 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);CHKERRQ(ierr); 1716 } 1717 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1718 PetscCheckSameComm(pc,1,viewer,2); 1719 1720 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1721 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 1722 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1723 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 1724 #if defined(PETSC_HAVE_SAWS) 1725 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws);CHKERRQ(ierr); 1726 #endif 1727 1728 if (iascii) { 1729 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer);CHKERRQ(ierr); 1730 if (!pc->setupcalled) { 1731 ierr = PetscViewerASCIIPrintf(viewer," PC has not been set up so information may be incomplete\n");CHKERRQ(ierr); 1732 } 1733 if (pc->ops->view) { 1734 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1735 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1736 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1737 } 1738 if (pc->mat) { 1739 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);CHKERRQ(ierr); 1740 if (pc->pmat == pc->mat) { 1741 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");CHKERRQ(ierr); 1742 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1743 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1744 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1745 } else { 1746 if (pc->pmat) { 1747 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");CHKERRQ(ierr); 1748 } else { 1749 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix:\n");CHKERRQ(ierr); 1750 } 1751 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1752 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1753 if (pc->pmat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1754 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1755 } 1756 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1757 } 1758 } else if (isstring) { 1759 ierr = PCGetType(pc,&cstr);CHKERRQ(ierr); 1760 ierr = PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr);CHKERRQ(ierr); 1761 if (pc->ops->view) {ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr);} 1762 if (pc->mat) {ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);} 1763 if (pc->pmat && pc->pmat != pc->mat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1764 } else if (isbinary) { 1765 PetscInt classid = PC_FILE_CLASSID; 1766 MPI_Comm comm; 1767 PetscMPIInt rank; 1768 char type[256]; 1769 1770 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1771 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 1772 if (!rank) { 1773 ierr = PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 1774 ierr = PetscStrncpy(type,((PetscObject)pc)->type_name,256);CHKERRQ(ierr); 1775 ierr = PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 1776 } 1777 if (pc->ops->view) { 1778 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1779 } 1780 } else if (isdraw) { 1781 PetscDraw draw; 1782 char str[25]; 1783 PetscReal x,y,bottom,h; 1784 PetscInt n; 1785 1786 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1787 ierr = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr); 1788 if (pc->mat) { 1789 ierr = MatGetSize(pc->mat,&n,NULL);CHKERRQ(ierr); 1790 ierr = PetscSNPrintf(str,25,"PC: %s (%D)",((PetscObject)pc)->type_name,n);CHKERRQ(ierr); 1791 } else { 1792 ierr = PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name);CHKERRQ(ierr); 1793 } 1794 ierr = PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr); 1795 bottom = y - h; 1796 ierr = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr); 1797 if (pc->ops->view) { 1798 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1799 } 1800 ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr); 1801 #if defined(PETSC_HAVE_SAWS) 1802 } else if (issaws) { 1803 PetscMPIInt rank; 1804 1805 ierr = PetscObjectName((PetscObject)pc);CHKERRQ(ierr); 1806 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr); 1807 if (!((PetscObject)pc)->amsmem && !rank) { 1808 ierr = PetscObjectViewSAWs((PetscObject)pc,viewer);CHKERRQ(ierr); 1809 } 1810 if (pc->mat) {ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);} 1811 if (pc->pmat && pc->pmat != pc->mat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1812 #endif 1813 } 1814 PetscFunctionReturn(0); 1815 } 1816 1817 /*@C 1818 PCRegister - Adds a method to the preconditioner package. 1819 1820 Not collective 1821 1822 Input Parameters: 1823 + name_solver - name of a new user-defined solver 1824 - routine_create - routine to create method context 1825 1826 Notes: 1827 PCRegister() may be called multiple times to add several user-defined preconditioners. 1828 1829 Sample usage: 1830 .vb 1831 PCRegister("my_solver", MySolverCreate); 1832 .ve 1833 1834 Then, your solver can be chosen with the procedural interface via 1835 $ PCSetType(pc,"my_solver") 1836 or at runtime via the option 1837 $ -pc_type my_solver 1838 1839 Level: advanced 1840 1841 .seealso: PCRegisterAll() 1842 @*/ 1843 PetscErrorCode PCRegister(const char sname[],PetscErrorCode (*function)(PC)) 1844 { 1845 PetscErrorCode ierr; 1846 1847 PetscFunctionBegin; 1848 ierr = PCInitializePackage();CHKERRQ(ierr); 1849 ierr = PetscFunctionListAdd(&PCList,sname,function);CHKERRQ(ierr); 1850 PetscFunctionReturn(0); 1851 } 1852 1853 static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y) 1854 { 1855 PC pc; 1856 PetscErrorCode ierr; 1857 1858 PetscFunctionBegin; 1859 ierr = MatShellGetContext(A,&pc);CHKERRQ(ierr); 1860 ierr = PCApply(pc,X,Y);CHKERRQ(ierr); 1861 PetscFunctionReturn(0); 1862 } 1863 1864 /*@ 1865 PCComputeOperator - Computes the explicit preconditioned operator. 1866 1867 Collective on PC 1868 1869 Input Parameter: 1870 + pc - the preconditioner object 1871 - mattype - the matrix type to be used for the operator 1872 1873 Output Parameter: 1874 . mat - the explict preconditioned operator 1875 1876 Notes: 1877 This computation is done by applying the operators to columns of the identity matrix. 1878 This routine is costly in general, and is recommended for use only with relatively small systems. 1879 Currently, this routine uses a dense matrix format when mattype == NULL 1880 1881 Level: advanced 1882 1883 .seealso: KSPComputeOperator(), MatType 1884 1885 @*/ 1886 PetscErrorCode PCComputeOperator(PC pc,MatType mattype,Mat *mat) 1887 { 1888 PetscErrorCode ierr; 1889 PetscInt N,M,m,n; 1890 Mat A,Apc; 1891 1892 PetscFunctionBegin; 1893 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1894 PetscValidPointer(mat,3); 1895 ierr = PCGetOperators(pc,&A,NULL);CHKERRQ(ierr); 1896 ierr = MatGetLocalSize(A,&m,&n);CHKERRQ(ierr); 1897 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr); 1898 ierr = MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc);CHKERRQ(ierr); 1899 ierr = MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC);CHKERRQ(ierr); 1900 ierr = MatComputeOperator(Apc,mattype,mat);CHKERRQ(ierr); 1901 ierr = MatDestroy(&Apc);CHKERRQ(ierr); 1902 PetscFunctionReturn(0); 1903 } 1904 1905 /*@ 1906 PCSetCoordinates - sets the coordinates of all the nodes on the local process 1907 1908 Collective on PC 1909 1910 Input Parameters: 1911 + pc - the solver context 1912 . dim - the dimension of the coordinates 1, 2, or 3 1913 . nloc - the blocked size of the coordinates array 1914 - coords - the coordinates array 1915 1916 Level: intermediate 1917 1918 Notes: 1919 coords is an array of the dim coordinates for the nodes on 1920 the local processor, of size dim*nloc. 1921 If there are 108 equation on a processor 1922 for a displacement finite element discretization of elasticity (so 1923 that there are nloc = 36 = 108/3 nodes) then the array must have 108 1924 double precision values (ie, 3 * 36). These x y z coordinates 1925 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 1926 ... , N-1.z ]. 1927 1928 .seealso: MatSetNearNullSpace() 1929 @*/ 1930 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 1931 { 1932 PetscErrorCode ierr; 1933 1934 PetscFunctionBegin; 1935 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1936 PetscValidLogicalCollectiveInt(pc,dim,2); 1937 ierr = PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));CHKERRQ(ierr); 1938 PetscFunctionReturn(0); 1939 } 1940 1941 /*@ 1942 PCGetInterpolations - Gets interpolation matrices for all levels (except level 0) 1943 1944 Logically Collective on PC 1945 1946 Input Parameters: 1947 + pc - the precondition context 1948 1949 Output Parameter: 1950 - num_levels - the number of levels 1951 . interpolations - the interpolation matrices (size of num_levels-1) 1952 1953 Level: advanced 1954 1955 .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level 1956 1957 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetInterpolation(), PCGetCoarseOperators() 1958 @*/ 1959 PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[]) 1960 { 1961 PetscErrorCode ierr; 1962 1963 PetscFunctionBegin; 1964 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1965 PetscValidPointer(num_levels,2); 1966 PetscValidPointer(interpolations,3); 1967 ierr = PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));CHKERRQ(ierr); 1968 PetscFunctionReturn(0); 1969 } 1970 1971 /*@ 1972 PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level) 1973 1974 Logically Collective on PC 1975 1976 Input Parameters: 1977 + pc - the precondition context 1978 1979 Output Parameter: 1980 - num_levels - the number of levels 1981 . coarseOperators - the coarse operator matrices (size of num_levels-1) 1982 1983 Level: advanced 1984 1985 .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level 1986 1987 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetRScale(), PCMGGetInterpolation(), PCGetInterpolations() 1988 @*/ 1989 PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[]) 1990 { 1991 PetscErrorCode ierr; 1992 1993 PetscFunctionBegin; 1994 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1995 PetscValidPointer(num_levels,2); 1996 PetscValidPointer(coarseOperators,3); 1997 ierr = PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));CHKERRQ(ierr); 1998 PetscFunctionReturn(0); 1999 } 2000