1 2 /* 3 The PC (preconditioner) interface routines, callable by users. 4 */ 5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 6 #include <petscdm.h> 7 8 /* Logging support */ 9 PetscClassId PC_CLASSID; 10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 12 PetscInt PetscMGLevelId; 13 14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[]) 15 { 16 PetscErrorCode ierr; 17 PetscMPIInt size; 18 PetscBool hasop,flg1,flg2,set,flg3; 19 20 PetscFunctionBegin; 21 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRMPI(ierr); 22 if (pc->pmat) { 23 ierr = MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 24 if (size == 1) { 25 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);CHKERRQ(ierr); 26 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);CHKERRQ(ierr); 27 ierr = MatIsSymmetricKnown(pc->pmat,&set,&flg3);CHKERRQ(ierr); 28 if (flg1 && (!flg2 || (set && flg3))) { 29 *type = PCICC; 30 } else if (flg2) { 31 *type = PCILU; 32 } else if (hasop) { /* likely is a parallel matrix run on one processor */ 33 *type = PCBJACOBI; 34 } else { 35 *type = PCNONE; 36 } 37 } else { 38 if (hasop) { 39 *type = PCBJACOBI; 40 } else { 41 *type = PCNONE; 42 } 43 } 44 } else { 45 if (size == 1) { 46 *type = PCILU; 47 } else { 48 *type = PCBJACOBI; 49 } 50 } 51 PetscFunctionReturn(0); 52 } 53 54 /*@ 55 PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats 56 57 Collective on PC 58 59 Input Parameter: 60 . pc - the preconditioner context 61 62 Level: developer 63 64 Notes: 65 This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC 66 67 .seealso: PCCreate(), PCSetUp() 68 @*/ 69 PetscErrorCode PCReset(PC pc) 70 { 71 PetscErrorCode ierr; 72 73 PetscFunctionBegin; 74 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 75 if (pc->ops->reset) { 76 ierr = (*pc->ops->reset)(pc);CHKERRQ(ierr); 77 } 78 ierr = VecDestroy(&pc->diagonalscaleright);CHKERRQ(ierr); 79 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 80 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 81 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 82 83 pc->setupcalled = 0; 84 PetscFunctionReturn(0); 85 } 86 87 /*@C 88 PCDestroy - Destroys PC context that was created with PCCreate(). 89 90 Collective on PC 91 92 Input Parameter: 93 . pc - the preconditioner context 94 95 Level: developer 96 97 .seealso: PCCreate(), PCSetUp() 98 @*/ 99 PetscErrorCode PCDestroy(PC *pc) 100 { 101 PetscErrorCode ierr; 102 103 PetscFunctionBegin; 104 if (!*pc) PetscFunctionReturn(0); 105 PetscValidHeaderSpecific((*pc),PC_CLASSID,1); 106 if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; PetscFunctionReturn(0);} 107 108 ierr = PCReset(*pc);CHKERRQ(ierr); 109 110 /* if memory was published with SAWs then destroy it */ 111 ierr = PetscObjectSAWsViewOff((PetscObject)*pc);CHKERRQ(ierr); 112 if ((*pc)->ops->destroy) {ierr = (*(*pc)->ops->destroy)((*pc));CHKERRQ(ierr);} 113 ierr = DMDestroy(&(*pc)->dm);CHKERRQ(ierr); 114 ierr = PetscHeaderDestroy(pc);CHKERRQ(ierr); 115 PetscFunctionReturn(0); 116 } 117 118 /*@C 119 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 120 scaling as needed by certain time-stepping codes. 121 122 Logically Collective on PC 123 124 Input Parameter: 125 . pc - the preconditioner context 126 127 Output Parameter: 128 . flag - PETSC_TRUE if it applies the scaling 129 130 Level: developer 131 132 Notes: 133 If this returns PETSC_TRUE then the system solved via the Krylov method is 134 $ D M A D^{-1} y = D M b for left preconditioning or 135 $ D A M D^{-1} z = D b for right preconditioning 136 137 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale() 138 @*/ 139 PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag) 140 { 141 PetscFunctionBegin; 142 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 143 PetscValidBoolPointer(flag,2); 144 *flag = pc->diagonalscale; 145 PetscFunctionReturn(0); 146 } 147 148 /*@ 149 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 150 scaling as needed by certain time-stepping codes. 151 152 Logically Collective on PC 153 154 Input Parameters: 155 + pc - the preconditioner context 156 - s - scaling vector 157 158 Level: intermediate 159 160 Notes: 161 The system solved via the Krylov method is 162 $ D M A D^{-1} y = D M b for left preconditioning or 163 $ D A M D^{-1} z = D b for right preconditioning 164 165 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 166 167 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale() 168 @*/ 169 PetscErrorCode PCSetDiagonalScale(PC pc,Vec s) 170 { 171 PetscErrorCode ierr; 172 173 PetscFunctionBegin; 174 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 175 PetscValidHeaderSpecific(s,VEC_CLASSID,2); 176 pc->diagonalscale = PETSC_TRUE; 177 178 ierr = PetscObjectReference((PetscObject)s);CHKERRQ(ierr); 179 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 180 181 pc->diagonalscaleleft = s; 182 183 ierr = VecDuplicate(s,&pc->diagonalscaleright);CHKERRQ(ierr); 184 ierr = VecCopy(s,pc->diagonalscaleright);CHKERRQ(ierr); 185 ierr = VecReciprocal(pc->diagonalscaleright);CHKERRQ(ierr); 186 PetscFunctionReturn(0); 187 } 188 189 /*@ 190 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 191 192 Logically Collective on PC 193 194 Input Parameters: 195 + pc - the preconditioner context 196 . in - input vector 197 - out - scaled vector (maybe the same as in) 198 199 Level: intermediate 200 201 Notes: 202 The system solved via the Krylov method is 203 $ D M A D^{-1} y = D M b for left preconditioning or 204 $ D A M D^{-1} z = D b for right preconditioning 205 206 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 207 208 If diagonal scaling is turned off and in is not out then in is copied to out 209 210 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale() 211 @*/ 212 PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out) 213 { 214 PetscErrorCode ierr; 215 216 PetscFunctionBegin; 217 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 218 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 219 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 220 if (pc->diagonalscale) { 221 ierr = VecPointwiseMult(out,pc->diagonalscaleleft,in);CHKERRQ(ierr); 222 } else if (in != out) { 223 ierr = VecCopy(in,out);CHKERRQ(ierr); 224 } 225 PetscFunctionReturn(0); 226 } 227 228 /*@ 229 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 230 231 Logically Collective on PC 232 233 Input Parameters: 234 + pc - the preconditioner context 235 . in - input vector 236 - out - scaled vector (maybe the same as in) 237 238 Level: intermediate 239 240 Notes: 241 The system solved via the Krylov method is 242 $ D M A D^{-1} y = D M b for left preconditioning or 243 $ D A M D^{-1} z = D b for right preconditioning 244 245 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 246 247 If diagonal scaling is turned off and in is not out then in is copied to out 248 249 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale() 250 @*/ 251 PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out) 252 { 253 PetscErrorCode ierr; 254 255 PetscFunctionBegin; 256 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 257 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 258 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 259 if (pc->diagonalscale) { 260 ierr = VecPointwiseMult(out,pc->diagonalscaleright,in);CHKERRQ(ierr); 261 } else if (in != out) { 262 ierr = VecCopy(in,out);CHKERRQ(ierr); 263 } 264 PetscFunctionReturn(0); 265 } 266 267 /*@ 268 PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the 269 operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(), 270 TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat. 271 272 Logically Collective on PC 273 274 Input Parameters: 275 + pc - the preconditioner context 276 - flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false) 277 278 Options Database Key: 279 . -pc_use_amat <true,false> 280 281 Notes: 282 For the common case in which the linear system matrix and the matrix used to construct the 283 preconditioner are identical, this routine is does nothing. 284 285 Level: intermediate 286 287 .seealso: PCGetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE 288 @*/ 289 PetscErrorCode PCSetUseAmat(PC pc,PetscBool flg) 290 { 291 PetscFunctionBegin; 292 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 293 pc->useAmat = flg; 294 PetscFunctionReturn(0); 295 } 296 297 /*@ 298 PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected. 299 300 Logically Collective on PC 301 302 Input Parameters: 303 + pc - iterative context obtained from PCCreate() 304 - flg - PETSC_TRUE indicates you want the error generated 305 306 Level: advanced 307 308 Notes: 309 Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve() 310 to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is 311 detected. 312 313 This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs 314 315 .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll() 316 @*/ 317 PetscErrorCode PCSetErrorIfFailure(PC pc,PetscBool flg) 318 { 319 PetscFunctionBegin; 320 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 321 PetscValidLogicalCollectiveBool(pc,flg,2); 322 pc->erroriffailure = flg; 323 PetscFunctionReturn(0); 324 } 325 326 /*@ 327 PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the 328 operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(), 329 TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat. 330 331 Logically Collective on PC 332 333 Input Parameter: 334 . pc - the preconditioner context 335 336 Output Parameter: 337 . flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false) 338 339 Notes: 340 For the common case in which the linear system matrix and the matrix used to construct the 341 preconditioner are identical, this routine is does nothing. 342 343 Level: intermediate 344 345 .seealso: PCSetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE 346 @*/ 347 PetscErrorCode PCGetUseAmat(PC pc,PetscBool *flg) 348 { 349 PetscFunctionBegin; 350 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 351 *flg = pc->useAmat; 352 PetscFunctionReturn(0); 353 } 354 355 /*@ 356 PCCreate - Creates a preconditioner context. 357 358 Collective 359 360 Input Parameter: 361 . comm - MPI communicator 362 363 Output Parameter: 364 . pc - location to put the preconditioner context 365 366 Notes: 367 The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC 368 in parallel. For dense matrices it is always PCNONE. 369 370 Level: developer 371 372 .seealso: PCSetUp(), PCApply(), PCDestroy() 373 @*/ 374 PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc) 375 { 376 PC pc; 377 PetscErrorCode ierr; 378 379 PetscFunctionBegin; 380 PetscValidPointer(newpc,2); 381 *newpc = NULL; 382 ierr = PCInitializePackage();CHKERRQ(ierr); 383 384 ierr = PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView);CHKERRQ(ierr); 385 386 pc->mat = NULL; 387 pc->pmat = NULL; 388 pc->setupcalled = 0; 389 pc->setfromoptionscalled = 0; 390 pc->data = NULL; 391 pc->diagonalscale = PETSC_FALSE; 392 pc->diagonalscaleleft = NULL; 393 pc->diagonalscaleright = NULL; 394 395 pc->modifysubmatrices = NULL; 396 pc->modifysubmatricesP = NULL; 397 398 *newpc = pc; 399 PetscFunctionReturn(0); 400 401 } 402 403 /* -------------------------------------------------------------------------------*/ 404 405 /*@ 406 PCApply - Applies the preconditioner to a vector. 407 408 Collective on PC 409 410 Input Parameters: 411 + pc - the preconditioner context 412 - x - input vector 413 414 Output Parameter: 415 . y - output vector 416 417 Level: developer 418 419 .seealso: PCApplyTranspose(), PCApplyBAorAB() 420 @*/ 421 PetscErrorCode PCApply(PC pc,Vec x,Vec y) 422 { 423 PetscErrorCode ierr; 424 PetscInt m,n,mv,nv; 425 426 PetscFunctionBegin; 427 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 428 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 429 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 430 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 431 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 432 /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */ 433 ierr = MatGetLocalSize(pc->pmat,&m,&n);CHKERRQ(ierr); 434 ierr = VecGetLocalSize(x,&mv);CHKERRQ(ierr); 435 ierr = VecGetLocalSize(y,&nv);CHKERRQ(ierr); 436 /* check pmat * y = x is feasible */ 437 if (mv != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %D does not equal input vector size %D",m,mv); 438 if (nv != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %D does not equal output vector size %D",n,nv); 439 ierr = VecSetErrorIfLocked(y,3);CHKERRQ(ierr); 440 441 ierr = PCSetUp(pc);CHKERRQ(ierr); 442 if (!pc->ops->apply) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply"); 443 ierr = VecLockReadPush(x);CHKERRQ(ierr); 444 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 445 ierr = (*pc->ops->apply)(pc,x,y);CHKERRQ(ierr); 446 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 447 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 448 ierr = VecLockReadPop(x);CHKERRQ(ierr); 449 PetscFunctionReturn(0); 450 } 451 452 /*@ 453 PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices. 454 455 Collective on PC 456 457 Input Parameters: 458 + pc - the preconditioner context 459 - X - block of input vectors 460 461 Output Parameter: 462 . Y - block of output vectors 463 464 Level: developer 465 466 .seealso: PCApply(), KSPMatSolve() 467 @*/ 468 PetscErrorCode PCMatApply(PC pc,Mat X,Mat Y) 469 { 470 Mat A; 471 Vec cy, cx; 472 PetscInt m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3; 473 PetscBool match; 474 PetscErrorCode ierr; 475 476 PetscFunctionBegin; 477 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 478 PetscValidHeaderSpecific(X, MAT_CLASSID, 2); 479 PetscValidHeaderSpecific(Y, MAT_CLASSID, 3); 480 PetscCheckSameComm(pc, 1, X, 2); 481 PetscCheckSameComm(pc, 1, Y, 3); 482 if (Y == X) SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices"); 483 ierr = PCGetOperators(pc, NULL, &A);CHKERRQ(ierr); 484 ierr = MatGetLocalSize(A, &m3, &n3);CHKERRQ(ierr); 485 ierr = MatGetLocalSize(X, &m2, &n2);CHKERRQ(ierr); 486 ierr = MatGetLocalSize(Y, &m1, &n1);CHKERRQ(ierr); 487 ierr = MatGetSize(A, &M3, &N3);CHKERRQ(ierr); 488 ierr = MatGetSize(X, &M2, &N2);CHKERRQ(ierr); 489 ierr = MatGetSize(Y, &M1, &N1);CHKERRQ(ierr); 490 if (n1 != n2 || N1 != N2) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%D,%D) and block of output vectors (n,N) = (%D,%D)", n2, N2, n1, N1); 491 if (m2 != m3 || M2 != M3) SETERRQ6(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%D,%D) and Pmat (m,M)x(n,N) = (%D,%D)x(%D,%D)", m2, M2, m3, M3, n3, N3); 492 if (m1 != n3 || M1 != N3) SETERRQ6(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%D,%D) and Pmat (m,M)x(n,N) = (%D,%D)x(%D,%D)", m1, M1, m3, M3, n3, N3); 493 ierr = PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "");CHKERRQ(ierr); 494 if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat"); 495 ierr = PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "");CHKERRQ(ierr); 496 if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat"); 497 ierr = PCSetUp(pc);CHKERRQ(ierr); 498 if (pc->ops->matapply) { 499 ierr = PetscLogEventBegin(PC_MatApply, pc, X, Y, 0);CHKERRQ(ierr); 500 ierr = (*pc->ops->matapply)(pc, X, Y);CHKERRQ(ierr); 501 ierr = PetscLogEventEnd(PC_MatApply, pc, X, Y, 0);CHKERRQ(ierr); 502 } else { 503 ierr = PetscInfo1(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name);CHKERRQ(ierr); 504 for (n1 = 0; n1 < N1; ++n1) { 505 ierr = MatDenseGetColumnVecRead(X, n1, &cx);CHKERRQ(ierr); 506 ierr = MatDenseGetColumnVecWrite(Y, n1, &cy);CHKERRQ(ierr); 507 ierr = PCApply(pc, cx, cy);CHKERRQ(ierr); 508 ierr = MatDenseRestoreColumnVecWrite(Y, n1, &cy);CHKERRQ(ierr); 509 ierr = MatDenseRestoreColumnVecRead(X, n1, &cx);CHKERRQ(ierr); 510 } 511 } 512 PetscFunctionReturn(0); 513 } 514 515 /*@ 516 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 517 518 Collective on PC 519 520 Input Parameters: 521 + pc - the preconditioner context 522 - x - input vector 523 524 Output Parameter: 525 . y - output vector 526 527 Notes: 528 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 529 530 Level: developer 531 532 .seealso: PCApply(), PCApplySymmetricRight() 533 @*/ 534 PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y) 535 { 536 PetscErrorCode ierr; 537 538 PetscFunctionBegin; 539 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 540 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 541 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 542 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 543 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 544 ierr = PCSetUp(pc);CHKERRQ(ierr); 545 if (!pc->ops->applysymmetricleft) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply"); 546 ierr = VecLockReadPush(x);CHKERRQ(ierr); 547 ierr = PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 548 ierr = (*pc->ops->applysymmetricleft)(pc,x,y);CHKERRQ(ierr); 549 ierr = PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 550 ierr = VecLockReadPop(x);CHKERRQ(ierr); 551 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 552 PetscFunctionReturn(0); 553 } 554 555 /*@ 556 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 557 558 Collective on PC 559 560 Input Parameters: 561 + pc - the preconditioner context 562 - x - input vector 563 564 Output Parameter: 565 . y - output vector 566 567 Level: developer 568 569 Notes: 570 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 571 572 .seealso: PCApply(), PCApplySymmetricLeft() 573 @*/ 574 PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y) 575 { 576 PetscErrorCode ierr; 577 578 PetscFunctionBegin; 579 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 580 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 581 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 582 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 583 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 584 ierr = PCSetUp(pc);CHKERRQ(ierr); 585 if (!pc->ops->applysymmetricright) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply"); 586 ierr = VecLockReadPush(x);CHKERRQ(ierr); 587 ierr = PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 588 ierr = (*pc->ops->applysymmetricright)(pc,x,y);CHKERRQ(ierr); 589 ierr = PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 590 ierr = VecLockReadPop(x);CHKERRQ(ierr); 591 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 592 PetscFunctionReturn(0); 593 } 594 595 /*@ 596 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 597 598 Collective on PC 599 600 Input Parameters: 601 + pc - the preconditioner context 602 - x - input vector 603 604 Output Parameter: 605 . y - output vector 606 607 Notes: 608 For complex numbers this applies the non-Hermitian transpose. 609 610 Developer Notes: 611 We need to implement a PCApplyHermitianTranspose() 612 613 Level: developer 614 615 .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists() 616 @*/ 617 PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y) 618 { 619 PetscErrorCode ierr; 620 621 PetscFunctionBegin; 622 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 623 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 624 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 625 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 626 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 627 ierr = PCSetUp(pc);CHKERRQ(ierr); 628 if (!pc->ops->applytranspose) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose"); 629 ierr = VecLockReadPush(x);CHKERRQ(ierr); 630 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 631 ierr = (*pc->ops->applytranspose)(pc,x,y);CHKERRQ(ierr); 632 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 633 ierr = VecLockReadPop(x);CHKERRQ(ierr); 634 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 635 PetscFunctionReturn(0); 636 } 637 638 /*@ 639 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 640 641 Collective on PC 642 643 Input Parameters: 644 . pc - the preconditioner context 645 646 Output Parameter: 647 . flg - PETSC_TRUE if a transpose operation is defined 648 649 Level: developer 650 651 .seealso: PCApplyTranspose() 652 @*/ 653 PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg) 654 { 655 PetscFunctionBegin; 656 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 657 PetscValidBoolPointer(flg,2); 658 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 659 else *flg = PETSC_FALSE; 660 PetscFunctionReturn(0); 661 } 662 663 /*@ 664 PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x. 665 666 Collective on PC 667 668 Input Parameters: 669 + pc - the preconditioner context 670 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 671 . x - input vector 672 - work - work vector 673 674 Output Parameter: 675 . y - output vector 676 677 Level: developer 678 679 Notes: 680 If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the 681 specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling. 682 683 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose() 684 @*/ 685 PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work) 686 { 687 PetscErrorCode ierr; 688 689 PetscFunctionBegin; 690 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 691 PetscValidLogicalCollectiveEnum(pc,side,2); 692 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 693 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 694 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 695 PetscCheckSameComm(pc,1,x,3); 696 PetscCheckSameComm(pc,1,y,4); 697 PetscCheckSameComm(pc,1,work,5); 698 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 699 if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric"); 700 if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application"); 701 if (pc->erroriffailure) {ierr = VecValidValues(x,3,PETSC_TRUE);CHKERRQ(ierr);} 702 703 ierr = PCSetUp(pc);CHKERRQ(ierr); 704 if (pc->diagonalscale) { 705 if (pc->ops->applyBA) { 706 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 707 ierr = VecDuplicate(x,&work2);CHKERRQ(ierr); 708 ierr = PCDiagonalScaleRight(pc,x,work2);CHKERRQ(ierr); 709 ierr = (*pc->ops->applyBA)(pc,side,work2,y,work);CHKERRQ(ierr); 710 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 711 ierr = VecDestroy(&work2);CHKERRQ(ierr); 712 } else if (side == PC_RIGHT) { 713 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 714 ierr = PCApply(pc,y,work);CHKERRQ(ierr); 715 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 716 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 717 } else if (side == PC_LEFT) { 718 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 719 ierr = MatMult(pc->mat,y,work);CHKERRQ(ierr); 720 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 721 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 722 } else if (side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner"); 723 } else { 724 if (pc->ops->applyBA) { 725 ierr = (*pc->ops->applyBA)(pc,side,x,y,work);CHKERRQ(ierr); 726 } else if (side == PC_RIGHT) { 727 ierr = PCApply(pc,x,work);CHKERRQ(ierr); 728 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 729 } else if (side == PC_LEFT) { 730 ierr = MatMult(pc->mat,x,work);CHKERRQ(ierr); 731 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 732 } else if (side == PC_SYMMETRIC) { 733 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 734 ierr = PCApplySymmetricRight(pc,x,work);CHKERRQ(ierr); 735 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 736 ierr = VecCopy(y,work);CHKERRQ(ierr); 737 ierr = PCApplySymmetricLeft(pc,work,y);CHKERRQ(ierr); 738 } 739 } 740 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 741 PetscFunctionReturn(0); 742 } 743 744 /*@ 745 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 746 and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning, 747 NOT tr(B*A) = tr(A)*tr(B). 748 749 Collective on PC 750 751 Input Parameters: 752 + pc - the preconditioner context 753 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 754 . x - input vector 755 - work - work vector 756 757 Output Parameter: 758 . y - output vector 759 760 Notes: 761 this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner 762 defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 763 764 Level: developer 765 766 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB() 767 @*/ 768 PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work) 769 { 770 PetscErrorCode ierr; 771 772 PetscFunctionBegin; 773 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 774 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 775 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 776 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 777 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 778 if (pc->erroriffailure) {ierr = VecValidValues(x,3,PETSC_TRUE);CHKERRQ(ierr);} 779 if (pc->ops->applyBAtranspose) { 780 ierr = (*pc->ops->applyBAtranspose)(pc,side,x,y,work);CHKERRQ(ierr); 781 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 782 PetscFunctionReturn(0); 783 } 784 if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left"); 785 786 ierr = PCSetUp(pc);CHKERRQ(ierr); 787 if (side == PC_RIGHT) { 788 ierr = PCApplyTranspose(pc,x,work);CHKERRQ(ierr); 789 ierr = MatMultTranspose(pc->mat,work,y);CHKERRQ(ierr); 790 } else if (side == PC_LEFT) { 791 ierr = MatMultTranspose(pc->mat,x,work);CHKERRQ(ierr); 792 ierr = PCApplyTranspose(pc,work,y);CHKERRQ(ierr); 793 } 794 /* add support for PC_SYMMETRIC */ 795 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 796 PetscFunctionReturn(0); 797 } 798 799 /* -------------------------------------------------------------------------------*/ 800 801 /*@ 802 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 803 built-in fast application of Richardson's method. 804 805 Not Collective 806 807 Input Parameter: 808 . pc - the preconditioner 809 810 Output Parameter: 811 . exists - PETSC_TRUE or PETSC_FALSE 812 813 Level: developer 814 815 .seealso: PCApplyRichardson() 816 @*/ 817 PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists) 818 { 819 PetscFunctionBegin; 820 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 821 PetscValidPointer(exists,2); 822 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 823 else *exists = PETSC_FALSE; 824 PetscFunctionReturn(0); 825 } 826 827 /*@ 828 PCApplyRichardson - Applies several steps of Richardson iteration with 829 the particular preconditioner. This routine is usually used by the 830 Krylov solvers and not the application code directly. 831 832 Collective on PC 833 834 Input Parameters: 835 + pc - the preconditioner context 836 . b - the right hand side 837 . w - one work vector 838 . rtol - relative decrease in residual norm convergence criteria 839 . abstol - absolute residual norm convergence criteria 840 . dtol - divergence residual norm increase criteria 841 . its - the number of iterations to apply. 842 - guesszero - if the input x contains nonzero initial guess 843 844 Output Parameters: 845 + outits - number of iterations actually used (for SOR this always equals its) 846 . reason - the reason the apply terminated 847 - y - the solution (also contains initial guess if guesszero is PETSC_FALSE 848 849 Notes: 850 Most preconditioners do not support this function. Use the command 851 PCApplyRichardsonExists() to determine if one does. 852 853 Except for the multigrid PC this routine ignores the convergence tolerances 854 and always runs for the number of iterations 855 856 Level: developer 857 858 .seealso: PCApplyRichardsonExists() 859 @*/ 860 PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason) 861 { 862 PetscErrorCode ierr; 863 864 PetscFunctionBegin; 865 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 866 PetscValidHeaderSpecific(b,VEC_CLASSID,2); 867 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 868 PetscValidHeaderSpecific(w,VEC_CLASSID,4); 869 if (b == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors"); 870 ierr = PCSetUp(pc);CHKERRQ(ierr); 871 if (!pc->ops->applyrichardson) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson"); 872 ierr = (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);CHKERRQ(ierr); 873 PetscFunctionReturn(0); 874 } 875 876 /*@ 877 PCSetFailedReason - Sets the reason a PCSetUp() failed or PC_NOERROR if it did not fail 878 879 Logically Collective on PC 880 881 Input Parameters: 882 + pc - the preconditioner context 883 - reason - the reason it failedx 884 885 Level: advanced 886 887 .seealso: PCCreate(), PCApply(), PCDestroy(), PCFailedReason 888 @*/ 889 PetscErrorCode PCSetFailedReason(PC pc,PCFailedReason reason) 890 { 891 PetscFunctionBegin; 892 pc->failedreason = reason; 893 PetscFunctionReturn(0); 894 } 895 896 /*@ 897 PCGetFailedReason - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail 898 899 Logically Collective on PC 900 901 Input Parameter: 902 . pc - the preconditioner context 903 904 Output Parameter: 905 . reason - the reason it failed 906 907 Level: advanced 908 909 Notes: This is the maximum over reason over all ranks in the PC communicator. It is only valid after 910 a call KSPCheckDot() or KSPCheckNorm() inside a KSPSolve(). It is not valid immediately after a PCSetUp() 911 or PCApply(), then use PCGetFailedReasonRank() 912 913 .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReasonRank(), PCSetFailedReason() 914 @*/ 915 PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason) 916 { 917 PetscFunctionBegin; 918 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 919 else *reason = pc->failedreason; 920 PetscFunctionReturn(0); 921 } 922 923 /*@ 924 PCGetFailedReasonRank - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail on this MPI rank 925 926 Not Collective on PC 927 928 Input Parameter: 929 . pc - the preconditioner context 930 931 Output Parameter: 932 . reason - the reason it failed 933 934 Notes: 935 Different ranks may have different reasons or no reason, see PCGetFailedReason() 936 937 Level: advanced 938 939 .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReason(), PCSetFailedReason() 940 @*/ 941 PetscErrorCode PCGetFailedReasonRank(PC pc,PCFailedReason *reason) 942 { 943 PetscFunctionBegin; 944 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 945 else *reason = pc->failedreason; 946 PetscFunctionReturn(0); 947 } 948 949 /* Next line needed to deactivate KSP_Solve logging */ 950 #include <petsc/private/kspimpl.h> 951 952 /* 953 a setupcall of 0 indicates never setup, 954 1 indicates has been previously setup 955 -1 indicates a PCSetUp() was attempted and failed 956 */ 957 /*@ 958 PCSetUp - Prepares for the use of a preconditioner. 959 960 Collective on PC 961 962 Input Parameter: 963 . pc - the preconditioner context 964 965 Level: developer 966 967 .seealso: PCCreate(), PCApply(), PCDestroy() 968 @*/ 969 PetscErrorCode PCSetUp(PC pc) 970 { 971 PetscErrorCode ierr; 972 const char *def; 973 PetscObjectState matstate, matnonzerostate; 974 975 PetscFunctionBegin; 976 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 977 if (!pc->mat) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first"); 978 979 if (pc->setupcalled && pc->reusepreconditioner) { 980 ierr = PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n");CHKERRQ(ierr); 981 PetscFunctionReturn(0); 982 } 983 984 ierr = PetscObjectStateGet((PetscObject)pc->pmat,&matstate);CHKERRQ(ierr); 985 ierr = MatGetNonzeroState(pc->pmat,&matnonzerostate);CHKERRQ(ierr); 986 if (!pc->setupcalled) { 987 ierr = PetscInfo(pc,"Setting up PC for first time\n");CHKERRQ(ierr); 988 pc->flag = DIFFERENT_NONZERO_PATTERN; 989 } else if (matstate == pc->matstate) { 990 ierr = PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n");CHKERRQ(ierr); 991 PetscFunctionReturn(0); 992 } else { 993 if (matnonzerostate > pc->matnonzerostate) { 994 ierr = PetscInfo(pc,"Setting up PC with different nonzero pattern\n");CHKERRQ(ierr); 995 pc->flag = DIFFERENT_NONZERO_PATTERN; 996 } else { 997 ierr = PetscInfo(pc,"Setting up PC with same nonzero pattern\n");CHKERRQ(ierr); 998 pc->flag = SAME_NONZERO_PATTERN; 999 } 1000 } 1001 pc->matstate = matstate; 1002 pc->matnonzerostate = matnonzerostate; 1003 1004 if (!((PetscObject)pc)->type_name) { 1005 ierr = PCGetDefaultType_Private(pc,&def);CHKERRQ(ierr); 1006 ierr = PCSetType(pc,def);CHKERRQ(ierr); 1007 } 1008 1009 ierr = MatSetErrorIfFailure(pc->pmat,pc->erroriffailure);CHKERRQ(ierr); 1010 ierr = MatSetErrorIfFailure(pc->mat,pc->erroriffailure);CHKERRQ(ierr); 1011 ierr = PetscLogEventBegin(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 1012 if (pc->ops->setup) { 1013 /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */ 1014 ierr = PetscLogEventDeactivatePush(KSP_Solve);CHKERRQ(ierr); 1015 ierr = PetscLogEventDeactivatePush(PC_Apply);CHKERRQ(ierr); 1016 ierr = (*pc->ops->setup)(pc);CHKERRQ(ierr); 1017 ierr = PetscLogEventDeactivatePop(KSP_Solve);CHKERRQ(ierr); 1018 ierr = PetscLogEventDeactivatePop(PC_Apply);CHKERRQ(ierr); 1019 } 1020 ierr = PetscLogEventEnd(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 1021 if (!pc->setupcalled) pc->setupcalled = 1; 1022 PetscFunctionReturn(0); 1023 } 1024 1025 /*@ 1026 PCSetUpOnBlocks - Sets up the preconditioner for each block in 1027 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 1028 methods. 1029 1030 Collective on PC 1031 1032 Input Parameters: 1033 . pc - the preconditioner context 1034 1035 Level: developer 1036 1037 .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp() 1038 @*/ 1039 PetscErrorCode PCSetUpOnBlocks(PC pc) 1040 { 1041 PetscErrorCode ierr; 1042 1043 PetscFunctionBegin; 1044 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1045 if (!pc->ops->setuponblocks) PetscFunctionReturn(0); 1046 ierr = PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 1047 ierr = (*pc->ops->setuponblocks)(pc);CHKERRQ(ierr); 1048 ierr = PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 1049 PetscFunctionReturn(0); 1050 } 1051 1052 /*@C 1053 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 1054 submatrices that arise within certain subdomain-based preconditioners. 1055 The basic submatrices are extracted from the preconditioner matrix as 1056 usual; the user can then alter these (for example, to set different boundary 1057 conditions for each submatrix) before they are used for the local solves. 1058 1059 Logically Collective on PC 1060 1061 Input Parameters: 1062 + pc - the preconditioner context 1063 . func - routine for modifying the submatrices 1064 - ctx - optional user-defined context (may be null) 1065 1066 Calling sequence of func: 1067 $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx); 1068 1069 + row - an array of index sets that contain the global row numbers 1070 that comprise each local submatrix 1071 . col - an array of index sets that contain the global column numbers 1072 that comprise each local submatrix 1073 . submat - array of local submatrices 1074 - ctx - optional user-defined context for private data for the 1075 user-defined func routine (may be null) 1076 1077 Notes: 1078 PCSetModifySubMatrices() MUST be called before KSPSetUp() and 1079 KSPSolve(). 1080 1081 A routine set by PCSetModifySubMatrices() is currently called within 1082 the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM) 1083 preconditioners. All other preconditioners ignore this routine. 1084 1085 Level: advanced 1086 1087 .seealso: PCModifySubMatrices() 1088 @*/ 1089 PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx) 1090 { 1091 PetscFunctionBegin; 1092 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1093 pc->modifysubmatrices = func; 1094 pc->modifysubmatricesP = ctx; 1095 PetscFunctionReturn(0); 1096 } 1097 1098 /*@C 1099 PCModifySubMatrices - Calls an optional user-defined routine within 1100 certain preconditioners if one has been set with PCSetModifySubMatrices(). 1101 1102 Collective on PC 1103 1104 Input Parameters: 1105 + pc - the preconditioner context 1106 . nsub - the number of local submatrices 1107 . row - an array of index sets that contain the global row numbers 1108 that comprise each local submatrix 1109 . col - an array of index sets that contain the global column numbers 1110 that comprise each local submatrix 1111 . submat - array of local submatrices 1112 - ctx - optional user-defined context for private data for the 1113 user-defined routine (may be null) 1114 1115 Output Parameter: 1116 . submat - array of local submatrices (the entries of which may 1117 have been modified) 1118 1119 Notes: 1120 The user should NOT generally call this routine, as it will 1121 automatically be called within certain preconditioners (currently 1122 block Jacobi, additive Schwarz) if set. 1123 1124 The basic submatrices are extracted from the preconditioner matrix 1125 as usual; the user can then alter these (for example, to set different 1126 boundary conditions for each submatrix) before they are used for the 1127 local solves. 1128 1129 Level: developer 1130 1131 .seealso: PCSetModifySubMatrices() 1132 @*/ 1133 PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx) 1134 { 1135 PetscErrorCode ierr; 1136 1137 PetscFunctionBegin; 1138 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1139 if (!pc->modifysubmatrices) PetscFunctionReturn(0); 1140 ierr = PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 1141 ierr = (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);CHKERRQ(ierr); 1142 ierr = PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 1143 PetscFunctionReturn(0); 1144 } 1145 1146 /*@ 1147 PCSetOperators - Sets the matrix associated with the linear system and 1148 a (possibly) different one associated with the preconditioner. 1149 1150 Logically Collective on PC 1151 1152 Input Parameters: 1153 + pc - the preconditioner context 1154 . Amat - the matrix that defines the linear system 1155 - Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat. 1156 1157 Notes: 1158 Passing a NULL for Amat or Pmat removes the matrix that is currently used. 1159 1160 If you wish to replace either Amat or Pmat but leave the other one untouched then 1161 first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference() 1162 on it and then pass it back in in your call to KSPSetOperators(). 1163 1164 More Notes about Repeated Solution of Linear Systems: 1165 PETSc does NOT reset the matrix entries of either Amat or Pmat 1166 to zero after a linear solve; the user is completely responsible for 1167 matrix assembly. See the routine MatZeroEntries() if desiring to 1168 zero all elements of a matrix. 1169 1170 Level: intermediate 1171 1172 .seealso: PCGetOperators(), MatZeroEntries() 1173 @*/ 1174 PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat) 1175 { 1176 PetscErrorCode ierr; 1177 PetscInt m1,n1,m2,n2; 1178 1179 PetscFunctionBegin; 1180 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1181 if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1182 if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3); 1183 if (Amat) PetscCheckSameComm(pc,1,Amat,2); 1184 if (Pmat) PetscCheckSameComm(pc,1,Pmat,3); 1185 if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) { 1186 ierr = MatGetLocalSize(Amat,&m1,&n1);CHKERRQ(ierr); 1187 ierr = MatGetLocalSize(pc->mat,&m2,&n2);CHKERRQ(ierr); 1188 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1189 ierr = MatGetLocalSize(Pmat,&m1,&n1);CHKERRQ(ierr); 1190 ierr = MatGetLocalSize(pc->pmat,&m2,&n2);CHKERRQ(ierr); 1191 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1192 } 1193 1194 if (Pmat != pc->pmat) { 1195 /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */ 1196 pc->matnonzerostate = -1; 1197 pc->matstate = -1; 1198 } 1199 1200 /* reference first in case the matrices are the same */ 1201 if (Amat) {ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr);} 1202 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 1203 if (Pmat) {ierr = PetscObjectReference((PetscObject)Pmat);CHKERRQ(ierr);} 1204 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 1205 pc->mat = Amat; 1206 pc->pmat = Pmat; 1207 PetscFunctionReturn(0); 1208 } 1209 1210 /*@ 1211 PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed. 1212 1213 Logically Collective on PC 1214 1215 Input Parameters: 1216 + pc - the preconditioner context 1217 - flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner 1218 1219 Level: intermediate 1220 1221 .seealso: PCGetOperators(), MatZeroEntries(), PCGetReusePreconditioner(), KSPSetReusePreconditioner() 1222 @*/ 1223 PetscErrorCode PCSetReusePreconditioner(PC pc,PetscBool flag) 1224 { 1225 PetscFunctionBegin; 1226 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1227 PetscValidLogicalCollectiveBool(pc,flag,2); 1228 pc->reusepreconditioner = flag; 1229 PetscFunctionReturn(0); 1230 } 1231 1232 /*@ 1233 PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed. 1234 1235 Not Collective 1236 1237 Input Parameter: 1238 . pc - the preconditioner context 1239 1240 Output Parameter: 1241 . flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner 1242 1243 Level: intermediate 1244 1245 .seealso: PCGetOperators(), MatZeroEntries(), PCSetReusePreconditioner() 1246 @*/ 1247 PetscErrorCode PCGetReusePreconditioner(PC pc,PetscBool *flag) 1248 { 1249 PetscFunctionBegin; 1250 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1251 PetscValidPointer(flag,2); 1252 *flag = pc->reusepreconditioner; 1253 PetscFunctionReturn(0); 1254 } 1255 1256 /*@ 1257 PCGetOperators - Gets the matrix associated with the linear system and 1258 possibly a different one associated with the preconditioner. 1259 1260 Not collective, though parallel Mats are returned if the PC is parallel 1261 1262 Input Parameter: 1263 . pc - the preconditioner context 1264 1265 Output Parameters: 1266 + Amat - the matrix defining the linear system 1267 - Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat. 1268 1269 Level: intermediate 1270 1271 Notes: 1272 Does not increase the reference count of the matrices, so you should not destroy them 1273 1274 Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators 1275 are created in PC and returned to the user. In this case, if both operators 1276 mat and pmat are requested, two DIFFERENT operators will be returned. If 1277 only one is requested both operators in the PC will be the same (i.e. as 1278 if one had called KSP/PCSetOperators() with the same argument for both Mats). 1279 The user must set the sizes of the returned matrices and their type etc just 1280 as if the user created them with MatCreate(). For example, 1281 1282 $ KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to 1283 $ set size, type, etc of Amat 1284 1285 $ MatCreate(comm,&mat); 1286 $ KSP/PCSetOperators(ksp/pc,Amat,Amat); 1287 $ PetscObjectDereference((PetscObject)mat); 1288 $ set size, type, etc of Amat 1289 1290 and 1291 1292 $ KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to 1293 $ set size, type, etc of Amat and Pmat 1294 1295 $ MatCreate(comm,&Amat); 1296 $ MatCreate(comm,&Pmat); 1297 $ KSP/PCSetOperators(ksp/pc,Amat,Pmat); 1298 $ PetscObjectDereference((PetscObject)Amat); 1299 $ PetscObjectDereference((PetscObject)Pmat); 1300 $ set size, type, etc of Amat and Pmat 1301 1302 The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy 1303 of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely 1304 managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look 1305 at this is when you create a SNES you do not NEED to create a KSP and attach it to 1306 the SNES object (the SNES object manages it for you). Similarly when you create a KSP 1307 you do not need to attach a PC to it (the KSP object manages the PC object for you). 1308 Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when 1309 it can be created for you? 1310 1311 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet() 1312 @*/ 1313 PetscErrorCode PCGetOperators(PC pc,Mat *Amat,Mat *Pmat) 1314 { 1315 PetscErrorCode ierr; 1316 1317 PetscFunctionBegin; 1318 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1319 if (Amat) { 1320 if (!pc->mat) { 1321 if (pc->pmat && !Pmat) { /* Apmat has been set, but user did not request it, so use for Amat */ 1322 pc->mat = pc->pmat; 1323 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1324 } else { /* both Amat and Pmat are empty */ 1325 ierr = MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat);CHKERRQ(ierr); 1326 if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */ 1327 pc->pmat = pc->mat; 1328 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1329 } 1330 } 1331 } 1332 *Amat = pc->mat; 1333 } 1334 if (Pmat) { 1335 if (!pc->pmat) { 1336 if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */ 1337 pc->pmat = pc->mat; 1338 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1339 } else { 1340 ierr = MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat);CHKERRQ(ierr); 1341 if (!Amat) { /* user did NOT request Amat, so make same as Pmat */ 1342 pc->mat = pc->pmat; 1343 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1344 } 1345 } 1346 } 1347 *Pmat = pc->pmat; 1348 } 1349 PetscFunctionReturn(0); 1350 } 1351 1352 /*@C 1353 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1354 possibly a different one associated with the preconditioner have been set in the PC. 1355 1356 Not collective, though the results on all processes should be the same 1357 1358 Input Parameter: 1359 . pc - the preconditioner context 1360 1361 Output Parameters: 1362 + mat - the matrix associated with the linear system was set 1363 - pmat - matrix associated with the preconditioner was set, usually the same 1364 1365 Level: intermediate 1366 1367 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators() 1368 @*/ 1369 PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat) 1370 { 1371 PetscFunctionBegin; 1372 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1373 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1374 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1375 PetscFunctionReturn(0); 1376 } 1377 1378 /*@ 1379 PCFactorGetMatrix - Gets the factored matrix from the 1380 preconditioner context. This routine is valid only for the LU, 1381 incomplete LU, Cholesky, and incomplete Cholesky methods. 1382 1383 Not Collective on PC though Mat is parallel if PC is parallel 1384 1385 Input Parameters: 1386 . pc - the preconditioner context 1387 1388 Output parameters: 1389 . mat - the factored matrix 1390 1391 Level: advanced 1392 1393 Notes: 1394 Does not increase the reference count for the matrix so DO NOT destroy it 1395 1396 @*/ 1397 PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat) 1398 { 1399 PetscErrorCode ierr; 1400 1401 PetscFunctionBegin; 1402 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1403 PetscValidPointer(mat,2); 1404 if (pc->ops->getfactoredmatrix) { 1405 ierr = (*pc->ops->getfactoredmatrix)(pc,mat);CHKERRQ(ierr); 1406 } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix"); 1407 PetscFunctionReturn(0); 1408 } 1409 1410 /*@C 1411 PCSetOptionsPrefix - Sets the prefix used for searching for all 1412 PC options in the database. 1413 1414 Logically Collective on PC 1415 1416 Input Parameters: 1417 + pc - the preconditioner context 1418 - prefix - the prefix string to prepend to all PC option requests 1419 1420 Notes: 1421 A hyphen (-) must NOT be given at the beginning of the prefix name. 1422 The first character of all runtime options is AUTOMATICALLY the 1423 hyphen. 1424 1425 Level: advanced 1426 1427 .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix() 1428 @*/ 1429 PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[]) 1430 { 1431 PetscErrorCode ierr; 1432 1433 PetscFunctionBegin; 1434 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1435 ierr = PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1436 PetscFunctionReturn(0); 1437 } 1438 1439 /*@C 1440 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1441 PC options in the database. 1442 1443 Logically Collective on PC 1444 1445 Input Parameters: 1446 + pc - the preconditioner context 1447 - prefix - the prefix string to prepend to all PC option requests 1448 1449 Notes: 1450 A hyphen (-) must NOT be given at the beginning of the prefix name. 1451 The first character of all runtime options is AUTOMATICALLY the 1452 hyphen. 1453 1454 Level: advanced 1455 1456 .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix() 1457 @*/ 1458 PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[]) 1459 { 1460 PetscErrorCode ierr; 1461 1462 PetscFunctionBegin; 1463 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1464 ierr = PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1465 PetscFunctionReturn(0); 1466 } 1467 1468 /*@C 1469 PCGetOptionsPrefix - Gets the prefix used for searching for all 1470 PC options in the database. 1471 1472 Not Collective 1473 1474 Input Parameters: 1475 . pc - the preconditioner context 1476 1477 Output Parameters: 1478 . prefix - pointer to the prefix string used, is returned 1479 1480 Notes: 1481 On the fortran side, the user should pass in a string 'prifix' of 1482 sufficient length to hold the prefix. 1483 1484 Level: advanced 1485 1486 .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix() 1487 @*/ 1488 PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[]) 1489 { 1490 PetscErrorCode ierr; 1491 1492 PetscFunctionBegin; 1493 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1494 PetscValidPointer(prefix,2); 1495 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1496 PetscFunctionReturn(0); 1497 } 1498 1499 /* 1500 Indicates the right hand side will be changed by KSPSolve(), this occurs for a few 1501 preconditioners including BDDC and Eisentat that transform the equations before applying 1502 the Krylov methods 1503 */ 1504 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc,PetscBool *change) 1505 { 1506 PetscErrorCode ierr; 1507 1508 PetscFunctionBegin; 1509 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1510 PetscValidPointer(change,2); 1511 *change = PETSC_FALSE; 1512 ierr = PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));CHKERRQ(ierr); 1513 PetscFunctionReturn(0); 1514 } 1515 1516 /*@ 1517 PCPreSolve - Optional pre-solve phase, intended for any 1518 preconditioner-specific actions that must be performed before 1519 the iterative solve itself. 1520 1521 Collective on PC 1522 1523 Input Parameters: 1524 + pc - the preconditioner context 1525 - ksp - the Krylov subspace context 1526 1527 Level: developer 1528 1529 Sample of Usage: 1530 .vb 1531 PCPreSolve(pc,ksp); 1532 KSPSolve(ksp,b,x); 1533 PCPostSolve(pc,ksp); 1534 .ve 1535 1536 Notes: 1537 The pre-solve phase is distinct from the PCSetUp() phase. 1538 1539 KSPSolve() calls this directly, so is rarely called by the user. 1540 1541 .seealso: PCPostSolve() 1542 @*/ 1543 PetscErrorCode PCPreSolve(PC pc,KSP ksp) 1544 { 1545 PetscErrorCode ierr; 1546 Vec x,rhs; 1547 1548 PetscFunctionBegin; 1549 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1550 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1551 pc->presolvedone++; 1552 if (pc->presolvedone > 2) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice"); 1553 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1554 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1555 1556 if (pc->ops->presolve) { 1557 ierr = (*pc->ops->presolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1558 } else if (pc->presolve) { 1559 ierr = (pc->presolve)(pc,ksp);CHKERRQ(ierr); 1560 } 1561 PetscFunctionReturn(0); 1562 } 1563 1564 /*@C 1565 PCSetPreSolve - Sets function PCPreSolve() which is intended for any 1566 preconditioner-specific actions that must be performed before 1567 the iterative solve itself. 1568 1569 Logically Collective on pc 1570 1571 Input Parameters: 1572 + pc - the preconditioner object 1573 - presolve - the function to call before the solve 1574 1575 Calling sequence of presolve: 1576 $ func(PC pc,KSP ksp) 1577 1578 + pc - the PC context 1579 - ksp - the KSP context 1580 1581 Level: developer 1582 1583 .seealso: PC, PCSetUp(), PCPreSolve() 1584 @*/ 1585 PetscErrorCode PCSetPreSolve(PC pc,PetscErrorCode (*presolve)(PC,KSP)) 1586 { 1587 PetscFunctionBegin; 1588 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1589 pc->presolve = presolve; 1590 PetscFunctionReturn(0); 1591 } 1592 1593 /*@ 1594 PCPostSolve - Optional post-solve phase, intended for any 1595 preconditioner-specific actions that must be performed after 1596 the iterative solve itself. 1597 1598 Collective on PC 1599 1600 Input Parameters: 1601 + pc - the preconditioner context 1602 - ksp - the Krylov subspace context 1603 1604 Sample of Usage: 1605 .vb 1606 PCPreSolve(pc,ksp); 1607 KSPSolve(ksp,b,x); 1608 PCPostSolve(pc,ksp); 1609 .ve 1610 1611 Note: 1612 KSPSolve() calls this routine directly, so it is rarely called by the user. 1613 1614 Level: developer 1615 1616 .seealso: PCPreSolve(), KSPSolve() 1617 @*/ 1618 PetscErrorCode PCPostSolve(PC pc,KSP ksp) 1619 { 1620 PetscErrorCode ierr; 1621 Vec x,rhs; 1622 1623 PetscFunctionBegin; 1624 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1625 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1626 pc->presolvedone--; 1627 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1628 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1629 if (pc->ops->postsolve) { 1630 ierr = (*pc->ops->postsolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1631 } 1632 PetscFunctionReturn(0); 1633 } 1634 1635 /*@C 1636 PCLoad - Loads a PC that has been stored in binary with PCView(). 1637 1638 Collective on PetscViewer 1639 1640 Input Parameters: 1641 + newdm - the newly loaded PC, this needs to have been created with PCCreate() or 1642 some related function before a call to PCLoad(). 1643 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() 1644 1645 Level: intermediate 1646 1647 Notes: 1648 The type is determined by the data in the file, any type set into the PC before this call is ignored. 1649 1650 Notes for advanced users: 1651 Most users should not need to know the details of the binary storage 1652 format, since PCLoad() and PCView() completely hide these details. 1653 But for anyone who's interested, the standard binary matrix storage 1654 format is 1655 .vb 1656 has not yet been determined 1657 .ve 1658 1659 .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad() 1660 @*/ 1661 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) 1662 { 1663 PetscErrorCode ierr; 1664 PetscBool isbinary; 1665 PetscInt classid; 1666 char type[256]; 1667 1668 PetscFunctionBegin; 1669 PetscValidHeaderSpecific(newdm,PC_CLASSID,1); 1670 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1671 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1672 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 1673 1674 ierr = PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT);CHKERRQ(ierr); 1675 if (classid != PC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file"); 1676 ierr = PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR);CHKERRQ(ierr); 1677 ierr = PCSetType(newdm, type);CHKERRQ(ierr); 1678 if (newdm->ops->load) { 1679 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 1680 } 1681 PetscFunctionReturn(0); 1682 } 1683 1684 #include <petscdraw.h> 1685 #if defined(PETSC_HAVE_SAWS) 1686 #include <petscviewersaws.h> 1687 #endif 1688 1689 /*@C 1690 PCViewFromOptions - View from Options 1691 1692 Collective on PC 1693 1694 Input Parameters: 1695 + A - the PC context 1696 . obj - Optional object 1697 - name - command line option 1698 1699 Level: intermediate 1700 .seealso: PC, PCView, PetscObjectViewFromOptions(), PCCreate() 1701 @*/ 1702 PetscErrorCode PCViewFromOptions(PC A,PetscObject obj,const char name[]) 1703 { 1704 PetscErrorCode ierr; 1705 1706 PetscFunctionBegin; 1707 PetscValidHeaderSpecific(A,PC_CLASSID,1); 1708 ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr); 1709 PetscFunctionReturn(0); 1710 } 1711 1712 /*@C 1713 PCView - Prints the PC data structure. 1714 1715 Collective on PC 1716 1717 Input Parameters: 1718 + PC - the PC context 1719 - viewer - optional visualization context 1720 1721 Note: 1722 The available visualization contexts include 1723 + PETSC_VIEWER_STDOUT_SELF - standard output (default) 1724 - PETSC_VIEWER_STDOUT_WORLD - synchronized standard 1725 output where only the first processor opens 1726 the file. All other processors send their 1727 data to the first processor to print. 1728 1729 The user can open an alternative visualization contexts with 1730 PetscViewerASCIIOpen() (output to a specified file). 1731 1732 Level: developer 1733 1734 .seealso: KSPView(), PetscViewerASCIIOpen() 1735 @*/ 1736 PetscErrorCode PCView(PC pc,PetscViewer viewer) 1737 { 1738 PCType cstr; 1739 PetscErrorCode ierr; 1740 PetscBool iascii,isstring,isbinary,isdraw; 1741 #if defined(PETSC_HAVE_SAWS) 1742 PetscBool issaws; 1743 #endif 1744 1745 PetscFunctionBegin; 1746 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1747 if (!viewer) { 1748 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);CHKERRQ(ierr); 1749 } 1750 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1751 PetscCheckSameComm(pc,1,viewer,2); 1752 1753 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1754 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 1755 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1756 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 1757 #if defined(PETSC_HAVE_SAWS) 1758 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws);CHKERRQ(ierr); 1759 #endif 1760 1761 if (iascii) { 1762 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer);CHKERRQ(ierr); 1763 if (!pc->setupcalled) { 1764 ierr = PetscViewerASCIIPrintf(viewer," PC has not been set up so information may be incomplete\n");CHKERRQ(ierr); 1765 } 1766 if (pc->ops->view) { 1767 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1768 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1769 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1770 } 1771 if (pc->mat) { 1772 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);CHKERRQ(ierr); 1773 if (pc->pmat == pc->mat) { 1774 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");CHKERRQ(ierr); 1775 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1776 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1777 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1778 } else { 1779 if (pc->pmat) { 1780 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");CHKERRQ(ierr); 1781 } else { 1782 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix:\n");CHKERRQ(ierr); 1783 } 1784 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1785 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1786 if (pc->pmat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1787 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1788 } 1789 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1790 } 1791 } else if (isstring) { 1792 ierr = PCGetType(pc,&cstr);CHKERRQ(ierr); 1793 ierr = PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr);CHKERRQ(ierr); 1794 if (pc->ops->view) {ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr);} 1795 if (pc->mat) {ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);} 1796 if (pc->pmat && pc->pmat != pc->mat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1797 } else if (isbinary) { 1798 PetscInt classid = PC_FILE_CLASSID; 1799 MPI_Comm comm; 1800 PetscMPIInt rank; 1801 char type[256]; 1802 1803 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1804 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 1805 if (rank == 0) { 1806 ierr = PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 1807 ierr = PetscStrncpy(type,((PetscObject)pc)->type_name,256);CHKERRQ(ierr); 1808 ierr = PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 1809 } 1810 if (pc->ops->view) { 1811 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1812 } 1813 } else if (isdraw) { 1814 PetscDraw draw; 1815 char str[25]; 1816 PetscReal x,y,bottom,h; 1817 PetscInt n; 1818 1819 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1820 ierr = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr); 1821 if (pc->mat) { 1822 ierr = MatGetSize(pc->mat,&n,NULL);CHKERRQ(ierr); 1823 ierr = PetscSNPrintf(str,25,"PC: %s (%D)",((PetscObject)pc)->type_name,n);CHKERRQ(ierr); 1824 } else { 1825 ierr = PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name);CHKERRQ(ierr); 1826 } 1827 ierr = PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr); 1828 bottom = y - h; 1829 ierr = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr); 1830 if (pc->ops->view) { 1831 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1832 } 1833 ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr); 1834 #if defined(PETSC_HAVE_SAWS) 1835 } else if (issaws) { 1836 PetscMPIInt rank; 1837 1838 ierr = PetscObjectName((PetscObject)pc);CHKERRQ(ierr); 1839 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr); 1840 if (!((PetscObject)pc)->amsmem && rank == 0) { 1841 ierr = PetscObjectViewSAWs((PetscObject)pc,viewer);CHKERRQ(ierr); 1842 } 1843 if (pc->mat) {ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);} 1844 if (pc->pmat && pc->pmat != pc->mat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1845 #endif 1846 } 1847 PetscFunctionReturn(0); 1848 } 1849 1850 /*@C 1851 PCRegister - Adds a method to the preconditioner package. 1852 1853 Not collective 1854 1855 Input Parameters: 1856 + name_solver - name of a new user-defined solver 1857 - routine_create - routine to create method context 1858 1859 Notes: 1860 PCRegister() may be called multiple times to add several user-defined preconditioners. 1861 1862 Sample usage: 1863 .vb 1864 PCRegister("my_solver", MySolverCreate); 1865 .ve 1866 1867 Then, your solver can be chosen with the procedural interface via 1868 $ PCSetType(pc,"my_solver") 1869 or at runtime via the option 1870 $ -pc_type my_solver 1871 1872 Level: advanced 1873 1874 .seealso: PCRegisterAll() 1875 @*/ 1876 PetscErrorCode PCRegister(const char sname[],PetscErrorCode (*function)(PC)) 1877 { 1878 PetscErrorCode ierr; 1879 1880 PetscFunctionBegin; 1881 ierr = PCInitializePackage();CHKERRQ(ierr); 1882 ierr = PetscFunctionListAdd(&PCList,sname,function);CHKERRQ(ierr); 1883 PetscFunctionReturn(0); 1884 } 1885 1886 static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y) 1887 { 1888 PC pc; 1889 PetscErrorCode ierr; 1890 1891 PetscFunctionBegin; 1892 ierr = MatShellGetContext(A,&pc);CHKERRQ(ierr); 1893 ierr = PCApply(pc,X,Y);CHKERRQ(ierr); 1894 PetscFunctionReturn(0); 1895 } 1896 1897 /*@ 1898 PCComputeOperator - Computes the explicit preconditioned operator. 1899 1900 Collective on PC 1901 1902 Input Parameters: 1903 + pc - the preconditioner object 1904 - mattype - the matrix type to be used for the operator 1905 1906 Output Parameter: 1907 . mat - the explicit preconditioned operator 1908 1909 Notes: 1910 This computation is done by applying the operators to columns of the identity matrix. 1911 This routine is costly in general, and is recommended for use only with relatively small systems. 1912 Currently, this routine uses a dense matrix format when mattype == NULL 1913 1914 Level: advanced 1915 1916 .seealso: KSPComputeOperator(), MatType 1917 1918 @*/ 1919 PetscErrorCode PCComputeOperator(PC pc,MatType mattype,Mat *mat) 1920 { 1921 PetscErrorCode ierr; 1922 PetscInt N,M,m,n; 1923 Mat A,Apc; 1924 1925 PetscFunctionBegin; 1926 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1927 PetscValidPointer(mat,3); 1928 ierr = PCGetOperators(pc,&A,NULL);CHKERRQ(ierr); 1929 ierr = MatGetLocalSize(A,&m,&n);CHKERRQ(ierr); 1930 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr); 1931 ierr = MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc);CHKERRQ(ierr); 1932 ierr = MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC);CHKERRQ(ierr); 1933 ierr = MatComputeOperator(Apc,mattype,mat);CHKERRQ(ierr); 1934 ierr = MatDestroy(&Apc);CHKERRQ(ierr); 1935 PetscFunctionReturn(0); 1936 } 1937 1938 /*@ 1939 PCSetCoordinates - sets the coordinates of all the nodes on the local process 1940 1941 Collective on PC 1942 1943 Input Parameters: 1944 + pc - the solver context 1945 . dim - the dimension of the coordinates 1, 2, or 3 1946 . nloc - the blocked size of the coordinates array 1947 - coords - the coordinates array 1948 1949 Level: intermediate 1950 1951 Notes: 1952 coords is an array of the dim coordinates for the nodes on 1953 the local processor, of size dim*nloc. 1954 If there are 108 equation on a processor 1955 for a displacement finite element discretization of elasticity (so 1956 that there are nloc = 36 = 108/3 nodes) then the array must have 108 1957 double precision values (ie, 3 * 36). These x y z coordinates 1958 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 1959 ... , N-1.z ]. 1960 1961 .seealso: MatSetNearNullSpace() 1962 @*/ 1963 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 1964 { 1965 PetscErrorCode ierr; 1966 1967 PetscFunctionBegin; 1968 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1969 PetscValidLogicalCollectiveInt(pc,dim,2); 1970 ierr = PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));CHKERRQ(ierr); 1971 PetscFunctionReturn(0); 1972 } 1973 1974 /*@ 1975 PCGetInterpolations - Gets interpolation matrices for all levels (except level 0) 1976 1977 Logically Collective on PC 1978 1979 Input Parameter: 1980 . pc - the precondition context 1981 1982 Output Parameters: 1983 + num_levels - the number of levels 1984 - interpolations - the interpolation matrices (size of num_levels-1) 1985 1986 Level: advanced 1987 1988 .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level 1989 1990 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetInterpolation(), PCGetCoarseOperators() 1991 @*/ 1992 PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[]) 1993 { 1994 PetscErrorCode ierr; 1995 1996 PetscFunctionBegin; 1997 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1998 PetscValidPointer(num_levels,2); 1999 PetscValidPointer(interpolations,3); 2000 ierr = PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));CHKERRQ(ierr); 2001 PetscFunctionReturn(0); 2002 } 2003 2004 /*@ 2005 PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level) 2006 2007 Logically Collective on PC 2008 2009 Input Parameter: 2010 . pc - the precondition context 2011 2012 Output Parameters: 2013 + num_levels - the number of levels 2014 - coarseOperators - the coarse operator matrices (size of num_levels-1) 2015 2016 Level: advanced 2017 2018 .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level 2019 2020 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetRScale(), PCMGGetInterpolation(), PCGetInterpolations() 2021 @*/ 2022 PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[]) 2023 { 2024 PetscErrorCode ierr; 2025 2026 PetscFunctionBegin; 2027 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 2028 PetscValidPointer(num_levels,2); 2029 PetscValidPointer(coarseOperators,3); 2030 ierr = PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));CHKERRQ(ierr); 2031 PetscFunctionReturn(0); 2032 } 2033