1 2 /* 3 The PC (preconditioner) interface routines, callable by users. 4 */ 5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 6 #include <petscdm.h> 7 8 /* Logging support */ 9 PetscClassId PC_CLASSID; 10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 12 PetscInt PetscMGLevelId; 13 14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[]) 15 { 16 PetscErrorCode ierr; 17 PetscMPIInt size; 18 PetscBool hasop,flg1,flg2,set,flg3; 19 20 PetscFunctionBegin; 21 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRMPI(ierr); 22 if (pc->pmat) { 23 ierr = MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 24 if (size == 1) { 25 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);CHKERRQ(ierr); 26 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);CHKERRQ(ierr); 27 ierr = MatIsSymmetricKnown(pc->pmat,&set,&flg3);CHKERRQ(ierr); 28 if (flg1 && (!flg2 || (set && flg3))) { 29 *type = PCICC; 30 } else if (flg2) { 31 *type = PCILU; 32 } else if (hasop) { /* likely is a parallel matrix run on one processor */ 33 *type = PCBJACOBI; 34 } else { 35 *type = PCNONE; 36 } 37 } else { 38 if (hasop) { 39 *type = PCBJACOBI; 40 } else { 41 *type = PCNONE; 42 } 43 } 44 } else { 45 if (size == 1) { 46 *type = PCILU; 47 } else { 48 *type = PCBJACOBI; 49 } 50 } 51 PetscFunctionReturn(0); 52 } 53 54 /*@ 55 PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats 56 57 Collective on PC 58 59 Input Parameter: 60 . pc - the preconditioner context 61 62 Level: developer 63 64 Notes: 65 This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC 66 67 .seealso: PCCreate(), PCSetUp() 68 @*/ 69 PetscErrorCode PCReset(PC pc) 70 { 71 PetscErrorCode ierr; 72 73 PetscFunctionBegin; 74 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 75 if (pc->ops->reset) { 76 ierr = (*pc->ops->reset)(pc);CHKERRQ(ierr); 77 } 78 ierr = VecDestroy(&pc->diagonalscaleright);CHKERRQ(ierr); 79 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 80 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 81 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 82 83 pc->setupcalled = 0; 84 PetscFunctionReturn(0); 85 } 86 87 /*@C 88 PCDestroy - Destroys PC context that was created with PCCreate(). 89 90 Collective on PC 91 92 Input Parameter: 93 . pc - the preconditioner context 94 95 Level: developer 96 97 .seealso: PCCreate(), PCSetUp() 98 @*/ 99 PetscErrorCode PCDestroy(PC *pc) 100 { 101 PetscErrorCode ierr; 102 103 PetscFunctionBegin; 104 if (!*pc) PetscFunctionReturn(0); 105 PetscValidHeaderSpecific((*pc),PC_CLASSID,1); 106 if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; PetscFunctionReturn(0);} 107 108 ierr = PCReset(*pc);CHKERRQ(ierr); 109 110 /* if memory was published with SAWs then destroy it */ 111 ierr = PetscObjectSAWsViewOff((PetscObject)*pc);CHKERRQ(ierr); 112 if ((*pc)->ops->destroy) {ierr = (*(*pc)->ops->destroy)((*pc));CHKERRQ(ierr);} 113 ierr = DMDestroy(&(*pc)->dm);CHKERRQ(ierr); 114 ierr = PetscHeaderDestroy(pc);CHKERRQ(ierr); 115 PetscFunctionReturn(0); 116 } 117 118 /*@C 119 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 120 scaling as needed by certain time-stepping codes. 121 122 Logically Collective on PC 123 124 Input Parameter: 125 . pc - the preconditioner context 126 127 Output Parameter: 128 . flag - PETSC_TRUE if it applies the scaling 129 130 Level: developer 131 132 Notes: 133 If this returns PETSC_TRUE then the system solved via the Krylov method is 134 $ D M A D^{-1} y = D M b for left preconditioning or 135 $ D A M D^{-1} z = D b for right preconditioning 136 137 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale() 138 @*/ 139 PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag) 140 { 141 PetscFunctionBegin; 142 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 143 PetscValidBoolPointer(flag,2); 144 *flag = pc->diagonalscale; 145 PetscFunctionReturn(0); 146 } 147 148 /*@ 149 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 150 scaling as needed by certain time-stepping codes. 151 152 Logically Collective on PC 153 154 Input Parameters: 155 + pc - the preconditioner context 156 - s - scaling vector 157 158 Level: intermediate 159 160 Notes: 161 The system solved via the Krylov method is 162 $ D M A D^{-1} y = D M b for left preconditioning or 163 $ D A M D^{-1} z = D b for right preconditioning 164 165 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 166 167 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale() 168 @*/ 169 PetscErrorCode PCSetDiagonalScale(PC pc,Vec s) 170 { 171 PetscErrorCode ierr; 172 173 PetscFunctionBegin; 174 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 175 PetscValidHeaderSpecific(s,VEC_CLASSID,2); 176 pc->diagonalscale = PETSC_TRUE; 177 178 ierr = PetscObjectReference((PetscObject)s);CHKERRQ(ierr); 179 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 180 181 pc->diagonalscaleleft = s; 182 183 ierr = VecDuplicate(s,&pc->diagonalscaleright);CHKERRQ(ierr); 184 ierr = VecCopy(s,pc->diagonalscaleright);CHKERRQ(ierr); 185 ierr = VecReciprocal(pc->diagonalscaleright);CHKERRQ(ierr); 186 PetscFunctionReturn(0); 187 } 188 189 /*@ 190 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 191 192 Logically Collective on PC 193 194 Input Parameters: 195 + pc - the preconditioner context 196 . in - input vector 197 - out - scaled vector (maybe the same as in) 198 199 Level: intermediate 200 201 Notes: 202 The system solved via the Krylov method is 203 $ D M A D^{-1} y = D M b for left preconditioning or 204 $ D A M D^{-1} z = D b for right preconditioning 205 206 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 207 208 If diagonal scaling is turned off and in is not out then in is copied to out 209 210 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale() 211 @*/ 212 PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out) 213 { 214 PetscErrorCode ierr; 215 216 PetscFunctionBegin; 217 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 218 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 219 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 220 if (pc->diagonalscale) { 221 ierr = VecPointwiseMult(out,pc->diagonalscaleleft,in);CHKERRQ(ierr); 222 } else if (in != out) { 223 ierr = VecCopy(in,out);CHKERRQ(ierr); 224 } 225 PetscFunctionReturn(0); 226 } 227 228 /*@ 229 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 230 231 Logically Collective on PC 232 233 Input Parameters: 234 + pc - the preconditioner context 235 . in - input vector 236 - out - scaled vector (maybe the same as in) 237 238 Level: intermediate 239 240 Notes: 241 The system solved via the Krylov method is 242 $ D M A D^{-1} y = D M b for left preconditioning or 243 $ D A M D^{-1} z = D b for right preconditioning 244 245 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 246 247 If diagonal scaling is turned off and in is not out then in is copied to out 248 249 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale() 250 @*/ 251 PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out) 252 { 253 PetscErrorCode ierr; 254 255 PetscFunctionBegin; 256 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 257 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 258 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 259 if (pc->diagonalscale) { 260 ierr = VecPointwiseMult(out,pc->diagonalscaleright,in);CHKERRQ(ierr); 261 } else if (in != out) { 262 ierr = VecCopy(in,out);CHKERRQ(ierr); 263 } 264 PetscFunctionReturn(0); 265 } 266 267 /*@ 268 PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the 269 operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(), 270 TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat. 271 272 Logically Collective on PC 273 274 Input Parameters: 275 + pc - the preconditioner context 276 - flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false) 277 278 Options Database Key: 279 . -pc_use_amat <true,false> 280 281 Notes: 282 For the common case in which the linear system matrix and the matrix used to construct the 283 preconditioner are identical, this routine is does nothing. 284 285 Level: intermediate 286 287 .seealso: PCGetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE 288 @*/ 289 PetscErrorCode PCSetUseAmat(PC pc,PetscBool flg) 290 { 291 PetscFunctionBegin; 292 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 293 pc->useAmat = flg; 294 PetscFunctionReturn(0); 295 } 296 297 /*@ 298 PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected. 299 300 Logically Collective on PC 301 302 Input Parameters: 303 + pc - iterative context obtained from PCCreate() 304 - flg - PETSC_TRUE indicates you want the error generated 305 306 Level: advanced 307 308 Notes: 309 Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve() 310 to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is 311 detected. 312 313 This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs 314 315 .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll() 316 @*/ 317 PetscErrorCode PCSetErrorIfFailure(PC pc,PetscBool flg) 318 { 319 PetscFunctionBegin; 320 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 321 PetscValidLogicalCollectiveBool(pc,flg,2); 322 pc->erroriffailure = flg; 323 PetscFunctionReturn(0); 324 } 325 326 /*@ 327 PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the 328 operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(), 329 TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat. 330 331 Logically Collective on PC 332 333 Input Parameter: 334 . pc - the preconditioner context 335 336 Output Parameter: 337 . flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false) 338 339 Notes: 340 For the common case in which the linear system matrix and the matrix used to construct the 341 preconditioner are identical, this routine is does nothing. 342 343 Level: intermediate 344 345 .seealso: PCSetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE 346 @*/ 347 PetscErrorCode PCGetUseAmat(PC pc,PetscBool *flg) 348 { 349 PetscFunctionBegin; 350 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 351 *flg = pc->useAmat; 352 PetscFunctionReturn(0); 353 } 354 355 /*@ 356 PCCreate - Creates a preconditioner context. 357 358 Collective 359 360 Input Parameter: 361 . comm - MPI communicator 362 363 Output Parameter: 364 . pc - location to put the preconditioner context 365 366 Notes: 367 The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC 368 in parallel. For dense matrices it is always PCNONE. 369 370 Level: developer 371 372 .seealso: PCSetUp(), PCApply(), PCDestroy() 373 @*/ 374 PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc) 375 { 376 PC pc; 377 PetscErrorCode ierr; 378 379 PetscFunctionBegin; 380 PetscValidPointer(newpc,1); 381 *newpc = NULL; 382 ierr = PCInitializePackage();CHKERRQ(ierr); 383 384 ierr = PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView);CHKERRQ(ierr); 385 386 pc->mat = NULL; 387 pc->pmat = NULL; 388 pc->setupcalled = 0; 389 pc->setfromoptionscalled = 0; 390 pc->data = NULL; 391 pc->diagonalscale = PETSC_FALSE; 392 pc->diagonalscaleleft = NULL; 393 pc->diagonalscaleright = NULL; 394 395 pc->modifysubmatrices = NULL; 396 pc->modifysubmatricesP = NULL; 397 398 *newpc = pc; 399 PetscFunctionReturn(0); 400 401 } 402 403 /* -------------------------------------------------------------------------------*/ 404 405 /*@ 406 PCApply - Applies the preconditioner to a vector. 407 408 Collective on PC 409 410 Input Parameters: 411 + pc - the preconditioner context 412 - x - input vector 413 414 Output Parameter: 415 . y - output vector 416 417 Level: developer 418 419 .seealso: PCApplyTranspose(), PCApplyBAorAB() 420 @*/ 421 PetscErrorCode PCApply(PC pc,Vec x,Vec y) 422 { 423 PetscErrorCode ierr; 424 PetscInt m,n,mv,nv; 425 426 PetscFunctionBegin; 427 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 428 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 429 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 430 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 431 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 432 /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */ 433 ierr = MatGetLocalSize(pc->pmat,&m,&n);CHKERRQ(ierr); 434 ierr = VecGetLocalSize(x,&mv);CHKERRQ(ierr); 435 ierr = VecGetLocalSize(y,&nv);CHKERRQ(ierr); 436 /* check pmat * y = x is feasible */ 437 if (mv != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %D does not equal input vector size %D",m,mv); 438 if (nv != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %D does not equal output vector size %D",n,nv); 439 ierr = VecSetErrorIfLocked(y,3);CHKERRQ(ierr); 440 441 ierr = PCSetUp(pc);CHKERRQ(ierr); 442 if (!pc->ops->apply) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply"); 443 ierr = VecLockReadPush(x);CHKERRQ(ierr); 444 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 445 ierr = (*pc->ops->apply)(pc,x,y);CHKERRQ(ierr); 446 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 447 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 448 ierr = VecLockReadPop(x);CHKERRQ(ierr); 449 PetscFunctionReturn(0); 450 } 451 452 /*@ 453 PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices. 454 455 Collective on PC 456 457 Input Parameters: 458 + pc - the preconditioner context 459 - X - block of input vectors 460 461 Output Parameter: 462 . Y - block of output vectors 463 464 Level: developer 465 466 .seealso: PCApply(), KSPMatSolve() 467 @*/ 468 PetscErrorCode PCMatApply(PC pc,Mat X,Mat Y) 469 { 470 Mat A; 471 Vec cy, cx; 472 PetscInt m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3; 473 PetscBool match; 474 PetscErrorCode ierr; 475 476 PetscFunctionBegin; 477 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 478 PetscValidHeaderSpecific(X, MAT_CLASSID, 2); 479 PetscValidHeaderSpecific(Y, MAT_CLASSID, 3); 480 PetscCheckSameComm(pc, 1, X, 2); 481 PetscCheckSameComm(pc, 1, Y, 3); 482 if (Y == X) SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices"); 483 ierr = PCGetOperators(pc, NULL, &A);CHKERRQ(ierr); 484 ierr = MatGetLocalSize(A, &m3, &n3);CHKERRQ(ierr); 485 ierr = MatGetLocalSize(X, &m2, &n2);CHKERRQ(ierr); 486 ierr = MatGetLocalSize(Y, &m1, &n1);CHKERRQ(ierr); 487 ierr = MatGetSize(A, &M3, &N3);CHKERRQ(ierr); 488 ierr = MatGetSize(X, &M2, &N2);CHKERRQ(ierr); 489 ierr = MatGetSize(Y, &M1, &N1);CHKERRQ(ierr); 490 if (n1 != n2 || N1 != N2) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%D,%D) and block of output vectors (n,N) = (%D,%D)", n2, N2, n1, N1); 491 if (m2 != m3 || M2 != M3) SETERRQ6(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%D,%D) and Pmat (m,M)x(n,N) = (%D,%D)x(%D,%D)", m2, M2, m3, M3, n3, N3); 492 if (m1 != n3 || M1 != N3) SETERRQ6(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%D,%D) and Pmat (m,M)x(n,N) = (%D,%D)x(%D,%D)", m1, M1, m3, M3, n3, N3); 493 ierr = PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "");CHKERRQ(ierr); 494 if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat"); 495 ierr = PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "");CHKERRQ(ierr); 496 if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat"); 497 ierr = PCSetUp(pc);CHKERRQ(ierr); 498 if (pc->ops->matapply) { 499 ierr = PetscLogEventBegin(PC_MatApply, pc, X, Y, 0);CHKERRQ(ierr); 500 ierr = (*pc->ops->matapply)(pc, X, Y);CHKERRQ(ierr); 501 ierr = PetscLogEventEnd(PC_MatApply, pc, X, Y, 0);CHKERRQ(ierr); 502 } else { 503 ierr = PetscInfo1(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name);CHKERRQ(ierr); 504 for (n1 = 0; n1 < N1; ++n1) { 505 ierr = MatDenseGetColumnVecRead(X, n1, &cx);CHKERRQ(ierr); 506 ierr = MatDenseGetColumnVecWrite(Y, n1, &cy);CHKERRQ(ierr); 507 ierr = PCApply(pc, cx, cy);CHKERRQ(ierr); 508 ierr = MatDenseRestoreColumnVecWrite(Y, n1, &cy);CHKERRQ(ierr); 509 ierr = MatDenseRestoreColumnVecRead(X, n1, &cx);CHKERRQ(ierr); 510 } 511 } 512 PetscFunctionReturn(0); 513 } 514 515 /*@ 516 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 517 518 Collective on PC 519 520 Input Parameters: 521 + pc - the preconditioner context 522 - x - input vector 523 524 Output Parameter: 525 . y - output vector 526 527 Notes: 528 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 529 530 Level: developer 531 532 .seealso: PCApply(), PCApplySymmetricRight() 533 @*/ 534 PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y) 535 { 536 PetscErrorCode ierr; 537 538 PetscFunctionBegin; 539 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 540 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 541 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 542 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 543 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 544 ierr = PCSetUp(pc);CHKERRQ(ierr); 545 if (!pc->ops->applysymmetricleft) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply"); 546 ierr = VecLockReadPush(x);CHKERRQ(ierr); 547 ierr = PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 548 ierr = (*pc->ops->applysymmetricleft)(pc,x,y);CHKERRQ(ierr); 549 ierr = PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 550 ierr = VecLockReadPop(x);CHKERRQ(ierr); 551 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 552 PetscFunctionReturn(0); 553 } 554 555 /*@ 556 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 557 558 Collective on PC 559 560 Input Parameters: 561 + pc - the preconditioner context 562 - x - input vector 563 564 Output Parameter: 565 . y - output vector 566 567 Level: developer 568 569 Notes: 570 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 571 572 .seealso: PCApply(), PCApplySymmetricLeft() 573 @*/ 574 PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y) 575 { 576 PetscErrorCode ierr; 577 578 PetscFunctionBegin; 579 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 580 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 581 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 582 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 583 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 584 ierr = PCSetUp(pc);CHKERRQ(ierr); 585 if (!pc->ops->applysymmetricright) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply"); 586 ierr = VecLockReadPush(x);CHKERRQ(ierr); 587 ierr = PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 588 ierr = (*pc->ops->applysymmetricright)(pc,x,y);CHKERRQ(ierr); 589 ierr = PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 590 ierr = VecLockReadPop(x);CHKERRQ(ierr); 591 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 592 PetscFunctionReturn(0); 593 } 594 595 /*@ 596 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 597 598 Collective on PC 599 600 Input Parameters: 601 + pc - the preconditioner context 602 - x - input vector 603 604 Output Parameter: 605 . y - output vector 606 607 Notes: 608 For complex numbers this applies the non-Hermitian transpose. 609 610 Developer Notes: 611 We need to implement a PCApplyHermitianTranspose() 612 613 Level: developer 614 615 .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists() 616 @*/ 617 PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y) 618 { 619 PetscErrorCode ierr; 620 621 PetscFunctionBegin; 622 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 623 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 624 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 625 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 626 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 627 ierr = PCSetUp(pc);CHKERRQ(ierr); 628 if (!pc->ops->applytranspose) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose"); 629 ierr = VecLockReadPush(x);CHKERRQ(ierr); 630 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 631 ierr = (*pc->ops->applytranspose)(pc,x,y);CHKERRQ(ierr); 632 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 633 ierr = VecLockReadPop(x);CHKERRQ(ierr); 634 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 635 PetscFunctionReturn(0); 636 } 637 638 /*@ 639 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 640 641 Collective on PC 642 643 Input Parameters: 644 . pc - the preconditioner context 645 646 Output Parameter: 647 . flg - PETSC_TRUE if a transpose operation is defined 648 649 Level: developer 650 651 .seealso: PCApplyTranspose() 652 @*/ 653 PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg) 654 { 655 PetscFunctionBegin; 656 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 657 PetscValidBoolPointer(flg,2); 658 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 659 else *flg = PETSC_FALSE; 660 PetscFunctionReturn(0); 661 } 662 663 /*@ 664 PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x. 665 666 Collective on PC 667 668 Input Parameters: 669 + pc - the preconditioner context 670 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 671 . x - input vector 672 - work - work vector 673 674 Output Parameter: 675 . y - output vector 676 677 Level: developer 678 679 Notes: 680 If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the 681 specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling. 682 683 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose() 684 @*/ 685 PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work) 686 { 687 PetscErrorCode ierr; 688 689 PetscFunctionBegin; 690 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 691 PetscValidLogicalCollectiveEnum(pc,side,2); 692 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 693 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 694 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 695 PetscCheckSameComm(pc,1,x,3); 696 PetscCheckSameComm(pc,1,y,4); 697 PetscCheckSameComm(pc,1,work,5); 698 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 699 if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric"); 700 if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application"); 701 if (pc->erroriffailure) {ierr = VecValidValues(x,3,PETSC_TRUE);CHKERRQ(ierr);} 702 703 ierr = PCSetUp(pc);CHKERRQ(ierr); 704 if (pc->diagonalscale) { 705 if (pc->ops->applyBA) { 706 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 707 ierr = VecDuplicate(x,&work2);CHKERRQ(ierr); 708 ierr = PCDiagonalScaleRight(pc,x,work2);CHKERRQ(ierr); 709 ierr = (*pc->ops->applyBA)(pc,side,work2,y,work);CHKERRQ(ierr); 710 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 711 ierr = VecDestroy(&work2);CHKERRQ(ierr); 712 } else if (side == PC_RIGHT) { 713 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 714 ierr = PCApply(pc,y,work);CHKERRQ(ierr); 715 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 716 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 717 } else if (side == PC_LEFT) { 718 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 719 ierr = MatMult(pc->mat,y,work);CHKERRQ(ierr); 720 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 721 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 722 } else if (side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner"); 723 } else { 724 if (pc->ops->applyBA) { 725 ierr = (*pc->ops->applyBA)(pc,side,x,y,work);CHKERRQ(ierr); 726 } else if (side == PC_RIGHT) { 727 ierr = PCApply(pc,x,work);CHKERRQ(ierr); 728 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 729 } else if (side == PC_LEFT) { 730 ierr = MatMult(pc->mat,x,work);CHKERRQ(ierr); 731 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 732 } else if (side == PC_SYMMETRIC) { 733 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 734 ierr = PCApplySymmetricRight(pc,x,work);CHKERRQ(ierr); 735 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 736 ierr = VecCopy(y,work);CHKERRQ(ierr); 737 ierr = PCApplySymmetricLeft(pc,work,y);CHKERRQ(ierr); 738 } 739 } 740 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 741 PetscFunctionReturn(0); 742 } 743 744 /*@ 745 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 746 and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning, 747 NOT tr(B*A) = tr(A)*tr(B). 748 749 Collective on PC 750 751 Input Parameters: 752 + pc - the preconditioner context 753 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 754 . x - input vector 755 - work - work vector 756 757 Output Parameter: 758 . y - output vector 759 760 761 Notes: 762 this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner 763 defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 764 765 Level: developer 766 767 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB() 768 @*/ 769 PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work) 770 { 771 PetscErrorCode ierr; 772 773 PetscFunctionBegin; 774 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 775 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 776 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 777 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 778 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 779 if (pc->erroriffailure) {ierr = VecValidValues(x,3,PETSC_TRUE);CHKERRQ(ierr);} 780 if (pc->ops->applyBAtranspose) { 781 ierr = (*pc->ops->applyBAtranspose)(pc,side,x,y,work);CHKERRQ(ierr); 782 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 783 PetscFunctionReturn(0); 784 } 785 if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left"); 786 787 ierr = PCSetUp(pc);CHKERRQ(ierr); 788 if (side == PC_RIGHT) { 789 ierr = PCApplyTranspose(pc,x,work);CHKERRQ(ierr); 790 ierr = MatMultTranspose(pc->mat,work,y);CHKERRQ(ierr); 791 } else if (side == PC_LEFT) { 792 ierr = MatMultTranspose(pc->mat,x,work);CHKERRQ(ierr); 793 ierr = PCApplyTranspose(pc,work,y);CHKERRQ(ierr); 794 } 795 /* add support for PC_SYMMETRIC */ 796 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 797 PetscFunctionReturn(0); 798 } 799 800 /* -------------------------------------------------------------------------------*/ 801 802 /*@ 803 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 804 built-in fast application of Richardson's method. 805 806 Not Collective 807 808 Input Parameter: 809 . pc - the preconditioner 810 811 Output Parameter: 812 . exists - PETSC_TRUE or PETSC_FALSE 813 814 Level: developer 815 816 .seealso: PCApplyRichardson() 817 @*/ 818 PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists) 819 { 820 PetscFunctionBegin; 821 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 822 PetscValidPointer(exists,2); 823 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 824 else *exists = PETSC_FALSE; 825 PetscFunctionReturn(0); 826 } 827 828 /*@ 829 PCApplyRichardson - Applies several steps of Richardson iteration with 830 the particular preconditioner. This routine is usually used by the 831 Krylov solvers and not the application code directly. 832 833 Collective on PC 834 835 Input Parameters: 836 + pc - the preconditioner context 837 . b - the right hand side 838 . w - one work vector 839 . rtol - relative decrease in residual norm convergence criteria 840 . abstol - absolute residual norm convergence criteria 841 . dtol - divergence residual norm increase criteria 842 . its - the number of iterations to apply. 843 - guesszero - if the input x contains nonzero initial guess 844 845 Output Parameter: 846 + outits - number of iterations actually used (for SOR this always equals its) 847 . reason - the reason the apply terminated 848 - y - the solution (also contains initial guess if guesszero is PETSC_FALSE 849 850 Notes: 851 Most preconditioners do not support this function. Use the command 852 PCApplyRichardsonExists() to determine if one does. 853 854 Except for the multigrid PC this routine ignores the convergence tolerances 855 and always runs for the number of iterations 856 857 Level: developer 858 859 .seealso: PCApplyRichardsonExists() 860 @*/ 861 PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason) 862 { 863 PetscErrorCode ierr; 864 865 PetscFunctionBegin; 866 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 867 PetscValidHeaderSpecific(b,VEC_CLASSID,2); 868 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 869 PetscValidHeaderSpecific(w,VEC_CLASSID,4); 870 if (b == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors"); 871 ierr = PCSetUp(pc);CHKERRQ(ierr); 872 if (!pc->ops->applyrichardson) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson"); 873 ierr = (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);CHKERRQ(ierr); 874 PetscFunctionReturn(0); 875 } 876 877 /*@ 878 PCSetFailedReason - Sets the reason a PCSetUp() failed or PC_NOERROR if it did not fail 879 880 Logically Collective on PC 881 882 Input Parameter: 883 + pc - the preconditioner context 884 - reason - the reason it failedx 885 886 Level: advanced 887 888 .seealso: PCCreate(), PCApply(), PCDestroy(), PCFailedReason 889 @*/ 890 PetscErrorCode PCSetFailedReason(PC pc,PCFailedReason reason) 891 { 892 PetscFunctionBegin; 893 pc->failedreason = reason; 894 PetscFunctionReturn(0); 895 } 896 897 /*@ 898 PCGetFailedReason - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail 899 900 Logically Collective on PC 901 902 Input Parameter: 903 . pc - the preconditioner context 904 905 Output Parameter: 906 . reason - the reason it failed 907 908 Level: advanced 909 910 Notes: This is the maximum over reason over all ranks in the PC communicator. It is only valid after 911 a call KSPCheckDot() or KSPCheckNorm() inside a KSPSolve(). It is not valid immediately after a PCSetUp() 912 or PCApply(), then use PCGetFailedReasonRank() 913 914 .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReasonRank(), PCSetFailedReason() 915 @*/ 916 PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason) 917 { 918 PetscFunctionBegin; 919 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 920 else *reason = pc->failedreason; 921 PetscFunctionReturn(0); 922 } 923 924 /*@ 925 PCGetFailedReasonRank - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail on this MPI rank 926 927 Not Collective on PC 928 929 Input Parameter: 930 . pc - the preconditioner context 931 932 Output Parameter: 933 . reason - the reason it failed 934 935 Notes: 936 Different ranks may have different reasons or no reason, see PCGetFailedReason() 937 938 Level: advanced 939 940 .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReason(), PCSetFailedReason() 941 @*/ 942 PetscErrorCode PCGetFailedReasonRank(PC pc,PCFailedReason *reason) 943 { 944 PetscFunctionBegin; 945 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 946 else *reason = pc->failedreason; 947 PetscFunctionReturn(0); 948 } 949 950 /* Next line needed to deactivate KSP_Solve logging */ 951 #include <petsc/private/kspimpl.h> 952 953 /* 954 a setupcall of 0 indicates never setup, 955 1 indicates has been previously setup 956 -1 indicates a PCSetUp() was attempted and failed 957 */ 958 /*@ 959 PCSetUp - Prepares for the use of a preconditioner. 960 961 Collective on PC 962 963 Input Parameter: 964 . pc - the preconditioner context 965 966 Level: developer 967 968 .seealso: PCCreate(), PCApply(), PCDestroy() 969 @*/ 970 PetscErrorCode PCSetUp(PC pc) 971 { 972 PetscErrorCode ierr; 973 const char *def; 974 PetscObjectState matstate, matnonzerostate; 975 976 PetscFunctionBegin; 977 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 978 if (!pc->mat) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first"); 979 980 if (pc->setupcalled && pc->reusepreconditioner) { 981 ierr = PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n");CHKERRQ(ierr); 982 PetscFunctionReturn(0); 983 } 984 985 ierr = PetscObjectStateGet((PetscObject)pc->pmat,&matstate);CHKERRQ(ierr); 986 ierr = MatGetNonzeroState(pc->pmat,&matnonzerostate);CHKERRQ(ierr); 987 if (!pc->setupcalled) { 988 ierr = PetscInfo(pc,"Setting up PC for first time\n");CHKERRQ(ierr); 989 pc->flag = DIFFERENT_NONZERO_PATTERN; 990 } else if (matstate == pc->matstate) { 991 ierr = PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n");CHKERRQ(ierr); 992 PetscFunctionReturn(0); 993 } else { 994 if (matnonzerostate > pc->matnonzerostate) { 995 ierr = PetscInfo(pc,"Setting up PC with different nonzero pattern\n");CHKERRQ(ierr); 996 pc->flag = DIFFERENT_NONZERO_PATTERN; 997 } else { 998 ierr = PetscInfo(pc,"Setting up PC with same nonzero pattern\n");CHKERRQ(ierr); 999 pc->flag = SAME_NONZERO_PATTERN; 1000 } 1001 } 1002 pc->matstate = matstate; 1003 pc->matnonzerostate = matnonzerostate; 1004 1005 if (!((PetscObject)pc)->type_name) { 1006 ierr = PCGetDefaultType_Private(pc,&def);CHKERRQ(ierr); 1007 ierr = PCSetType(pc,def);CHKERRQ(ierr); 1008 } 1009 1010 ierr = MatSetErrorIfFailure(pc->pmat,pc->erroriffailure);CHKERRQ(ierr); 1011 ierr = MatSetErrorIfFailure(pc->mat,pc->erroriffailure);CHKERRQ(ierr); 1012 ierr = PetscLogEventBegin(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 1013 if (pc->ops->setup) { 1014 /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */ 1015 ierr = PetscLogEventDeactivatePush(KSP_Solve);CHKERRQ(ierr); 1016 ierr = PetscLogEventDeactivatePush(PC_Apply);CHKERRQ(ierr); 1017 ierr = (*pc->ops->setup)(pc);CHKERRQ(ierr); 1018 ierr = PetscLogEventDeactivatePop(KSP_Solve);CHKERRQ(ierr); 1019 ierr = PetscLogEventDeactivatePop(PC_Apply);CHKERRQ(ierr); 1020 } 1021 ierr = PetscLogEventEnd(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 1022 if (!pc->setupcalled) pc->setupcalled = 1; 1023 PetscFunctionReturn(0); 1024 } 1025 1026 /*@ 1027 PCSetUpOnBlocks - Sets up the preconditioner for each block in 1028 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 1029 methods. 1030 1031 Collective on PC 1032 1033 Input Parameters: 1034 . pc - the preconditioner context 1035 1036 Level: developer 1037 1038 .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp() 1039 @*/ 1040 PetscErrorCode PCSetUpOnBlocks(PC pc) 1041 { 1042 PetscErrorCode ierr; 1043 1044 PetscFunctionBegin; 1045 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1046 if (!pc->ops->setuponblocks) PetscFunctionReturn(0); 1047 ierr = PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 1048 ierr = (*pc->ops->setuponblocks)(pc);CHKERRQ(ierr); 1049 ierr = PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 1050 PetscFunctionReturn(0); 1051 } 1052 1053 /*@C 1054 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 1055 submatrices that arise within certain subdomain-based preconditioners. 1056 The basic submatrices are extracted from the preconditioner matrix as 1057 usual; the user can then alter these (for example, to set different boundary 1058 conditions for each submatrix) before they are used for the local solves. 1059 1060 Logically Collective on PC 1061 1062 Input Parameters: 1063 + pc - the preconditioner context 1064 . func - routine for modifying the submatrices 1065 - ctx - optional user-defined context (may be null) 1066 1067 Calling sequence of func: 1068 $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx); 1069 1070 + row - an array of index sets that contain the global row numbers 1071 that comprise each local submatrix 1072 . col - an array of index sets that contain the global column numbers 1073 that comprise each local submatrix 1074 . submat - array of local submatrices 1075 - ctx - optional user-defined context for private data for the 1076 user-defined func routine (may be null) 1077 1078 Notes: 1079 PCSetModifySubMatrices() MUST be called before KSPSetUp() and 1080 KSPSolve(). 1081 1082 A routine set by PCSetModifySubMatrices() is currently called within 1083 the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM) 1084 preconditioners. All other preconditioners ignore this routine. 1085 1086 Level: advanced 1087 1088 .seealso: PCModifySubMatrices() 1089 @*/ 1090 PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx) 1091 { 1092 PetscFunctionBegin; 1093 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1094 pc->modifysubmatrices = func; 1095 pc->modifysubmatricesP = ctx; 1096 PetscFunctionReturn(0); 1097 } 1098 1099 /*@C 1100 PCModifySubMatrices - Calls an optional user-defined routine within 1101 certain preconditioners if one has been set with PCSetModifySubMatrices(). 1102 1103 Collective on PC 1104 1105 Input Parameters: 1106 + pc - the preconditioner context 1107 . nsub - the number of local submatrices 1108 . row - an array of index sets that contain the global row numbers 1109 that comprise each local submatrix 1110 . col - an array of index sets that contain the global column numbers 1111 that comprise each local submatrix 1112 . submat - array of local submatrices 1113 - ctx - optional user-defined context for private data for the 1114 user-defined routine (may be null) 1115 1116 Output Parameter: 1117 . submat - array of local submatrices (the entries of which may 1118 have been modified) 1119 1120 Notes: 1121 The user should NOT generally call this routine, as it will 1122 automatically be called within certain preconditioners (currently 1123 block Jacobi, additive Schwarz) if set. 1124 1125 The basic submatrices are extracted from the preconditioner matrix 1126 as usual; the user can then alter these (for example, to set different 1127 boundary conditions for each submatrix) before they are used for the 1128 local solves. 1129 1130 Level: developer 1131 1132 .seealso: PCSetModifySubMatrices() 1133 @*/ 1134 PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx) 1135 { 1136 PetscErrorCode ierr; 1137 1138 PetscFunctionBegin; 1139 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1140 if (!pc->modifysubmatrices) PetscFunctionReturn(0); 1141 ierr = PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 1142 ierr = (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);CHKERRQ(ierr); 1143 ierr = PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 1144 PetscFunctionReturn(0); 1145 } 1146 1147 /*@ 1148 PCSetOperators - Sets the matrix associated with the linear system and 1149 a (possibly) different one associated with the preconditioner. 1150 1151 Logically Collective on PC 1152 1153 Input Parameters: 1154 + pc - the preconditioner context 1155 . Amat - the matrix that defines the linear system 1156 - Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat. 1157 1158 Notes: 1159 Passing a NULL for Amat or Pmat removes the matrix that is currently used. 1160 1161 If you wish to replace either Amat or Pmat but leave the other one untouched then 1162 first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference() 1163 on it and then pass it back in in your call to KSPSetOperators(). 1164 1165 More Notes about Repeated Solution of Linear Systems: 1166 PETSc does NOT reset the matrix entries of either Amat or Pmat 1167 to zero after a linear solve; the user is completely responsible for 1168 matrix assembly. See the routine MatZeroEntries() if desiring to 1169 zero all elements of a matrix. 1170 1171 Level: intermediate 1172 1173 .seealso: PCGetOperators(), MatZeroEntries() 1174 @*/ 1175 PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat) 1176 { 1177 PetscErrorCode ierr; 1178 PetscInt m1,n1,m2,n2; 1179 1180 PetscFunctionBegin; 1181 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1182 if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1183 if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3); 1184 if (Amat) PetscCheckSameComm(pc,1,Amat,2); 1185 if (Pmat) PetscCheckSameComm(pc,1,Pmat,3); 1186 if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) { 1187 ierr = MatGetLocalSize(Amat,&m1,&n1);CHKERRQ(ierr); 1188 ierr = MatGetLocalSize(pc->mat,&m2,&n2);CHKERRQ(ierr); 1189 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1190 ierr = MatGetLocalSize(Pmat,&m1,&n1);CHKERRQ(ierr); 1191 ierr = MatGetLocalSize(pc->pmat,&m2,&n2);CHKERRQ(ierr); 1192 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1193 } 1194 1195 if (Pmat != pc->pmat) { 1196 /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */ 1197 pc->matnonzerostate = -1; 1198 pc->matstate = -1; 1199 } 1200 1201 /* reference first in case the matrices are the same */ 1202 if (Amat) {ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr);} 1203 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 1204 if (Pmat) {ierr = PetscObjectReference((PetscObject)Pmat);CHKERRQ(ierr);} 1205 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 1206 pc->mat = Amat; 1207 pc->pmat = Pmat; 1208 PetscFunctionReturn(0); 1209 } 1210 1211 /*@ 1212 PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed. 1213 1214 Logically Collective on PC 1215 1216 Input Parameters: 1217 + pc - the preconditioner context 1218 - flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner 1219 1220 Level: intermediate 1221 1222 .seealso: PCGetOperators(), MatZeroEntries(), PCGetReusePreconditioner(), KSPSetReusePreconditioner() 1223 @*/ 1224 PetscErrorCode PCSetReusePreconditioner(PC pc,PetscBool flag) 1225 { 1226 PetscFunctionBegin; 1227 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1228 PetscValidLogicalCollectiveBool(pc,flag,2); 1229 pc->reusepreconditioner = flag; 1230 PetscFunctionReturn(0); 1231 } 1232 1233 /*@ 1234 PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed. 1235 1236 Not Collective 1237 1238 Input Parameter: 1239 . pc - the preconditioner context 1240 1241 Output Parameter: 1242 . flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner 1243 1244 Level: intermediate 1245 1246 .seealso: PCGetOperators(), MatZeroEntries(), PCSetReusePreconditioner() 1247 @*/ 1248 PetscErrorCode PCGetReusePreconditioner(PC pc,PetscBool *flag) 1249 { 1250 PetscFunctionBegin; 1251 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1252 PetscValidPointer(flag,2); 1253 *flag = pc->reusepreconditioner; 1254 PetscFunctionReturn(0); 1255 } 1256 1257 /*@ 1258 PCGetOperators - Gets the matrix associated with the linear system and 1259 possibly a different one associated with the preconditioner. 1260 1261 Not collective, though parallel Mats are returned if the PC is parallel 1262 1263 Input Parameter: 1264 . pc - the preconditioner context 1265 1266 Output Parameters: 1267 + Amat - the matrix defining the linear system 1268 - Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat. 1269 1270 Level: intermediate 1271 1272 Notes: 1273 Does not increase the reference count of the matrices, so you should not destroy them 1274 1275 Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators 1276 are created in PC and returned to the user. In this case, if both operators 1277 mat and pmat are requested, two DIFFERENT operators will be returned. If 1278 only one is requested both operators in the PC will be the same (i.e. as 1279 if one had called KSP/PCSetOperators() with the same argument for both Mats). 1280 The user must set the sizes of the returned matrices and their type etc just 1281 as if the user created them with MatCreate(). For example, 1282 1283 $ KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to 1284 $ set size, type, etc of Amat 1285 1286 $ MatCreate(comm,&mat); 1287 $ KSP/PCSetOperators(ksp/pc,Amat,Amat); 1288 $ PetscObjectDereference((PetscObject)mat); 1289 $ set size, type, etc of Amat 1290 1291 and 1292 1293 $ KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to 1294 $ set size, type, etc of Amat and Pmat 1295 1296 $ MatCreate(comm,&Amat); 1297 $ MatCreate(comm,&Pmat); 1298 $ KSP/PCSetOperators(ksp/pc,Amat,Pmat); 1299 $ PetscObjectDereference((PetscObject)Amat); 1300 $ PetscObjectDereference((PetscObject)Pmat); 1301 $ set size, type, etc of Amat and Pmat 1302 1303 The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy 1304 of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely 1305 managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look 1306 at this is when you create a SNES you do not NEED to create a KSP and attach it to 1307 the SNES object (the SNES object manages it for you). Similarly when you create a KSP 1308 you do not need to attach a PC to it (the KSP object manages the PC object for you). 1309 Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when 1310 it can be created for you? 1311 1312 1313 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet() 1314 @*/ 1315 PetscErrorCode PCGetOperators(PC pc,Mat *Amat,Mat *Pmat) 1316 { 1317 PetscErrorCode ierr; 1318 1319 PetscFunctionBegin; 1320 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1321 if (Amat) { 1322 if (!pc->mat) { 1323 if (pc->pmat && !Pmat) { /* Apmat has been set, but user did not request it, so use for Amat */ 1324 pc->mat = pc->pmat; 1325 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1326 } else { /* both Amat and Pmat are empty */ 1327 ierr = MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat);CHKERRQ(ierr); 1328 if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */ 1329 pc->pmat = pc->mat; 1330 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1331 } 1332 } 1333 } 1334 *Amat = pc->mat; 1335 } 1336 if (Pmat) { 1337 if (!pc->pmat) { 1338 if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */ 1339 pc->pmat = pc->mat; 1340 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1341 } else { 1342 ierr = MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat);CHKERRQ(ierr); 1343 if (!Amat) { /* user did NOT request Amat, so make same as Pmat */ 1344 pc->mat = pc->pmat; 1345 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1346 } 1347 } 1348 } 1349 *Pmat = pc->pmat; 1350 } 1351 PetscFunctionReturn(0); 1352 } 1353 1354 /*@C 1355 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1356 possibly a different one associated with the preconditioner have been set in the PC. 1357 1358 Not collective, though the results on all processes should be the same 1359 1360 Input Parameter: 1361 . pc - the preconditioner context 1362 1363 Output Parameters: 1364 + mat - the matrix associated with the linear system was set 1365 - pmat - matrix associated with the preconditioner was set, usually the same 1366 1367 Level: intermediate 1368 1369 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators() 1370 @*/ 1371 PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat) 1372 { 1373 PetscFunctionBegin; 1374 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1375 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1376 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1377 PetscFunctionReturn(0); 1378 } 1379 1380 /*@ 1381 PCFactorGetMatrix - Gets the factored matrix from the 1382 preconditioner context. This routine is valid only for the LU, 1383 incomplete LU, Cholesky, and incomplete Cholesky methods. 1384 1385 Not Collective on PC though Mat is parallel if PC is parallel 1386 1387 Input Parameters: 1388 . pc - the preconditioner context 1389 1390 Output parameters: 1391 . mat - the factored matrix 1392 1393 Level: advanced 1394 1395 Notes: 1396 Does not increase the reference count for the matrix so DO NOT destroy it 1397 1398 @*/ 1399 PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat) 1400 { 1401 PetscErrorCode ierr; 1402 1403 PetscFunctionBegin; 1404 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1405 PetscValidPointer(mat,2); 1406 if (pc->ops->getfactoredmatrix) { 1407 ierr = (*pc->ops->getfactoredmatrix)(pc,mat);CHKERRQ(ierr); 1408 } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix"); 1409 PetscFunctionReturn(0); 1410 } 1411 1412 /*@C 1413 PCSetOptionsPrefix - Sets the prefix used for searching for all 1414 PC options in the database. 1415 1416 Logically Collective on PC 1417 1418 Input Parameters: 1419 + pc - the preconditioner context 1420 - prefix - the prefix string to prepend to all PC option requests 1421 1422 Notes: 1423 A hyphen (-) must NOT be given at the beginning of the prefix name. 1424 The first character of all runtime options is AUTOMATICALLY the 1425 hyphen. 1426 1427 Level: advanced 1428 1429 .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix() 1430 @*/ 1431 PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[]) 1432 { 1433 PetscErrorCode ierr; 1434 1435 PetscFunctionBegin; 1436 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1437 ierr = PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1438 PetscFunctionReturn(0); 1439 } 1440 1441 /*@C 1442 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1443 PC options in the database. 1444 1445 Logically Collective on PC 1446 1447 Input Parameters: 1448 + pc - the preconditioner context 1449 - prefix - the prefix string to prepend to all PC option requests 1450 1451 Notes: 1452 A hyphen (-) must NOT be given at the beginning of the prefix name. 1453 The first character of all runtime options is AUTOMATICALLY the 1454 hyphen. 1455 1456 Level: advanced 1457 1458 .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix() 1459 @*/ 1460 PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[]) 1461 { 1462 PetscErrorCode ierr; 1463 1464 PetscFunctionBegin; 1465 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1466 ierr = PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1467 PetscFunctionReturn(0); 1468 } 1469 1470 /*@C 1471 PCGetOptionsPrefix - Gets the prefix used for searching for all 1472 PC options in the database. 1473 1474 Not Collective 1475 1476 Input Parameters: 1477 . pc - the preconditioner context 1478 1479 Output Parameters: 1480 . prefix - pointer to the prefix string used, is returned 1481 1482 Notes: 1483 On the fortran side, the user should pass in a string 'prifix' of 1484 sufficient length to hold the prefix. 1485 1486 Level: advanced 1487 1488 .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix() 1489 @*/ 1490 PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[]) 1491 { 1492 PetscErrorCode ierr; 1493 1494 PetscFunctionBegin; 1495 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1496 PetscValidPointer(prefix,2); 1497 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1498 PetscFunctionReturn(0); 1499 } 1500 1501 /* 1502 Indicates the right hand side will be changed by KSPSolve(), this occurs for a few 1503 preconditioners including BDDC and Eisentat that transform the equations before applying 1504 the Krylov methods 1505 */ 1506 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc,PetscBool *change) 1507 { 1508 PetscErrorCode ierr; 1509 1510 PetscFunctionBegin; 1511 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1512 PetscValidPointer(change,2); 1513 *change = PETSC_FALSE; 1514 ierr = PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));CHKERRQ(ierr); 1515 PetscFunctionReturn(0); 1516 } 1517 1518 /*@ 1519 PCPreSolve - Optional pre-solve phase, intended for any 1520 preconditioner-specific actions that must be performed before 1521 the iterative solve itself. 1522 1523 Collective on PC 1524 1525 Input Parameters: 1526 + pc - the preconditioner context 1527 - ksp - the Krylov subspace context 1528 1529 Level: developer 1530 1531 Sample of Usage: 1532 .vb 1533 PCPreSolve(pc,ksp); 1534 KSPSolve(ksp,b,x); 1535 PCPostSolve(pc,ksp); 1536 .ve 1537 1538 Notes: 1539 The pre-solve phase is distinct from the PCSetUp() phase. 1540 1541 KSPSolve() calls this directly, so is rarely called by the user. 1542 1543 .seealso: PCPostSolve() 1544 @*/ 1545 PetscErrorCode PCPreSolve(PC pc,KSP ksp) 1546 { 1547 PetscErrorCode ierr; 1548 Vec x,rhs; 1549 1550 PetscFunctionBegin; 1551 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1552 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1553 pc->presolvedone++; 1554 if (pc->presolvedone > 2) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice"); 1555 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1556 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1557 1558 if (pc->ops->presolve) { 1559 ierr = (*pc->ops->presolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1560 } 1561 PetscFunctionReturn(0); 1562 } 1563 1564 /*@ 1565 PCPostSolve - Optional post-solve phase, intended for any 1566 preconditioner-specific actions that must be performed after 1567 the iterative solve itself. 1568 1569 Collective on PC 1570 1571 Input Parameters: 1572 + pc - the preconditioner context 1573 - ksp - the Krylov subspace context 1574 1575 Sample of Usage: 1576 .vb 1577 PCPreSolve(pc,ksp); 1578 KSPSolve(ksp,b,x); 1579 PCPostSolve(pc,ksp); 1580 .ve 1581 1582 Note: 1583 KSPSolve() calls this routine directly, so it is rarely called by the user. 1584 1585 Level: developer 1586 1587 .seealso: PCPreSolve(), KSPSolve() 1588 @*/ 1589 PetscErrorCode PCPostSolve(PC pc,KSP ksp) 1590 { 1591 PetscErrorCode ierr; 1592 Vec x,rhs; 1593 1594 PetscFunctionBegin; 1595 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1596 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1597 pc->presolvedone--; 1598 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1599 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1600 if (pc->ops->postsolve) { 1601 ierr = (*pc->ops->postsolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1602 } 1603 PetscFunctionReturn(0); 1604 } 1605 1606 /*@C 1607 PCLoad - Loads a PC that has been stored in binary with PCView(). 1608 1609 Collective on PetscViewer 1610 1611 Input Parameters: 1612 + newdm - the newly loaded PC, this needs to have been created with PCCreate() or 1613 some related function before a call to PCLoad(). 1614 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() 1615 1616 Level: intermediate 1617 1618 Notes: 1619 The type is determined by the data in the file, any type set into the PC before this call is ignored. 1620 1621 Notes for advanced users: 1622 Most users should not need to know the details of the binary storage 1623 format, since PCLoad() and PCView() completely hide these details. 1624 But for anyone who's interested, the standard binary matrix storage 1625 format is 1626 .vb 1627 has not yet been determined 1628 .ve 1629 1630 .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad() 1631 @*/ 1632 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) 1633 { 1634 PetscErrorCode ierr; 1635 PetscBool isbinary; 1636 PetscInt classid; 1637 char type[256]; 1638 1639 PetscFunctionBegin; 1640 PetscValidHeaderSpecific(newdm,PC_CLASSID,1); 1641 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1642 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1643 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 1644 1645 ierr = PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT);CHKERRQ(ierr); 1646 if (classid != PC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file"); 1647 ierr = PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR);CHKERRQ(ierr); 1648 ierr = PCSetType(newdm, type);CHKERRQ(ierr); 1649 if (newdm->ops->load) { 1650 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 1651 } 1652 PetscFunctionReturn(0); 1653 } 1654 1655 #include <petscdraw.h> 1656 #if defined(PETSC_HAVE_SAWS) 1657 #include <petscviewersaws.h> 1658 #endif 1659 1660 /*@C 1661 PCViewFromOptions - View from Options 1662 1663 Collective on PC 1664 1665 Input Parameters: 1666 + A - the PC context 1667 . obj - Optional object 1668 - name - command line option 1669 1670 Level: intermediate 1671 .seealso: PC, PCView, PetscObjectViewFromOptions(), PCCreate() 1672 @*/ 1673 PetscErrorCode PCViewFromOptions(PC A,PetscObject obj,const char name[]) 1674 { 1675 PetscErrorCode ierr; 1676 1677 PetscFunctionBegin; 1678 PetscValidHeaderSpecific(A,PC_CLASSID,1); 1679 ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr); 1680 PetscFunctionReturn(0); 1681 } 1682 1683 /*@C 1684 PCView - Prints the PC data structure. 1685 1686 Collective on PC 1687 1688 Input Parameters: 1689 + PC - the PC context 1690 - viewer - optional visualization context 1691 1692 Note: 1693 The available visualization contexts include 1694 + PETSC_VIEWER_STDOUT_SELF - standard output (default) 1695 - PETSC_VIEWER_STDOUT_WORLD - synchronized standard 1696 output where only the first processor opens 1697 the file. All other processors send their 1698 data to the first processor to print. 1699 1700 The user can open an alternative visualization contexts with 1701 PetscViewerASCIIOpen() (output to a specified file). 1702 1703 Level: developer 1704 1705 .seealso: KSPView(), PetscViewerASCIIOpen() 1706 @*/ 1707 PetscErrorCode PCView(PC pc,PetscViewer viewer) 1708 { 1709 PCType cstr; 1710 PetscErrorCode ierr; 1711 PetscBool iascii,isstring,isbinary,isdraw; 1712 #if defined(PETSC_HAVE_SAWS) 1713 PetscBool issaws; 1714 #endif 1715 1716 PetscFunctionBegin; 1717 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1718 if (!viewer) { 1719 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);CHKERRQ(ierr); 1720 } 1721 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1722 PetscCheckSameComm(pc,1,viewer,2); 1723 1724 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1725 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 1726 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1727 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 1728 #if defined(PETSC_HAVE_SAWS) 1729 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws);CHKERRQ(ierr); 1730 #endif 1731 1732 if (iascii) { 1733 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer);CHKERRQ(ierr); 1734 if (!pc->setupcalled) { 1735 ierr = PetscViewerASCIIPrintf(viewer," PC has not been set up so information may be incomplete\n");CHKERRQ(ierr); 1736 } 1737 if (pc->ops->view) { 1738 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1739 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1740 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1741 } 1742 if (pc->mat) { 1743 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);CHKERRQ(ierr); 1744 if (pc->pmat == pc->mat) { 1745 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");CHKERRQ(ierr); 1746 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1747 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1748 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1749 } else { 1750 if (pc->pmat) { 1751 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");CHKERRQ(ierr); 1752 } else { 1753 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix:\n");CHKERRQ(ierr); 1754 } 1755 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1756 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1757 if (pc->pmat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1758 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1759 } 1760 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1761 } 1762 } else if (isstring) { 1763 ierr = PCGetType(pc,&cstr);CHKERRQ(ierr); 1764 ierr = PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr);CHKERRQ(ierr); 1765 if (pc->ops->view) {ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr);} 1766 if (pc->mat) {ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);} 1767 if (pc->pmat && pc->pmat != pc->mat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1768 } else if (isbinary) { 1769 PetscInt classid = PC_FILE_CLASSID; 1770 MPI_Comm comm; 1771 PetscMPIInt rank; 1772 char type[256]; 1773 1774 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1775 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 1776 if (!rank) { 1777 ierr = PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 1778 ierr = PetscStrncpy(type,((PetscObject)pc)->type_name,256);CHKERRQ(ierr); 1779 ierr = PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 1780 } 1781 if (pc->ops->view) { 1782 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1783 } 1784 } else if (isdraw) { 1785 PetscDraw draw; 1786 char str[25]; 1787 PetscReal x,y,bottom,h; 1788 PetscInt n; 1789 1790 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1791 ierr = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr); 1792 if (pc->mat) { 1793 ierr = MatGetSize(pc->mat,&n,NULL);CHKERRQ(ierr); 1794 ierr = PetscSNPrintf(str,25,"PC: %s (%D)",((PetscObject)pc)->type_name,n);CHKERRQ(ierr); 1795 } else { 1796 ierr = PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name);CHKERRQ(ierr); 1797 } 1798 ierr = PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr); 1799 bottom = y - h; 1800 ierr = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr); 1801 if (pc->ops->view) { 1802 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1803 } 1804 ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr); 1805 #if defined(PETSC_HAVE_SAWS) 1806 } else if (issaws) { 1807 PetscMPIInt rank; 1808 1809 ierr = PetscObjectName((PetscObject)pc);CHKERRQ(ierr); 1810 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr); 1811 if (!((PetscObject)pc)->amsmem && !rank) { 1812 ierr = PetscObjectViewSAWs((PetscObject)pc,viewer);CHKERRQ(ierr); 1813 } 1814 if (pc->mat) {ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);} 1815 if (pc->pmat && pc->pmat != pc->mat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1816 #endif 1817 } 1818 PetscFunctionReturn(0); 1819 } 1820 1821 /*@C 1822 PCRegister - Adds a method to the preconditioner package. 1823 1824 Not collective 1825 1826 Input Parameters: 1827 + name_solver - name of a new user-defined solver 1828 - routine_create - routine to create method context 1829 1830 Notes: 1831 PCRegister() may be called multiple times to add several user-defined preconditioners. 1832 1833 Sample usage: 1834 .vb 1835 PCRegister("my_solver", MySolverCreate); 1836 .ve 1837 1838 Then, your solver can be chosen with the procedural interface via 1839 $ PCSetType(pc,"my_solver") 1840 or at runtime via the option 1841 $ -pc_type my_solver 1842 1843 Level: advanced 1844 1845 .seealso: PCRegisterAll() 1846 @*/ 1847 PetscErrorCode PCRegister(const char sname[],PetscErrorCode (*function)(PC)) 1848 { 1849 PetscErrorCode ierr; 1850 1851 PetscFunctionBegin; 1852 ierr = PCInitializePackage();CHKERRQ(ierr); 1853 ierr = PetscFunctionListAdd(&PCList,sname,function);CHKERRQ(ierr); 1854 PetscFunctionReturn(0); 1855 } 1856 1857 static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y) 1858 { 1859 PC pc; 1860 PetscErrorCode ierr; 1861 1862 PetscFunctionBegin; 1863 ierr = MatShellGetContext(A,&pc);CHKERRQ(ierr); 1864 ierr = PCApply(pc,X,Y);CHKERRQ(ierr); 1865 PetscFunctionReturn(0); 1866 } 1867 1868 /*@ 1869 PCComputeOperator - Computes the explicit preconditioned operator. 1870 1871 Collective on PC 1872 1873 Input Parameter: 1874 + pc - the preconditioner object 1875 - mattype - the matrix type to be used for the operator 1876 1877 Output Parameter: 1878 . mat - the explict preconditioned operator 1879 1880 Notes: 1881 This computation is done by applying the operators to columns of the identity matrix. 1882 This routine is costly in general, and is recommended for use only with relatively small systems. 1883 Currently, this routine uses a dense matrix format when mattype == NULL 1884 1885 Level: advanced 1886 1887 .seealso: KSPComputeOperator(), MatType 1888 1889 @*/ 1890 PetscErrorCode PCComputeOperator(PC pc,MatType mattype,Mat *mat) 1891 { 1892 PetscErrorCode ierr; 1893 PetscInt N,M,m,n; 1894 Mat A,Apc; 1895 1896 PetscFunctionBegin; 1897 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1898 PetscValidPointer(mat,3); 1899 ierr = PCGetOperators(pc,&A,NULL);CHKERRQ(ierr); 1900 ierr = MatGetLocalSize(A,&m,&n);CHKERRQ(ierr); 1901 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr); 1902 ierr = MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc);CHKERRQ(ierr); 1903 ierr = MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC);CHKERRQ(ierr); 1904 ierr = MatComputeOperator(Apc,mattype,mat);CHKERRQ(ierr); 1905 ierr = MatDestroy(&Apc);CHKERRQ(ierr); 1906 PetscFunctionReturn(0); 1907 } 1908 1909 /*@ 1910 PCSetCoordinates - sets the coordinates of all the nodes on the local process 1911 1912 Collective on PC 1913 1914 Input Parameters: 1915 + pc - the solver context 1916 . dim - the dimension of the coordinates 1, 2, or 3 1917 . nloc - the blocked size of the coordinates array 1918 - coords - the coordinates array 1919 1920 Level: intermediate 1921 1922 Notes: 1923 coords is an array of the dim coordinates for the nodes on 1924 the local processor, of size dim*nloc. 1925 If there are 108 equation on a processor 1926 for a displacement finite element discretization of elasticity (so 1927 that there are nloc = 36 = 108/3 nodes) then the array must have 108 1928 double precision values (ie, 3 * 36). These x y z coordinates 1929 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 1930 ... , N-1.z ]. 1931 1932 .seealso: MatSetNearNullSpace() 1933 @*/ 1934 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 1935 { 1936 PetscErrorCode ierr; 1937 1938 PetscFunctionBegin; 1939 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1940 PetscValidLogicalCollectiveInt(pc,dim,2); 1941 ierr = PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));CHKERRQ(ierr); 1942 PetscFunctionReturn(0); 1943 } 1944 1945 /*@ 1946 PCGetInterpolations - Gets interpolation matrices for all levels (except level 0) 1947 1948 Logically Collective on PC 1949 1950 Input Parameters: 1951 + pc - the precondition context 1952 1953 Output Parameter: 1954 - num_levels - the number of levels 1955 . interpolations - the interpolation matrices (size of num_levels-1) 1956 1957 Level: advanced 1958 1959 .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level 1960 1961 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetInterpolation(), PCGetCoarseOperators() 1962 @*/ 1963 PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[]) 1964 { 1965 PetscErrorCode ierr; 1966 1967 PetscFunctionBegin; 1968 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1969 PetscValidPointer(num_levels,2); 1970 PetscValidPointer(interpolations,3); 1971 ierr = PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));CHKERRQ(ierr); 1972 PetscFunctionReturn(0); 1973 } 1974 1975 /*@ 1976 PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level) 1977 1978 Logically Collective on PC 1979 1980 Input Parameters: 1981 + pc - the precondition context 1982 1983 Output Parameter: 1984 - num_levels - the number of levels 1985 . coarseOperators - the coarse operator matrices (size of num_levels-1) 1986 1987 Level: advanced 1988 1989 .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level 1990 1991 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetRScale(), PCMGGetInterpolation(), PCGetInterpolations() 1992 @*/ 1993 PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[]) 1994 { 1995 PetscErrorCode ierr; 1996 1997 PetscFunctionBegin; 1998 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1999 PetscValidPointer(num_levels,2); 2000 PetscValidPointer(coarseOperators,3); 2001 ierr = PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));CHKERRQ(ierr); 2002 PetscFunctionReturn(0); 2003 } 2004