1 2 /* 3 The PC (preconditioner) interface routines, callable by users. 4 */ 5 #include <private/pcimpl.h> /*I "petscksp.h" I*/ 6 7 /* Logging support */ 8 PetscClassId PC_CLASSID; 9 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 10 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 11 12 #undef __FUNCT__ 13 #define __FUNCT__ "PCGetDefaultType_Private" 14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char* type[]) 15 { 16 PetscErrorCode ierr; 17 PetscMPIInt size; 18 PetscBool flg1,flg2,set,flg3; 19 20 PetscFunctionBegin; 21 ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr); 22 if (pc->pmat) { 23 PetscErrorCode (*f)(Mat,PetscBool *,MatReuse,Mat*); 24 ierr = PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr); 25 if (size == 1) { 26 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);CHKERRQ(ierr); 27 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);CHKERRQ(ierr); 28 ierr = MatIsSymmetricKnown(pc->pmat,&set,&flg3);CHKERRQ(ierr); 29 if (flg1 && (!flg2 || (set && flg3))) { 30 *type = PCICC; 31 } else if (flg2) { 32 *type = PCILU; 33 } else if (f) { /* likely is a parallel matrix run on one processor */ 34 *type = PCBJACOBI; 35 } else { 36 *type = PCNONE; 37 } 38 } else { 39 if (f) { 40 *type = PCBJACOBI; 41 } else { 42 *type = PCNONE; 43 } 44 } 45 } else { 46 if (size == 1) { 47 *type = PCILU; 48 } else { 49 *type = PCBJACOBI; 50 } 51 } 52 PetscFunctionReturn(0); 53 } 54 55 #undef __FUNCT__ 56 #define __FUNCT__ "PCReset" 57 /*@ 58 PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats 59 60 Collective on PC 61 62 Input Parameter: 63 . pc - the preconditioner context 64 65 Level: developer 66 67 Notes: This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC 68 69 .keywords: PC, destroy 70 71 .seealso: PCCreate(), PCSetUp() 72 @*/ 73 PetscErrorCode PCReset(PC pc) 74 { 75 PetscErrorCode ierr; 76 77 PetscFunctionBegin; 78 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 79 if (pc->ops->reset) { 80 ierr = (*pc->ops->reset)(pc); 81 } 82 if (pc->diagonalscaleright) {ierr = VecDestroy(pc->diagonalscaleright);CHKERRQ(ierr);} 83 if (pc->diagonalscaleleft) {ierr = VecDestroy(pc->diagonalscaleleft);CHKERRQ(ierr);} 84 if (pc->pmat) {ierr = MatDestroy(pc->pmat);CHKERRQ(ierr);} 85 if (pc->mat) {ierr = MatDestroy(pc->mat);CHKERRQ(ierr);} 86 pc->setupcalled = 0; 87 PetscFunctionReturn(0); 88 } 89 90 #undef __FUNCT__ 91 #define __FUNCT__ "PCDestroy" 92 /*@ 93 PCDestroy - Destroys PC context that was created with PCCreate(). 94 95 Collective on PC 96 97 Input Parameter: 98 . pc - the preconditioner context 99 100 Level: developer 101 102 .keywords: PC, destroy 103 104 .seealso: PCCreate(), PCSetUp() 105 @*/ 106 PetscErrorCode PCDestroy(PC pc) 107 { 108 PetscErrorCode ierr; 109 110 PetscFunctionBegin; 111 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 112 if (--((PetscObject)pc)->refct > 0) PetscFunctionReturn(0); 113 114 ierr = PCReset(pc);CHKERRQ(ierr); 115 116 /* if memory was published with AMS then destroy it */ 117 ierr = PetscObjectDepublish(pc);CHKERRQ(ierr); 118 if (pc->ops->destroy) {ierr = (*pc->ops->destroy)(pc);CHKERRQ(ierr);} 119 120 if (pc->dm) {ierr = DMDestroy(pc->dm);CHKERRQ(ierr);} 121 122 ierr = PetscHeaderDestroy(pc);CHKERRQ(ierr); 123 PetscFunctionReturn(0); 124 } 125 126 #undef __FUNCT__ 127 #define __FUNCT__ "PCGetDiagonalScale" 128 /*@C 129 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 130 scaling as needed by certain time-stepping codes. 131 132 Logically Collective on PC 133 134 Input Parameter: 135 . pc - the preconditioner context 136 137 Output Parameter: 138 . flag - PETSC_TRUE if it applies the scaling 139 140 Level: developer 141 142 Notes: If this returns PETSC_TRUE then the system solved via the Krylov method is 143 $ D M A D^{-1} y = D M b for left preconditioning or 144 $ D A M D^{-1} z = D b for right preconditioning 145 146 .keywords: PC 147 148 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale() 149 @*/ 150 PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag) 151 { 152 PetscFunctionBegin; 153 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 154 PetscValidPointer(flag,2); 155 *flag = pc->diagonalscale; 156 PetscFunctionReturn(0); 157 } 158 159 #undef __FUNCT__ 160 #define __FUNCT__ "PCSetDiagonalScale" 161 /*@ 162 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 163 scaling as needed by certain time-stepping codes. 164 165 Logically Collective on PC 166 167 Input Parameters: 168 + pc - the preconditioner context 169 - s - scaling vector 170 171 Level: intermediate 172 173 Notes: The system solved via the Krylov method is 174 $ D M A D^{-1} y = D M b for left preconditioning or 175 $ D A M D^{-1} z = D b for right preconditioning 176 177 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 178 179 .keywords: PC 180 181 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale() 182 @*/ 183 PetscErrorCode PCSetDiagonalScale(PC pc,Vec s) 184 { 185 PetscErrorCode ierr; 186 187 PetscFunctionBegin; 188 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 189 PetscValidHeaderSpecific(s,VEC_CLASSID,2); 190 pc->diagonalscale = PETSC_TRUE; 191 ierr = PetscObjectReference((PetscObject)s);CHKERRQ(ierr); 192 if (pc->diagonalscaleleft) { 193 ierr = VecDestroy(pc->diagonalscaleleft);CHKERRQ(ierr); 194 } 195 pc->diagonalscaleleft = s; 196 if (!pc->diagonalscaleright) { 197 ierr = VecDuplicate(s,&pc->diagonalscaleright);CHKERRQ(ierr); 198 } 199 ierr = VecCopy(s,pc->diagonalscaleright);CHKERRQ(ierr); 200 ierr = VecReciprocal(pc->diagonalscaleright);CHKERRQ(ierr); 201 PetscFunctionReturn(0); 202 } 203 204 #undef __FUNCT__ 205 #define __FUNCT__ "PCDiagonalScaleLeft" 206 /*@ 207 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 208 209 Logically Collective on PC 210 211 Input Parameters: 212 + pc - the preconditioner context 213 . in - input vector 214 + out - scaled vector (maybe the same as in) 215 216 Level: intermediate 217 218 Notes: The system solved via the Krylov method is 219 $ D M A D^{-1} y = D M b for left preconditioning or 220 $ D A M D^{-1} z = D b for right preconditioning 221 222 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 223 224 If diagonal scaling is turned off and in is not out then in is copied to out 225 226 .keywords: PC 227 228 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale() 229 @*/ 230 PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out) 231 { 232 PetscErrorCode ierr; 233 234 PetscFunctionBegin; 235 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 236 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 237 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 238 if (pc->diagonalscale) { 239 ierr = VecPointwiseMult(out,pc->diagonalscaleleft,in);CHKERRQ(ierr); 240 } else if (in != out) { 241 ierr = VecCopy(in,out);CHKERRQ(ierr); 242 } 243 PetscFunctionReturn(0); 244 } 245 246 #undef __FUNCT__ 247 #define __FUNCT__ "PCDiagonalScaleRight" 248 /*@ 249 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 250 251 Logically Collective on PC 252 253 Input Parameters: 254 + pc - the preconditioner context 255 . in - input vector 256 + out - scaled vector (maybe the same as in) 257 258 Level: intermediate 259 260 Notes: The system solved via the Krylov method is 261 $ D M A D^{-1} y = D M b for left preconditioning or 262 $ D A M D^{-1} z = D b for right preconditioning 263 264 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 265 266 If diagonal scaling is turned off and in is not out then in is copied to out 267 268 .keywords: PC 269 270 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale() 271 @*/ 272 PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out) 273 { 274 PetscErrorCode ierr; 275 276 PetscFunctionBegin; 277 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 278 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 279 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 280 if (pc->diagonalscale) { 281 ierr = VecPointwiseMult(out,pc->diagonalscaleright,in);CHKERRQ(ierr); 282 } else if (in != out) { 283 ierr = VecCopy(in,out);CHKERRQ(ierr); 284 } 285 PetscFunctionReturn(0); 286 } 287 288 #if 0 289 #undef __FUNCT__ 290 #define __FUNCT__ "PCPublish_Petsc" 291 static PetscErrorCode PCPublish_Petsc(PetscObject obj) 292 { 293 PetscFunctionBegin; 294 PetscFunctionReturn(0); 295 } 296 #endif 297 298 #undef __FUNCT__ 299 #define __FUNCT__ "PCCreate" 300 /*@ 301 PCCreate - Creates a preconditioner context. 302 303 Collective on MPI_Comm 304 305 Input Parameter: 306 . comm - MPI communicator 307 308 Output Parameter: 309 . pc - location to put the preconditioner context 310 311 Notes: 312 The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or ICC 313 in parallel. For dense matrices it is always PCNONE. 314 315 Level: developer 316 317 .keywords: PC, create, context 318 319 .seealso: PCSetUp(), PCApply(), PCDestroy() 320 @*/ 321 PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc) 322 { 323 PC pc; 324 PetscErrorCode ierr; 325 326 PetscFunctionBegin; 327 PetscValidPointer(newpc,1); 328 *newpc = 0; 329 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 330 ierr = PCInitializePackage(PETSC_NULL);CHKERRQ(ierr); 331 #endif 332 333 ierr = PetscHeaderCreate(pc,_p_PC,struct _PCOps,PC_CLASSID,-1,"PC",comm,PCDestroy,PCView);CHKERRQ(ierr); 334 335 pc->mat = 0; 336 pc->pmat = 0; 337 pc->setupcalled = 0; 338 pc->setfromoptionscalled = 0; 339 pc->data = 0; 340 pc->diagonalscale = PETSC_FALSE; 341 pc->diagonalscaleleft = 0; 342 pc->diagonalscaleright = 0; 343 pc->reuse = 0; 344 345 pc->modifysubmatrices = 0; 346 pc->modifysubmatricesP = 0; 347 *newpc = pc; 348 PetscFunctionReturn(0); 349 350 } 351 352 /* -------------------------------------------------------------------------------*/ 353 354 #undef __FUNCT__ 355 #define __FUNCT__ "PCApply" 356 /*@ 357 PCApply - Applies the preconditioner to a vector. 358 359 Collective on PC and Vec 360 361 Input Parameters: 362 + pc - the preconditioner context 363 - x - input vector 364 365 Output Parameter: 366 . y - output vector 367 368 Level: developer 369 370 .keywords: PC, apply 371 372 .seealso: PCApplyTranspose(), PCApplyBAorAB() 373 @*/ 374 PetscErrorCode PCApply(PC pc,Vec x,Vec y) 375 { 376 PetscErrorCode ierr; 377 378 PetscFunctionBegin; 379 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 380 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 381 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 382 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 383 if (pc->setupcalled < 2) { 384 ierr = PCSetUp(pc);CHKERRQ(ierr); 385 } 386 if (!pc->ops->apply) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply"); 387 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 388 ierr = (*pc->ops->apply)(pc,x,y);CHKERRQ(ierr); 389 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 390 PetscFunctionReturn(0); 391 } 392 393 #undef __FUNCT__ 394 #define __FUNCT__ "PCApplySymmetricLeft" 395 /*@ 396 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 397 398 Collective on PC and Vec 399 400 Input Parameters: 401 + pc - the preconditioner context 402 - x - input vector 403 404 Output Parameter: 405 . y - output vector 406 407 Notes: 408 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 409 410 Level: developer 411 412 .keywords: PC, apply, symmetric, left 413 414 .seealso: PCApply(), PCApplySymmetricRight() 415 @*/ 416 PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y) 417 { 418 PetscErrorCode ierr; 419 420 PetscFunctionBegin; 421 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 422 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 423 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 424 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 425 if (pc->setupcalled < 2) { 426 ierr = PCSetUp(pc);CHKERRQ(ierr); 427 } 428 if (!pc->ops->applysymmetricleft) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have left symmetric apply"); 429 ierr = PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 430 ierr = (*pc->ops->applysymmetricleft)(pc,x,y);CHKERRQ(ierr); 431 ierr = PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 432 PetscFunctionReturn(0); 433 } 434 435 #undef __FUNCT__ 436 #define __FUNCT__ "PCApplySymmetricRight" 437 /*@ 438 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 439 440 Collective on PC and Vec 441 442 Input Parameters: 443 + pc - the preconditioner context 444 - x - input vector 445 446 Output Parameter: 447 . y - output vector 448 449 Level: developer 450 451 Notes: 452 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 453 454 .keywords: PC, apply, symmetric, right 455 456 .seealso: PCApply(), PCApplySymmetricLeft() 457 @*/ 458 PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y) 459 { 460 PetscErrorCode ierr; 461 462 PetscFunctionBegin; 463 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 464 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 465 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 466 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 467 if (pc->setupcalled < 2) { 468 ierr = PCSetUp(pc);CHKERRQ(ierr); 469 } 470 if (!pc->ops->applysymmetricright) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have left symmetric apply"); 471 ierr = PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 472 ierr = (*pc->ops->applysymmetricright)(pc,x,y);CHKERRQ(ierr); 473 ierr = PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 474 PetscFunctionReturn(0); 475 } 476 477 #undef __FUNCT__ 478 #define __FUNCT__ "PCApplyTranspose" 479 /*@ 480 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 481 482 Collective on PC and Vec 483 484 Input Parameters: 485 + pc - the preconditioner context 486 - x - input vector 487 488 Output Parameter: 489 . y - output vector 490 491 Level: developer 492 493 .keywords: PC, apply, transpose 494 495 .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists() 496 @*/ 497 PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y) 498 { 499 PetscErrorCode ierr; 500 501 PetscFunctionBegin; 502 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 503 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 504 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 505 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 506 if (pc->setupcalled < 2) { 507 ierr = PCSetUp(pc);CHKERRQ(ierr); 508 } 509 if (!pc->ops->applytranspose) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply transpose"); 510 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 511 ierr = (*pc->ops->applytranspose)(pc,x,y);CHKERRQ(ierr); 512 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 513 PetscFunctionReturn(0); 514 } 515 516 #undef __FUNCT__ 517 #define __FUNCT__ "PCApplyTransposeExists" 518 /*@ 519 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 520 521 Collective on PC and Vec 522 523 Input Parameters: 524 . pc - the preconditioner context 525 526 Output Parameter: 527 . flg - PETSC_TRUE if a transpose operation is defined 528 529 Level: developer 530 531 .keywords: PC, apply, transpose 532 533 .seealso: PCApplyTranspose() 534 @*/ 535 PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg) 536 { 537 PetscFunctionBegin; 538 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 539 PetscValidPointer(flg,2); 540 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 541 else *flg = PETSC_FALSE; 542 PetscFunctionReturn(0); 543 } 544 545 #undef __FUNCT__ 546 #define __FUNCT__ "PCApplyBAorAB" 547 /*@ 548 PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x. 549 550 Collective on PC and Vec 551 552 Input Parameters: 553 + pc - the preconditioner context 554 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 555 . x - input vector 556 - work - work vector 557 558 Output Parameter: 559 . y - output vector 560 561 Level: developer 562 563 Notes: If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the 564 specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling. 565 566 .keywords: PC, apply, operator 567 568 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose() 569 @*/ 570 PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work) 571 { 572 PetscErrorCode ierr; 573 574 PetscFunctionBegin; 575 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 576 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 577 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 578 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 579 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 580 if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric"); 581 if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application"); 582 583 if (pc->setupcalled < 2) { 584 ierr = PCSetUp(pc);CHKERRQ(ierr); 585 } 586 587 if (pc->diagonalscale) { 588 if (pc->ops->applyBA) { 589 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 590 ierr = VecDuplicate(x,&work2);CHKERRQ(ierr); 591 ierr = PCDiagonalScaleRight(pc,x,work2);CHKERRQ(ierr); 592 ierr = (*pc->ops->applyBA)(pc,side,work2,y,work);CHKERRQ(ierr); 593 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 594 ierr = VecDestroy(work2);CHKERRQ(ierr); 595 } else if (side == PC_RIGHT) { 596 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 597 ierr = PCApply(pc,y,work);CHKERRQ(ierr); 598 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 599 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 600 } else if (side == PC_LEFT) { 601 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 602 ierr = MatMult(pc->mat,y,work);CHKERRQ(ierr); 603 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 604 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 605 } else if (side == PC_SYMMETRIC) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner"); 606 } else { 607 if (pc->ops->applyBA) { 608 ierr = (*pc->ops->applyBA)(pc,side,x,y,work);CHKERRQ(ierr); 609 } else if (side == PC_RIGHT) { 610 ierr = PCApply(pc,x,work);CHKERRQ(ierr); 611 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 612 } else if (side == PC_LEFT) { 613 ierr = MatMult(pc->mat,x,work);CHKERRQ(ierr); 614 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 615 } else if (side == PC_SYMMETRIC) { 616 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 617 ierr = PCApplySymmetricRight(pc,x,work);CHKERRQ(ierr); 618 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 619 ierr = VecCopy(y,work);CHKERRQ(ierr); 620 ierr = PCApplySymmetricLeft(pc,work,y);CHKERRQ(ierr); 621 } 622 } 623 PetscFunctionReturn(0); 624 } 625 626 #undef __FUNCT__ 627 #define __FUNCT__ "PCApplyBAorABTranspose" 628 /*@ 629 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 630 and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning, 631 NOT tr(B*A) = tr(A)*tr(B). 632 633 Collective on PC and Vec 634 635 Input Parameters: 636 + pc - the preconditioner context 637 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 638 . x - input vector 639 - work - work vector 640 641 Output Parameter: 642 . y - output vector 643 644 645 Notes: this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner 646 defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 647 648 Level: developer 649 650 .keywords: PC, apply, operator, transpose 651 652 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB() 653 @*/ 654 PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work) 655 { 656 PetscErrorCode ierr; 657 658 PetscFunctionBegin; 659 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 660 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 661 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 662 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 663 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 664 if (pc->ops->applyBAtranspose) { 665 ierr = (*pc->ops->applyBAtranspose)(pc,side,x,y,work);CHKERRQ(ierr); 666 PetscFunctionReturn(0); 667 } 668 if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left"); 669 670 if (pc->setupcalled < 2) { 671 ierr = PCSetUp(pc);CHKERRQ(ierr); 672 } 673 674 if (side == PC_RIGHT) { 675 ierr = PCApplyTranspose(pc,x,work);CHKERRQ(ierr); 676 ierr = MatMultTranspose(pc->mat,work,y);CHKERRQ(ierr); 677 } else if (side == PC_LEFT) { 678 ierr = MatMultTranspose(pc->mat,x,work);CHKERRQ(ierr); 679 ierr = PCApplyTranspose(pc,work,y);CHKERRQ(ierr); 680 } 681 /* add support for PC_SYMMETRIC */ 682 PetscFunctionReturn(0); /* actually will never get here */ 683 } 684 685 /* -------------------------------------------------------------------------------*/ 686 687 #undef __FUNCT__ 688 #define __FUNCT__ "PCApplyRichardsonExists" 689 /*@ 690 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 691 built-in fast application of Richardson's method. 692 693 Not Collective 694 695 Input Parameter: 696 . pc - the preconditioner 697 698 Output Parameter: 699 . exists - PETSC_TRUE or PETSC_FALSE 700 701 Level: developer 702 703 .keywords: PC, apply, Richardson, exists 704 705 .seealso: PCApplyRichardson() 706 @*/ 707 PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists) 708 { 709 PetscFunctionBegin; 710 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 711 PetscValidIntPointer(exists,2); 712 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 713 else *exists = PETSC_FALSE; 714 PetscFunctionReturn(0); 715 } 716 717 #undef __FUNCT__ 718 #define __FUNCT__ "PCApplyRichardson" 719 /*@ 720 PCApplyRichardson - Applies several steps of Richardson iteration with 721 the particular preconditioner. This routine is usually used by the 722 Krylov solvers and not the application code directly. 723 724 Collective on PC 725 726 Input Parameters: 727 + pc - the preconditioner context 728 . b - the right hand side 729 . w - one work vector 730 . rtol - relative decrease in residual norm convergence criteria 731 . abstol - absolute residual norm convergence criteria 732 . dtol - divergence residual norm increase criteria 733 . its - the number of iterations to apply. 734 - guesszero - if the input x contains nonzero initial guess 735 736 Output Parameter: 737 + outits - number of iterations actually used (for SOR this always equals its) 738 . reason - the reason the apply terminated 739 - y - the solution (also contains initial guess if guesszero is PETSC_FALSE 740 741 Notes: 742 Most preconditioners do not support this function. Use the command 743 PCApplyRichardsonExists() to determine if one does. 744 745 Except for the multigrid PC this routine ignores the convergence tolerances 746 and always runs for the number of iterations 747 748 Level: developer 749 750 .keywords: PC, apply, Richardson 751 752 .seealso: PCApplyRichardsonExists() 753 @*/ 754 PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason) 755 { 756 PetscErrorCode ierr; 757 758 PetscFunctionBegin; 759 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 760 PetscValidHeaderSpecific(b,VEC_CLASSID,2); 761 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 762 PetscValidHeaderSpecific(w,VEC_CLASSID,4); 763 if (b == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"b and y must be different vectors"); 764 if (pc->setupcalled < 2) { 765 ierr = PCSetUp(pc);CHKERRQ(ierr); 766 } 767 if (!pc->ops->applyrichardson) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply richardson"); 768 ierr = (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);CHKERRQ(ierr); 769 PetscFunctionReturn(0); 770 } 771 772 /* 773 a setupcall of 0 indicates never setup, 774 1 needs to be resetup, 775 2 does not need any changes. 776 */ 777 #undef __FUNCT__ 778 #define __FUNCT__ "PCSetUp" 779 /*@ 780 PCSetUp - Prepares for the use of a preconditioner. 781 782 Collective on PC 783 784 Input Parameter: 785 . pc - the preconditioner context 786 787 Level: developer 788 789 .keywords: PC, setup 790 791 .seealso: PCCreate(), PCApply(), PCDestroy() 792 @*/ 793 PetscErrorCode PCSetUp(PC pc) 794 { 795 PetscErrorCode ierr; 796 const char *def; 797 798 PetscFunctionBegin; 799 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 800 if (!pc->mat) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first"); 801 802 if (pc->setupcalled > 1) { 803 ierr = PetscInfo(pc,"Setting PC with identical preconditioner\n");CHKERRQ(ierr); 804 PetscFunctionReturn(0); 805 } else if (!pc->setupcalled) { 806 ierr = PetscInfo(pc,"Setting up new PC\n");CHKERRQ(ierr); 807 } else if (pc->flag == SAME_NONZERO_PATTERN) { 808 ierr = PetscInfo(pc,"Setting up PC with same nonzero pattern\n");CHKERRQ(ierr); 809 } else { 810 ierr = PetscInfo(pc,"Setting up PC with different nonzero pattern\n");CHKERRQ(ierr); 811 } 812 813 if (!((PetscObject)pc)->type_name) { 814 ierr = PCGetDefaultType_Private(pc,&def);CHKERRQ(ierr); 815 ierr = PCSetType(pc,def);CHKERRQ(ierr); 816 } 817 818 ierr = PetscLogEventBegin(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 819 if (pc->ops->setup) { 820 ierr = (*pc->ops->setup)(pc);CHKERRQ(ierr); 821 } 822 pc->setupcalled = 2; 823 ierr = PetscLogEventEnd(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 824 PetscFunctionReturn(0); 825 } 826 827 #undef __FUNCT__ 828 #define __FUNCT__ "PCSetUpOnBlocks" 829 /*@ 830 PCSetUpOnBlocks - Sets up the preconditioner for each block in 831 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 832 methods. 833 834 Collective on PC 835 836 Input Parameters: 837 . pc - the preconditioner context 838 839 Level: developer 840 841 .keywords: PC, setup, blocks 842 843 .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp() 844 @*/ 845 PetscErrorCode PCSetUpOnBlocks(PC pc) 846 { 847 PetscErrorCode ierr; 848 849 PetscFunctionBegin; 850 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 851 if (!pc->ops->setuponblocks) PetscFunctionReturn(0); 852 ierr = PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 853 ierr = (*pc->ops->setuponblocks)(pc);CHKERRQ(ierr); 854 ierr = PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 855 PetscFunctionReturn(0); 856 } 857 858 #undef __FUNCT__ 859 #define __FUNCT__ "PCSetModifySubMatrices" 860 /*@C 861 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 862 submatrices that arise within certain subdomain-based preconditioners. 863 The basic submatrices are extracted from the preconditioner matrix as 864 usual; the user can then alter these (for example, to set different boundary 865 conditions for each submatrix) before they are used for the local solves. 866 867 Logically Collective on PC 868 869 Input Parameters: 870 + pc - the preconditioner context 871 . func - routine for modifying the submatrices 872 - ctx - optional user-defined context (may be null) 873 874 Calling sequence of func: 875 $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx); 876 877 . row - an array of index sets that contain the global row numbers 878 that comprise each local submatrix 879 . col - an array of index sets that contain the global column numbers 880 that comprise each local submatrix 881 . submat - array of local submatrices 882 - ctx - optional user-defined context for private data for the 883 user-defined func routine (may be null) 884 885 Notes: 886 PCSetModifySubMatrices() MUST be called before KSPSetUp() and 887 KSPSolve(). 888 889 A routine set by PCSetModifySubMatrices() is currently called within 890 the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM) 891 preconditioners. All other preconditioners ignore this routine. 892 893 Level: advanced 894 895 .keywords: PC, set, modify, submatrices 896 897 .seealso: PCModifySubMatrices() 898 @*/ 899 PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx) 900 { 901 PetscFunctionBegin; 902 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 903 pc->modifysubmatrices = func; 904 pc->modifysubmatricesP = ctx; 905 PetscFunctionReturn(0); 906 } 907 908 #undef __FUNCT__ 909 #define __FUNCT__ "PCModifySubMatrices" 910 /*@C 911 PCModifySubMatrices - Calls an optional user-defined routine within 912 certain preconditioners if one has been set with PCSetModifySubMarices(). 913 914 Collective on PC 915 916 Input Parameters: 917 + pc - the preconditioner context 918 . nsub - the number of local submatrices 919 . row - an array of index sets that contain the global row numbers 920 that comprise each local submatrix 921 . col - an array of index sets that contain the global column numbers 922 that comprise each local submatrix 923 . submat - array of local submatrices 924 - ctx - optional user-defined context for private data for the 925 user-defined routine (may be null) 926 927 Output Parameter: 928 . submat - array of local submatrices (the entries of which may 929 have been modified) 930 931 Notes: 932 The user should NOT generally call this routine, as it will 933 automatically be called within certain preconditioners (currently 934 block Jacobi, additive Schwarz) if set. 935 936 The basic submatrices are extracted from the preconditioner matrix 937 as usual; the user can then alter these (for example, to set different 938 boundary conditions for each submatrix) before they are used for the 939 local solves. 940 941 Level: developer 942 943 .keywords: PC, modify, submatrices 944 945 .seealso: PCSetModifySubMatrices() 946 @*/ 947 PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx) 948 { 949 PetscErrorCode ierr; 950 951 PetscFunctionBegin; 952 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 953 if (!pc->modifysubmatrices) PetscFunctionReturn(0); 954 ierr = PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 955 ierr = (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);CHKERRQ(ierr); 956 ierr = PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 957 PetscFunctionReturn(0); 958 } 959 960 #undef __FUNCT__ 961 #define __FUNCT__ "PCSetOperators" 962 /*@ 963 PCSetOperators - Sets the matrix associated with the linear system and 964 a (possibly) different one associated with the preconditioner. 965 966 Logically Collective on PC and Mat 967 968 Input Parameters: 969 + pc - the preconditioner context 970 . Amat - the matrix associated with the linear system 971 . Pmat - the matrix to be used in constructing the preconditioner, usually 972 the same as Amat. 973 - flag - flag indicating information about the preconditioner matrix structure 974 during successive linear solves. This flag is ignored the first time a 975 linear system is solved, and thus is irrelevant when solving just one linear 976 system. 977 978 Notes: 979 The flag can be used to eliminate unnecessary work in the preconditioner 980 during the repeated solution of linear systems of the same size. The 981 available options are 982 + SAME_PRECONDITIONER - 983 Pmat is identical during successive linear solves. 984 This option is intended for folks who are using 985 different Amat and Pmat matrices and wish to reuse the 986 same preconditioner matrix. For example, this option 987 saves work by not recomputing incomplete factorization 988 for ILU/ICC preconditioners. 989 . SAME_NONZERO_PATTERN - 990 Pmat has the same nonzero structure during 991 successive linear solves. 992 - DIFFERENT_NONZERO_PATTERN - 993 Pmat does not have the same nonzero structure. 994 995 Passing a PETSC_NULL for Amat or Pmat removes the matrix that is currently used. 996 997 If you wish to replace either Amat or Pmat but leave the other one untouched then 998 first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference() 999 on it and then pass it back in in your call to KSPSetOperators(). 1000 1001 Caution: 1002 If you specify SAME_NONZERO_PATTERN, PETSc believes your assertion 1003 and does not check the structure of the matrix. If you erroneously 1004 claim that the structure is the same when it actually is not, the new 1005 preconditioner will not function correctly. Thus, use this optimization 1006 feature carefully! 1007 1008 If in doubt about whether your preconditioner matrix has changed 1009 structure or not, use the flag DIFFERENT_NONZERO_PATTERN. 1010 1011 More Notes about Repeated Solution of Linear Systems: 1012 PETSc does NOT reset the matrix entries of either Amat or Pmat 1013 to zero after a linear solve; the user is completely responsible for 1014 matrix assembly. See the routine MatZeroEntries() if desiring to 1015 zero all elements of a matrix. 1016 1017 Level: intermediate 1018 1019 .keywords: PC, set, operators, matrix, linear system 1020 1021 .seealso: PCGetOperators(), MatZeroEntries() 1022 @*/ 1023 PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat,MatStructure flag) 1024 { 1025 PetscErrorCode ierr; 1026 PetscInt m1,n1,m2,n2; 1027 1028 PetscFunctionBegin; 1029 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1030 if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1031 if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3); 1032 if (Amat) PetscCheckSameComm(pc,1,Amat,2); 1033 if (Pmat) PetscCheckSameComm(pc,1,Pmat,3); 1034 if (pc->setupcalled && Amat && Pmat) { 1035 ierr = MatGetLocalSize(Amat,&m1,&n1);CHKERRQ(ierr); 1036 ierr = MatGetLocalSize(pc->mat,&m2,&n2);CHKERRQ(ierr); 1037 if (m1 != m2 || n1 != n2) { 1038 SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1039 } 1040 ierr = MatGetLocalSize(Pmat,&m1,&n1);CHKERRQ(ierr); 1041 ierr = MatGetLocalSize(pc->pmat,&m2,&n2);CHKERRQ(ierr); 1042 if (m1 != m2 || n1 != n2) { 1043 SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1044 } 1045 } 1046 1047 /* reference first in case the matrices are the same */ 1048 if (Amat) {ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr);} 1049 if (pc->mat) {ierr = MatDestroy(pc->mat);CHKERRQ(ierr);} 1050 if (Pmat) {ierr = PetscObjectReference((PetscObject)Pmat);CHKERRQ(ierr);} 1051 if (pc->pmat) {ierr = MatDestroy(pc->pmat);CHKERRQ(ierr);} 1052 pc->mat = Amat; 1053 pc->pmat = Pmat; 1054 1055 if (pc->setupcalled == 2 && flag != SAME_PRECONDITIONER) { 1056 pc->setupcalled = 1; 1057 } 1058 pc->flag = flag; 1059 PetscFunctionReturn(0); 1060 } 1061 1062 #undef __FUNCT__ 1063 #define __FUNCT__ "PCGetOperators" 1064 /*@C 1065 PCGetOperators - Gets the matrix associated with the linear system and 1066 possibly a different one associated with the preconditioner. 1067 1068 Not collective, though parallel Mats are returned if the PC is parallel 1069 1070 Input Parameter: 1071 . pc - the preconditioner context 1072 1073 Output Parameters: 1074 + mat - the matrix associated with the linear system 1075 . pmat - matrix associated with the preconditioner, usually the same 1076 as mat. 1077 - flag - flag indicating information about the preconditioner 1078 matrix structure. See PCSetOperators() for details. 1079 1080 Level: intermediate 1081 1082 Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators 1083 are created in PC and returned to the user. In this case, if both operators 1084 mat and pmat are requested, two DIFFERENT operators will be returned. If 1085 only one is requested both operators in the PC will be the same (i.e. as 1086 if one had called KSP/PCSetOperators() with the same argument for both Mats). 1087 The user must set the sizes of the returned matrices and their type etc just 1088 as if the user created them with MatCreate(). For example, 1089 1090 $ KSP/PCGetOperators(ksp/pc,&mat,PETSC_NULL,PETSC_NULL); is equivalent to 1091 $ set size, type, etc of mat 1092 1093 $ MatCreate(comm,&mat); 1094 $ KSP/PCSetOperators(ksp/pc,mat,mat,SAME_NONZERO_PATTERN); 1095 $ PetscObjectDereference((PetscObject)mat); 1096 $ set size, type, etc of mat 1097 1098 and 1099 1100 $ KSP/PCGetOperators(ksp/pc,&mat,&pmat,PETSC_NULL); is equivalent to 1101 $ set size, type, etc of mat and pmat 1102 1103 $ MatCreate(comm,&mat); 1104 $ MatCreate(comm,&pmat); 1105 $ KSP/PCSetOperators(ksp/pc,mat,pmat,SAME_NONZERO_PATTERN); 1106 $ PetscObjectDereference((PetscObject)mat); 1107 $ PetscObjectDereference((PetscObject)pmat); 1108 $ set size, type, etc of mat and pmat 1109 1110 The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy 1111 of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely 1112 managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look 1113 at this is when you create a SNES you do not NEED to create a KSP and attach it to 1114 the SNES object (the SNES object manages it for you). Similarly when you create a KSP 1115 you do not need to attach a PC to it (the KSP object manages the PC object for you). 1116 Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when 1117 it can be created for you? 1118 1119 1120 .keywords: PC, get, operators, matrix, linear system 1121 1122 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet() 1123 @*/ 1124 PetscErrorCode PCGetOperators(PC pc,Mat *mat,Mat *pmat,MatStructure *flag) 1125 { 1126 PetscErrorCode ierr; 1127 1128 PetscFunctionBegin; 1129 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1130 if (mat) { 1131 if (!pc->mat) { 1132 if (pc->pmat && !pmat) { /* pmat has been set, but user did not request it, so use for mat */ 1133 pc->mat = pc->pmat; 1134 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1135 } else { /* both mat and pmat are empty */ 1136 ierr = MatCreate(((PetscObject)pc)->comm,&pc->mat);CHKERRQ(ierr); 1137 if (!pmat) { /* user did NOT request pmat, so make same as mat */ 1138 pc->pmat = pc->mat; 1139 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1140 } 1141 } 1142 } 1143 *mat = pc->mat; 1144 } 1145 if (pmat) { 1146 if (!pc->pmat) { 1147 if (pc->mat && !mat) { /* mat has been set but was not requested, so use for pmat */ 1148 pc->pmat = pc->mat; 1149 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1150 } else { 1151 ierr = MatCreate(((PetscObject)pc)->comm,&pc->pmat);CHKERRQ(ierr); 1152 if (!mat) { /* user did NOT request mat, so make same as pmat */ 1153 pc->mat = pc->pmat; 1154 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1155 } 1156 } 1157 } 1158 *pmat = pc->pmat; 1159 } 1160 if (flag) *flag = pc->flag; 1161 PetscFunctionReturn(0); 1162 } 1163 1164 #undef __FUNCT__ 1165 #define __FUNCT__ "PCGetOperatorsSet" 1166 /*@C 1167 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1168 possibly a different one associated with the preconditioner have been set in the PC. 1169 1170 Not collective, though the results on all processes should be the same 1171 1172 Input Parameter: 1173 . pc - the preconditioner context 1174 1175 Output Parameters: 1176 + mat - the matrix associated with the linear system was set 1177 - pmat - matrix associated with the preconditioner was set, usually the same 1178 1179 Level: intermediate 1180 1181 .keywords: PC, get, operators, matrix, linear system 1182 1183 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators() 1184 @*/ 1185 PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat) 1186 { 1187 PetscFunctionBegin; 1188 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1189 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1190 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1191 PetscFunctionReturn(0); 1192 } 1193 1194 #undef __FUNCT__ 1195 #define __FUNCT__ "PCFactorGetMatrix" 1196 /*@ 1197 PCFactorGetMatrix - Gets the factored matrix from the 1198 preconditioner context. This routine is valid only for the LU, 1199 incomplete LU, Cholesky, and incomplete Cholesky methods. 1200 1201 Not Collective on PC though Mat is parallel if PC is parallel 1202 1203 Input Parameters: 1204 . pc - the preconditioner context 1205 1206 Output parameters: 1207 . mat - the factored matrix 1208 1209 Level: advanced 1210 1211 Notes: Does not increase the reference count for the matrix so DO NOT destroy it 1212 1213 .keywords: PC, get, factored, matrix 1214 @*/ 1215 PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat) 1216 { 1217 PetscErrorCode ierr; 1218 1219 PetscFunctionBegin; 1220 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1221 PetscValidPointer(mat,2); 1222 if (pc->ops->getfactoredmatrix) { 1223 ierr = (*pc->ops->getfactoredmatrix)(pc,mat);CHKERRQ(ierr); 1224 } else { 1225 SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC type does not support getting factor matrix"); 1226 } 1227 PetscFunctionReturn(0); 1228 } 1229 1230 #undef __FUNCT__ 1231 #define __FUNCT__ "PCSetOptionsPrefix" 1232 /*@C 1233 PCSetOptionsPrefix - Sets the prefix used for searching for all 1234 PC options in the database. 1235 1236 Logically Collective on PC 1237 1238 Input Parameters: 1239 + pc - the preconditioner context 1240 - prefix - the prefix string to prepend to all PC option requests 1241 1242 Notes: 1243 A hyphen (-) must NOT be given at the beginning of the prefix name. 1244 The first character of all runtime options is AUTOMATICALLY the 1245 hyphen. 1246 1247 Level: advanced 1248 1249 .keywords: PC, set, options, prefix, database 1250 1251 .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix() 1252 @*/ 1253 PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[]) 1254 { 1255 PetscErrorCode ierr; 1256 1257 PetscFunctionBegin; 1258 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1259 ierr = PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1260 PetscFunctionReturn(0); 1261 } 1262 1263 #undef __FUNCT__ 1264 #define __FUNCT__ "PCAppendOptionsPrefix" 1265 /*@C 1266 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1267 PC options in the database. 1268 1269 Logically Collective on PC 1270 1271 Input Parameters: 1272 + pc - the preconditioner context 1273 - prefix - the prefix string to prepend to all PC option requests 1274 1275 Notes: 1276 A hyphen (-) must NOT be given at the beginning of the prefix name. 1277 The first character of all runtime options is AUTOMATICALLY the 1278 hyphen. 1279 1280 Level: advanced 1281 1282 .keywords: PC, append, options, prefix, database 1283 1284 .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix() 1285 @*/ 1286 PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[]) 1287 { 1288 PetscErrorCode ierr; 1289 1290 PetscFunctionBegin; 1291 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1292 ierr = PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1293 PetscFunctionReturn(0); 1294 } 1295 1296 #undef __FUNCT__ 1297 #define __FUNCT__ "PCGetOptionsPrefix" 1298 /*@C 1299 PCGetOptionsPrefix - Gets the prefix used for searching for all 1300 PC options in the database. 1301 1302 Not Collective 1303 1304 Input Parameters: 1305 . pc - the preconditioner context 1306 1307 Output Parameters: 1308 . prefix - pointer to the prefix string used, is returned 1309 1310 Notes: On the fortran side, the user should pass in a string 'prifix' of 1311 sufficient length to hold the prefix. 1312 1313 Level: advanced 1314 1315 .keywords: PC, get, options, prefix, database 1316 1317 .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix() 1318 @*/ 1319 PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[]) 1320 { 1321 PetscErrorCode ierr; 1322 1323 PetscFunctionBegin; 1324 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1325 PetscValidPointer(prefix,2); 1326 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1327 PetscFunctionReturn(0); 1328 } 1329 1330 #undef __FUNCT__ 1331 #define __FUNCT__ "PCPreSolve" 1332 /*@ 1333 PCPreSolve - Optional pre-solve phase, intended for any 1334 preconditioner-specific actions that must be performed before 1335 the iterative solve itself. 1336 1337 Collective on PC 1338 1339 Input Parameters: 1340 + pc - the preconditioner context 1341 - ksp - the Krylov subspace context 1342 1343 Level: developer 1344 1345 Sample of Usage: 1346 .vb 1347 PCPreSolve(pc,ksp); 1348 KSPSolve(ksp,b,x); 1349 PCPostSolve(pc,ksp); 1350 .ve 1351 1352 Notes: 1353 The pre-solve phase is distinct from the PCSetUp() phase. 1354 1355 KSPSolve() calls this directly, so is rarely called by the user. 1356 1357 .keywords: PC, pre-solve 1358 1359 .seealso: PCPostSolve() 1360 @*/ 1361 PetscErrorCode PCPreSolve(PC pc,KSP ksp) 1362 { 1363 PetscErrorCode ierr; 1364 Vec x,rhs; 1365 Mat A,B; 1366 1367 PetscFunctionBegin; 1368 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1369 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1370 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1371 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1372 /* 1373 Scale the system and have the matrices use the scaled form 1374 only if the two matrices are actually the same (and hence 1375 have the same scaling 1376 */ 1377 ierr = PCGetOperators(pc,&A,&B,PETSC_NULL);CHKERRQ(ierr); 1378 if (A == B) { 1379 ierr = MatScaleSystem(pc->mat,rhs,x);CHKERRQ(ierr); 1380 ierr = MatUseScaledForm(pc->mat,PETSC_TRUE);CHKERRQ(ierr); 1381 } 1382 1383 if (pc->ops->presolve) { 1384 ierr = (*pc->ops->presolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1385 } 1386 PetscFunctionReturn(0); 1387 } 1388 1389 #undef __FUNCT__ 1390 #define __FUNCT__ "PCPostSolve" 1391 /*@ 1392 PCPostSolve - Optional post-solve phase, intended for any 1393 preconditioner-specific actions that must be performed after 1394 the iterative solve itself. 1395 1396 Collective on PC 1397 1398 Input Parameters: 1399 + pc - the preconditioner context 1400 - ksp - the Krylov subspace context 1401 1402 Sample of Usage: 1403 .vb 1404 PCPreSolve(pc,ksp); 1405 KSPSolve(ksp,b,x); 1406 PCPostSolve(pc,ksp); 1407 .ve 1408 1409 Note: 1410 KSPSolve() calls this routine directly, so it is rarely called by the user. 1411 1412 Level: developer 1413 1414 .keywords: PC, post-solve 1415 1416 .seealso: PCPreSolve(), KSPSolve() 1417 @*/ 1418 PetscErrorCode PCPostSolve(PC pc,KSP ksp) 1419 { 1420 PetscErrorCode ierr; 1421 Vec x,rhs; 1422 Mat A,B; 1423 1424 PetscFunctionBegin; 1425 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1426 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1427 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1428 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1429 if (pc->ops->postsolve) { 1430 ierr = (*pc->ops->postsolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1431 } 1432 /* 1433 Scale the system and have the matrices use the scaled form 1434 only if the two matrices are actually the same (and hence 1435 have the same scaling 1436 */ 1437 ierr = PCGetOperators(pc,&A,&B,PETSC_NULL);CHKERRQ(ierr); 1438 if (A == B) { 1439 ierr = MatUnScaleSystem(pc->mat,rhs,x);CHKERRQ(ierr); 1440 ierr = MatUseScaledForm(pc->mat,PETSC_FALSE);CHKERRQ(ierr); 1441 } 1442 PetscFunctionReturn(0); 1443 } 1444 1445 #undef __FUNCT__ 1446 #define __FUNCT__ "PCView" 1447 /*@C 1448 PCView - Prints the PC data structure. 1449 1450 Collective on PC 1451 1452 Input Parameters: 1453 + PC - the PC context 1454 - viewer - optional visualization context 1455 1456 Note: 1457 The available visualization contexts include 1458 + PETSC_VIEWER_STDOUT_SELF - standard output (default) 1459 - PETSC_VIEWER_STDOUT_WORLD - synchronized standard 1460 output where only the first processor opens 1461 the file. All other processors send their 1462 data to the first processor to print. 1463 1464 The user can open an alternative visualization contexts with 1465 PetscViewerASCIIOpen() (output to a specified file). 1466 1467 Level: developer 1468 1469 .keywords: PC, view 1470 1471 .seealso: KSPView(), PetscViewerASCIIOpen() 1472 @*/ 1473 PetscErrorCode PCView(PC pc,PetscViewer viewer) 1474 { 1475 const PCType cstr; 1476 PetscErrorCode ierr; 1477 PetscBool iascii,isstring; 1478 PetscViewerFormat format; 1479 1480 PetscFunctionBegin; 1481 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1482 if (!viewer) { 1483 ierr = PetscViewerASCIIGetStdout(((PetscObject)pc)->comm,&viewer);CHKERRQ(ierr); 1484 } 1485 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1486 PetscCheckSameComm(pc,1,viewer,2); 1487 1488 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1489 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 1490 if (iascii) { 1491 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1492 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer,"PC Object");CHKERRQ(ierr); 1493 if (pc->ops->view) { 1494 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1495 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1496 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1497 } 1498 if (pc->mat) { 1499 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);CHKERRQ(ierr); 1500 if (pc->pmat == pc->mat) { 1501 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");CHKERRQ(ierr); 1502 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1503 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1504 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1505 } else { 1506 if (pc->pmat) { 1507 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");CHKERRQ(ierr); 1508 } else { 1509 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix:\n");CHKERRQ(ierr); 1510 } 1511 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1512 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1513 if (pc->pmat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1514 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1515 } 1516 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1517 } 1518 } else if (isstring) { 1519 ierr = PCGetType(pc,&cstr);CHKERRQ(ierr); 1520 ierr = PetscViewerStringSPrintf(viewer," %-7.7s",cstr);CHKERRQ(ierr); 1521 if (pc->ops->view) {ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr);} 1522 } else { 1523 SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported by PC",((PetscObject)viewer)->type_name); 1524 } 1525 PetscFunctionReturn(0); 1526 } 1527 1528 1529 #undef __FUNCT__ 1530 #define __FUNCT__ "PCSetInitialGuessNonzero" 1531 /*@ 1532 PCSetInitialGuessNonzero - Tells the iterative solver that the 1533 initial guess is nonzero; otherwise PC assumes the initial guess 1534 is to be zero (and thus zeros it out before solving). 1535 1536 Logically Collective on PC 1537 1538 Input Parameters: 1539 + pc - iterative context obtained from PCCreate() 1540 - flg - PETSC_TRUE indicates the guess is non-zero, PETSC_FALSE indicates the guess is zero 1541 1542 Level: Developer 1543 1544 Notes: 1545 This is a weird function. Since PC's are linear operators on the right hand side they 1546 CANNOT use an initial guess. This function is for the "pass-through" preconditioners 1547 PCKSP, PCREDUNDANT and PCOPENMP and causes the inner KSP object to use the nonzero 1548 initial guess. Not currently working for PCREDUNDANT, that has to be rewritten to use KSP. 1549 1550 1551 .keywords: PC, set, initial guess, nonzero 1552 1553 .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll() 1554 @*/ 1555 PetscErrorCode PCSetInitialGuessNonzero(PC pc,PetscBool flg) 1556 { 1557 PetscFunctionBegin; 1558 PetscValidLogicalCollectiveBool(pc,flg,2); 1559 pc->nonzero_guess = flg; 1560 PetscFunctionReturn(0); 1561 } 1562 1563 #undef __FUNCT__ 1564 #define __FUNCT__ "PCRegister" 1565 /*@C 1566 PCRegister - See PCRegisterDynamic() 1567 1568 Level: advanced 1569 @*/ 1570 PetscErrorCode PCRegister(const char sname[],const char path[],const char name[],PetscErrorCode (*function)(PC)) 1571 { 1572 PetscErrorCode ierr; 1573 char fullname[PETSC_MAX_PATH_LEN]; 1574 1575 PetscFunctionBegin; 1576 1577 ierr = PetscFListConcat(path,name,fullname);CHKERRQ(ierr); 1578 ierr = PetscFListAdd(&PCList,sname,fullname,(void (*)(void))function);CHKERRQ(ierr); 1579 PetscFunctionReturn(0); 1580 } 1581 1582 #undef __FUNCT__ 1583 #define __FUNCT__ "PCComputeExplicitOperator" 1584 /*@ 1585 PCComputeExplicitOperator - Computes the explicit preconditioned operator. 1586 1587 Collective on PC 1588 1589 Input Parameter: 1590 . pc - the preconditioner object 1591 1592 Output Parameter: 1593 . mat - the explict preconditioned operator 1594 1595 Notes: 1596 This computation is done by applying the operators to columns of the 1597 identity matrix. 1598 1599 Currently, this routine uses a dense matrix format when 1 processor 1600 is used and a sparse format otherwise. This routine is costly in general, 1601 and is recommended for use only with relatively small systems. 1602 1603 Level: advanced 1604 1605 .keywords: PC, compute, explicit, operator 1606 1607 .seealso: KSPComputeExplicitOperator() 1608 1609 @*/ 1610 PetscErrorCode PCComputeExplicitOperator(PC pc,Mat *mat) 1611 { 1612 Vec in,out; 1613 PetscErrorCode ierr; 1614 PetscInt i,M,m,*rows,start,end; 1615 PetscMPIInt size; 1616 MPI_Comm comm; 1617 PetscScalar *array,one = 1.0; 1618 1619 PetscFunctionBegin; 1620 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1621 PetscValidPointer(mat,2); 1622 1623 comm = ((PetscObject)pc)->comm; 1624 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1625 1626 if (!pc->pmat) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"You must call KSPSetOperators() or PCSetOperators() before this call"); 1627 ierr = MatGetVecs(pc->pmat,&in,0);CHKERRQ(ierr); 1628 ierr = VecDuplicate(in,&out);CHKERRQ(ierr); 1629 ierr = VecGetOwnershipRange(in,&start,&end);CHKERRQ(ierr); 1630 ierr = VecGetSize(in,&M);CHKERRQ(ierr); 1631 ierr = VecGetLocalSize(in,&m);CHKERRQ(ierr); 1632 ierr = PetscMalloc((m+1)*sizeof(PetscInt),&rows);CHKERRQ(ierr); 1633 for (i=0; i<m; i++) {rows[i] = start + i;} 1634 1635 ierr = MatCreate(comm,mat);CHKERRQ(ierr); 1636 ierr = MatSetSizes(*mat,m,m,M,M);CHKERRQ(ierr); 1637 if (size == 1) { 1638 ierr = MatSetType(*mat,MATSEQDENSE);CHKERRQ(ierr); 1639 ierr = MatSeqDenseSetPreallocation(*mat,PETSC_NULL);CHKERRQ(ierr); 1640 } else { 1641 ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 1642 ierr = MatMPIAIJSetPreallocation(*mat,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 1643 } 1644 1645 for (i=0; i<M; i++) { 1646 1647 ierr = VecSet(in,0.0);CHKERRQ(ierr); 1648 ierr = VecSetValues(in,1,&i,&one,INSERT_VALUES);CHKERRQ(ierr); 1649 ierr = VecAssemblyBegin(in);CHKERRQ(ierr); 1650 ierr = VecAssemblyEnd(in);CHKERRQ(ierr); 1651 1652 /* should fix, allowing user to choose side */ 1653 ierr = PCApply(pc,in,out);CHKERRQ(ierr); 1654 1655 ierr = VecGetArray(out,&array);CHKERRQ(ierr); 1656 ierr = MatSetValues(*mat,m,rows,1,&i,array,INSERT_VALUES);CHKERRQ(ierr); 1657 ierr = VecRestoreArray(out,&array);CHKERRQ(ierr); 1658 1659 } 1660 ierr = PetscFree(rows);CHKERRQ(ierr); 1661 ierr = VecDestroy(out);CHKERRQ(ierr); 1662 ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1663 ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1664 PetscFunctionReturn(0); 1665 } 1666 1667