1 2 /* 3 The PC (preconditioner) interface routines, callable by users. 4 */ 5 #include <private/pcimpl.h> /*I "petscksp.h" I*/ 6 7 /* Logging support */ 8 PetscClassId PC_CLASSID; 9 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 10 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 11 12 #undef __FUNCT__ 13 #define __FUNCT__ "PCGetDefaultType_Private" 14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char* type[]) 15 { 16 PetscErrorCode ierr; 17 PetscMPIInt size; 18 PetscBool flg1,flg2,set,flg3; 19 20 PetscFunctionBegin; 21 ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr); 22 if (pc->pmat) { 23 PetscErrorCode (*f)(Mat,MatReuse,Mat*); 24 ierr = PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr); 25 if (size == 1) { 26 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);CHKERRQ(ierr); 27 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);CHKERRQ(ierr); 28 ierr = MatIsSymmetricKnown(pc->pmat,&set,&flg3);CHKERRQ(ierr); 29 if (flg1 && (!flg2 || (set && flg3))) { 30 *type = PCICC; 31 } else if (flg2) { 32 *type = PCILU; 33 } else if (f) { /* likely is a parallel matrix run on one processor */ 34 *type = PCBJACOBI; 35 } else { 36 *type = PCNONE; 37 } 38 } else { 39 if (f) { 40 *type = PCBJACOBI; 41 } else { 42 *type = PCNONE; 43 } 44 } 45 } else { 46 if (size == 1) { 47 *type = PCILU; 48 } else { 49 *type = PCBJACOBI; 50 } 51 } 52 PetscFunctionReturn(0); 53 } 54 55 #undef __FUNCT__ 56 #define __FUNCT__ "PCReset" 57 /*@ 58 PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats 59 60 Collective on PC 61 62 Input Parameter: 63 . pc - the preconditioner context 64 65 Level: developer 66 67 Notes: This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC 68 69 .keywords: PC, destroy 70 71 .seealso: PCCreate(), PCSetUp() 72 @*/ 73 PetscErrorCode PCReset(PC pc) 74 { 75 PetscErrorCode ierr; 76 77 PetscFunctionBegin; 78 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 79 if (pc->ops->reset) { 80 ierr = (*pc->ops->reset)(pc); 81 } 82 ierr = VecDestroy(&pc->diagonalscaleright);CHKERRQ(ierr); 83 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 84 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 85 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 86 pc->setupcalled = 0; 87 PetscFunctionReturn(0); 88 } 89 90 #undef __FUNCT__ 91 #define __FUNCT__ "PCDestroy" 92 /*@ 93 PCDestroy - Destroys PC context that was created with PCCreate(). 94 95 Collective on PC 96 97 Input Parameter: 98 . pc - the preconditioner context 99 100 Level: developer 101 102 .keywords: PC, destroy 103 104 .seealso: PCCreate(), PCSetUp() 105 @*/ 106 PetscErrorCode PCDestroy(PC *pc) 107 { 108 PetscErrorCode ierr; 109 110 PetscFunctionBegin; 111 if (!*pc) PetscFunctionReturn(0); 112 PetscValidHeaderSpecific((*pc),PC_CLASSID,1); 113 if (--((PetscObject)(*pc))->refct > 0) {*pc = 0; PetscFunctionReturn(0);} 114 115 ierr = PCReset(*pc);CHKERRQ(ierr); 116 117 /* if memory was published with AMS then destroy it */ 118 ierr = PetscObjectDepublish((*pc));CHKERRQ(ierr); 119 if ((*pc)->ops->destroy) {ierr = (*(*pc)->ops->destroy)((*pc));CHKERRQ(ierr);} 120 ierr = DMDestroy(&(*pc)->dm);CHKERRQ(ierr); 121 ierr = PetscHeaderDestroy(pc);CHKERRQ(ierr); 122 PetscFunctionReturn(0); 123 } 124 125 #undef __FUNCT__ 126 #define __FUNCT__ "PCGetDiagonalScale" 127 /*@C 128 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 129 scaling as needed by certain time-stepping codes. 130 131 Logically Collective on PC 132 133 Input Parameter: 134 . pc - the preconditioner context 135 136 Output Parameter: 137 . flag - PETSC_TRUE if it applies the scaling 138 139 Level: developer 140 141 Notes: If this returns PETSC_TRUE then the system solved via the Krylov method is 142 $ D M A D^{-1} y = D M b for left preconditioning or 143 $ D A M D^{-1} z = D b for right preconditioning 144 145 .keywords: PC 146 147 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale() 148 @*/ 149 PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag) 150 { 151 PetscFunctionBegin; 152 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 153 PetscValidPointer(flag,2); 154 *flag = pc->diagonalscale; 155 PetscFunctionReturn(0); 156 } 157 158 #undef __FUNCT__ 159 #define __FUNCT__ "PCSetDiagonalScale" 160 /*@ 161 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 162 scaling as needed by certain time-stepping codes. 163 164 Logically Collective on PC 165 166 Input Parameters: 167 + pc - the preconditioner context 168 - s - scaling vector 169 170 Level: intermediate 171 172 Notes: The system solved via the Krylov method is 173 $ D M A D^{-1} y = D M b for left preconditioning or 174 $ D A M D^{-1} z = D b for right preconditioning 175 176 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 177 178 .keywords: PC 179 180 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale() 181 @*/ 182 PetscErrorCode PCSetDiagonalScale(PC pc,Vec s) 183 { 184 PetscErrorCode ierr; 185 186 PetscFunctionBegin; 187 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 188 PetscValidHeaderSpecific(s,VEC_CLASSID,2); 189 pc->diagonalscale = PETSC_TRUE; 190 ierr = PetscObjectReference((PetscObject)s);CHKERRQ(ierr); 191 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 192 pc->diagonalscaleleft = s; 193 ierr = VecDuplicate(s,&pc->diagonalscaleright);CHKERRQ(ierr); 194 ierr = VecCopy(s,pc->diagonalscaleright);CHKERRQ(ierr); 195 ierr = VecReciprocal(pc->diagonalscaleright);CHKERRQ(ierr); 196 PetscFunctionReturn(0); 197 } 198 199 #undef __FUNCT__ 200 #define __FUNCT__ "PCDiagonalScaleLeft" 201 /*@ 202 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 203 204 Logically Collective on PC 205 206 Input Parameters: 207 + pc - the preconditioner context 208 . in - input vector 209 + out - scaled vector (maybe the same as in) 210 211 Level: intermediate 212 213 Notes: The system solved via the Krylov method is 214 $ D M A D^{-1} y = D M b for left preconditioning or 215 $ D A M D^{-1} z = D b for right preconditioning 216 217 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 218 219 If diagonal scaling is turned off and in is not out then in is copied to out 220 221 .keywords: PC 222 223 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale() 224 @*/ 225 PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out) 226 { 227 PetscErrorCode ierr; 228 229 PetscFunctionBegin; 230 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 231 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 232 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 233 if (pc->diagonalscale) { 234 ierr = VecPointwiseMult(out,pc->diagonalscaleleft,in);CHKERRQ(ierr); 235 } else if (in != out) { 236 ierr = VecCopy(in,out);CHKERRQ(ierr); 237 } 238 PetscFunctionReturn(0); 239 } 240 241 #undef __FUNCT__ 242 #define __FUNCT__ "PCDiagonalScaleRight" 243 /*@ 244 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 245 246 Logically Collective on PC 247 248 Input Parameters: 249 + pc - the preconditioner context 250 . in - input vector 251 + out - scaled vector (maybe the same as in) 252 253 Level: intermediate 254 255 Notes: The system solved via the Krylov method is 256 $ D M A D^{-1} y = D M b for left preconditioning or 257 $ D A M D^{-1} z = D b for right preconditioning 258 259 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 260 261 If diagonal scaling is turned off and in is not out then in is copied to out 262 263 .keywords: PC 264 265 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale() 266 @*/ 267 PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out) 268 { 269 PetscErrorCode ierr; 270 271 PetscFunctionBegin; 272 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 273 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 274 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 275 if (pc->diagonalscale) { 276 ierr = VecPointwiseMult(out,pc->diagonalscaleright,in);CHKERRQ(ierr); 277 } else if (in != out) { 278 ierr = VecCopy(in,out);CHKERRQ(ierr); 279 } 280 PetscFunctionReturn(0); 281 } 282 283 #if 0 284 #undef __FUNCT__ 285 #define __FUNCT__ "PCPublish_Petsc" 286 static PetscErrorCode PCPublish_Petsc(PetscObject obj) 287 { 288 PetscFunctionBegin; 289 PetscFunctionReturn(0); 290 } 291 #endif 292 293 #undef __FUNCT__ 294 #define __FUNCT__ "PCCreate" 295 /*@ 296 PCCreate - Creates a preconditioner context. 297 298 Collective on MPI_Comm 299 300 Input Parameter: 301 . comm - MPI communicator 302 303 Output Parameter: 304 . pc - location to put the preconditioner context 305 306 Notes: 307 The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or ICC 308 in parallel. For dense matrices it is always PCNONE. 309 310 Level: developer 311 312 .keywords: PC, create, context 313 314 .seealso: PCSetUp(), PCApply(), PCDestroy() 315 @*/ 316 PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc) 317 { 318 PC pc; 319 PetscErrorCode ierr; 320 321 PetscFunctionBegin; 322 PetscValidPointer(newpc,1); 323 *newpc = 0; 324 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 325 ierr = PCInitializePackage(PETSC_NULL);CHKERRQ(ierr); 326 #endif 327 328 ierr = PetscHeaderCreate(pc,_p_PC,struct _PCOps,PC_CLASSID,-1,"PC",comm,PCDestroy,PCView);CHKERRQ(ierr); 329 330 pc->mat = 0; 331 pc->pmat = 0; 332 pc->setupcalled = 0; 333 pc->setfromoptionscalled = 0; 334 pc->data = 0; 335 pc->diagonalscale = PETSC_FALSE; 336 pc->diagonalscaleleft = 0; 337 pc->diagonalscaleright = 0; 338 pc->reuse = 0; 339 340 pc->modifysubmatrices = 0; 341 pc->modifysubmatricesP = 0; 342 *newpc = pc; 343 PetscFunctionReturn(0); 344 345 } 346 347 /* -------------------------------------------------------------------------------*/ 348 349 #undef __FUNCT__ 350 #define __FUNCT__ "PCApply" 351 /*@ 352 PCApply - Applies the preconditioner to a vector. 353 354 Collective on PC and Vec 355 356 Input Parameters: 357 + pc - the preconditioner context 358 - x - input vector 359 360 Output Parameter: 361 . y - output vector 362 363 Level: developer 364 365 .keywords: PC, apply 366 367 .seealso: PCApplyTranspose(), PCApplyBAorAB() 368 @*/ 369 PetscErrorCode PCApply(PC pc,Vec x,Vec y) 370 { 371 PetscErrorCode ierr; 372 373 PetscFunctionBegin; 374 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 375 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 376 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 377 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 378 if (pc->setupcalled < 2) { 379 ierr = PCSetUp(pc);CHKERRQ(ierr); 380 } 381 if (!pc->ops->apply) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply"); 382 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 383 ierr = (*pc->ops->apply)(pc,x,y);CHKERRQ(ierr); 384 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 385 PetscFunctionReturn(0); 386 } 387 388 #undef __FUNCT__ 389 #define __FUNCT__ "PCApplySymmetricLeft" 390 /*@ 391 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 392 393 Collective on PC and Vec 394 395 Input Parameters: 396 + pc - the preconditioner context 397 - x - input vector 398 399 Output Parameter: 400 . y - output vector 401 402 Notes: 403 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 404 405 Level: developer 406 407 .keywords: PC, apply, symmetric, left 408 409 .seealso: PCApply(), PCApplySymmetricRight() 410 @*/ 411 PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y) 412 { 413 PetscErrorCode ierr; 414 415 PetscFunctionBegin; 416 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 417 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 418 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 419 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 420 if (pc->setupcalled < 2) { 421 ierr = PCSetUp(pc);CHKERRQ(ierr); 422 } 423 if (!pc->ops->applysymmetricleft) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have left symmetric apply"); 424 ierr = PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 425 ierr = (*pc->ops->applysymmetricleft)(pc,x,y);CHKERRQ(ierr); 426 ierr = PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 427 PetscFunctionReturn(0); 428 } 429 430 #undef __FUNCT__ 431 #define __FUNCT__ "PCApplySymmetricRight" 432 /*@ 433 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 434 435 Collective on PC and Vec 436 437 Input Parameters: 438 + pc - the preconditioner context 439 - x - input vector 440 441 Output Parameter: 442 . y - output vector 443 444 Level: developer 445 446 Notes: 447 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 448 449 .keywords: PC, apply, symmetric, right 450 451 .seealso: PCApply(), PCApplySymmetricLeft() 452 @*/ 453 PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y) 454 { 455 PetscErrorCode ierr; 456 457 PetscFunctionBegin; 458 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 459 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 460 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 461 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 462 if (pc->setupcalled < 2) { 463 ierr = PCSetUp(pc);CHKERRQ(ierr); 464 } 465 if (!pc->ops->applysymmetricright) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have left symmetric apply"); 466 ierr = PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 467 ierr = (*pc->ops->applysymmetricright)(pc,x,y);CHKERRQ(ierr); 468 ierr = PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 469 PetscFunctionReturn(0); 470 } 471 472 #undef __FUNCT__ 473 #define __FUNCT__ "PCApplyTranspose" 474 /*@ 475 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 476 477 Collective on PC and Vec 478 479 Input Parameters: 480 + pc - the preconditioner context 481 - x - input vector 482 483 Output Parameter: 484 . y - output vector 485 486 Notes: For complex numbers this applies the non-Hermitian transpose. 487 488 Developer Notes: We need to implement a PCApplyHermitianTranspose() 489 490 Level: developer 491 492 .keywords: PC, apply, transpose 493 494 .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists() 495 @*/ 496 PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y) 497 { 498 PetscErrorCode ierr; 499 500 PetscFunctionBegin; 501 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 502 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 503 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 504 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 505 if (pc->setupcalled < 2) { 506 ierr = PCSetUp(pc);CHKERRQ(ierr); 507 } 508 if (!pc->ops->applytranspose) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply transpose"); 509 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 510 ierr = (*pc->ops->applytranspose)(pc,x,y);CHKERRQ(ierr); 511 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 512 PetscFunctionReturn(0); 513 } 514 515 #undef __FUNCT__ 516 #define __FUNCT__ "PCApplyTransposeExists" 517 /*@ 518 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 519 520 Collective on PC and Vec 521 522 Input Parameters: 523 . pc - the preconditioner context 524 525 Output Parameter: 526 . flg - PETSC_TRUE if a transpose operation is defined 527 528 Level: developer 529 530 .keywords: PC, apply, transpose 531 532 .seealso: PCApplyTranspose() 533 @*/ 534 PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg) 535 { 536 PetscFunctionBegin; 537 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 538 PetscValidPointer(flg,2); 539 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 540 else *flg = PETSC_FALSE; 541 PetscFunctionReturn(0); 542 } 543 544 #undef __FUNCT__ 545 #define __FUNCT__ "PCApplyBAorAB" 546 /*@ 547 PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x. 548 549 Collective on PC and Vec 550 551 Input Parameters: 552 + pc - the preconditioner context 553 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 554 . x - input vector 555 - work - work vector 556 557 Output Parameter: 558 . y - output vector 559 560 Level: developer 561 562 Notes: If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the 563 specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling. 564 565 .keywords: PC, apply, operator 566 567 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose() 568 @*/ 569 PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work) 570 { 571 PetscErrorCode ierr; 572 573 PetscFunctionBegin; 574 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 575 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 576 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 577 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 578 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 579 if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric"); 580 if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application"); 581 582 if (pc->setupcalled < 2) { 583 ierr = PCSetUp(pc);CHKERRQ(ierr); 584 } 585 586 if (pc->diagonalscale) { 587 if (pc->ops->applyBA) { 588 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 589 ierr = VecDuplicate(x,&work2);CHKERRQ(ierr); 590 ierr = PCDiagonalScaleRight(pc,x,work2);CHKERRQ(ierr); 591 ierr = (*pc->ops->applyBA)(pc,side,work2,y,work);CHKERRQ(ierr); 592 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 593 ierr = VecDestroy(&work2);CHKERRQ(ierr); 594 } else if (side == PC_RIGHT) { 595 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 596 ierr = PCApply(pc,y,work);CHKERRQ(ierr); 597 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 598 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 599 } else if (side == PC_LEFT) { 600 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 601 ierr = MatMult(pc->mat,y,work);CHKERRQ(ierr); 602 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 603 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 604 } else if (side == PC_SYMMETRIC) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner"); 605 } else { 606 if (pc->ops->applyBA) { 607 ierr = (*pc->ops->applyBA)(pc,side,x,y,work);CHKERRQ(ierr); 608 } else if (side == PC_RIGHT) { 609 ierr = PCApply(pc,x,work);CHKERRQ(ierr); 610 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 611 } else if (side == PC_LEFT) { 612 ierr = MatMult(pc->mat,x,work);CHKERRQ(ierr); 613 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 614 } else if (side == PC_SYMMETRIC) { 615 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 616 ierr = PCApplySymmetricRight(pc,x,work);CHKERRQ(ierr); 617 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 618 ierr = VecCopy(y,work);CHKERRQ(ierr); 619 ierr = PCApplySymmetricLeft(pc,work,y);CHKERRQ(ierr); 620 } 621 } 622 PetscFunctionReturn(0); 623 } 624 625 #undef __FUNCT__ 626 #define __FUNCT__ "PCApplyBAorABTranspose" 627 /*@ 628 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 629 and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning, 630 NOT tr(B*A) = tr(A)*tr(B). 631 632 Collective on PC and Vec 633 634 Input Parameters: 635 + pc - the preconditioner context 636 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 637 . x - input vector 638 - work - work vector 639 640 Output Parameter: 641 . y - output vector 642 643 644 Notes: this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner 645 defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 646 647 Level: developer 648 649 .keywords: PC, apply, operator, transpose 650 651 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB() 652 @*/ 653 PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work) 654 { 655 PetscErrorCode ierr; 656 657 PetscFunctionBegin; 658 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 659 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 660 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 661 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 662 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 663 if (pc->ops->applyBAtranspose) { 664 ierr = (*pc->ops->applyBAtranspose)(pc,side,x,y,work);CHKERRQ(ierr); 665 PetscFunctionReturn(0); 666 } 667 if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left"); 668 669 if (pc->setupcalled < 2) { 670 ierr = PCSetUp(pc);CHKERRQ(ierr); 671 } 672 673 if (side == PC_RIGHT) { 674 ierr = PCApplyTranspose(pc,x,work);CHKERRQ(ierr); 675 ierr = MatMultTranspose(pc->mat,work,y);CHKERRQ(ierr); 676 } else if (side == PC_LEFT) { 677 ierr = MatMultTranspose(pc->mat,x,work);CHKERRQ(ierr); 678 ierr = PCApplyTranspose(pc,work,y);CHKERRQ(ierr); 679 } 680 /* add support for PC_SYMMETRIC */ 681 PetscFunctionReturn(0); /* actually will never get here */ 682 } 683 684 /* -------------------------------------------------------------------------------*/ 685 686 #undef __FUNCT__ 687 #define __FUNCT__ "PCApplyRichardsonExists" 688 /*@ 689 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 690 built-in fast application of Richardson's method. 691 692 Not Collective 693 694 Input Parameter: 695 . pc - the preconditioner 696 697 Output Parameter: 698 . exists - PETSC_TRUE or PETSC_FALSE 699 700 Level: developer 701 702 .keywords: PC, apply, Richardson, exists 703 704 .seealso: PCApplyRichardson() 705 @*/ 706 PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists) 707 { 708 PetscFunctionBegin; 709 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 710 PetscValidIntPointer(exists,2); 711 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 712 else *exists = PETSC_FALSE; 713 PetscFunctionReturn(0); 714 } 715 716 #undef __FUNCT__ 717 #define __FUNCT__ "PCApplyRichardson" 718 /*@ 719 PCApplyRichardson - Applies several steps of Richardson iteration with 720 the particular preconditioner. This routine is usually used by the 721 Krylov solvers and not the application code directly. 722 723 Collective on PC 724 725 Input Parameters: 726 + pc - the preconditioner context 727 . b - the right hand side 728 . w - one work vector 729 . rtol - relative decrease in residual norm convergence criteria 730 . abstol - absolute residual norm convergence criteria 731 . dtol - divergence residual norm increase criteria 732 . its - the number of iterations to apply. 733 - guesszero - if the input x contains nonzero initial guess 734 735 Output Parameter: 736 + outits - number of iterations actually used (for SOR this always equals its) 737 . reason - the reason the apply terminated 738 - y - the solution (also contains initial guess if guesszero is PETSC_FALSE 739 740 Notes: 741 Most preconditioners do not support this function. Use the command 742 PCApplyRichardsonExists() to determine if one does. 743 744 Except for the multigrid PC this routine ignores the convergence tolerances 745 and always runs for the number of iterations 746 747 Level: developer 748 749 .keywords: PC, apply, Richardson 750 751 .seealso: PCApplyRichardsonExists() 752 @*/ 753 PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason) 754 { 755 PetscErrorCode ierr; 756 757 PetscFunctionBegin; 758 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 759 PetscValidHeaderSpecific(b,VEC_CLASSID,2); 760 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 761 PetscValidHeaderSpecific(w,VEC_CLASSID,4); 762 if (b == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"b and y must be different vectors"); 763 if (pc->setupcalled < 2) { 764 ierr = PCSetUp(pc);CHKERRQ(ierr); 765 } 766 if (!pc->ops->applyrichardson) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply richardson"); 767 ierr = (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);CHKERRQ(ierr); 768 PetscFunctionReturn(0); 769 } 770 771 /* 772 a setupcall of 0 indicates never setup, 773 1 needs to be resetup, 774 2 does not need any changes. 775 */ 776 #undef __FUNCT__ 777 #define __FUNCT__ "PCSetUp" 778 /*@ 779 PCSetUp - Prepares for the use of a preconditioner. 780 781 Collective on PC 782 783 Input Parameter: 784 . pc - the preconditioner context 785 786 Level: developer 787 788 .keywords: PC, setup 789 790 .seealso: PCCreate(), PCApply(), PCDestroy() 791 @*/ 792 PetscErrorCode PCSetUp(PC pc) 793 { 794 PetscErrorCode ierr; 795 const char *def; 796 797 PetscFunctionBegin; 798 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 799 if (!pc->mat) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first"); 800 801 if (pc->setupcalled > 1) { 802 ierr = PetscInfo(pc,"Setting PC with identical preconditioner\n");CHKERRQ(ierr); 803 PetscFunctionReturn(0); 804 } else if (!pc->setupcalled) { 805 ierr = PetscInfo(pc,"Setting up new PC\n");CHKERRQ(ierr); 806 } else if (pc->flag == SAME_NONZERO_PATTERN) { 807 ierr = PetscInfo(pc,"Setting up PC with same nonzero pattern\n");CHKERRQ(ierr); 808 } else { 809 ierr = PetscInfo(pc,"Setting up PC with different nonzero pattern\n");CHKERRQ(ierr); 810 } 811 812 if (!((PetscObject)pc)->type_name) { 813 ierr = PCGetDefaultType_Private(pc,&def);CHKERRQ(ierr); 814 ierr = PCSetType(pc,def);CHKERRQ(ierr); 815 } 816 817 ierr = PetscLogEventBegin(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 818 if (pc->ops->setup) { 819 ierr = (*pc->ops->setup)(pc);CHKERRQ(ierr); 820 } 821 pc->setupcalled = 2; 822 ierr = PetscLogEventEnd(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 823 PetscFunctionReturn(0); 824 } 825 826 #undef __FUNCT__ 827 #define __FUNCT__ "PCSetUpOnBlocks" 828 /*@ 829 PCSetUpOnBlocks - Sets up the preconditioner for each block in 830 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 831 methods. 832 833 Collective on PC 834 835 Input Parameters: 836 . pc - the preconditioner context 837 838 Level: developer 839 840 .keywords: PC, setup, blocks 841 842 .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp() 843 @*/ 844 PetscErrorCode PCSetUpOnBlocks(PC pc) 845 { 846 PetscErrorCode ierr; 847 848 PetscFunctionBegin; 849 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 850 if (!pc->ops->setuponblocks) PetscFunctionReturn(0); 851 ierr = PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 852 ierr = (*pc->ops->setuponblocks)(pc);CHKERRQ(ierr); 853 ierr = PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 854 PetscFunctionReturn(0); 855 } 856 857 #undef __FUNCT__ 858 #define __FUNCT__ "PCSetModifySubMatrices" 859 /*@C 860 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 861 submatrices that arise within certain subdomain-based preconditioners. 862 The basic submatrices are extracted from the preconditioner matrix as 863 usual; the user can then alter these (for example, to set different boundary 864 conditions for each submatrix) before they are used for the local solves. 865 866 Logically Collective on PC 867 868 Input Parameters: 869 + pc - the preconditioner context 870 . func - routine for modifying the submatrices 871 - ctx - optional user-defined context (may be null) 872 873 Calling sequence of func: 874 $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx); 875 876 . row - an array of index sets that contain the global row numbers 877 that comprise each local submatrix 878 . col - an array of index sets that contain the global column numbers 879 that comprise each local submatrix 880 . submat - array of local submatrices 881 - ctx - optional user-defined context for private data for the 882 user-defined func routine (may be null) 883 884 Notes: 885 PCSetModifySubMatrices() MUST be called before KSPSetUp() and 886 KSPSolve(). 887 888 A routine set by PCSetModifySubMatrices() is currently called within 889 the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM) 890 preconditioners. All other preconditioners ignore this routine. 891 892 Level: advanced 893 894 .keywords: PC, set, modify, submatrices 895 896 .seealso: PCModifySubMatrices() 897 @*/ 898 PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx) 899 { 900 PetscFunctionBegin; 901 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 902 pc->modifysubmatrices = func; 903 pc->modifysubmatricesP = ctx; 904 PetscFunctionReturn(0); 905 } 906 907 #undef __FUNCT__ 908 #define __FUNCT__ "PCModifySubMatrices" 909 /*@C 910 PCModifySubMatrices - Calls an optional user-defined routine within 911 certain preconditioners if one has been set with PCSetModifySubMarices(). 912 913 Collective on PC 914 915 Input Parameters: 916 + pc - the preconditioner context 917 . nsub - the number of local submatrices 918 . row - an array of index sets that contain the global row numbers 919 that comprise each local submatrix 920 . col - an array of index sets that contain the global column numbers 921 that comprise each local submatrix 922 . submat - array of local submatrices 923 - ctx - optional user-defined context for private data for the 924 user-defined routine (may be null) 925 926 Output Parameter: 927 . submat - array of local submatrices (the entries of which may 928 have been modified) 929 930 Notes: 931 The user should NOT generally call this routine, as it will 932 automatically be called within certain preconditioners (currently 933 block Jacobi, additive Schwarz) if set. 934 935 The basic submatrices are extracted from the preconditioner matrix 936 as usual; the user can then alter these (for example, to set different 937 boundary conditions for each submatrix) before they are used for the 938 local solves. 939 940 Level: developer 941 942 .keywords: PC, modify, submatrices 943 944 .seealso: PCSetModifySubMatrices() 945 @*/ 946 PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx) 947 { 948 PetscErrorCode ierr; 949 950 PetscFunctionBegin; 951 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 952 if (!pc->modifysubmatrices) PetscFunctionReturn(0); 953 ierr = PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 954 ierr = (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);CHKERRQ(ierr); 955 ierr = PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 956 PetscFunctionReturn(0); 957 } 958 959 #undef __FUNCT__ 960 #define __FUNCT__ "PCSetOperators" 961 /*@ 962 PCSetOperators - Sets the matrix associated with the linear system and 963 a (possibly) different one associated with the preconditioner. 964 965 Logically Collective on PC and Mat 966 967 Input Parameters: 968 + pc - the preconditioner context 969 . Amat - the matrix associated with the linear system 970 . Pmat - the matrix to be used in constructing the preconditioner, usually 971 the same as Amat. 972 - flag - flag indicating information about the preconditioner matrix structure 973 during successive linear solves. This flag is ignored the first time a 974 linear system is solved, and thus is irrelevant when solving just one linear 975 system. 976 977 Notes: 978 The flag can be used to eliminate unnecessary work in the preconditioner 979 during the repeated solution of linear systems of the same size. The 980 available options are 981 + SAME_PRECONDITIONER - 982 Pmat is identical during successive linear solves. 983 This option is intended for folks who are using 984 different Amat and Pmat matrices and wish to reuse the 985 same preconditioner matrix. For example, this option 986 saves work by not recomputing incomplete factorization 987 for ILU/ICC preconditioners. 988 . SAME_NONZERO_PATTERN - 989 Pmat has the same nonzero structure during 990 successive linear solves. 991 - DIFFERENT_NONZERO_PATTERN - 992 Pmat does not have the same nonzero structure. 993 994 Passing a PETSC_NULL for Amat or Pmat removes the matrix that is currently used. 995 996 If you wish to replace either Amat or Pmat but leave the other one untouched then 997 first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference() 998 on it and then pass it back in in your call to KSPSetOperators(). 999 1000 Caution: 1001 If you specify SAME_NONZERO_PATTERN, PETSc believes your assertion 1002 and does not check the structure of the matrix. If you erroneously 1003 claim that the structure is the same when it actually is not, the new 1004 preconditioner will not function correctly. Thus, use this optimization 1005 feature carefully! 1006 1007 If in doubt about whether your preconditioner matrix has changed 1008 structure or not, use the flag DIFFERENT_NONZERO_PATTERN. 1009 1010 More Notes about Repeated Solution of Linear Systems: 1011 PETSc does NOT reset the matrix entries of either Amat or Pmat 1012 to zero after a linear solve; the user is completely responsible for 1013 matrix assembly. See the routine MatZeroEntries() if desiring to 1014 zero all elements of a matrix. 1015 1016 Level: intermediate 1017 1018 .keywords: PC, set, operators, matrix, linear system 1019 1020 .seealso: PCGetOperators(), MatZeroEntries() 1021 @*/ 1022 PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat,MatStructure flag) 1023 { 1024 PetscErrorCode ierr; 1025 PetscInt m1,n1,m2,n2; 1026 1027 PetscFunctionBegin; 1028 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1029 if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1030 if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3); 1031 if (Amat) PetscCheckSameComm(pc,1,Amat,2); 1032 if (Pmat) PetscCheckSameComm(pc,1,Pmat,3); 1033 if (pc->setupcalled && Amat && Pmat) { 1034 ierr = MatGetLocalSize(Amat,&m1,&n1);CHKERRQ(ierr); 1035 ierr = MatGetLocalSize(pc->mat,&m2,&n2);CHKERRQ(ierr); 1036 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1037 ierr = MatGetLocalSize(Pmat,&m1,&n1);CHKERRQ(ierr); 1038 ierr = MatGetLocalSize(pc->pmat,&m2,&n2);CHKERRQ(ierr); 1039 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1040 } 1041 1042 /* reference first in case the matrices are the same */ 1043 if (Amat) {ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr);} 1044 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 1045 if (Pmat) {ierr = PetscObjectReference((PetscObject)Pmat);CHKERRQ(ierr);} 1046 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 1047 pc->mat = Amat; 1048 pc->pmat = Pmat; 1049 1050 if (pc->setupcalled == 2 && flag != SAME_PRECONDITIONER) { 1051 pc->setupcalled = 1; 1052 } 1053 pc->flag = flag; 1054 PetscFunctionReturn(0); 1055 } 1056 1057 #undef __FUNCT__ 1058 #define __FUNCT__ "PCGetOperators" 1059 /*@C 1060 PCGetOperators - Gets the matrix associated with the linear system and 1061 possibly a different one associated with the preconditioner. 1062 1063 Not collective, though parallel Mats are returned if the PC is parallel 1064 1065 Input Parameter: 1066 . pc - the preconditioner context 1067 1068 Output Parameters: 1069 + mat - the matrix associated with the linear system 1070 . pmat - matrix associated with the preconditioner, usually the same 1071 as mat. 1072 - flag - flag indicating information about the preconditioner 1073 matrix structure. See PCSetOperators() for details. 1074 1075 Level: intermediate 1076 1077 Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators 1078 are created in PC and returned to the user. In this case, if both operators 1079 mat and pmat are requested, two DIFFERENT operators will be returned. If 1080 only one is requested both operators in the PC will be the same (i.e. as 1081 if one had called KSP/PCSetOperators() with the same argument for both Mats). 1082 The user must set the sizes of the returned matrices and their type etc just 1083 as if the user created them with MatCreate(). For example, 1084 1085 $ KSP/PCGetOperators(ksp/pc,&mat,PETSC_NULL,PETSC_NULL); is equivalent to 1086 $ set size, type, etc of mat 1087 1088 $ MatCreate(comm,&mat); 1089 $ KSP/PCSetOperators(ksp/pc,mat,mat,SAME_NONZERO_PATTERN); 1090 $ PetscObjectDereference((PetscObject)mat); 1091 $ set size, type, etc of mat 1092 1093 and 1094 1095 $ KSP/PCGetOperators(ksp/pc,&mat,&pmat,PETSC_NULL); is equivalent to 1096 $ set size, type, etc of mat and pmat 1097 1098 $ MatCreate(comm,&mat); 1099 $ MatCreate(comm,&pmat); 1100 $ KSP/PCSetOperators(ksp/pc,mat,pmat,SAME_NONZERO_PATTERN); 1101 $ PetscObjectDereference((PetscObject)mat); 1102 $ PetscObjectDereference((PetscObject)pmat); 1103 $ set size, type, etc of mat and pmat 1104 1105 The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy 1106 of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely 1107 managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look 1108 at this is when you create a SNES you do not NEED to create a KSP and attach it to 1109 the SNES object (the SNES object manages it for you). Similarly when you create a KSP 1110 you do not need to attach a PC to it (the KSP object manages the PC object for you). 1111 Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when 1112 it can be created for you? 1113 1114 1115 .keywords: PC, get, operators, matrix, linear system 1116 1117 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet() 1118 @*/ 1119 PetscErrorCode PCGetOperators(PC pc,Mat *mat,Mat *pmat,MatStructure *flag) 1120 { 1121 PetscErrorCode ierr; 1122 1123 PetscFunctionBegin; 1124 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1125 if (mat) { 1126 if (!pc->mat) { 1127 if (pc->pmat && !pmat) { /* pmat has been set, but user did not request it, so use for mat */ 1128 pc->mat = pc->pmat; 1129 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1130 } else { /* both mat and pmat are empty */ 1131 ierr = MatCreate(((PetscObject)pc)->comm,&pc->mat);CHKERRQ(ierr); 1132 if (!pmat) { /* user did NOT request pmat, so make same as mat */ 1133 pc->pmat = pc->mat; 1134 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1135 } 1136 } 1137 } 1138 *mat = pc->mat; 1139 } 1140 if (pmat) { 1141 if (!pc->pmat) { 1142 if (pc->mat && !mat) { /* mat has been set but was not requested, so use for pmat */ 1143 pc->pmat = pc->mat; 1144 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1145 } else { 1146 ierr = MatCreate(((PetscObject)pc)->comm,&pc->pmat);CHKERRQ(ierr); 1147 if (!mat) { /* user did NOT request mat, so make same as pmat */ 1148 pc->mat = pc->pmat; 1149 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1150 } 1151 } 1152 } 1153 *pmat = pc->pmat; 1154 } 1155 if (flag) *flag = pc->flag; 1156 PetscFunctionReturn(0); 1157 } 1158 1159 #undef __FUNCT__ 1160 #define __FUNCT__ "PCGetOperatorsSet" 1161 /*@C 1162 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1163 possibly a different one associated with the preconditioner have been set in the PC. 1164 1165 Not collective, though the results on all processes should be the same 1166 1167 Input Parameter: 1168 . pc - the preconditioner context 1169 1170 Output Parameters: 1171 + mat - the matrix associated with the linear system was set 1172 - pmat - matrix associated with the preconditioner was set, usually the same 1173 1174 Level: intermediate 1175 1176 .keywords: PC, get, operators, matrix, linear system 1177 1178 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators() 1179 @*/ 1180 PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat) 1181 { 1182 PetscFunctionBegin; 1183 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1184 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1185 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1186 PetscFunctionReturn(0); 1187 } 1188 1189 #undef __FUNCT__ 1190 #define __FUNCT__ "PCFactorGetMatrix" 1191 /*@ 1192 PCFactorGetMatrix - Gets the factored matrix from the 1193 preconditioner context. This routine is valid only for the LU, 1194 incomplete LU, Cholesky, and incomplete Cholesky methods. 1195 1196 Not Collective on PC though Mat is parallel if PC is parallel 1197 1198 Input Parameters: 1199 . pc - the preconditioner context 1200 1201 Output parameters: 1202 . mat - the factored matrix 1203 1204 Level: advanced 1205 1206 Notes: Does not increase the reference count for the matrix so DO NOT destroy it 1207 1208 .keywords: PC, get, factored, matrix 1209 @*/ 1210 PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat) 1211 { 1212 PetscErrorCode ierr; 1213 1214 PetscFunctionBegin; 1215 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1216 PetscValidPointer(mat,2); 1217 if (pc->ops->getfactoredmatrix) { 1218 ierr = (*pc->ops->getfactoredmatrix)(pc,mat);CHKERRQ(ierr); 1219 } else { 1220 SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC type does not support getting factor matrix"); 1221 } 1222 PetscFunctionReturn(0); 1223 } 1224 1225 #undef __FUNCT__ 1226 #define __FUNCT__ "PCSetOptionsPrefix" 1227 /*@C 1228 PCSetOptionsPrefix - Sets the prefix used for searching for all 1229 PC options in the database. 1230 1231 Logically Collective on PC 1232 1233 Input Parameters: 1234 + pc - the preconditioner context 1235 - prefix - the prefix string to prepend to all PC option requests 1236 1237 Notes: 1238 A hyphen (-) must NOT be given at the beginning of the prefix name. 1239 The first character of all runtime options is AUTOMATICALLY the 1240 hyphen. 1241 1242 Level: advanced 1243 1244 .keywords: PC, set, options, prefix, database 1245 1246 .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix() 1247 @*/ 1248 PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[]) 1249 { 1250 PetscErrorCode ierr; 1251 1252 PetscFunctionBegin; 1253 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1254 ierr = PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1255 PetscFunctionReturn(0); 1256 } 1257 1258 #undef __FUNCT__ 1259 #define __FUNCT__ "PCAppendOptionsPrefix" 1260 /*@C 1261 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1262 PC options in the database. 1263 1264 Logically Collective on PC 1265 1266 Input Parameters: 1267 + pc - the preconditioner context 1268 - prefix - the prefix string to prepend to all PC option requests 1269 1270 Notes: 1271 A hyphen (-) must NOT be given at the beginning of the prefix name. 1272 The first character of all runtime options is AUTOMATICALLY the 1273 hyphen. 1274 1275 Level: advanced 1276 1277 .keywords: PC, append, options, prefix, database 1278 1279 .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix() 1280 @*/ 1281 PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[]) 1282 { 1283 PetscErrorCode ierr; 1284 1285 PetscFunctionBegin; 1286 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1287 ierr = PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1288 PetscFunctionReturn(0); 1289 } 1290 1291 #undef __FUNCT__ 1292 #define __FUNCT__ "PCGetOptionsPrefix" 1293 /*@C 1294 PCGetOptionsPrefix - Gets the prefix used for searching for all 1295 PC options in the database. 1296 1297 Not Collective 1298 1299 Input Parameters: 1300 . pc - the preconditioner context 1301 1302 Output Parameters: 1303 . prefix - pointer to the prefix string used, is returned 1304 1305 Notes: On the fortran side, the user should pass in a string 'prifix' of 1306 sufficient length to hold the prefix. 1307 1308 Level: advanced 1309 1310 .keywords: PC, get, options, prefix, database 1311 1312 .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix() 1313 @*/ 1314 PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[]) 1315 { 1316 PetscErrorCode ierr; 1317 1318 PetscFunctionBegin; 1319 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1320 PetscValidPointer(prefix,2); 1321 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1322 PetscFunctionReturn(0); 1323 } 1324 1325 #undef __FUNCT__ 1326 #define __FUNCT__ "PCPreSolve" 1327 /*@ 1328 PCPreSolve - Optional pre-solve phase, intended for any 1329 preconditioner-specific actions that must be performed before 1330 the iterative solve itself. 1331 1332 Collective on PC 1333 1334 Input Parameters: 1335 + pc - the preconditioner context 1336 - ksp - the Krylov subspace context 1337 1338 Level: developer 1339 1340 Sample of Usage: 1341 .vb 1342 PCPreSolve(pc,ksp); 1343 KSPSolve(ksp,b,x); 1344 PCPostSolve(pc,ksp); 1345 .ve 1346 1347 Notes: 1348 The pre-solve phase is distinct from the PCSetUp() phase. 1349 1350 KSPSolve() calls this directly, so is rarely called by the user. 1351 1352 .keywords: PC, pre-solve 1353 1354 .seealso: PCPostSolve() 1355 @*/ 1356 PetscErrorCode PCPreSolve(PC pc,KSP ksp) 1357 { 1358 PetscErrorCode ierr; 1359 Vec x,rhs; 1360 Mat A,B; 1361 1362 PetscFunctionBegin; 1363 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1364 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1365 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1366 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1367 /* 1368 Scale the system and have the matrices use the scaled form 1369 only if the two matrices are actually the same (and hence 1370 have the same scaling 1371 */ 1372 ierr = PCGetOperators(pc,&A,&B,PETSC_NULL);CHKERRQ(ierr); 1373 if (A == B) { 1374 ierr = MatScaleSystem(pc->mat,rhs,x);CHKERRQ(ierr); 1375 ierr = MatUseScaledForm(pc->mat,PETSC_TRUE);CHKERRQ(ierr); 1376 } 1377 1378 if (pc->ops->presolve) { 1379 ierr = (*pc->ops->presolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1380 } 1381 PetscFunctionReturn(0); 1382 } 1383 1384 #undef __FUNCT__ 1385 #define __FUNCT__ "PCPostSolve" 1386 /*@ 1387 PCPostSolve - Optional post-solve phase, intended for any 1388 preconditioner-specific actions that must be performed after 1389 the iterative solve itself. 1390 1391 Collective on PC 1392 1393 Input Parameters: 1394 + pc - the preconditioner context 1395 - ksp - the Krylov subspace context 1396 1397 Sample of Usage: 1398 .vb 1399 PCPreSolve(pc,ksp); 1400 KSPSolve(ksp,b,x); 1401 PCPostSolve(pc,ksp); 1402 .ve 1403 1404 Note: 1405 KSPSolve() calls this routine directly, so it is rarely called by the user. 1406 1407 Level: developer 1408 1409 .keywords: PC, post-solve 1410 1411 .seealso: PCPreSolve(), KSPSolve() 1412 @*/ 1413 PetscErrorCode PCPostSolve(PC pc,KSP ksp) 1414 { 1415 PetscErrorCode ierr; 1416 Vec x,rhs; 1417 Mat A,B; 1418 1419 PetscFunctionBegin; 1420 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1421 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1422 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1423 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1424 if (pc->ops->postsolve) { 1425 ierr = (*pc->ops->postsolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1426 } 1427 /* 1428 Scale the system and have the matrices use the scaled form 1429 only if the two matrices are actually the same (and hence 1430 have the same scaling 1431 */ 1432 ierr = PCGetOperators(pc,&A,&B,PETSC_NULL);CHKERRQ(ierr); 1433 if (A == B) { 1434 ierr = MatUnScaleSystem(pc->mat,rhs,x);CHKERRQ(ierr); 1435 ierr = MatUseScaledForm(pc->mat,PETSC_FALSE);CHKERRQ(ierr); 1436 } 1437 PetscFunctionReturn(0); 1438 } 1439 1440 #undef __FUNCT__ 1441 #define __FUNCT__ "PCView" 1442 /*@C 1443 PCView - Prints the PC data structure. 1444 1445 Collective on PC 1446 1447 Input Parameters: 1448 + PC - the PC context 1449 - viewer - optional visualization context 1450 1451 Note: 1452 The available visualization contexts include 1453 + PETSC_VIEWER_STDOUT_SELF - standard output (default) 1454 - PETSC_VIEWER_STDOUT_WORLD - synchronized standard 1455 output where only the first processor opens 1456 the file. All other processors send their 1457 data to the first processor to print. 1458 1459 The user can open an alternative visualization contexts with 1460 PetscViewerASCIIOpen() (output to a specified file). 1461 1462 Level: developer 1463 1464 .keywords: PC, view 1465 1466 .seealso: KSPView(), PetscViewerASCIIOpen() 1467 @*/ 1468 PetscErrorCode PCView(PC pc,PetscViewer viewer) 1469 { 1470 const PCType cstr; 1471 PetscErrorCode ierr; 1472 PetscBool iascii,isstring; 1473 PetscViewerFormat format; 1474 1475 PetscFunctionBegin; 1476 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1477 if (!viewer) { 1478 ierr = PetscViewerASCIIGetStdout(((PetscObject)pc)->comm,&viewer);CHKERRQ(ierr); 1479 } 1480 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1481 PetscCheckSameComm(pc,1,viewer,2); 1482 1483 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1484 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 1485 if (iascii) { 1486 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1487 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer,"PC Object");CHKERRQ(ierr); 1488 if (pc->ops->view) { 1489 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1490 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1491 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1492 } 1493 if (pc->mat) { 1494 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);CHKERRQ(ierr); 1495 if (pc->pmat == pc->mat) { 1496 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");CHKERRQ(ierr); 1497 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1498 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1499 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1500 } else { 1501 if (pc->pmat) { 1502 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");CHKERRQ(ierr); 1503 } else { 1504 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix:\n");CHKERRQ(ierr); 1505 } 1506 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1507 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1508 if (pc->pmat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1509 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1510 } 1511 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1512 } 1513 } else if (isstring) { 1514 ierr = PCGetType(pc,&cstr);CHKERRQ(ierr); 1515 ierr = PetscViewerStringSPrintf(viewer," %-7.7s",cstr);CHKERRQ(ierr); 1516 if (pc->ops->view) {ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr);} 1517 } else { 1518 SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported by PC",((PetscObject)viewer)->type_name); 1519 } 1520 PetscFunctionReturn(0); 1521 } 1522 1523 1524 #undef __FUNCT__ 1525 #define __FUNCT__ "PCSetInitialGuessNonzero" 1526 /*@ 1527 PCSetInitialGuessNonzero - Tells the iterative solver that the 1528 initial guess is nonzero; otherwise PC assumes the initial guess 1529 is to be zero (and thus zeros it out before solving). 1530 1531 Logically Collective on PC 1532 1533 Input Parameters: 1534 + pc - iterative context obtained from PCCreate() 1535 - flg - PETSC_TRUE indicates the guess is non-zero, PETSC_FALSE indicates the guess is zero 1536 1537 Level: Developer 1538 1539 Notes: 1540 This is a weird function. Since PC's are linear operators on the right hand side they 1541 CANNOT use an initial guess. This function is for the "pass-through" preconditioners 1542 PCKSP, PCREDUNDANT and PCOPENMP and causes the inner KSP object to use the nonzero 1543 initial guess. Not currently working for PCREDUNDANT, that has to be rewritten to use KSP. 1544 1545 1546 .keywords: PC, set, initial guess, nonzero 1547 1548 .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll() 1549 @*/ 1550 PetscErrorCode PCSetInitialGuessNonzero(PC pc,PetscBool flg) 1551 { 1552 PetscFunctionBegin; 1553 PetscValidLogicalCollectiveBool(pc,flg,2); 1554 pc->nonzero_guess = flg; 1555 PetscFunctionReturn(0); 1556 } 1557 1558 #undef __FUNCT__ 1559 #define __FUNCT__ "PCRegister" 1560 /*@C 1561 PCRegister - See PCRegisterDynamic() 1562 1563 Level: advanced 1564 @*/ 1565 PetscErrorCode PCRegister(const char sname[],const char path[],const char name[],PetscErrorCode (*function)(PC)) 1566 { 1567 PetscErrorCode ierr; 1568 char fullname[PETSC_MAX_PATH_LEN]; 1569 1570 PetscFunctionBegin; 1571 1572 ierr = PetscFListConcat(path,name,fullname);CHKERRQ(ierr); 1573 ierr = PetscFListAdd(&PCList,sname,fullname,(void (*)(void))function);CHKERRQ(ierr); 1574 PetscFunctionReturn(0); 1575 } 1576 1577 #undef __FUNCT__ 1578 #define __FUNCT__ "PCComputeExplicitOperator" 1579 /*@ 1580 PCComputeExplicitOperator - Computes the explicit preconditioned operator. 1581 1582 Collective on PC 1583 1584 Input Parameter: 1585 . pc - the preconditioner object 1586 1587 Output Parameter: 1588 . mat - the explict preconditioned operator 1589 1590 Notes: 1591 This computation is done by applying the operators to columns of the 1592 identity matrix. 1593 1594 Currently, this routine uses a dense matrix format when 1 processor 1595 is used and a sparse format otherwise. This routine is costly in general, 1596 and is recommended for use only with relatively small systems. 1597 1598 Level: advanced 1599 1600 .keywords: PC, compute, explicit, operator 1601 1602 .seealso: KSPComputeExplicitOperator() 1603 1604 @*/ 1605 PetscErrorCode PCComputeExplicitOperator(PC pc,Mat *mat) 1606 { 1607 Vec in,out; 1608 PetscErrorCode ierr; 1609 PetscInt i,M,m,*rows,start,end; 1610 PetscMPIInt size; 1611 MPI_Comm comm; 1612 PetscScalar *array,one = 1.0; 1613 1614 PetscFunctionBegin; 1615 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1616 PetscValidPointer(mat,2); 1617 1618 comm = ((PetscObject)pc)->comm; 1619 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1620 1621 if (!pc->pmat) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"You must call KSPSetOperators() or PCSetOperators() before this call"); 1622 ierr = MatGetVecs(pc->pmat,&in,0);CHKERRQ(ierr); 1623 ierr = VecDuplicate(in,&out);CHKERRQ(ierr); 1624 ierr = VecGetOwnershipRange(in,&start,&end);CHKERRQ(ierr); 1625 ierr = VecGetSize(in,&M);CHKERRQ(ierr); 1626 ierr = VecGetLocalSize(in,&m);CHKERRQ(ierr); 1627 ierr = PetscMalloc((m+1)*sizeof(PetscInt),&rows);CHKERRQ(ierr); 1628 for (i=0; i<m; i++) {rows[i] = start + i;} 1629 1630 ierr = MatCreate(comm,mat);CHKERRQ(ierr); 1631 ierr = MatSetSizes(*mat,m,m,M,M);CHKERRQ(ierr); 1632 if (size == 1) { 1633 ierr = MatSetType(*mat,MATSEQDENSE);CHKERRQ(ierr); 1634 ierr = MatSeqDenseSetPreallocation(*mat,PETSC_NULL);CHKERRQ(ierr); 1635 } else { 1636 ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 1637 ierr = MatMPIAIJSetPreallocation(*mat,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 1638 } 1639 1640 for (i=0; i<M; i++) { 1641 1642 ierr = VecSet(in,0.0);CHKERRQ(ierr); 1643 ierr = VecSetValues(in,1,&i,&one,INSERT_VALUES);CHKERRQ(ierr); 1644 ierr = VecAssemblyBegin(in);CHKERRQ(ierr); 1645 ierr = VecAssemblyEnd(in);CHKERRQ(ierr); 1646 1647 /* should fix, allowing user to choose side */ 1648 ierr = PCApply(pc,in,out);CHKERRQ(ierr); 1649 1650 ierr = VecGetArray(out,&array);CHKERRQ(ierr); 1651 ierr = MatSetValues(*mat,m,rows,1,&i,array,INSERT_VALUES);CHKERRQ(ierr); 1652 ierr = VecRestoreArray(out,&array);CHKERRQ(ierr); 1653 1654 } 1655 ierr = PetscFree(rows);CHKERRQ(ierr); 1656 ierr = VecDestroy(&out);CHKERRQ(ierr); 1657 ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1658 ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1659 PetscFunctionReturn(0); 1660 } 1661 1662