1 2 /* 3 The PC (preconditioner) interface routines, callable by users. 4 */ 5 #include <private/pcimpl.h> /*I "petscksp.h" I*/ 6 7 /* Logging support */ 8 PetscClassId PC_CLASSID; 9 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 10 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 11 12 #undef __FUNCT__ 13 #define __FUNCT__ "PCGetDefaultType_Private" 14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char* type[]) 15 { 16 PetscErrorCode ierr; 17 PetscMPIInt size; 18 PetscBool flg1,flg2,set,flg3; 19 20 PetscFunctionBegin; 21 ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr); 22 if (pc->pmat) { 23 PetscErrorCode (*f)(Mat,MatReuse,Mat*); 24 ierr = PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr); 25 if (size == 1) { 26 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);CHKERRQ(ierr); 27 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);CHKERRQ(ierr); 28 ierr = MatIsSymmetricKnown(pc->pmat,&set,&flg3);CHKERRQ(ierr); 29 if (flg1 && (!flg2 || (set && flg3))) { 30 *type = PCICC; 31 } else if (flg2) { 32 *type = PCILU; 33 } else if (f) { /* likely is a parallel matrix run on one processor */ 34 *type = PCBJACOBI; 35 } else { 36 *type = PCNONE; 37 } 38 } else { 39 if (f) { 40 *type = PCBJACOBI; 41 } else { 42 *type = PCNONE; 43 } 44 } 45 } else { 46 if (size == 1) { 47 *type = PCILU; 48 } else { 49 *type = PCBJACOBI; 50 } 51 } 52 PetscFunctionReturn(0); 53 } 54 55 #undef __FUNCT__ 56 #define __FUNCT__ "PCReset" 57 /*@ 58 PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats 59 60 Collective on PC 61 62 Input Parameter: 63 . pc - the preconditioner context 64 65 Level: developer 66 67 Notes: This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC 68 69 .keywords: PC, destroy 70 71 .seealso: PCCreate(), PCSetUp() 72 @*/ 73 PetscErrorCode PCReset(PC pc) 74 { 75 PetscErrorCode ierr; 76 77 PetscFunctionBegin; 78 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 79 if (pc->ops->reset) { 80 ierr = (*pc->ops->reset)(pc); 81 } 82 ierr = VecDestroy(&pc->diagonalscaleright);CHKERRQ(ierr); 83 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 84 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 85 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 86 pc->setupcalled = 0; 87 PetscFunctionReturn(0); 88 } 89 90 #undef __FUNCT__ 91 #define __FUNCT__ "PCDestroy" 92 /*@ 93 PCDestroy - Destroys PC context that was created with PCCreate(). 94 95 Collective on PC 96 97 Input Parameter: 98 . pc - the preconditioner context 99 100 Level: developer 101 102 .keywords: PC, destroy 103 104 .seealso: PCCreate(), PCSetUp() 105 @*/ 106 PetscErrorCode PCDestroy(PC *pc) 107 { 108 PetscErrorCode ierr; 109 110 PetscFunctionBegin; 111 if (!*pc) PetscFunctionReturn(0); 112 PetscValidHeaderSpecific((*pc),PC_CLASSID,1); 113 if (--((PetscObject)(*pc))->refct > 0) {*pc = 0; PetscFunctionReturn(0);} 114 115 ierr = PCReset(*pc);CHKERRQ(ierr); 116 117 /* if memory was published with AMS then destroy it */ 118 ierr = PetscObjectDepublish((*pc));CHKERRQ(ierr); 119 if ((*pc)->ops->destroy) {ierr = (*(*pc)->ops->destroy)((*pc));CHKERRQ(ierr);} 120 ierr = DMDestroy(&(*pc)->dm);CHKERRQ(ierr); 121 ierr = PetscHeaderDestroy(pc);CHKERRQ(ierr); 122 PetscFunctionReturn(0); 123 } 124 125 #undef __FUNCT__ 126 #define __FUNCT__ "PCGetDiagonalScale" 127 /*@C 128 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 129 scaling as needed by certain time-stepping codes. 130 131 Logically Collective on PC 132 133 Input Parameter: 134 . pc - the preconditioner context 135 136 Output Parameter: 137 . flag - PETSC_TRUE if it applies the scaling 138 139 Level: developer 140 141 Notes: If this returns PETSC_TRUE then the system solved via the Krylov method is 142 $ D M A D^{-1} y = D M b for left preconditioning or 143 $ D A M D^{-1} z = D b for right preconditioning 144 145 .keywords: PC 146 147 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale() 148 @*/ 149 PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag) 150 { 151 PetscFunctionBegin; 152 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 153 PetscValidPointer(flag,2); 154 *flag = pc->diagonalscale; 155 PetscFunctionReturn(0); 156 } 157 158 #undef __FUNCT__ 159 #define __FUNCT__ "PCSetDiagonalScale" 160 /*@ 161 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 162 scaling as needed by certain time-stepping codes. 163 164 Logically Collective on PC 165 166 Input Parameters: 167 + pc - the preconditioner context 168 - s - scaling vector 169 170 Level: intermediate 171 172 Notes: The system solved via the Krylov method is 173 $ D M A D^{-1} y = D M b for left preconditioning or 174 $ D A M D^{-1} z = D b for right preconditioning 175 176 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 177 178 .keywords: PC 179 180 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale() 181 @*/ 182 PetscErrorCode PCSetDiagonalScale(PC pc,Vec s) 183 { 184 PetscErrorCode ierr; 185 186 PetscFunctionBegin; 187 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 188 PetscValidHeaderSpecific(s,VEC_CLASSID,2); 189 pc->diagonalscale = PETSC_TRUE; 190 ierr = PetscObjectReference((PetscObject)s);CHKERRQ(ierr); 191 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 192 pc->diagonalscaleleft = s; 193 ierr = VecDuplicate(s,&pc->diagonalscaleright);CHKERRQ(ierr); 194 ierr = VecCopy(s,pc->diagonalscaleright);CHKERRQ(ierr); 195 ierr = VecReciprocal(pc->diagonalscaleright);CHKERRQ(ierr); 196 PetscFunctionReturn(0); 197 } 198 199 #undef __FUNCT__ 200 #define __FUNCT__ "PCDiagonalScaleLeft" 201 /*@ 202 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 203 204 Logically Collective on PC 205 206 Input Parameters: 207 + pc - the preconditioner context 208 . in - input vector 209 + out - scaled vector (maybe the same as in) 210 211 Level: intermediate 212 213 Notes: The system solved via the Krylov method is 214 $ D M A D^{-1} y = D M b for left preconditioning or 215 $ D A M D^{-1} z = D b for right preconditioning 216 217 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 218 219 If diagonal scaling is turned off and in is not out then in is copied to out 220 221 .keywords: PC 222 223 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale() 224 @*/ 225 PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out) 226 { 227 PetscErrorCode ierr; 228 229 PetscFunctionBegin; 230 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 231 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 232 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 233 if (pc->diagonalscale) { 234 ierr = VecPointwiseMult(out,pc->diagonalscaleleft,in);CHKERRQ(ierr); 235 } else if (in != out) { 236 ierr = VecCopy(in,out);CHKERRQ(ierr); 237 } 238 PetscFunctionReturn(0); 239 } 240 241 #undef __FUNCT__ 242 #define __FUNCT__ "PCDiagonalScaleRight" 243 /*@ 244 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 245 246 Logically Collective on PC 247 248 Input Parameters: 249 + pc - the preconditioner context 250 . in - input vector 251 + out - scaled vector (maybe the same as in) 252 253 Level: intermediate 254 255 Notes: The system solved via the Krylov method is 256 $ D M A D^{-1} y = D M b for left preconditioning or 257 $ D A M D^{-1} z = D b for right preconditioning 258 259 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 260 261 If diagonal scaling is turned off and in is not out then in is copied to out 262 263 .keywords: PC 264 265 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale() 266 @*/ 267 PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out) 268 { 269 PetscErrorCode ierr; 270 271 PetscFunctionBegin; 272 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 273 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 274 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 275 if (pc->diagonalscale) { 276 ierr = VecPointwiseMult(out,pc->diagonalscaleright,in);CHKERRQ(ierr); 277 } else if (in != out) { 278 ierr = VecCopy(in,out);CHKERRQ(ierr); 279 } 280 PetscFunctionReturn(0); 281 } 282 283 #if 0 284 #undef __FUNCT__ 285 #define __FUNCT__ "PCPublish_Petsc" 286 static PetscErrorCode PCPublish_Petsc(PetscObject obj) 287 { 288 PetscFunctionBegin; 289 PetscFunctionReturn(0); 290 } 291 #endif 292 293 #undef __FUNCT__ 294 #define __FUNCT__ "PCCreate" 295 /*@ 296 PCCreate - Creates a preconditioner context. 297 298 Collective on MPI_Comm 299 300 Input Parameter: 301 . comm - MPI communicator 302 303 Output Parameter: 304 . pc - location to put the preconditioner context 305 306 Notes: 307 The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or ICC 308 in parallel. For dense matrices it is always PCNONE. 309 310 Level: developer 311 312 .keywords: PC, create, context 313 314 .seealso: PCSetUp(), PCApply(), PCDestroy() 315 @*/ 316 PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc) 317 { 318 PC pc; 319 PetscErrorCode ierr; 320 321 PetscFunctionBegin; 322 PetscValidPointer(newpc,1); 323 *newpc = 0; 324 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 325 ierr = PCInitializePackage(PETSC_NULL);CHKERRQ(ierr); 326 #endif 327 328 ierr = PetscHeaderCreate(pc,_p_PC,struct _PCOps,PC_CLASSID,-1,"PC","Preconditioner","PC",comm,PCDestroy,PCView);CHKERRQ(ierr); 329 330 pc->mat = 0; 331 pc->pmat = 0; 332 pc->setupcalled = 0; 333 pc->setfromoptionscalled = 0; 334 pc->data = 0; 335 pc->diagonalscale = PETSC_FALSE; 336 pc->diagonalscaleleft = 0; 337 pc->diagonalscaleright = 0; 338 pc->reuse = 0; 339 340 pc->modifysubmatrices = 0; 341 pc->modifysubmatricesP = 0; 342 *newpc = pc; 343 PetscFunctionReturn(0); 344 345 } 346 347 /* -------------------------------------------------------------------------------*/ 348 349 #undef __FUNCT__ 350 #define __FUNCT__ "PCApply" 351 /*@ 352 PCApply - Applies the preconditioner to a vector. 353 354 Collective on PC and Vec 355 356 Input Parameters: 357 + pc - the preconditioner context 358 - x - input vector 359 360 Output Parameter: 361 . y - output vector 362 363 Level: developer 364 365 .keywords: PC, apply 366 367 .seealso: PCApplyTranspose(), PCApplyBAorAB() 368 @*/ 369 PetscErrorCode PCApply(PC pc,Vec x,Vec y) 370 { 371 PetscErrorCode ierr; 372 373 PetscFunctionBegin; 374 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 375 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 376 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 377 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 378 if (pc->setupcalled < 2) { 379 ierr = PCSetUp(pc);CHKERRQ(ierr); 380 } 381 if (!pc->ops->apply) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply"); 382 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 383 ierr = (*pc->ops->apply)(pc,x,y);CHKERRQ(ierr); 384 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 385 PetscFunctionReturn(0); 386 } 387 388 #undef __FUNCT__ 389 #define __FUNCT__ "PCApplySymmetricLeft" 390 /*@ 391 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 392 393 Collective on PC and Vec 394 395 Input Parameters: 396 + pc - the preconditioner context 397 - x - input vector 398 399 Output Parameter: 400 . y - output vector 401 402 Notes: 403 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 404 405 Level: developer 406 407 .keywords: PC, apply, symmetric, left 408 409 .seealso: PCApply(), PCApplySymmetricRight() 410 @*/ 411 PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y) 412 { 413 PetscErrorCode ierr; 414 415 PetscFunctionBegin; 416 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 417 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 418 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 419 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 420 if (pc->setupcalled < 2) { 421 ierr = PCSetUp(pc);CHKERRQ(ierr); 422 } 423 if (!pc->ops->applysymmetricleft) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have left symmetric apply"); 424 ierr = PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 425 ierr = (*pc->ops->applysymmetricleft)(pc,x,y);CHKERRQ(ierr); 426 ierr = PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 427 PetscFunctionReturn(0); 428 } 429 430 #undef __FUNCT__ 431 #define __FUNCT__ "PCApplySymmetricRight" 432 /*@ 433 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 434 435 Collective on PC and Vec 436 437 Input Parameters: 438 + pc - the preconditioner context 439 - x - input vector 440 441 Output Parameter: 442 . y - output vector 443 444 Level: developer 445 446 Notes: 447 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 448 449 .keywords: PC, apply, symmetric, right 450 451 .seealso: PCApply(), PCApplySymmetricLeft() 452 @*/ 453 PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y) 454 { 455 PetscErrorCode ierr; 456 457 PetscFunctionBegin; 458 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 459 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 460 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 461 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 462 if (pc->setupcalled < 2) { 463 ierr = PCSetUp(pc);CHKERRQ(ierr); 464 } 465 if (!pc->ops->applysymmetricright) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have left symmetric apply"); 466 ierr = PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 467 ierr = (*pc->ops->applysymmetricright)(pc,x,y);CHKERRQ(ierr); 468 ierr = PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 469 PetscFunctionReturn(0); 470 } 471 472 #undef __FUNCT__ 473 #define __FUNCT__ "PCApplyTranspose" 474 /*@ 475 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 476 477 Collective on PC and Vec 478 479 Input Parameters: 480 + pc - the preconditioner context 481 - x - input vector 482 483 Output Parameter: 484 . y - output vector 485 486 Notes: For complex numbers this applies the non-Hermitian transpose. 487 488 Developer Notes: We need to implement a PCApplyHermitianTranspose() 489 490 Level: developer 491 492 .keywords: PC, apply, transpose 493 494 .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists() 495 @*/ 496 PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y) 497 { 498 PetscErrorCode ierr; 499 500 PetscFunctionBegin; 501 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 502 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 503 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 504 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 505 if (pc->setupcalled < 2) { 506 ierr = PCSetUp(pc);CHKERRQ(ierr); 507 } 508 if (!pc->ops->applytranspose) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply transpose"); 509 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 510 ierr = (*pc->ops->applytranspose)(pc,x,y);CHKERRQ(ierr); 511 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 512 PetscFunctionReturn(0); 513 } 514 515 #undef __FUNCT__ 516 #define __FUNCT__ "PCApplyTransposeExists" 517 /*@ 518 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 519 520 Collective on PC and Vec 521 522 Input Parameters: 523 . pc - the preconditioner context 524 525 Output Parameter: 526 . flg - PETSC_TRUE if a transpose operation is defined 527 528 Level: developer 529 530 .keywords: PC, apply, transpose 531 532 .seealso: PCApplyTranspose() 533 @*/ 534 PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg) 535 { 536 PetscFunctionBegin; 537 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 538 PetscValidPointer(flg,2); 539 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 540 else *flg = PETSC_FALSE; 541 PetscFunctionReturn(0); 542 } 543 544 #undef __FUNCT__ 545 #define __FUNCT__ "PCApplyBAorAB" 546 /*@ 547 PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x. 548 549 Collective on PC and Vec 550 551 Input Parameters: 552 + pc - the preconditioner context 553 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 554 . x - input vector 555 - work - work vector 556 557 Output Parameter: 558 . y - output vector 559 560 Level: developer 561 562 Notes: If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the 563 specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling. 564 565 .keywords: PC, apply, operator 566 567 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose() 568 @*/ 569 PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work) 570 { 571 PetscErrorCode ierr; 572 573 PetscFunctionBegin; 574 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 575 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 576 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 577 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 578 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 579 if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric"); 580 if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application"); 581 582 if (pc->setupcalled < 2) { 583 ierr = PCSetUp(pc);CHKERRQ(ierr); 584 } 585 586 if (pc->diagonalscale) { 587 if (pc->ops->applyBA) { 588 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 589 ierr = VecDuplicate(x,&work2);CHKERRQ(ierr); 590 ierr = PCDiagonalScaleRight(pc,x,work2);CHKERRQ(ierr); 591 ierr = (*pc->ops->applyBA)(pc,side,work2,y,work);CHKERRQ(ierr); 592 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 593 ierr = VecDestroy(&work2);CHKERRQ(ierr); 594 } else if (side == PC_RIGHT) { 595 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 596 ierr = PCApply(pc,y,work);CHKERRQ(ierr); 597 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 598 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 599 } else if (side == PC_LEFT) { 600 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 601 ierr = MatMult(pc->mat,y,work);CHKERRQ(ierr); 602 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 603 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 604 } else if (side == PC_SYMMETRIC) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner"); 605 } else { 606 if (pc->ops->applyBA) { 607 ierr = (*pc->ops->applyBA)(pc,side,x,y,work);CHKERRQ(ierr); 608 } else if (side == PC_RIGHT) { 609 ierr = PCApply(pc,x,work);CHKERRQ(ierr); 610 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 611 } else if (side == PC_LEFT) { 612 ierr = MatMult(pc->mat,x,work);CHKERRQ(ierr); 613 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 614 } else if (side == PC_SYMMETRIC) { 615 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 616 ierr = PCApplySymmetricRight(pc,x,work);CHKERRQ(ierr); 617 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 618 ierr = VecCopy(y,work);CHKERRQ(ierr); 619 ierr = PCApplySymmetricLeft(pc,work,y);CHKERRQ(ierr); 620 } 621 } 622 PetscFunctionReturn(0); 623 } 624 625 #undef __FUNCT__ 626 #define __FUNCT__ "PCApplyBAorABTranspose" 627 /*@ 628 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 629 and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning, 630 NOT tr(B*A) = tr(A)*tr(B). 631 632 Collective on PC and Vec 633 634 Input Parameters: 635 + pc - the preconditioner context 636 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 637 . x - input vector 638 - work - work vector 639 640 Output Parameter: 641 . y - output vector 642 643 644 Notes: this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner 645 defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 646 647 Level: developer 648 649 .keywords: PC, apply, operator, transpose 650 651 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB() 652 @*/ 653 PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work) 654 { 655 PetscErrorCode ierr; 656 657 PetscFunctionBegin; 658 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 659 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 660 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 661 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 662 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 663 if (pc->ops->applyBAtranspose) { 664 ierr = (*pc->ops->applyBAtranspose)(pc,side,x,y,work);CHKERRQ(ierr); 665 PetscFunctionReturn(0); 666 } 667 if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left"); 668 669 if (pc->setupcalled < 2) { 670 ierr = PCSetUp(pc);CHKERRQ(ierr); 671 } 672 673 if (side == PC_RIGHT) { 674 ierr = PCApplyTranspose(pc,x,work);CHKERRQ(ierr); 675 ierr = MatMultTranspose(pc->mat,work,y);CHKERRQ(ierr); 676 } else if (side == PC_LEFT) { 677 ierr = MatMultTranspose(pc->mat,x,work);CHKERRQ(ierr); 678 ierr = PCApplyTranspose(pc,work,y);CHKERRQ(ierr); 679 } 680 /* add support for PC_SYMMETRIC */ 681 PetscFunctionReturn(0); /* actually will never get here */ 682 } 683 684 /* -------------------------------------------------------------------------------*/ 685 686 #undef __FUNCT__ 687 #define __FUNCT__ "PCApplyRichardsonExists" 688 /*@ 689 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 690 built-in fast application of Richardson's method. 691 692 Not Collective 693 694 Input Parameter: 695 . pc - the preconditioner 696 697 Output Parameter: 698 . exists - PETSC_TRUE or PETSC_FALSE 699 700 Level: developer 701 702 .keywords: PC, apply, Richardson, exists 703 704 .seealso: PCApplyRichardson() 705 @*/ 706 PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists) 707 { 708 PetscFunctionBegin; 709 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 710 PetscValidIntPointer(exists,2); 711 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 712 else *exists = PETSC_FALSE; 713 PetscFunctionReturn(0); 714 } 715 716 #undef __FUNCT__ 717 #define __FUNCT__ "PCApplyRichardson" 718 /*@ 719 PCApplyRichardson - Applies several steps of Richardson iteration with 720 the particular preconditioner. This routine is usually used by the 721 Krylov solvers and not the application code directly. 722 723 Collective on PC 724 725 Input Parameters: 726 + pc - the preconditioner context 727 . b - the right hand side 728 . w - one work vector 729 . rtol - relative decrease in residual norm convergence criteria 730 . abstol - absolute residual norm convergence criteria 731 . dtol - divergence residual norm increase criteria 732 . its - the number of iterations to apply. 733 - guesszero - if the input x contains nonzero initial guess 734 735 Output Parameter: 736 + outits - number of iterations actually used (for SOR this always equals its) 737 . reason - the reason the apply terminated 738 - y - the solution (also contains initial guess if guesszero is PETSC_FALSE 739 740 Notes: 741 Most preconditioners do not support this function. Use the command 742 PCApplyRichardsonExists() to determine if one does. 743 744 Except for the multigrid PC this routine ignores the convergence tolerances 745 and always runs for the number of iterations 746 747 Level: developer 748 749 .keywords: PC, apply, Richardson 750 751 .seealso: PCApplyRichardsonExists() 752 @*/ 753 PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason) 754 { 755 PetscErrorCode ierr; 756 757 PetscFunctionBegin; 758 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 759 PetscValidHeaderSpecific(b,VEC_CLASSID,2); 760 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 761 PetscValidHeaderSpecific(w,VEC_CLASSID,4); 762 if (b == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"b and y must be different vectors"); 763 if (pc->setupcalled < 2) { 764 ierr = PCSetUp(pc);CHKERRQ(ierr); 765 } 766 if (!pc->ops->applyrichardson) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply richardson"); 767 ierr = (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);CHKERRQ(ierr); 768 PetscFunctionReturn(0); 769 } 770 771 /* 772 a setupcall of 0 indicates never setup, 773 1 needs to be resetup, 774 2 does not need any changes. 775 */ 776 #undef __FUNCT__ 777 #define __FUNCT__ "PCSetUp" 778 /*@ 779 PCSetUp - Prepares for the use of a preconditioner. 780 781 Collective on PC 782 783 Input Parameter: 784 . pc - the preconditioner context 785 786 Level: developer 787 788 .keywords: PC, setup 789 790 .seealso: PCCreate(), PCApply(), PCDestroy() 791 @*/ 792 PetscErrorCode PCSetUp(PC pc) 793 { 794 PetscErrorCode ierr; 795 const char *def; 796 797 PetscFunctionBegin; 798 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 799 if (!pc->mat) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first"); 800 801 if (pc->setupcalled > 1) { 802 ierr = PetscInfo(pc,"Setting PC with identical preconditioner\n");CHKERRQ(ierr); 803 PetscFunctionReturn(0); 804 } else if (!pc->setupcalled) { 805 ierr = PetscInfo(pc,"Setting up new PC\n");CHKERRQ(ierr); 806 } else if (pc->flag == SAME_NONZERO_PATTERN) { 807 ierr = PetscInfo(pc,"Setting up PC with same nonzero pattern\n");CHKERRQ(ierr); 808 } else { 809 ierr = PetscInfo(pc,"Setting up PC with different nonzero pattern\n");CHKERRQ(ierr); 810 } 811 812 if (!((PetscObject)pc)->type_name) { 813 ierr = PCGetDefaultType_Private(pc,&def);CHKERRQ(ierr); 814 ierr = PCSetType(pc,def);CHKERRQ(ierr); 815 } 816 817 ierr = PetscLogEventBegin(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 818 if (pc->ops->setup) { 819 ierr = (*pc->ops->setup)(pc);CHKERRQ(ierr); 820 } 821 pc->setupcalled = 2; 822 ierr = PetscLogEventEnd(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 823 PetscFunctionReturn(0); 824 } 825 826 #undef __FUNCT__ 827 #define __FUNCT__ "PCSetUpOnBlocks" 828 /*@ 829 PCSetUpOnBlocks - Sets up the preconditioner for each block in 830 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 831 methods. 832 833 Collective on PC 834 835 Input Parameters: 836 . pc - the preconditioner context 837 838 Level: developer 839 840 .keywords: PC, setup, blocks 841 842 .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp() 843 @*/ 844 PetscErrorCode PCSetUpOnBlocks(PC pc) 845 { 846 PetscErrorCode ierr; 847 848 PetscFunctionBegin; 849 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 850 if (!pc->ops->setuponblocks) PetscFunctionReturn(0); 851 ierr = PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 852 ierr = (*pc->ops->setuponblocks)(pc);CHKERRQ(ierr); 853 ierr = PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 854 PetscFunctionReturn(0); 855 } 856 857 #undef __FUNCT__ 858 #define __FUNCT__ "PCSetModifySubMatrices" 859 /*@C 860 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 861 submatrices that arise within certain subdomain-based preconditioners. 862 The basic submatrices are extracted from the preconditioner matrix as 863 usual; the user can then alter these (for example, to set different boundary 864 conditions for each submatrix) before they are used for the local solves. 865 866 Logically Collective on PC 867 868 Input Parameters: 869 + pc - the preconditioner context 870 . func - routine for modifying the submatrices 871 - ctx - optional user-defined context (may be null) 872 873 Calling sequence of func: 874 $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx); 875 876 . row - an array of index sets that contain the global row numbers 877 that comprise each local submatrix 878 . col - an array of index sets that contain the global column numbers 879 that comprise each local submatrix 880 . submat - array of local submatrices 881 - ctx - optional user-defined context for private data for the 882 user-defined func routine (may be null) 883 884 Notes: 885 PCSetModifySubMatrices() MUST be called before KSPSetUp() and 886 KSPSolve(). 887 888 A routine set by PCSetModifySubMatrices() is currently called within 889 the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM) 890 preconditioners. All other preconditioners ignore this routine. 891 892 Level: advanced 893 894 .keywords: PC, set, modify, submatrices 895 896 .seealso: PCModifySubMatrices(), PCASMGetSubMatrices() 897 @*/ 898 PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx) 899 { 900 PetscFunctionBegin; 901 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 902 pc->modifysubmatrices = func; 903 pc->modifysubmatricesP = ctx; 904 PetscFunctionReturn(0); 905 } 906 907 #undef __FUNCT__ 908 #define __FUNCT__ "PCModifySubMatrices" 909 /*@C 910 PCModifySubMatrices - Calls an optional user-defined routine within 911 certain preconditioners if one has been set with PCSetModifySubMarices(). 912 913 Collective on PC 914 915 Input Parameters: 916 + pc - the preconditioner context 917 . nsub - the number of local submatrices 918 . row - an array of index sets that contain the global row numbers 919 that comprise each local submatrix 920 . col - an array of index sets that contain the global column numbers 921 that comprise each local submatrix 922 . submat - array of local submatrices 923 - ctx - optional user-defined context for private data for the 924 user-defined routine (may be null) 925 926 Output Parameter: 927 . submat - array of local submatrices (the entries of which may 928 have been modified) 929 930 Notes: 931 The user should NOT generally call this routine, as it will 932 automatically be called within certain preconditioners (currently 933 block Jacobi, additive Schwarz) if set. 934 935 The basic submatrices are extracted from the preconditioner matrix 936 as usual; the user can then alter these (for example, to set different 937 boundary conditions for each submatrix) before they are used for the 938 local solves. 939 940 Level: developer 941 942 .keywords: PC, modify, submatrices 943 944 .seealso: PCSetModifySubMatrices() 945 @*/ 946 PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx) 947 { 948 PetscErrorCode ierr; 949 950 PetscFunctionBegin; 951 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 952 if (!pc->modifysubmatrices) PetscFunctionReturn(0); 953 ierr = PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 954 ierr = (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);CHKERRQ(ierr); 955 ierr = PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 956 PetscFunctionReturn(0); 957 } 958 959 #undef __FUNCT__ 960 #define __FUNCT__ "PCSetOperators" 961 /*@ 962 PCSetOperators - Sets the matrix associated with the linear system and 963 a (possibly) different one associated with the preconditioner. 964 965 Logically Collective on PC and Mat 966 967 Input Parameters: 968 + pc - the preconditioner context 969 . Amat - the matrix associated with the linear system 970 . Pmat - the matrix to be used in constructing the preconditioner, usually 971 the same as Amat. 972 - flag - flag indicating information about the preconditioner matrix structure 973 during successive linear solves. This flag is ignored the first time a 974 linear system is solved, and thus is irrelevant when solving just one linear 975 system. 976 977 Notes: 978 The flag can be used to eliminate unnecessary work in the preconditioner 979 during the repeated solution of linear systems of the same size. The 980 available options are 981 + SAME_PRECONDITIONER - 982 Pmat is identical during successive linear solves. 983 This option is intended for folks who are using 984 different Amat and Pmat matrices and wish to reuse the 985 same preconditioner matrix. For example, this option 986 saves work by not recomputing incomplete factorization 987 for ILU/ICC preconditioners. 988 . SAME_NONZERO_PATTERN - 989 Pmat has the same nonzero structure during 990 successive linear solves. 991 - DIFFERENT_NONZERO_PATTERN - 992 Pmat does not have the same nonzero structure. 993 994 Passing a PETSC_NULL for Amat or Pmat removes the matrix that is currently used. 995 996 If you wish to replace either Amat or Pmat but leave the other one untouched then 997 first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference() 998 on it and then pass it back in in your call to KSPSetOperators(). 999 1000 Caution: 1001 If you specify SAME_NONZERO_PATTERN, PETSc believes your assertion 1002 and does not check the structure of the matrix. If you erroneously 1003 claim that the structure is the same when it actually is not, the new 1004 preconditioner will not function correctly. Thus, use this optimization 1005 feature carefully! 1006 1007 If in doubt about whether your preconditioner matrix has changed 1008 structure or not, use the flag DIFFERENT_NONZERO_PATTERN. 1009 1010 More Notes about Repeated Solution of Linear Systems: 1011 PETSc does NOT reset the matrix entries of either Amat or Pmat 1012 to zero after a linear solve; the user is completely responsible for 1013 matrix assembly. See the routine MatZeroEntries() if desiring to 1014 zero all elements of a matrix. 1015 1016 Level: intermediate 1017 1018 .keywords: PC, set, operators, matrix, linear system 1019 1020 .seealso: PCGetOperators(), MatZeroEntries() 1021 @*/ 1022 PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat,MatStructure flag) 1023 { 1024 PetscErrorCode ierr; 1025 PetscInt m1,n1,m2,n2; 1026 1027 PetscFunctionBegin; 1028 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1029 if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1030 if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3); 1031 if (Amat) PetscCheckSameComm(pc,1,Amat,2); 1032 if (Pmat) PetscCheckSameComm(pc,1,Pmat,3); 1033 if (pc->setupcalled && Amat && Pmat) { 1034 ierr = MatGetLocalSize(Amat,&m1,&n1);CHKERRQ(ierr); 1035 ierr = MatGetLocalSize(pc->mat,&m2,&n2);CHKERRQ(ierr); 1036 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1037 ierr = MatGetLocalSize(Pmat,&m1,&n1);CHKERRQ(ierr); 1038 ierr = MatGetLocalSize(pc->pmat,&m2,&n2);CHKERRQ(ierr); 1039 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1040 } 1041 1042 /* reference first in case the matrices are the same */ 1043 if (Amat) {ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr);} 1044 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 1045 if (Pmat) {ierr = PetscObjectReference((PetscObject)Pmat);CHKERRQ(ierr);} 1046 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 1047 pc->mat = Amat; 1048 pc->pmat = Pmat; 1049 1050 if (pc->setupcalled == 2 && flag != SAME_PRECONDITIONER) { 1051 pc->setupcalled = 1; 1052 } 1053 pc->flag = flag; 1054 PetscFunctionReturn(0); 1055 } 1056 1057 #undef __FUNCT__ 1058 #define __FUNCT__ "PCGetOperators" 1059 /*@C 1060 PCGetOperators - Gets the matrix associated with the linear system and 1061 possibly a different one associated with the preconditioner. 1062 1063 Not collective, though parallel Mats are returned if the PC is parallel 1064 1065 Input Parameter: 1066 . pc - the preconditioner context 1067 1068 Output Parameters: 1069 + mat - the matrix associated with the linear system 1070 . pmat - matrix associated with the preconditioner, usually the same 1071 as mat. 1072 - flag - flag indicating information about the preconditioner 1073 matrix structure. See PCSetOperators() for details. 1074 1075 Level: intermediate 1076 1077 Notes: Does not increase the reference count of the matrices, so you should not destroy them 1078 1079 Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators 1080 are created in PC and returned to the user. In this case, if both operators 1081 mat and pmat are requested, two DIFFERENT operators will be returned. If 1082 only one is requested both operators in the PC will be the same (i.e. as 1083 if one had called KSP/PCSetOperators() with the same argument for both Mats). 1084 The user must set the sizes of the returned matrices and their type etc just 1085 as if the user created them with MatCreate(). For example, 1086 1087 $ KSP/PCGetOperators(ksp/pc,&mat,PETSC_NULL,PETSC_NULL); is equivalent to 1088 $ set size, type, etc of mat 1089 1090 $ MatCreate(comm,&mat); 1091 $ KSP/PCSetOperators(ksp/pc,mat,mat,SAME_NONZERO_PATTERN); 1092 $ PetscObjectDereference((PetscObject)mat); 1093 $ set size, type, etc of mat 1094 1095 and 1096 1097 $ KSP/PCGetOperators(ksp/pc,&mat,&pmat,PETSC_NULL); is equivalent to 1098 $ set size, type, etc of mat and pmat 1099 1100 $ MatCreate(comm,&mat); 1101 $ MatCreate(comm,&pmat); 1102 $ KSP/PCSetOperators(ksp/pc,mat,pmat,SAME_NONZERO_PATTERN); 1103 $ PetscObjectDereference((PetscObject)mat); 1104 $ PetscObjectDereference((PetscObject)pmat); 1105 $ set size, type, etc of mat and pmat 1106 1107 The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy 1108 of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely 1109 managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look 1110 at this is when you create a SNES you do not NEED to create a KSP and attach it to 1111 the SNES object (the SNES object manages it for you). Similarly when you create a KSP 1112 you do not need to attach a PC to it (the KSP object manages the PC object for you). 1113 Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when 1114 it can be created for you? 1115 1116 1117 .keywords: PC, get, operators, matrix, linear system 1118 1119 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet() 1120 @*/ 1121 PetscErrorCode PCGetOperators(PC pc,Mat *mat,Mat *pmat,MatStructure *flag) 1122 { 1123 PetscErrorCode ierr; 1124 1125 PetscFunctionBegin; 1126 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1127 if (mat) { 1128 if (!pc->mat) { 1129 if (pc->pmat && !pmat) { /* pmat has been set, but user did not request it, so use for mat */ 1130 pc->mat = pc->pmat; 1131 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1132 } else { /* both mat and pmat are empty */ 1133 ierr = MatCreate(((PetscObject)pc)->comm,&pc->mat);CHKERRQ(ierr); 1134 if (!pmat) { /* user did NOT request pmat, so make same as mat */ 1135 pc->pmat = pc->mat; 1136 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1137 } 1138 } 1139 } 1140 *mat = pc->mat; 1141 } 1142 if (pmat) { 1143 if (!pc->pmat) { 1144 if (pc->mat && !mat) { /* mat has been set but was not requested, so use for pmat */ 1145 pc->pmat = pc->mat; 1146 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1147 } else { 1148 ierr = MatCreate(((PetscObject)pc)->comm,&pc->pmat);CHKERRQ(ierr); 1149 if (!mat) { /* user did NOT request mat, so make same as pmat */ 1150 pc->mat = pc->pmat; 1151 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1152 } 1153 } 1154 } 1155 *pmat = pc->pmat; 1156 } 1157 if (flag) *flag = pc->flag; 1158 PetscFunctionReturn(0); 1159 } 1160 1161 #undef __FUNCT__ 1162 #define __FUNCT__ "PCGetOperatorsSet" 1163 /*@C 1164 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1165 possibly a different one associated with the preconditioner have been set in the PC. 1166 1167 Not collective, though the results on all processes should be the same 1168 1169 Input Parameter: 1170 . pc - the preconditioner context 1171 1172 Output Parameters: 1173 + mat - the matrix associated with the linear system was set 1174 - pmat - matrix associated with the preconditioner was set, usually the same 1175 1176 Level: intermediate 1177 1178 .keywords: PC, get, operators, matrix, linear system 1179 1180 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators() 1181 @*/ 1182 PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat) 1183 { 1184 PetscFunctionBegin; 1185 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1186 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1187 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1188 PetscFunctionReturn(0); 1189 } 1190 1191 #undef __FUNCT__ 1192 #define __FUNCT__ "PCFactorGetMatrix" 1193 /*@ 1194 PCFactorGetMatrix - Gets the factored matrix from the 1195 preconditioner context. This routine is valid only for the LU, 1196 incomplete LU, Cholesky, and incomplete Cholesky methods. 1197 1198 Not Collective on PC though Mat is parallel if PC is parallel 1199 1200 Input Parameters: 1201 . pc - the preconditioner context 1202 1203 Output parameters: 1204 . mat - the factored matrix 1205 1206 Level: advanced 1207 1208 Notes: Does not increase the reference count for the matrix so DO NOT destroy it 1209 1210 .keywords: PC, get, factored, matrix 1211 @*/ 1212 PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat) 1213 { 1214 PetscErrorCode ierr; 1215 1216 PetscFunctionBegin; 1217 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1218 PetscValidPointer(mat,2); 1219 if (pc->ops->getfactoredmatrix) { 1220 ierr = (*pc->ops->getfactoredmatrix)(pc,mat);CHKERRQ(ierr); 1221 } else { 1222 SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC type does not support getting factor matrix"); 1223 } 1224 PetscFunctionReturn(0); 1225 } 1226 1227 #undef __FUNCT__ 1228 #define __FUNCT__ "PCSetOptionsPrefix" 1229 /*@C 1230 PCSetOptionsPrefix - Sets the prefix used for searching for all 1231 PC options in the database. 1232 1233 Logically Collective on PC 1234 1235 Input Parameters: 1236 + pc - the preconditioner context 1237 - prefix - the prefix string to prepend to all PC option requests 1238 1239 Notes: 1240 A hyphen (-) must NOT be given at the beginning of the prefix name. 1241 The first character of all runtime options is AUTOMATICALLY the 1242 hyphen. 1243 1244 Level: advanced 1245 1246 .keywords: PC, set, options, prefix, database 1247 1248 .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix() 1249 @*/ 1250 PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[]) 1251 { 1252 PetscErrorCode ierr; 1253 1254 PetscFunctionBegin; 1255 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1256 ierr = PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1257 PetscFunctionReturn(0); 1258 } 1259 1260 #undef __FUNCT__ 1261 #define __FUNCT__ "PCAppendOptionsPrefix" 1262 /*@C 1263 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1264 PC options in the database. 1265 1266 Logically Collective on PC 1267 1268 Input Parameters: 1269 + pc - the preconditioner context 1270 - prefix - the prefix string to prepend to all PC option requests 1271 1272 Notes: 1273 A hyphen (-) must NOT be given at the beginning of the prefix name. 1274 The first character of all runtime options is AUTOMATICALLY the 1275 hyphen. 1276 1277 Level: advanced 1278 1279 .keywords: PC, append, options, prefix, database 1280 1281 .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix() 1282 @*/ 1283 PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[]) 1284 { 1285 PetscErrorCode ierr; 1286 1287 PetscFunctionBegin; 1288 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1289 ierr = PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1290 PetscFunctionReturn(0); 1291 } 1292 1293 #undef __FUNCT__ 1294 #define __FUNCT__ "PCGetOptionsPrefix" 1295 /*@C 1296 PCGetOptionsPrefix - Gets the prefix used for searching for all 1297 PC options in the database. 1298 1299 Not Collective 1300 1301 Input Parameters: 1302 . pc - the preconditioner context 1303 1304 Output Parameters: 1305 . prefix - pointer to the prefix string used, is returned 1306 1307 Notes: On the fortran side, the user should pass in a string 'prifix' of 1308 sufficient length to hold the prefix. 1309 1310 Level: advanced 1311 1312 .keywords: PC, get, options, prefix, database 1313 1314 .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix() 1315 @*/ 1316 PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[]) 1317 { 1318 PetscErrorCode ierr; 1319 1320 PetscFunctionBegin; 1321 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1322 PetscValidPointer(prefix,2); 1323 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1324 PetscFunctionReturn(0); 1325 } 1326 1327 #undef __FUNCT__ 1328 #define __FUNCT__ "PCPreSolve" 1329 /*@ 1330 PCPreSolve - Optional pre-solve phase, intended for any 1331 preconditioner-specific actions that must be performed before 1332 the iterative solve itself. 1333 1334 Collective on PC 1335 1336 Input Parameters: 1337 + pc - the preconditioner context 1338 - ksp - the Krylov subspace context 1339 1340 Level: developer 1341 1342 Sample of Usage: 1343 .vb 1344 PCPreSolve(pc,ksp); 1345 KSPSolve(ksp,b,x); 1346 PCPostSolve(pc,ksp); 1347 .ve 1348 1349 Notes: 1350 The pre-solve phase is distinct from the PCSetUp() phase. 1351 1352 KSPSolve() calls this directly, so is rarely called by the user. 1353 1354 .keywords: PC, pre-solve 1355 1356 .seealso: PCPostSolve() 1357 @*/ 1358 PetscErrorCode PCPreSolve(PC pc,KSP ksp) 1359 { 1360 PetscErrorCode ierr; 1361 Vec x,rhs; 1362 Mat A,B; 1363 1364 PetscFunctionBegin; 1365 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1366 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1367 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1368 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1369 /* 1370 Scale the system and have the matrices use the scaled form 1371 only if the two matrices are actually the same (and hence 1372 have the same scaling 1373 */ 1374 ierr = PCGetOperators(pc,&A,&B,PETSC_NULL);CHKERRQ(ierr); 1375 if (A == B) { 1376 ierr = MatScaleSystem(pc->mat,rhs,x);CHKERRQ(ierr); 1377 ierr = MatUseScaledForm(pc->mat,PETSC_TRUE);CHKERRQ(ierr); 1378 } 1379 1380 if (pc->ops->presolve) { 1381 ierr = (*pc->ops->presolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1382 } 1383 PetscFunctionReturn(0); 1384 } 1385 1386 #undef __FUNCT__ 1387 #define __FUNCT__ "PCPostSolve" 1388 /*@ 1389 PCPostSolve - Optional post-solve phase, intended for any 1390 preconditioner-specific actions that must be performed after 1391 the iterative solve itself. 1392 1393 Collective on PC 1394 1395 Input Parameters: 1396 + pc - the preconditioner context 1397 - ksp - the Krylov subspace context 1398 1399 Sample of Usage: 1400 .vb 1401 PCPreSolve(pc,ksp); 1402 KSPSolve(ksp,b,x); 1403 PCPostSolve(pc,ksp); 1404 .ve 1405 1406 Note: 1407 KSPSolve() calls this routine directly, so it is rarely called by the user. 1408 1409 Level: developer 1410 1411 .keywords: PC, post-solve 1412 1413 .seealso: PCPreSolve(), KSPSolve() 1414 @*/ 1415 PetscErrorCode PCPostSolve(PC pc,KSP ksp) 1416 { 1417 PetscErrorCode ierr; 1418 Vec x,rhs; 1419 Mat A,B; 1420 1421 PetscFunctionBegin; 1422 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1423 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1424 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1425 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1426 if (pc->ops->postsolve) { 1427 ierr = (*pc->ops->postsolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1428 } 1429 /* 1430 Scale the system and have the matrices use the scaled form 1431 only if the two matrices are actually the same (and hence 1432 have the same scaling 1433 */ 1434 ierr = PCGetOperators(pc,&A,&B,PETSC_NULL);CHKERRQ(ierr); 1435 if (A == B) { 1436 ierr = MatUnScaleSystem(pc->mat,rhs,x);CHKERRQ(ierr); 1437 ierr = MatUseScaledForm(pc->mat,PETSC_FALSE);CHKERRQ(ierr); 1438 } 1439 PetscFunctionReturn(0); 1440 } 1441 1442 #undef __FUNCT__ 1443 #define __FUNCT__ "PCView" 1444 /*@C 1445 PCView - Prints the PC data structure. 1446 1447 Collective on PC 1448 1449 Input Parameters: 1450 + PC - the PC context 1451 - viewer - optional visualization context 1452 1453 Note: 1454 The available visualization contexts include 1455 + PETSC_VIEWER_STDOUT_SELF - standard output (default) 1456 - PETSC_VIEWER_STDOUT_WORLD - synchronized standard 1457 output where only the first processor opens 1458 the file. All other processors send their 1459 data to the first processor to print. 1460 1461 The user can open an alternative visualization contexts with 1462 PetscViewerASCIIOpen() (output to a specified file). 1463 1464 Level: developer 1465 1466 .keywords: PC, view 1467 1468 .seealso: KSPView(), PetscViewerASCIIOpen() 1469 @*/ 1470 PetscErrorCode PCView(PC pc,PetscViewer viewer) 1471 { 1472 const PCType cstr; 1473 PetscErrorCode ierr; 1474 PetscBool iascii,isstring; 1475 PetscViewerFormat format; 1476 1477 PetscFunctionBegin; 1478 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1479 if (!viewer) { 1480 ierr = PetscViewerASCIIGetStdout(((PetscObject)pc)->comm,&viewer);CHKERRQ(ierr); 1481 } 1482 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1483 PetscCheckSameComm(pc,1,viewer,2); 1484 1485 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1486 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 1487 if (iascii) { 1488 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1489 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer,"PC Object");CHKERRQ(ierr); 1490 if (pc->ops->view) { 1491 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1492 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1493 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1494 } 1495 if (pc->mat) { 1496 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);CHKERRQ(ierr); 1497 if (pc->pmat == pc->mat) { 1498 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");CHKERRQ(ierr); 1499 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1500 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1501 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1502 } else { 1503 if (pc->pmat) { 1504 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");CHKERRQ(ierr); 1505 } else { 1506 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix:\n");CHKERRQ(ierr); 1507 } 1508 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1509 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1510 if (pc->pmat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1511 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1512 } 1513 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1514 } 1515 } else if (isstring) { 1516 ierr = PCGetType(pc,&cstr);CHKERRQ(ierr); 1517 ierr = PetscViewerStringSPrintf(viewer," %-7.7s",cstr);CHKERRQ(ierr); 1518 if (pc->ops->view) {ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr);} 1519 } else { 1520 SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported by PC",((PetscObject)viewer)->type_name); 1521 } 1522 PetscFunctionReturn(0); 1523 } 1524 1525 1526 #undef __FUNCT__ 1527 #define __FUNCT__ "PCSetInitialGuessNonzero" 1528 /*@ 1529 PCSetInitialGuessNonzero - Tells the iterative solver that the 1530 initial guess is nonzero; otherwise PC assumes the initial guess 1531 is to be zero (and thus zeros it out before solving). 1532 1533 Logically Collective on PC 1534 1535 Input Parameters: 1536 + pc - iterative context obtained from PCCreate() 1537 - flg - PETSC_TRUE indicates the guess is non-zero, PETSC_FALSE indicates the guess is zero 1538 1539 Level: Developer 1540 1541 Notes: 1542 This is a weird function. Since PC's are linear operators on the right hand side they 1543 CANNOT use an initial guess. This function is for the "pass-through" preconditioners 1544 PCKSP, PCREDUNDANT and PCHMPI and causes the inner KSP object to use the nonzero 1545 initial guess. Not currently working for PCREDUNDANT, that has to be rewritten to use KSP. 1546 1547 1548 .keywords: PC, set, initial guess, nonzero 1549 1550 .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll() 1551 @*/ 1552 PetscErrorCode PCSetInitialGuessNonzero(PC pc,PetscBool flg) 1553 { 1554 PetscFunctionBegin; 1555 PetscValidLogicalCollectiveBool(pc,flg,2); 1556 pc->nonzero_guess = flg; 1557 PetscFunctionReturn(0); 1558 } 1559 1560 #undef __FUNCT__ 1561 #define __FUNCT__ "PCRegister" 1562 /*@C 1563 PCRegister - See PCRegisterDynamic() 1564 1565 Level: advanced 1566 @*/ 1567 PetscErrorCode PCRegister(const char sname[],const char path[],const char name[],PetscErrorCode (*function)(PC)) 1568 { 1569 PetscErrorCode ierr; 1570 char fullname[PETSC_MAX_PATH_LEN]; 1571 1572 PetscFunctionBegin; 1573 1574 ierr = PetscFListConcat(path,name,fullname);CHKERRQ(ierr); 1575 ierr = PetscFListAdd(&PCList,sname,fullname,(void (*)(void))function);CHKERRQ(ierr); 1576 PetscFunctionReturn(0); 1577 } 1578 1579 #undef __FUNCT__ 1580 #define __FUNCT__ "PCComputeExplicitOperator" 1581 /*@ 1582 PCComputeExplicitOperator - Computes the explicit preconditioned operator. 1583 1584 Collective on PC 1585 1586 Input Parameter: 1587 . pc - the preconditioner object 1588 1589 Output Parameter: 1590 . mat - the explict preconditioned operator 1591 1592 Notes: 1593 This computation is done by applying the operators to columns of the 1594 identity matrix. 1595 1596 Currently, this routine uses a dense matrix format when 1 processor 1597 is used and a sparse format otherwise. This routine is costly in general, 1598 and is recommended for use only with relatively small systems. 1599 1600 Level: advanced 1601 1602 .keywords: PC, compute, explicit, operator 1603 1604 .seealso: KSPComputeExplicitOperator() 1605 1606 @*/ 1607 PetscErrorCode PCComputeExplicitOperator(PC pc,Mat *mat) 1608 { 1609 Vec in,out; 1610 PetscErrorCode ierr; 1611 PetscInt i,M,m,*rows,start,end; 1612 PetscMPIInt size; 1613 MPI_Comm comm; 1614 PetscScalar *array,one = 1.0; 1615 1616 PetscFunctionBegin; 1617 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1618 PetscValidPointer(mat,2); 1619 1620 comm = ((PetscObject)pc)->comm; 1621 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1622 1623 if (!pc->pmat) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"You must call KSPSetOperators() or PCSetOperators() before this call"); 1624 ierr = MatGetVecs(pc->pmat,&in,0);CHKERRQ(ierr); 1625 ierr = VecDuplicate(in,&out);CHKERRQ(ierr); 1626 ierr = VecGetOwnershipRange(in,&start,&end);CHKERRQ(ierr); 1627 ierr = VecGetSize(in,&M);CHKERRQ(ierr); 1628 ierr = VecGetLocalSize(in,&m);CHKERRQ(ierr); 1629 ierr = PetscMalloc((m+1)*sizeof(PetscInt),&rows);CHKERRQ(ierr); 1630 for (i=0; i<m; i++) {rows[i] = start + i;} 1631 1632 ierr = MatCreate(comm,mat);CHKERRQ(ierr); 1633 ierr = MatSetSizes(*mat,m,m,M,M);CHKERRQ(ierr); 1634 if (size == 1) { 1635 ierr = MatSetType(*mat,MATSEQDENSE);CHKERRQ(ierr); 1636 ierr = MatSeqDenseSetPreallocation(*mat,PETSC_NULL);CHKERRQ(ierr); 1637 } else { 1638 ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 1639 ierr = MatMPIAIJSetPreallocation(*mat,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 1640 } 1641 1642 for (i=0; i<M; i++) { 1643 1644 ierr = VecSet(in,0.0);CHKERRQ(ierr); 1645 ierr = VecSetValues(in,1,&i,&one,INSERT_VALUES);CHKERRQ(ierr); 1646 ierr = VecAssemblyBegin(in);CHKERRQ(ierr); 1647 ierr = VecAssemblyEnd(in);CHKERRQ(ierr); 1648 1649 /* should fix, allowing user to choose side */ 1650 ierr = PCApply(pc,in,out);CHKERRQ(ierr); 1651 1652 ierr = VecGetArray(out,&array);CHKERRQ(ierr); 1653 ierr = MatSetValues(*mat,m,rows,1,&i,array,INSERT_VALUES);CHKERRQ(ierr); 1654 ierr = VecRestoreArray(out,&array);CHKERRQ(ierr); 1655 1656 } 1657 ierr = PetscFree(rows);CHKERRQ(ierr); 1658 ierr = VecDestroy(&out);CHKERRQ(ierr); 1659 ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1660 ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1661 PetscFunctionReturn(0); 1662 } 1663 1664 #undef __FUNCT__ 1665 #define __FUNCT__ "PCSetCoordinates" 1666 /*@ 1667 PCSetCoordinates - sets the coordinates of all the nodes on the local process 1668 1669 Collective on PC 1670 1671 Input Parameters: 1672 + pc - the solver context 1673 . dim - the dimension of the coordinates 1, 2, or 3 1674 - coords - the coordinates 1675 1676 Level: intermediate 1677 1678 Notes: coords is an array of the 3D coordinates for the nodes on 1679 the local processor. So if there are 108 equation on a processor 1680 for a displacement finite element discretization of elasticity (so 1681 that there are 36 = 108/3 nodes) then the array must have 108 1682 double precision values (ie, 3 * 36). These x y z coordinates 1683 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 1684 ... , N-1.z ]. 1685 1686 .seealso: PCPROMETHEUS 1687 @*/ 1688 PetscErrorCode PCSetCoordinates(PC pc,PetscInt dim,PetscReal *coords) 1689 { 1690 PetscErrorCode ierr; 1691 1692 PetscFunctionBegin; 1693 ierr = PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscReal*),(pc,dim,coords));CHKERRQ(ierr); 1694 PetscFunctionReturn(0); 1695 } 1696