1 #define PETSCKSP_DLL 2 3 /* 4 The PC (preconditioner) interface routines, callable by users. 5 */ 6 #include "private/pcimpl.h" /*I "petscksp.h" I*/ 7 8 /* Logging support */ 9 PetscClassId PC_CLASSID; 10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 12 13 #undef __FUNCT__ 14 #define __FUNCT__ "PCGetDefaultType_Private" 15 PetscErrorCode PCGetDefaultType_Private(PC pc,const char* type[]) 16 { 17 PetscErrorCode ierr; 18 PetscMPIInt size; 19 PetscBool flg1,flg2,set,flg3; 20 21 PetscFunctionBegin; 22 ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr); 23 if (pc->pmat) { 24 PetscErrorCode (*f)(Mat,PetscBool *,MatReuse,Mat*); 25 ierr = PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr); 26 if (size == 1) { 27 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);CHKERRQ(ierr); 28 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);CHKERRQ(ierr); 29 ierr = MatIsSymmetricKnown(pc->pmat,&set,&flg3);CHKERRQ(ierr); 30 if (flg1 && (!flg2 || (set && flg3))) { 31 *type = PCICC; 32 } else if (flg2) { 33 *type = PCILU; 34 } else if (f) { /* likely is a parallel matrix run on one processor */ 35 *type = PCBJACOBI; 36 } else { 37 *type = PCNONE; 38 } 39 } else { 40 if (f) { 41 *type = PCBJACOBI; 42 } else { 43 *type = PCNONE; 44 } 45 } 46 } else { 47 if (size == 1) { 48 *type = PCILU; 49 } else { 50 *type = PCBJACOBI; 51 } 52 } 53 PetscFunctionReturn(0); 54 } 55 56 #undef __FUNCT__ 57 #define __FUNCT__ "PCDestroy" 58 /*@ 59 PCDestroy - Destroys PC context that was created with PCCreate(). 60 61 Collective on PC 62 63 Input Parameter: 64 . pc - the preconditioner context 65 66 Level: developer 67 68 .keywords: PC, destroy 69 70 .seealso: PCCreate(), PCSetUp() 71 @*/ 72 PetscErrorCode PCDestroy(PC pc) 73 { 74 PetscErrorCode ierr; 75 76 PetscFunctionBegin; 77 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 78 if (--((PetscObject)pc)->refct > 0) PetscFunctionReturn(0); 79 80 /* if memory was published with AMS then destroy it */ 81 ierr = PetscObjectDepublish(pc);CHKERRQ(ierr); 82 if (pc->dm) {ierr = DMDestroy(pc->dm);CHKERRQ(ierr);} 83 84 if (pc->ops->destroy) {ierr = (*pc->ops->destroy)(pc);CHKERRQ(ierr);} 85 if (pc->diagonalscaleright) {ierr = VecDestroy(pc->diagonalscaleright);CHKERRQ(ierr);} 86 if (pc->diagonalscaleleft) {ierr = VecDestroy(pc->diagonalscaleleft);CHKERRQ(ierr);} 87 88 if (pc->pmat) {ierr = MatDestroy(pc->pmat);CHKERRQ(ierr);} 89 if (pc->mat) {ierr = MatDestroy(pc->mat);CHKERRQ(ierr);} 90 91 ierr = PetscHeaderDestroy(pc);CHKERRQ(ierr); 92 PetscFunctionReturn(0); 93 } 94 95 #undef __FUNCT__ 96 #define __FUNCT__ "PCGetDiagonalScale" 97 /*@C 98 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 99 scaling as needed by certain time-stepping codes. 100 101 Logically Collective on PC 102 103 Input Parameter: 104 . pc - the preconditioner context 105 106 Output Parameter: 107 . flag - PETSC_TRUE if it applies the scaling 108 109 Level: developer 110 111 Notes: If this returns PETSC_TRUE then the system solved via the Krylov method is 112 $ D M A D^{-1} y = D M b for left preconditioning or 113 $ D A M D^{-1} z = D b for right preconditioning 114 115 .keywords: PC 116 117 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale() 118 @*/ 119 PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag) 120 { 121 PetscFunctionBegin; 122 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 123 PetscValidPointer(flag,2); 124 *flag = pc->diagonalscale; 125 PetscFunctionReturn(0); 126 } 127 128 #undef __FUNCT__ 129 #define __FUNCT__ "PCSetDiagonalScale" 130 /*@ 131 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 132 scaling as needed by certain time-stepping codes. 133 134 Logically Collective on PC 135 136 Input Parameters: 137 + pc - the preconditioner context 138 - s - scaling vector 139 140 Level: intermediate 141 142 Notes: The system solved via the Krylov method is 143 $ D M A D^{-1} y = D M b for left preconditioning or 144 $ D A M D^{-1} z = D b for right preconditioning 145 146 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 147 148 .keywords: PC 149 150 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale() 151 @*/ 152 PetscErrorCode PCSetDiagonalScale(PC pc,Vec s) 153 { 154 PetscErrorCode ierr; 155 156 PetscFunctionBegin; 157 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 158 PetscValidHeaderSpecific(s,VEC_CLASSID,2); 159 pc->diagonalscale = PETSC_TRUE; 160 ierr = PetscObjectReference((PetscObject)s);CHKERRQ(ierr); 161 if (pc->diagonalscaleleft) { 162 ierr = VecDestroy(pc->diagonalscaleleft);CHKERRQ(ierr); 163 } 164 pc->diagonalscaleleft = s; 165 if (!pc->diagonalscaleright) { 166 ierr = VecDuplicate(s,&pc->diagonalscaleright);CHKERRQ(ierr); 167 } 168 ierr = VecCopy(s,pc->diagonalscaleright);CHKERRQ(ierr); 169 ierr = VecReciprocal(pc->diagonalscaleright);CHKERRQ(ierr); 170 PetscFunctionReturn(0); 171 } 172 173 #undef __FUNCT__ 174 #define __FUNCT__ "PCDiagonalScaleLeft" 175 /*@ 176 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 177 178 Logically Collective on PC 179 180 Input Parameters: 181 + pc - the preconditioner context 182 . in - input vector 183 + out - scaled vector (maybe the same as in) 184 185 Level: intermediate 186 187 Notes: The system solved via the Krylov method is 188 $ D M A D^{-1} y = D M b for left preconditioning or 189 $ D A M D^{-1} z = D b for right preconditioning 190 191 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 192 193 If diagonal scaling is turned off and in is not out then in is copied to out 194 195 .keywords: PC 196 197 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale() 198 @*/ 199 PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out) 200 { 201 PetscErrorCode ierr; 202 203 PetscFunctionBegin; 204 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 205 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 206 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 207 if (pc->diagonalscale) { 208 ierr = VecPointwiseMult(out,pc->diagonalscaleleft,in);CHKERRQ(ierr); 209 } else if (in != out) { 210 ierr = VecCopy(in,out);CHKERRQ(ierr); 211 } 212 PetscFunctionReturn(0); 213 } 214 215 #undef __FUNCT__ 216 #define __FUNCT__ "PCDiagonalScaleRight" 217 /*@ 218 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 219 220 Logically Collective on PC 221 222 Input Parameters: 223 + pc - the preconditioner context 224 . in - input vector 225 + out - scaled vector (maybe the same as in) 226 227 Level: intermediate 228 229 Notes: The system solved via the Krylov method is 230 $ D M A D^{-1} y = D M b for left preconditioning or 231 $ D A M D^{-1} z = D b for right preconditioning 232 233 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 234 235 If diagonal scaling is turned off and in is not out then in is copied to out 236 237 .keywords: PC 238 239 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale() 240 @*/ 241 PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out) 242 { 243 PetscErrorCode ierr; 244 245 PetscFunctionBegin; 246 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 247 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 248 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 249 if (pc->diagonalscale) { 250 ierr = VecPointwiseMult(out,pc->diagonalscaleright,in);CHKERRQ(ierr); 251 } else if (in != out) { 252 ierr = VecCopy(in,out);CHKERRQ(ierr); 253 } 254 PetscFunctionReturn(0); 255 } 256 257 #if 0 258 #undef __FUNCT__ 259 #define __FUNCT__ "PCPublish_Petsc" 260 static PetscErrorCode PCPublish_Petsc(PetscObject obj) 261 { 262 PetscFunctionBegin; 263 PetscFunctionReturn(0); 264 } 265 #endif 266 267 #undef __FUNCT__ 268 #define __FUNCT__ "PCCreate" 269 /*@ 270 PCCreate - Creates a preconditioner context. 271 272 Collective on MPI_Comm 273 274 Input Parameter: 275 . comm - MPI communicator 276 277 Output Parameter: 278 . pc - location to put the preconditioner context 279 280 Notes: 281 The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or ICC 282 in parallel. For dense matrices it is always PCNONE. 283 284 Level: developer 285 286 .keywords: PC, create, context 287 288 .seealso: PCSetUp(), PCApply(), PCDestroy() 289 @*/ 290 PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc) 291 { 292 PC pc; 293 PetscErrorCode ierr; 294 295 PetscFunctionBegin; 296 PetscValidPointer(newpc,1); 297 *newpc = 0; 298 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 299 ierr = PCInitializePackage(PETSC_NULL);CHKERRQ(ierr); 300 #endif 301 302 ierr = PetscHeaderCreate(pc,_p_PC,struct _PCOps,PC_CLASSID,-1,"PC",comm,PCDestroy,PCView);CHKERRQ(ierr); 303 304 pc->mat = 0; 305 pc->pmat = 0; 306 pc->setupcalled = 0; 307 pc->setfromoptionscalled = 0; 308 pc->data = 0; 309 pc->diagonalscale = PETSC_FALSE; 310 pc->diagonalscaleleft = 0; 311 pc->diagonalscaleright = 0; 312 313 pc->modifysubmatrices = 0; 314 pc->modifysubmatricesP = 0; 315 *newpc = pc; 316 PetscFunctionReturn(0); 317 318 } 319 320 /* -------------------------------------------------------------------------------*/ 321 322 #undef __FUNCT__ 323 #define __FUNCT__ "PCApply" 324 /*@ 325 PCApply - Applies the preconditioner to a vector. 326 327 Collective on PC and Vec 328 329 Input Parameters: 330 + pc - the preconditioner context 331 - x - input vector 332 333 Output Parameter: 334 . y - output vector 335 336 Level: developer 337 338 .keywords: PC, apply 339 340 .seealso: PCApplyTranspose(), PCApplyBAorAB() 341 @*/ 342 PetscErrorCode PCApply(PC pc,Vec x,Vec y) 343 { 344 PetscErrorCode ierr; 345 346 PetscFunctionBegin; 347 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 348 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 349 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 350 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 351 if (pc->setupcalled < 2) { 352 ierr = PCSetUp(pc);CHKERRQ(ierr); 353 } 354 if (!pc->ops->apply) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply"); 355 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 356 ierr = (*pc->ops->apply)(pc,x,y);CHKERRQ(ierr); 357 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 358 PetscFunctionReturn(0); 359 } 360 361 #undef __FUNCT__ 362 #define __FUNCT__ "PCApplySymmetricLeft" 363 /*@ 364 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 365 366 Collective on PC and Vec 367 368 Input Parameters: 369 + pc - the preconditioner context 370 - x - input vector 371 372 Output Parameter: 373 . y - output vector 374 375 Notes: 376 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 377 378 Level: developer 379 380 .keywords: PC, apply, symmetric, left 381 382 .seealso: PCApply(), PCApplySymmetricRight() 383 @*/ 384 PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y) 385 { 386 PetscErrorCode ierr; 387 388 PetscFunctionBegin; 389 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 390 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 391 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 392 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 393 if (pc->setupcalled < 2) { 394 ierr = PCSetUp(pc);CHKERRQ(ierr); 395 } 396 if (!pc->ops->applysymmetricleft) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have left symmetric apply"); 397 ierr = PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 398 ierr = (*pc->ops->applysymmetricleft)(pc,x,y);CHKERRQ(ierr); 399 ierr = PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 400 PetscFunctionReturn(0); 401 } 402 403 #undef __FUNCT__ 404 #define __FUNCT__ "PCApplySymmetricRight" 405 /*@ 406 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 407 408 Collective on PC and Vec 409 410 Input Parameters: 411 + pc - the preconditioner context 412 - x - input vector 413 414 Output Parameter: 415 . y - output vector 416 417 Level: developer 418 419 Notes: 420 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 421 422 .keywords: PC, apply, symmetric, right 423 424 .seealso: PCApply(), PCApplySymmetricLeft() 425 @*/ 426 PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y) 427 { 428 PetscErrorCode ierr; 429 430 PetscFunctionBegin; 431 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 432 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 433 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 434 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 435 if (pc->setupcalled < 2) { 436 ierr = PCSetUp(pc);CHKERRQ(ierr); 437 } 438 if (!pc->ops->applysymmetricright) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have left symmetric apply"); 439 ierr = PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 440 ierr = (*pc->ops->applysymmetricright)(pc,x,y);CHKERRQ(ierr); 441 ierr = PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 442 PetscFunctionReturn(0); 443 } 444 445 #undef __FUNCT__ 446 #define __FUNCT__ "PCApplyTranspose" 447 /*@ 448 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 449 450 Collective on PC and Vec 451 452 Input Parameters: 453 + pc - the preconditioner context 454 - x - input vector 455 456 Output Parameter: 457 . y - output vector 458 459 Level: developer 460 461 .keywords: PC, apply, transpose 462 463 .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists() 464 @*/ 465 PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y) 466 { 467 PetscErrorCode ierr; 468 469 PetscFunctionBegin; 470 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 471 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 472 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 473 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 474 if (pc->setupcalled < 2) { 475 ierr = PCSetUp(pc);CHKERRQ(ierr); 476 } 477 if (!pc->ops->applytranspose) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply transpose"); 478 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 479 ierr = (*pc->ops->applytranspose)(pc,x,y);CHKERRQ(ierr); 480 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 481 PetscFunctionReturn(0); 482 } 483 484 #undef __FUNCT__ 485 #define __FUNCT__ "PCApplyTransposeExists" 486 /*@ 487 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 488 489 Collective on PC and Vec 490 491 Input Parameters: 492 . pc - the preconditioner context 493 494 Output Parameter: 495 . flg - PETSC_TRUE if a transpose operation is defined 496 497 Level: developer 498 499 .keywords: PC, apply, transpose 500 501 .seealso: PCApplyTranspose() 502 @*/ 503 PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg) 504 { 505 PetscFunctionBegin; 506 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 507 PetscValidPointer(flg,2); 508 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 509 else *flg = PETSC_FALSE; 510 PetscFunctionReturn(0); 511 } 512 513 #undef __FUNCT__ 514 #define __FUNCT__ "PCApplyBAorAB" 515 /*@ 516 PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x. 517 518 Collective on PC and Vec 519 520 Input Parameters: 521 + pc - the preconditioner context 522 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 523 . x - input vector 524 - work - work vector 525 526 Output Parameter: 527 . y - output vector 528 529 Level: developer 530 531 .keywords: PC, apply, operator 532 533 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose() 534 @*/ 535 PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work) 536 { 537 PetscErrorCode ierr; 538 539 PetscFunctionBegin; 540 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 541 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 542 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 543 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 544 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 545 if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric"); 546 if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application"); 547 548 if (pc->setupcalled < 2) { 549 ierr = PCSetUp(pc);CHKERRQ(ierr); 550 } 551 552 if (pc->diagonalscale) { 553 if (pc->ops->applyBA) { 554 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 555 ierr = VecDuplicate(x,&work2);CHKERRQ(ierr); 556 ierr = PCDiagonalScaleRight(pc,x,work2);CHKERRQ(ierr); 557 ierr = (*pc->ops->applyBA)(pc,side,work2,y,work);CHKERRQ(ierr); 558 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 559 ierr = VecDestroy(work2);CHKERRQ(ierr); 560 } else if (side == PC_RIGHT) { 561 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 562 ierr = PCApply(pc,y,work);CHKERRQ(ierr); 563 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 564 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 565 } else if (side == PC_LEFT) { 566 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 567 ierr = MatMult(pc->mat,y,work);CHKERRQ(ierr); 568 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 569 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 570 } else if (side == PC_SYMMETRIC) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner"); 571 } else { 572 if (pc->ops->applyBA) { 573 ierr = (*pc->ops->applyBA)(pc,side,x,y,work);CHKERRQ(ierr); 574 } else if (side == PC_RIGHT) { 575 ierr = PCApply(pc,x,work);CHKERRQ(ierr); 576 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 577 } else if (side == PC_LEFT) { 578 ierr = MatMult(pc->mat,x,work);CHKERRQ(ierr); 579 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 580 } else if (side == PC_SYMMETRIC) { 581 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 582 ierr = PCApplySymmetricRight(pc,x,work);CHKERRQ(ierr); 583 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 584 ierr = VecCopy(y,work);CHKERRQ(ierr); 585 ierr = PCApplySymmetricLeft(pc,work,y);CHKERRQ(ierr); 586 } 587 } 588 PetscFunctionReturn(0); 589 } 590 591 #undef __FUNCT__ 592 #define __FUNCT__ "PCApplyBAorABTranspose" 593 /*@ 594 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 595 and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning, 596 NOT tr(B*A) = tr(A)*tr(B). 597 598 Collective on PC and Vec 599 600 Input Parameters: 601 + pc - the preconditioner context 602 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 603 . x - input vector 604 - work - work vector 605 606 Output Parameter: 607 . y - output vector 608 609 610 Notes: this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner 611 defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 612 613 Level: developer 614 615 .keywords: PC, apply, operator, transpose 616 617 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB() 618 @*/ 619 PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work) 620 { 621 PetscErrorCode ierr; 622 623 PetscFunctionBegin; 624 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 625 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 626 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 627 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 628 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 629 if (pc->ops->applyBAtranspose) { 630 ierr = (*pc->ops->applyBAtranspose)(pc,side,x,y,work);CHKERRQ(ierr); 631 PetscFunctionReturn(0); 632 } 633 if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left"); 634 635 if (pc->setupcalled < 2) { 636 ierr = PCSetUp(pc);CHKERRQ(ierr); 637 } 638 639 if (side == PC_RIGHT) { 640 ierr = PCApplyTranspose(pc,x,work);CHKERRQ(ierr); 641 ierr = MatMultTranspose(pc->mat,work,y);CHKERRQ(ierr); 642 } else if (side == PC_LEFT) { 643 ierr = MatMultTranspose(pc->mat,x,work);CHKERRQ(ierr); 644 ierr = PCApplyTranspose(pc,work,y);CHKERRQ(ierr); 645 } 646 /* add support for PC_SYMMETRIC */ 647 PetscFunctionReturn(0); /* actually will never get here */ 648 } 649 650 /* -------------------------------------------------------------------------------*/ 651 652 #undef __FUNCT__ 653 #define __FUNCT__ "PCApplyRichardsonExists" 654 /*@ 655 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 656 built-in fast application of Richardson's method. 657 658 Not Collective 659 660 Input Parameter: 661 . pc - the preconditioner 662 663 Output Parameter: 664 . exists - PETSC_TRUE or PETSC_FALSE 665 666 Level: developer 667 668 .keywords: PC, apply, Richardson, exists 669 670 .seealso: PCApplyRichardson() 671 @*/ 672 PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists) 673 { 674 PetscFunctionBegin; 675 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 676 PetscValidIntPointer(exists,2); 677 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 678 else *exists = PETSC_FALSE; 679 PetscFunctionReturn(0); 680 } 681 682 #undef __FUNCT__ 683 #define __FUNCT__ "PCApplyRichardson" 684 /*@ 685 PCApplyRichardson - Applies several steps of Richardson iteration with 686 the particular preconditioner. This routine is usually used by the 687 Krylov solvers and not the application code directly. 688 689 Collective on PC 690 691 Input Parameters: 692 + pc - the preconditioner context 693 . b - the right hand side 694 . w - one work vector 695 . rtol - relative decrease in residual norm convergence criteria 696 . abstol - absolute residual norm convergence criteria 697 . dtol - divergence residual norm increase criteria 698 . its - the number of iterations to apply. 699 - guesszero - if the input x contains nonzero initial guess 700 701 Output Parameter: 702 + outits - number of iterations actually used (for SOR this always equals its) 703 . reason - the reason the apply terminated 704 - y - the solution (also contains initial guess if guesszero is PETSC_FALSE 705 706 Notes: 707 Most preconditioners do not support this function. Use the command 708 PCApplyRichardsonExists() to determine if one does. 709 710 Except for the multigrid PC this routine ignores the convergence tolerances 711 and always runs for the number of iterations 712 713 Level: developer 714 715 .keywords: PC, apply, Richardson 716 717 .seealso: PCApplyRichardsonExists() 718 @*/ 719 PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason) 720 { 721 PetscErrorCode ierr; 722 723 PetscFunctionBegin; 724 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 725 PetscValidHeaderSpecific(b,VEC_CLASSID,2); 726 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 727 PetscValidHeaderSpecific(w,VEC_CLASSID,4); 728 if (b == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"b and y must be different vectors"); 729 if (pc->setupcalled < 2) { 730 ierr = PCSetUp(pc);CHKERRQ(ierr); 731 } 732 if (!pc->ops->applyrichardson) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply richardson"); 733 ierr = (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);CHKERRQ(ierr); 734 PetscFunctionReturn(0); 735 } 736 737 /* 738 a setupcall of 0 indicates never setup, 739 1 needs to be resetup, 740 2 does not need any changes. 741 */ 742 #undef __FUNCT__ 743 #define __FUNCT__ "PCSetUp" 744 /*@ 745 PCSetUp - Prepares for the use of a preconditioner. 746 747 Collective on PC 748 749 Input Parameter: 750 . pc - the preconditioner context 751 752 Level: developer 753 754 .keywords: PC, setup 755 756 .seealso: PCCreate(), PCApply(), PCDestroy() 757 @*/ 758 PetscErrorCode PCSetUp(PC pc) 759 { 760 PetscErrorCode ierr; 761 const char *def; 762 763 PetscFunctionBegin; 764 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 765 if (!pc->mat) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first"); 766 767 if (pc->setupcalled > 1) { 768 ierr = PetscInfo(pc,"Setting PC with identical preconditioner\n");CHKERRQ(ierr); 769 PetscFunctionReturn(0); 770 } else if (!pc->setupcalled) { 771 ierr = PetscInfo(pc,"Setting up new PC\n");CHKERRQ(ierr); 772 } else if (pc->flag == SAME_NONZERO_PATTERN) { 773 ierr = PetscInfo(pc,"Setting up PC with same nonzero pattern\n");CHKERRQ(ierr); 774 } else { 775 ierr = PetscInfo(pc,"Setting up PC with different nonzero pattern\n");CHKERRQ(ierr); 776 } 777 778 if (!((PetscObject)pc)->type_name) { 779 ierr = PCGetDefaultType_Private(pc,&def);CHKERRQ(ierr); 780 ierr = PCSetType(pc,def);CHKERRQ(ierr); 781 } 782 783 ierr = PetscLogEventBegin(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 784 if (pc->ops->setup) { 785 ierr = (*pc->ops->setup)(pc);CHKERRQ(ierr); 786 } 787 pc->setupcalled = 2; 788 ierr = PetscLogEventEnd(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 789 PetscFunctionReturn(0); 790 } 791 792 #undef __FUNCT__ 793 #define __FUNCT__ "PCSetUpOnBlocks" 794 /*@ 795 PCSetUpOnBlocks - Sets up the preconditioner for each block in 796 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 797 methods. 798 799 Collective on PC 800 801 Input Parameters: 802 . pc - the preconditioner context 803 804 Level: developer 805 806 .keywords: PC, setup, blocks 807 808 .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp() 809 @*/ 810 PetscErrorCode PCSetUpOnBlocks(PC pc) 811 { 812 PetscErrorCode ierr; 813 814 PetscFunctionBegin; 815 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 816 if (!pc->ops->setuponblocks) PetscFunctionReturn(0); 817 ierr = PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 818 ierr = (*pc->ops->setuponblocks)(pc);CHKERRQ(ierr); 819 ierr = PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 820 PetscFunctionReturn(0); 821 } 822 823 #undef __FUNCT__ 824 #define __FUNCT__ "PCSetModifySubMatrices" 825 /*@C 826 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 827 submatrices that arise within certain subdomain-based preconditioners. 828 The basic submatrices are extracted from the preconditioner matrix as 829 usual; the user can then alter these (for example, to set different boundary 830 conditions for each submatrix) before they are used for the local solves. 831 832 Logically Collective on PC 833 834 Input Parameters: 835 + pc - the preconditioner context 836 . func - routine for modifying the submatrices 837 - ctx - optional user-defined context (may be null) 838 839 Calling sequence of func: 840 $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx); 841 842 . row - an array of index sets that contain the global row numbers 843 that comprise each local submatrix 844 . col - an array of index sets that contain the global column numbers 845 that comprise each local submatrix 846 . submat - array of local submatrices 847 - ctx - optional user-defined context for private data for the 848 user-defined func routine (may be null) 849 850 Notes: 851 PCSetModifySubMatrices() MUST be called before KSPSetUp() and 852 KSPSolve(). 853 854 A routine set by PCSetModifySubMatrices() is currently called within 855 the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM) 856 preconditioners. All other preconditioners ignore this routine. 857 858 Level: advanced 859 860 .keywords: PC, set, modify, submatrices 861 862 .seealso: PCModifySubMatrices() 863 @*/ 864 PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx) 865 { 866 PetscFunctionBegin; 867 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 868 pc->modifysubmatrices = func; 869 pc->modifysubmatricesP = ctx; 870 PetscFunctionReturn(0); 871 } 872 873 #undef __FUNCT__ 874 #define __FUNCT__ "PCModifySubMatrices" 875 /*@C 876 PCModifySubMatrices - Calls an optional user-defined routine within 877 certain preconditioners if one has been set with PCSetModifySubMarices(). 878 879 Collective on PC 880 881 Input Parameters: 882 + pc - the preconditioner context 883 . nsub - the number of local submatrices 884 . row - an array of index sets that contain the global row numbers 885 that comprise each local submatrix 886 . col - an array of index sets that contain the global column numbers 887 that comprise each local submatrix 888 . submat - array of local submatrices 889 - ctx - optional user-defined context for private data for the 890 user-defined routine (may be null) 891 892 Output Parameter: 893 . submat - array of local submatrices (the entries of which may 894 have been modified) 895 896 Notes: 897 The user should NOT generally call this routine, as it will 898 automatically be called within certain preconditioners (currently 899 block Jacobi, additive Schwarz) if set. 900 901 The basic submatrices are extracted from the preconditioner matrix 902 as usual; the user can then alter these (for example, to set different 903 boundary conditions for each submatrix) before they are used for the 904 local solves. 905 906 Level: developer 907 908 .keywords: PC, modify, submatrices 909 910 .seealso: PCSetModifySubMatrices() 911 @*/ 912 PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx) 913 { 914 PetscErrorCode ierr; 915 916 PetscFunctionBegin; 917 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 918 if (!pc->modifysubmatrices) PetscFunctionReturn(0); 919 ierr = PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 920 ierr = (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);CHKERRQ(ierr); 921 ierr = PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 922 PetscFunctionReturn(0); 923 } 924 925 #undef __FUNCT__ 926 #define __FUNCT__ "PCSetOperators" 927 /*@ 928 PCSetOperators - Sets the matrix associated with the linear system and 929 a (possibly) different one associated with the preconditioner. 930 931 Logically Collective on PC and Mat 932 933 Input Parameters: 934 + pc - the preconditioner context 935 . Amat - the matrix associated with the linear system 936 . Pmat - the matrix to be used in constructing the preconditioner, usually 937 the same as Amat. 938 - flag - flag indicating information about the preconditioner matrix structure 939 during successive linear solves. This flag is ignored the first time a 940 linear system is solved, and thus is irrelevant when solving just one linear 941 system. 942 943 Notes: 944 The flag can be used to eliminate unnecessary work in the preconditioner 945 during the repeated solution of linear systems of the same size. The 946 available options are 947 + SAME_PRECONDITIONER - 948 Pmat is identical during successive linear solves. 949 This option is intended for folks who are using 950 different Amat and Pmat matrices and wish to reuse the 951 same preconditioner matrix. For example, this option 952 saves work by not recomputing incomplete factorization 953 for ILU/ICC preconditioners. 954 . SAME_NONZERO_PATTERN - 955 Pmat has the same nonzero structure during 956 successive linear solves. 957 - DIFFERENT_NONZERO_PATTERN - 958 Pmat does not have the same nonzero structure. 959 960 Passing a PETSC_NULL for Amat or Pmat removes the matrix that is currently used. 961 962 If you wish to replace either Amat or Pmat but leave the other one untouched then 963 first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference() 964 on it and then pass it back in in your call to KSPSetOperators(). 965 966 Caution: 967 If you specify SAME_NONZERO_PATTERN, PETSc believes your assertion 968 and does not check the structure of the matrix. If you erroneously 969 claim that the structure is the same when it actually is not, the new 970 preconditioner will not function correctly. Thus, use this optimization 971 feature carefully! 972 973 If in doubt about whether your preconditioner matrix has changed 974 structure or not, use the flag DIFFERENT_NONZERO_PATTERN. 975 976 More Notes about Repeated Solution of Linear Systems: 977 PETSc does NOT reset the matrix entries of either Amat or Pmat 978 to zero after a linear solve; the user is completely responsible for 979 matrix assembly. See the routine MatZeroEntries() if desiring to 980 zero all elements of a matrix. 981 982 Level: intermediate 983 984 .keywords: PC, set, operators, matrix, linear system 985 986 .seealso: PCGetOperators(), MatZeroEntries() 987 @*/ 988 PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat,MatStructure flag) 989 { 990 PetscErrorCode ierr; 991 PetscInt m1,n1,m2,n2; 992 993 PetscFunctionBegin; 994 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 995 if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 996 if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3); 997 if (Amat) PetscCheckSameComm(pc,1,Amat,2); 998 if (Pmat) PetscCheckSameComm(pc,1,Pmat,3); 999 if (pc->setupcalled && Amat && Pmat) { 1000 ierr = MatGetLocalSize(Amat,&m1,&n1);CHKERRQ(ierr); 1001 ierr = MatGetLocalSize(pc->mat,&m2,&n2);CHKERRQ(ierr); 1002 if (m1 != m2 || n1 != n2) { 1003 SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1004 } 1005 ierr = MatGetLocalSize(Pmat,&m1,&n1);CHKERRQ(ierr); 1006 ierr = MatGetLocalSize(pc->pmat,&m2,&n2);CHKERRQ(ierr); 1007 if (m1 != m2 || n1 != n2) { 1008 SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1009 } 1010 } 1011 1012 /* reference first in case the matrices are the same */ 1013 if (Amat) {ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr);} 1014 if (pc->mat) {ierr = MatDestroy(pc->mat);CHKERRQ(ierr);} 1015 if (Pmat) {ierr = PetscObjectReference((PetscObject)Pmat);CHKERRQ(ierr);} 1016 if (pc->pmat) {ierr = MatDestroy(pc->pmat);CHKERRQ(ierr);} 1017 pc->mat = Amat; 1018 pc->pmat = Pmat; 1019 1020 if (pc->setupcalled == 2 && flag != SAME_PRECONDITIONER) { 1021 pc->setupcalled = 1; 1022 } 1023 pc->flag = flag; 1024 PetscFunctionReturn(0); 1025 } 1026 1027 #undef __FUNCT__ 1028 #define __FUNCT__ "PCGetOperators" 1029 /*@C 1030 PCGetOperators - Gets the matrix associated with the linear system and 1031 possibly a different one associated with the preconditioner. 1032 1033 Not collective, though parallel Mats are returned if the PC is parallel 1034 1035 Input Parameter: 1036 . pc - the preconditioner context 1037 1038 Output Parameters: 1039 + mat - the matrix associated with the linear system 1040 . pmat - matrix associated with the preconditioner, usually the same 1041 as mat. 1042 - flag - flag indicating information about the preconditioner 1043 matrix structure. See PCSetOperators() for details. 1044 1045 Level: intermediate 1046 1047 Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators 1048 are created in PC and returned to the user. In this case, if both operators 1049 mat and pmat are requested, two DIFFERENT operators will be returned. If 1050 only one is requested both operators in the PC will be the same (i.e. as 1051 if one had called KSP/PCSetOperators() with the same argument for both Mats). 1052 The user must set the sizes of the returned matrices and their type etc just 1053 as if the user created them with MatCreate(). For example, 1054 1055 $ KSP/PCGetOperators(ksp/pc,&mat,PETSC_NULL,PETSC_NULL); is equivalent to 1056 $ set size, type, etc of mat 1057 1058 $ MatCreate(comm,&mat); 1059 $ KSP/PCSetOperators(ksp/pc,mat,mat,SAME_NONZERO_PATTERN); 1060 $ PetscObjectDereference((PetscObject)mat); 1061 $ set size, type, etc of mat 1062 1063 and 1064 1065 $ KSP/PCGetOperators(ksp/pc,&mat,&pmat,PETSC_NULL); is equivalent to 1066 $ set size, type, etc of mat and pmat 1067 1068 $ MatCreate(comm,&mat); 1069 $ MatCreate(comm,&pmat); 1070 $ KSP/PCSetOperators(ksp/pc,mat,pmat,SAME_NONZERO_PATTERN); 1071 $ PetscObjectDereference((PetscObject)mat); 1072 $ PetscObjectDereference((PetscObject)pmat); 1073 $ set size, type, etc of mat and pmat 1074 1075 The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy 1076 of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely 1077 managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look 1078 at this is when you create a SNES you do not NEED to create a KSP and attach it to 1079 the SNES object (the SNES object manages it for you). Similarly when you create a KSP 1080 you do not need to attach a PC to it (the KSP object manages the PC object for you). 1081 Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when 1082 it can be created for you? 1083 1084 1085 .keywords: PC, get, operators, matrix, linear system 1086 1087 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet() 1088 @*/ 1089 PetscErrorCode PCGetOperators(PC pc,Mat *mat,Mat *pmat,MatStructure *flag) 1090 { 1091 PetscErrorCode ierr; 1092 1093 PetscFunctionBegin; 1094 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1095 if (mat) { 1096 if (!pc->mat) { 1097 ierr = MatCreate(((PetscObject)pc)->comm,&pc->mat);CHKERRQ(ierr); 1098 if (!pc->pmat && !pmat) { /* user did NOT request pmat, so make same as mat */ 1099 pc->pmat = pc->mat; 1100 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1101 } 1102 } 1103 *mat = pc->mat; 1104 } 1105 if (pmat) { 1106 if (!pc->pmat) { 1107 ierr = MatCreate(((PetscObject)pc)->comm,&pc->mat);CHKERRQ(ierr); 1108 if (!pc->mat && !mat) { /* user did NOT request mat, so make same as pmat */ 1109 pc->mat = pc->pmat; 1110 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1111 } 1112 } 1113 *pmat = pc->pmat; 1114 } 1115 if (flag) *flag = pc->flag; 1116 PetscFunctionReturn(0); 1117 } 1118 1119 #undef __FUNCT__ 1120 #define __FUNCT__ "PCGetOperatorsSet" 1121 /*@C 1122 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1123 possibly a different one associated with the preconditioner have been set in the PC. 1124 1125 Not collective, though the results on all processes should be the same 1126 1127 Input Parameter: 1128 . pc - the preconditioner context 1129 1130 Output Parameters: 1131 + mat - the matrix associated with the linear system was set 1132 - pmat - matrix associated with the preconditioner was set, usually the same 1133 1134 Level: intermediate 1135 1136 .keywords: PC, get, operators, matrix, linear system 1137 1138 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators() 1139 @*/ 1140 PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat) 1141 { 1142 PetscFunctionBegin; 1143 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1144 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1145 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1146 PetscFunctionReturn(0); 1147 } 1148 1149 #undef __FUNCT__ 1150 #define __FUNCT__ "PCFactorGetMatrix" 1151 /*@ 1152 PCFactorGetMatrix - Gets the factored matrix from the 1153 preconditioner context. This routine is valid only for the LU, 1154 incomplete LU, Cholesky, and incomplete Cholesky methods. 1155 1156 Not Collective on PC though Mat is parallel if PC is parallel 1157 1158 Input Parameters: 1159 . pc - the preconditioner context 1160 1161 Output parameters: 1162 . mat - the factored matrix 1163 1164 Level: advanced 1165 1166 Notes: Does not increase the reference count for the matrix so DO NOT destroy it 1167 1168 .keywords: PC, get, factored, matrix 1169 @*/ 1170 PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat) 1171 { 1172 PetscErrorCode ierr; 1173 1174 PetscFunctionBegin; 1175 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1176 PetscValidPointer(mat,2); 1177 if (pc->ops->getfactoredmatrix) { 1178 ierr = (*pc->ops->getfactoredmatrix)(pc,mat);CHKERRQ(ierr); 1179 } else { 1180 SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC type does not support getting factor matrix"); 1181 } 1182 PetscFunctionReturn(0); 1183 } 1184 1185 #undef __FUNCT__ 1186 #define __FUNCT__ "PCSetOptionsPrefix" 1187 /*@C 1188 PCSetOptionsPrefix - Sets the prefix used for searching for all 1189 PC options in the database. 1190 1191 Logically Collective on PC 1192 1193 Input Parameters: 1194 + pc - the preconditioner context 1195 - prefix - the prefix string to prepend to all PC option requests 1196 1197 Notes: 1198 A hyphen (-) must NOT be given at the beginning of the prefix name. 1199 The first character of all runtime options is AUTOMATICALLY the 1200 hyphen. 1201 1202 Level: advanced 1203 1204 .keywords: PC, set, options, prefix, database 1205 1206 .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix() 1207 @*/ 1208 PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[]) 1209 { 1210 PetscErrorCode ierr; 1211 1212 PetscFunctionBegin; 1213 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1214 ierr = PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1215 PetscFunctionReturn(0); 1216 } 1217 1218 #undef __FUNCT__ 1219 #define __FUNCT__ "PCAppendOptionsPrefix" 1220 /*@C 1221 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1222 PC options in the database. 1223 1224 Logically Collective on PC 1225 1226 Input Parameters: 1227 + pc - the preconditioner context 1228 - prefix - the prefix string to prepend to all PC option requests 1229 1230 Notes: 1231 A hyphen (-) must NOT be given at the beginning of the prefix name. 1232 The first character of all runtime options is AUTOMATICALLY the 1233 hyphen. 1234 1235 Level: advanced 1236 1237 .keywords: PC, append, options, prefix, database 1238 1239 .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix() 1240 @*/ 1241 PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[]) 1242 { 1243 PetscErrorCode ierr; 1244 1245 PetscFunctionBegin; 1246 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1247 ierr = PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1248 PetscFunctionReturn(0); 1249 } 1250 1251 #undef __FUNCT__ 1252 #define __FUNCT__ "PCGetOptionsPrefix" 1253 /*@C 1254 PCGetOptionsPrefix - Gets the prefix used for searching for all 1255 PC options in the database. 1256 1257 Not Collective 1258 1259 Input Parameters: 1260 . pc - the preconditioner context 1261 1262 Output Parameters: 1263 . prefix - pointer to the prefix string used, is returned 1264 1265 Notes: On the fortran side, the user should pass in a string 'prifix' of 1266 sufficient length to hold the prefix. 1267 1268 Level: advanced 1269 1270 .keywords: PC, get, options, prefix, database 1271 1272 .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix() 1273 @*/ 1274 PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[]) 1275 { 1276 PetscErrorCode ierr; 1277 1278 PetscFunctionBegin; 1279 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1280 PetscValidPointer(prefix,2); 1281 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1282 PetscFunctionReturn(0); 1283 } 1284 1285 #undef __FUNCT__ 1286 #define __FUNCT__ "PCPreSolve" 1287 /*@ 1288 PCPreSolve - Optional pre-solve phase, intended for any 1289 preconditioner-specific actions that must be performed before 1290 the iterative solve itself. 1291 1292 Collective on PC 1293 1294 Input Parameters: 1295 + pc - the preconditioner context 1296 - ksp - the Krylov subspace context 1297 1298 Level: developer 1299 1300 Sample of Usage: 1301 .vb 1302 PCPreSolve(pc,ksp); 1303 KSPSolve(ksp,b,x); 1304 PCPostSolve(pc,ksp); 1305 .ve 1306 1307 Notes: 1308 The pre-solve phase is distinct from the PCSetUp() phase. 1309 1310 KSPSolve() calls this directly, so is rarely called by the user. 1311 1312 .keywords: PC, pre-solve 1313 1314 .seealso: PCPostSolve() 1315 @*/ 1316 PetscErrorCode PCPreSolve(PC pc,KSP ksp) 1317 { 1318 PetscErrorCode ierr; 1319 Vec x,rhs; 1320 Mat A,B; 1321 1322 PetscFunctionBegin; 1323 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1324 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1325 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1326 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1327 /* 1328 Scale the system and have the matrices use the scaled form 1329 only if the two matrices are actually the same (and hence 1330 have the same scaling 1331 */ 1332 ierr = PCGetOperators(pc,&A,&B,PETSC_NULL);CHKERRQ(ierr); 1333 if (A == B) { 1334 ierr = MatScaleSystem(pc->mat,rhs,x);CHKERRQ(ierr); 1335 ierr = MatUseScaledForm(pc->mat,PETSC_TRUE);CHKERRQ(ierr); 1336 } 1337 1338 if (pc->ops->presolve) { 1339 ierr = (*pc->ops->presolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1340 } 1341 PetscFunctionReturn(0); 1342 } 1343 1344 #undef __FUNCT__ 1345 #define __FUNCT__ "PCPostSolve" 1346 /*@ 1347 PCPostSolve - Optional post-solve phase, intended for any 1348 preconditioner-specific actions that must be performed after 1349 the iterative solve itself. 1350 1351 Collective on PC 1352 1353 Input Parameters: 1354 + pc - the preconditioner context 1355 - ksp - the Krylov subspace context 1356 1357 Sample of Usage: 1358 .vb 1359 PCPreSolve(pc,ksp); 1360 KSPSolve(ksp,b,x); 1361 PCPostSolve(pc,ksp); 1362 .ve 1363 1364 Note: 1365 KSPSolve() calls this routine directly, so it is rarely called by the user. 1366 1367 Level: developer 1368 1369 .keywords: PC, post-solve 1370 1371 .seealso: PCPreSolve(), KSPSolve() 1372 @*/ 1373 PetscErrorCode PCPostSolve(PC pc,KSP ksp) 1374 { 1375 PetscErrorCode ierr; 1376 Vec x,rhs; 1377 Mat A,B; 1378 1379 PetscFunctionBegin; 1380 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1381 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1382 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1383 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1384 if (pc->ops->postsolve) { 1385 ierr = (*pc->ops->postsolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1386 } 1387 /* 1388 Scale the system and have the matrices use the scaled form 1389 only if the two matrices are actually the same (and hence 1390 have the same scaling 1391 */ 1392 ierr = PCGetOperators(pc,&A,&B,PETSC_NULL);CHKERRQ(ierr); 1393 if (A == B) { 1394 ierr = MatUnScaleSystem(pc->mat,rhs,x);CHKERRQ(ierr); 1395 ierr = MatUseScaledForm(pc->mat,PETSC_FALSE);CHKERRQ(ierr); 1396 } 1397 PetscFunctionReturn(0); 1398 } 1399 1400 #undef __FUNCT__ 1401 #define __FUNCT__ "PCView" 1402 /*@C 1403 PCView - Prints the PC data structure. 1404 1405 Collective on PC 1406 1407 Input Parameters: 1408 + PC - the PC context 1409 - viewer - optional visualization context 1410 1411 Note: 1412 The available visualization contexts include 1413 + PETSC_VIEWER_STDOUT_SELF - standard output (default) 1414 - PETSC_VIEWER_STDOUT_WORLD - synchronized standard 1415 output where only the first processor opens 1416 the file. All other processors send their 1417 data to the first processor to print. 1418 1419 The user can open an alternative visualization contexts with 1420 PetscViewerASCIIOpen() (output to a specified file). 1421 1422 Level: developer 1423 1424 .keywords: PC, view 1425 1426 .seealso: KSPView(), PetscViewerASCIIOpen() 1427 @*/ 1428 PetscErrorCode PCView(PC pc,PetscViewer viewer) 1429 { 1430 const PCType cstr; 1431 PetscErrorCode ierr; 1432 PetscBool iascii,isstring; 1433 PetscViewerFormat format; 1434 1435 PetscFunctionBegin; 1436 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1437 if (!viewer) { 1438 ierr = PetscViewerASCIIGetStdout(((PetscObject)pc)->comm,&viewer);CHKERRQ(ierr); 1439 } 1440 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1441 PetscCheckSameComm(pc,1,viewer,2); 1442 1443 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1444 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 1445 if (iascii) { 1446 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1447 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer,"PC Object");CHKERRQ(ierr); 1448 if (pc->ops->view) { 1449 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1450 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1451 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1452 } 1453 if (pc->mat) { 1454 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);CHKERRQ(ierr); 1455 if (pc->pmat == pc->mat) { 1456 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");CHKERRQ(ierr); 1457 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1458 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1459 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1460 } else { 1461 if (pc->pmat) { 1462 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");CHKERRQ(ierr); 1463 } else { 1464 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix:\n");CHKERRQ(ierr); 1465 } 1466 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1467 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1468 if (pc->pmat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1469 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1470 } 1471 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1472 } 1473 } else if (isstring) { 1474 ierr = PCGetType(pc,&cstr);CHKERRQ(ierr); 1475 ierr = PetscViewerStringSPrintf(viewer," %-7.7s",cstr);CHKERRQ(ierr); 1476 if (pc->ops->view) {ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr);} 1477 } else { 1478 SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported by PC",((PetscObject)viewer)->type_name); 1479 } 1480 PetscFunctionReturn(0); 1481 } 1482 1483 1484 #undef __FUNCT__ 1485 #define __FUNCT__ "PCSetInitialGuessNonzero" 1486 /*@ 1487 PCSetInitialGuessNonzero - Tells the iterative solver that the 1488 initial guess is nonzero; otherwise PC assumes the initial guess 1489 is to be zero (and thus zeros it out before solving). 1490 1491 Logically Collective on PC 1492 1493 Input Parameters: 1494 + pc - iterative context obtained from PCCreate() 1495 - flg - PETSC_TRUE indicates the guess is non-zero, PETSC_FALSE indicates the guess is zero 1496 1497 Level: Developer 1498 1499 Notes: 1500 This is a weird function. Since PC's are linear operators on the right hand side they 1501 CANNOT use an initial guess. This function is for the "pass-through" preconditioners 1502 PCKSP, PCREDUNDANT and PCOPENMP and causes the inner KSP object to use the nonzero 1503 initial guess. Not currently working for PCREDUNDANT, that has to be rewritten to use KSP. 1504 1505 1506 .keywords: PC, set, initial guess, nonzero 1507 1508 .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll() 1509 @*/ 1510 PetscErrorCode PCSetInitialGuessNonzero(PC pc,PetscBool flg) 1511 { 1512 PetscFunctionBegin; 1513 PetscValidLogicalCollectiveBool(pc,flg,2); 1514 pc->nonzero_guess = flg; 1515 PetscFunctionReturn(0); 1516 } 1517 1518 #undef __FUNCT__ 1519 #define __FUNCT__ "PCRegister" 1520 /*@C 1521 PCRegister - See PCRegisterDynamic() 1522 1523 Level: advanced 1524 @*/ 1525 PetscErrorCode PCRegister(const char sname[],const char path[],const char name[],PetscErrorCode (*function)(PC)) 1526 { 1527 PetscErrorCode ierr; 1528 char fullname[PETSC_MAX_PATH_LEN]; 1529 1530 PetscFunctionBegin; 1531 1532 ierr = PetscFListConcat(path,name,fullname);CHKERRQ(ierr); 1533 ierr = PetscFListAdd(&PCList,sname,fullname,(void (*)(void))function);CHKERRQ(ierr); 1534 PetscFunctionReturn(0); 1535 } 1536 1537 #undef __FUNCT__ 1538 #define __FUNCT__ "PCComputeExplicitOperator" 1539 /*@ 1540 PCComputeExplicitOperator - Computes the explicit preconditioned operator. 1541 1542 Collective on PC 1543 1544 Input Parameter: 1545 . pc - the preconditioner object 1546 1547 Output Parameter: 1548 . mat - the explict preconditioned operator 1549 1550 Notes: 1551 This computation is done by applying the operators to columns of the 1552 identity matrix. 1553 1554 Currently, this routine uses a dense matrix format when 1 processor 1555 is used and a sparse format otherwise. This routine is costly in general, 1556 and is recommended for use only with relatively small systems. 1557 1558 Level: advanced 1559 1560 .keywords: PC, compute, explicit, operator 1561 1562 .seealso: KSPComputeExplicitOperator() 1563 1564 @*/ 1565 PetscErrorCode PCComputeExplicitOperator(PC pc,Mat *mat) 1566 { 1567 Vec in,out; 1568 PetscErrorCode ierr; 1569 PetscInt i,M,m,*rows,start,end; 1570 PetscMPIInt size; 1571 MPI_Comm comm; 1572 PetscScalar *array,one = 1.0; 1573 1574 PetscFunctionBegin; 1575 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1576 PetscValidPointer(mat,2); 1577 1578 comm = ((PetscObject)pc)->comm; 1579 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1580 1581 if (!pc->pmat) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"You must call KSPSetOperators() or PCSetOperators() before this call"); 1582 ierr = MatGetVecs(pc->pmat,&in,0);CHKERRQ(ierr); 1583 ierr = VecDuplicate(in,&out);CHKERRQ(ierr); 1584 ierr = VecGetOwnershipRange(in,&start,&end);CHKERRQ(ierr); 1585 ierr = VecGetSize(in,&M);CHKERRQ(ierr); 1586 ierr = VecGetLocalSize(in,&m);CHKERRQ(ierr); 1587 ierr = PetscMalloc((m+1)*sizeof(PetscInt),&rows);CHKERRQ(ierr); 1588 for (i=0; i<m; i++) {rows[i] = start + i;} 1589 1590 ierr = MatCreate(comm,mat);CHKERRQ(ierr); 1591 ierr = MatSetSizes(*mat,m,m,M,M);CHKERRQ(ierr); 1592 if (size == 1) { 1593 ierr = MatSetType(*mat,MATSEQDENSE);CHKERRQ(ierr); 1594 ierr = MatSeqDenseSetPreallocation(*mat,PETSC_NULL);CHKERRQ(ierr); 1595 } else { 1596 ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 1597 ierr = MatMPIAIJSetPreallocation(*mat,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 1598 } 1599 1600 for (i=0; i<M; i++) { 1601 1602 ierr = VecSet(in,0.0);CHKERRQ(ierr); 1603 ierr = VecSetValues(in,1,&i,&one,INSERT_VALUES);CHKERRQ(ierr); 1604 ierr = VecAssemblyBegin(in);CHKERRQ(ierr); 1605 ierr = VecAssemblyEnd(in);CHKERRQ(ierr); 1606 1607 /* should fix, allowing user to choose side */ 1608 ierr = PCApply(pc,in,out);CHKERRQ(ierr); 1609 1610 ierr = VecGetArray(out,&array);CHKERRQ(ierr); 1611 ierr = MatSetValues(*mat,m,rows,1,&i,array,INSERT_VALUES);CHKERRQ(ierr); 1612 ierr = VecRestoreArray(out,&array);CHKERRQ(ierr); 1613 1614 } 1615 ierr = PetscFree(rows);CHKERRQ(ierr); 1616 ierr = VecDestroy(out);CHKERRQ(ierr); 1617 ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1618 ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1619 PetscFunctionReturn(0); 1620 } 1621 1622