1 2 /* 3 The PC (preconditioner) interface routines, callable by users. 4 */ 5 #include <petsc-private/pcimpl.h> /*I "petscksp.h" I*/ 6 7 /* Logging support */ 8 PetscClassId PC_CLASSID; 9 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 10 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks, PC_ApplyOnMproc; 11 12 #undef __FUNCT__ 13 #define __FUNCT__ "PCGetDefaultType_Private" 14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char* type[]) 15 { 16 PetscErrorCode ierr; 17 PetscMPIInt size; 18 PetscBool flg1,flg2,set,flg3; 19 20 PetscFunctionBegin; 21 ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr); 22 if (pc->pmat) { 23 PetscErrorCode (*f)(Mat,MatReuse,Mat*); 24 ierr = PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr); 25 if (size == 1) { 26 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);CHKERRQ(ierr); 27 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);CHKERRQ(ierr); 28 ierr = MatIsSymmetricKnown(pc->pmat,&set,&flg3);CHKERRQ(ierr); 29 if (flg1 && (!flg2 || (set && flg3))) { 30 *type = PCICC; 31 } else if (flg2) { 32 *type = PCILU; 33 } else if (f) { /* likely is a parallel matrix run on one processor */ 34 *type = PCBJACOBI; 35 } else { 36 *type = PCNONE; 37 } 38 } else { 39 if (f) { 40 *type = PCBJACOBI; 41 } else { 42 *type = PCNONE; 43 } 44 } 45 } else { 46 if (size == 1) { 47 *type = PCILU; 48 } else { 49 *type = PCBJACOBI; 50 } 51 } 52 PetscFunctionReturn(0); 53 } 54 55 #undef __FUNCT__ 56 #define __FUNCT__ "PCReset" 57 /*@ 58 PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats 59 60 Collective on PC 61 62 Input Parameter: 63 . pc - the preconditioner context 64 65 Level: developer 66 67 Notes: This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC 68 69 .keywords: PC, destroy 70 71 .seealso: PCCreate(), PCSetUp() 72 @*/ 73 PetscErrorCode PCReset(PC pc) 74 { 75 PetscErrorCode ierr; 76 77 PetscFunctionBegin; 78 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 79 if (pc->ops->reset) { 80 ierr = (*pc->ops->reset)(pc);CHKERRQ(ierr); 81 } 82 ierr = VecDestroy(&pc->diagonalscaleright);CHKERRQ(ierr); 83 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 84 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 85 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 86 pc->setupcalled = 0; 87 PetscFunctionReturn(0); 88 } 89 90 #undef __FUNCT__ 91 #define __FUNCT__ "PCDestroy" 92 /*@ 93 PCDestroy - Destroys PC context that was created with PCCreate(). 94 95 Collective on PC 96 97 Input Parameter: 98 . pc - the preconditioner context 99 100 Level: developer 101 102 .keywords: PC, destroy 103 104 .seealso: PCCreate(), PCSetUp() 105 @*/ 106 PetscErrorCode PCDestroy(PC *pc) 107 { 108 PetscErrorCode ierr; 109 110 PetscFunctionBegin; 111 if (!*pc) PetscFunctionReturn(0); 112 PetscValidHeaderSpecific((*pc),PC_CLASSID,1); 113 if (--((PetscObject)(*pc))->refct > 0) {*pc = 0; PetscFunctionReturn(0);} 114 115 ierr = PCReset(*pc);CHKERRQ(ierr); 116 117 /* if memory was published with AMS then destroy it */ 118 ierr = PetscObjectDepublish((*pc));CHKERRQ(ierr); 119 if ((*pc)->ops->destroy) {ierr = (*(*pc)->ops->destroy)((*pc));CHKERRQ(ierr);} 120 ierr = DMDestroy(&(*pc)->dm);CHKERRQ(ierr); 121 ierr = PetscHeaderDestroy(pc);CHKERRQ(ierr); 122 PetscFunctionReturn(0); 123 } 124 125 #undef __FUNCT__ 126 #define __FUNCT__ "PCGetDiagonalScale" 127 /*@C 128 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 129 scaling as needed by certain time-stepping codes. 130 131 Logically Collective on PC 132 133 Input Parameter: 134 . pc - the preconditioner context 135 136 Output Parameter: 137 . flag - PETSC_TRUE if it applies the scaling 138 139 Level: developer 140 141 Notes: If this returns PETSC_TRUE then the system solved via the Krylov method is 142 $ D M A D^{-1} y = D M b for left preconditioning or 143 $ D A M D^{-1} z = D b for right preconditioning 144 145 .keywords: PC 146 147 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale() 148 @*/ 149 PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag) 150 { 151 PetscFunctionBegin; 152 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 153 PetscValidPointer(flag,2); 154 *flag = pc->diagonalscale; 155 PetscFunctionReturn(0); 156 } 157 158 #undef __FUNCT__ 159 #define __FUNCT__ "PCSetDiagonalScale" 160 /*@ 161 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 162 scaling as needed by certain time-stepping codes. 163 164 Logically Collective on PC 165 166 Input Parameters: 167 + pc - the preconditioner context 168 - s - scaling vector 169 170 Level: intermediate 171 172 Notes: The system solved via the Krylov method is 173 $ D M A D^{-1} y = D M b for left preconditioning or 174 $ D A M D^{-1} z = D b for right preconditioning 175 176 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 177 178 .keywords: PC 179 180 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale() 181 @*/ 182 PetscErrorCode PCSetDiagonalScale(PC pc,Vec s) 183 { 184 PetscErrorCode ierr; 185 186 PetscFunctionBegin; 187 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 188 PetscValidHeaderSpecific(s,VEC_CLASSID,2); 189 pc->diagonalscale = PETSC_TRUE; 190 ierr = PetscObjectReference((PetscObject)s);CHKERRQ(ierr); 191 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 192 pc->diagonalscaleleft = s; 193 ierr = VecDuplicate(s,&pc->diagonalscaleright);CHKERRQ(ierr); 194 ierr = VecCopy(s,pc->diagonalscaleright);CHKERRQ(ierr); 195 ierr = VecReciprocal(pc->diagonalscaleright);CHKERRQ(ierr); 196 PetscFunctionReturn(0); 197 } 198 199 #undef __FUNCT__ 200 #define __FUNCT__ "PCDiagonalScaleLeft" 201 /*@ 202 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 203 204 Logically Collective on PC 205 206 Input Parameters: 207 + pc - the preconditioner context 208 . in - input vector 209 + out - scaled vector (maybe the same as in) 210 211 Level: intermediate 212 213 Notes: The system solved via the Krylov method is 214 $ D M A D^{-1} y = D M b for left preconditioning or 215 $ D A M D^{-1} z = D b for right preconditioning 216 217 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 218 219 If diagonal scaling is turned off and in is not out then in is copied to out 220 221 .keywords: PC 222 223 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale() 224 @*/ 225 PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out) 226 { 227 PetscErrorCode ierr; 228 229 PetscFunctionBegin; 230 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 231 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 232 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 233 if (pc->diagonalscale) { 234 ierr = VecPointwiseMult(out,pc->diagonalscaleleft,in);CHKERRQ(ierr); 235 } else if (in != out) { 236 ierr = VecCopy(in,out);CHKERRQ(ierr); 237 } 238 PetscFunctionReturn(0); 239 } 240 241 #undef __FUNCT__ 242 #define __FUNCT__ "PCDiagonalScaleRight" 243 /*@ 244 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 245 246 Logically Collective on PC 247 248 Input Parameters: 249 + pc - the preconditioner context 250 . in - input vector 251 + out - scaled vector (maybe the same as in) 252 253 Level: intermediate 254 255 Notes: The system solved via the Krylov method is 256 $ D M A D^{-1} y = D M b for left preconditioning or 257 $ D A M D^{-1} z = D b for right preconditioning 258 259 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 260 261 If diagonal scaling is turned off and in is not out then in is copied to out 262 263 .keywords: PC 264 265 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale() 266 @*/ 267 PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out) 268 { 269 PetscErrorCode ierr; 270 271 PetscFunctionBegin; 272 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 273 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 274 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 275 if (pc->diagonalscale) { 276 ierr = VecPointwiseMult(out,pc->diagonalscaleright,in);CHKERRQ(ierr); 277 } else if (in != out) { 278 ierr = VecCopy(in,out);CHKERRQ(ierr); 279 } 280 PetscFunctionReturn(0); 281 } 282 283 #if 0 284 #undef __FUNCT__ 285 #define __FUNCT__ "PCPublish_Petsc" 286 static PetscErrorCode PCPublish_Petsc(PetscObject obj) 287 { 288 PetscFunctionBegin; 289 PetscFunctionReturn(0); 290 } 291 #endif 292 293 #undef __FUNCT__ 294 #define __FUNCT__ "PCCreate" 295 /*@ 296 PCCreate - Creates a preconditioner context. 297 298 Collective on MPI_Comm 299 300 Input Parameter: 301 . comm - MPI communicator 302 303 Output Parameter: 304 . pc - location to put the preconditioner context 305 306 Notes: 307 The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or ICC 308 in parallel. For dense matrices it is always PCNONE. 309 310 Level: developer 311 312 .keywords: PC, create, context 313 314 .seealso: PCSetUp(), PCApply(), PCDestroy() 315 @*/ 316 PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc) 317 { 318 PC pc; 319 PetscErrorCode ierr; 320 321 PetscFunctionBegin; 322 PetscValidPointer(newpc,1); 323 *newpc = 0; 324 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 325 ierr = PCInitializePackage(PETSC_NULL);CHKERRQ(ierr); 326 #endif 327 328 ierr = PetscHeaderCreate(pc,_p_PC,struct _PCOps,PC_CLASSID,-1,"PC","Preconditioner","PC",comm,PCDestroy,PCView);CHKERRQ(ierr); 329 330 pc->mat = 0; 331 pc->pmat = 0; 332 pc->setupcalled = 0; 333 pc->setfromoptionscalled = 0; 334 pc->data = 0; 335 pc->diagonalscale = PETSC_FALSE; 336 pc->diagonalscaleleft = 0; 337 pc->diagonalscaleright = 0; 338 pc->reuse = 0; 339 340 pc->modifysubmatrices = 0; 341 pc->modifysubmatricesP = 0; 342 *newpc = pc; 343 PetscFunctionReturn(0); 344 345 } 346 347 /* -------------------------------------------------------------------------------*/ 348 349 #undef __FUNCT__ 350 #define __FUNCT__ "PCApply" 351 /*@ 352 PCApply - Applies the preconditioner to a vector. 353 354 Collective on PC and Vec 355 356 Input Parameters: 357 + pc - the preconditioner context 358 - x - input vector 359 360 Output Parameter: 361 . y - output vector 362 363 Level: developer 364 365 .keywords: PC, apply 366 367 .seealso: PCApplyTranspose(), PCApplyBAorAB() 368 @*/ 369 PetscErrorCode PCApply(PC pc,Vec x,Vec y) 370 { 371 PetscErrorCode ierr; 372 373 PetscFunctionBegin; 374 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 375 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 376 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 377 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 378 VecValidValues(x,2,PETSC_TRUE); 379 if (pc->setupcalled < 2) { 380 ierr = PCSetUp(pc);CHKERRQ(ierr); 381 } 382 if (!pc->ops->apply) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply"); 383 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 384 ierr = (*pc->ops->apply)(pc,x,y);CHKERRQ(ierr); 385 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 386 VecValidValues(y,3,PETSC_FALSE); 387 PetscFunctionReturn(0); 388 } 389 390 #undef __FUNCT__ 391 #define __FUNCT__ "PCApplySymmetricLeft" 392 /*@ 393 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 394 395 Collective on PC and Vec 396 397 Input Parameters: 398 + pc - the preconditioner context 399 - x - input vector 400 401 Output Parameter: 402 . y - output vector 403 404 Notes: 405 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 406 407 Level: developer 408 409 .keywords: PC, apply, symmetric, left 410 411 .seealso: PCApply(), PCApplySymmetricRight() 412 @*/ 413 PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y) 414 { 415 PetscErrorCode ierr; 416 417 PetscFunctionBegin; 418 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 419 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 420 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 421 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 422 VecValidValues(x,2,PETSC_TRUE); 423 if (pc->setupcalled < 2) { 424 ierr = PCSetUp(pc);CHKERRQ(ierr); 425 } 426 if (!pc->ops->applysymmetricleft) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have left symmetric apply"); 427 ierr = PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 428 ierr = (*pc->ops->applysymmetricleft)(pc,x,y);CHKERRQ(ierr); 429 ierr = PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 430 VecValidValues(y,3,PETSC_FALSE); 431 PetscFunctionReturn(0); 432 } 433 434 #undef __FUNCT__ 435 #define __FUNCT__ "PCApplySymmetricRight" 436 /*@ 437 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 438 439 Collective on PC and Vec 440 441 Input Parameters: 442 + pc - the preconditioner context 443 - x - input vector 444 445 Output Parameter: 446 . y - output vector 447 448 Level: developer 449 450 Notes: 451 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 452 453 .keywords: PC, apply, symmetric, right 454 455 .seealso: PCApply(), PCApplySymmetricLeft() 456 @*/ 457 PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y) 458 { 459 PetscErrorCode ierr; 460 461 PetscFunctionBegin; 462 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 463 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 464 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 465 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 466 VecValidValues(x,2,PETSC_TRUE); 467 if (pc->setupcalled < 2) { 468 ierr = PCSetUp(pc);CHKERRQ(ierr); 469 } 470 if (!pc->ops->applysymmetricright) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have left symmetric apply"); 471 ierr = PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 472 ierr = (*pc->ops->applysymmetricright)(pc,x,y);CHKERRQ(ierr); 473 ierr = PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 474 VecValidValues(y,3,PETSC_FALSE); 475 PetscFunctionReturn(0); 476 } 477 478 #undef __FUNCT__ 479 #define __FUNCT__ "PCApplyTranspose" 480 /*@ 481 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 482 483 Collective on PC and Vec 484 485 Input Parameters: 486 + pc - the preconditioner context 487 - x - input vector 488 489 Output Parameter: 490 . y - output vector 491 492 Notes: For complex numbers this applies the non-Hermitian transpose. 493 494 Developer Notes: We need to implement a PCApplyHermitianTranspose() 495 496 Level: developer 497 498 .keywords: PC, apply, transpose 499 500 .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists() 501 @*/ 502 PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y) 503 { 504 PetscErrorCode ierr; 505 506 PetscFunctionBegin; 507 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 508 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 509 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 510 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 511 VecValidValues(x,2,PETSC_TRUE); 512 if (pc->setupcalled < 2) { 513 ierr = PCSetUp(pc);CHKERRQ(ierr); 514 } 515 if (!pc->ops->applytranspose) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply transpose"); 516 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 517 ierr = (*pc->ops->applytranspose)(pc,x,y);CHKERRQ(ierr); 518 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 519 VecValidValues(y,3,PETSC_FALSE); 520 PetscFunctionReturn(0); 521 } 522 523 #undef __FUNCT__ 524 #define __FUNCT__ "PCApplyTransposeExists" 525 /*@ 526 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 527 528 Collective on PC and Vec 529 530 Input Parameters: 531 . pc - the preconditioner context 532 533 Output Parameter: 534 . flg - PETSC_TRUE if a transpose operation is defined 535 536 Level: developer 537 538 .keywords: PC, apply, transpose 539 540 .seealso: PCApplyTranspose() 541 @*/ 542 PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg) 543 { 544 PetscFunctionBegin; 545 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 546 PetscValidPointer(flg,2); 547 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 548 else *flg = PETSC_FALSE; 549 PetscFunctionReturn(0); 550 } 551 552 #undef __FUNCT__ 553 #define __FUNCT__ "PCApplyBAorAB" 554 /*@ 555 PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x. 556 557 Collective on PC and Vec 558 559 Input Parameters: 560 + pc - the preconditioner context 561 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 562 . x - input vector 563 - work - work vector 564 565 Output Parameter: 566 . y - output vector 567 568 Level: developer 569 570 Notes: If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the 571 specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling. 572 573 .keywords: PC, apply, operator 574 575 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose() 576 @*/ 577 PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work) 578 { 579 PetscErrorCode ierr; 580 581 PetscFunctionBegin; 582 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 583 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 584 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 585 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 586 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 587 VecValidValues(x,3,PETSC_TRUE); 588 if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric"); 589 if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application"); 590 591 if (pc->setupcalled < 2) { 592 ierr = PCSetUp(pc);CHKERRQ(ierr); 593 } 594 595 if (pc->diagonalscale) { 596 if (pc->ops->applyBA) { 597 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 598 ierr = VecDuplicate(x,&work2);CHKERRQ(ierr); 599 ierr = PCDiagonalScaleRight(pc,x,work2);CHKERRQ(ierr); 600 ierr = (*pc->ops->applyBA)(pc,side,work2,y,work);CHKERRQ(ierr); 601 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 602 ierr = VecDestroy(&work2);CHKERRQ(ierr); 603 } else if (side == PC_RIGHT) { 604 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 605 ierr = PCApply(pc,y,work);CHKERRQ(ierr); 606 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 607 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 608 } else if (side == PC_LEFT) { 609 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 610 ierr = MatMult(pc->mat,y,work);CHKERRQ(ierr); 611 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 612 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 613 } else if (side == PC_SYMMETRIC) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner"); 614 } else { 615 if (pc->ops->applyBA) { 616 ierr = (*pc->ops->applyBA)(pc,side,x,y,work);CHKERRQ(ierr); 617 } else if (side == PC_RIGHT) { 618 ierr = PCApply(pc,x,work);CHKERRQ(ierr); 619 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 620 } else if (side == PC_LEFT) { 621 ierr = MatMult(pc->mat,x,work);CHKERRQ(ierr); 622 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 623 } else if (side == PC_SYMMETRIC) { 624 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 625 ierr = PCApplySymmetricRight(pc,x,work);CHKERRQ(ierr); 626 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 627 ierr = VecCopy(y,work);CHKERRQ(ierr); 628 ierr = PCApplySymmetricLeft(pc,work,y);CHKERRQ(ierr); 629 } 630 } 631 VecValidValues(y,4,PETSC_FALSE); 632 PetscFunctionReturn(0); 633 } 634 635 #undef __FUNCT__ 636 #define __FUNCT__ "PCApplyBAorABTranspose" 637 /*@ 638 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 639 and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning, 640 NOT tr(B*A) = tr(A)*tr(B). 641 642 Collective on PC and Vec 643 644 Input Parameters: 645 + pc - the preconditioner context 646 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 647 . x - input vector 648 - work - work vector 649 650 Output Parameter: 651 . y - output vector 652 653 654 Notes: this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner 655 defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 656 657 Level: developer 658 659 .keywords: PC, apply, operator, transpose 660 661 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB() 662 @*/ 663 PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work) 664 { 665 PetscErrorCode ierr; 666 667 PetscFunctionBegin; 668 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 669 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 670 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 671 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 672 if (x == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 673 VecValidValues(x,3,PETSC_TRUE); 674 if (pc->ops->applyBAtranspose) { 675 ierr = (*pc->ops->applyBAtranspose)(pc,side,x,y,work);CHKERRQ(ierr); 676 VecValidValues(y,4,PETSC_FALSE); 677 PetscFunctionReturn(0); 678 } 679 if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left"); 680 681 if (pc->setupcalled < 2) { 682 ierr = PCSetUp(pc);CHKERRQ(ierr); 683 } 684 685 if (side == PC_RIGHT) { 686 ierr = PCApplyTranspose(pc,x,work);CHKERRQ(ierr); 687 ierr = MatMultTranspose(pc->mat,work,y);CHKERRQ(ierr); 688 } else if (side == PC_LEFT) { 689 ierr = MatMultTranspose(pc->mat,x,work);CHKERRQ(ierr); 690 ierr = PCApplyTranspose(pc,work,y);CHKERRQ(ierr); 691 } 692 /* add support for PC_SYMMETRIC */ 693 VecValidValues(y,4,PETSC_FALSE); 694 PetscFunctionReturn(0); 695 } 696 697 /* -------------------------------------------------------------------------------*/ 698 699 #undef __FUNCT__ 700 #define __FUNCT__ "PCApplyRichardsonExists" 701 /*@ 702 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 703 built-in fast application of Richardson's method. 704 705 Not Collective 706 707 Input Parameter: 708 . pc - the preconditioner 709 710 Output Parameter: 711 . exists - PETSC_TRUE or PETSC_FALSE 712 713 Level: developer 714 715 .keywords: PC, apply, Richardson, exists 716 717 .seealso: PCApplyRichardson() 718 @*/ 719 PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists) 720 { 721 PetscFunctionBegin; 722 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 723 PetscValidIntPointer(exists,2); 724 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 725 else *exists = PETSC_FALSE; 726 PetscFunctionReturn(0); 727 } 728 729 #undef __FUNCT__ 730 #define __FUNCT__ "PCApplyRichardson" 731 /*@ 732 PCApplyRichardson - Applies several steps of Richardson iteration with 733 the particular preconditioner. This routine is usually used by the 734 Krylov solvers and not the application code directly. 735 736 Collective on PC 737 738 Input Parameters: 739 + pc - the preconditioner context 740 . b - the right hand side 741 . w - one work vector 742 . rtol - relative decrease in residual norm convergence criteria 743 . abstol - absolute residual norm convergence criteria 744 . dtol - divergence residual norm increase criteria 745 . its - the number of iterations to apply. 746 - guesszero - if the input x contains nonzero initial guess 747 748 Output Parameter: 749 + outits - number of iterations actually used (for SOR this always equals its) 750 . reason - the reason the apply terminated 751 - y - the solution (also contains initial guess if guesszero is PETSC_FALSE 752 753 Notes: 754 Most preconditioners do not support this function. Use the command 755 PCApplyRichardsonExists() to determine if one does. 756 757 Except for the multigrid PC this routine ignores the convergence tolerances 758 and always runs for the number of iterations 759 760 Level: developer 761 762 .keywords: PC, apply, Richardson 763 764 .seealso: PCApplyRichardsonExists() 765 @*/ 766 PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason) 767 { 768 PetscErrorCode ierr; 769 770 PetscFunctionBegin; 771 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 772 PetscValidHeaderSpecific(b,VEC_CLASSID,2); 773 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 774 PetscValidHeaderSpecific(w,VEC_CLASSID,4); 775 if (b == y) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_IDN,"b and y must be different vectors"); 776 if (pc->setupcalled < 2) { 777 ierr = PCSetUp(pc);CHKERRQ(ierr); 778 } 779 if (!pc->ops->applyrichardson) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC does not have apply richardson"); 780 ierr = (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);CHKERRQ(ierr); 781 PetscFunctionReturn(0); 782 } 783 784 /* 785 a setupcall of 0 indicates never setup, 786 1 needs to be resetup, 787 2 does not need any changes. 788 */ 789 #undef __FUNCT__ 790 #define __FUNCT__ "PCSetUp" 791 /*@ 792 PCSetUp - Prepares for the use of a preconditioner. 793 794 Collective on PC 795 796 Input Parameter: 797 . pc - the preconditioner context 798 799 Level: developer 800 801 .keywords: PC, setup 802 803 .seealso: PCCreate(), PCApply(), PCDestroy() 804 @*/ 805 PetscErrorCode PCSetUp(PC pc) 806 { 807 PetscErrorCode ierr; 808 const char *def; 809 810 PetscFunctionBegin; 811 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 812 if (!pc->mat) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first"); 813 814 if (pc->setupcalled > 1) { 815 ierr = PetscInfo(pc,"Setting PC with identical preconditioner\n");CHKERRQ(ierr); 816 PetscFunctionReturn(0); 817 } else if (!pc->setupcalled) { 818 ierr = PetscInfo(pc,"Setting up new PC\n");CHKERRQ(ierr); 819 } else if (pc->flag == SAME_NONZERO_PATTERN) { 820 ierr = PetscInfo(pc,"Setting up PC with same nonzero pattern\n");CHKERRQ(ierr); 821 } else { 822 ierr = PetscInfo(pc,"Setting up PC with different nonzero pattern\n");CHKERRQ(ierr); 823 } 824 825 if (!((PetscObject)pc)->type_name) { 826 ierr = PCGetDefaultType_Private(pc,&def);CHKERRQ(ierr); 827 ierr = PCSetType(pc,def);CHKERRQ(ierr); 828 } 829 830 ierr = PetscLogEventBegin(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 831 if (pc->ops->setup) { 832 ierr = (*pc->ops->setup)(pc);CHKERRQ(ierr); 833 } 834 pc->setupcalled = 2; 835 ierr = PetscLogEventEnd(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 836 PetscFunctionReturn(0); 837 } 838 839 #undef __FUNCT__ 840 #define __FUNCT__ "PCSetUpOnBlocks" 841 /*@ 842 PCSetUpOnBlocks - Sets up the preconditioner for each block in 843 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 844 methods. 845 846 Collective on PC 847 848 Input Parameters: 849 . pc - the preconditioner context 850 851 Level: developer 852 853 .keywords: PC, setup, blocks 854 855 .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp() 856 @*/ 857 PetscErrorCode PCSetUpOnBlocks(PC pc) 858 { 859 PetscErrorCode ierr; 860 861 PetscFunctionBegin; 862 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 863 if (!pc->ops->setuponblocks) PetscFunctionReturn(0); 864 ierr = PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 865 ierr = (*pc->ops->setuponblocks)(pc);CHKERRQ(ierr); 866 ierr = PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 867 PetscFunctionReturn(0); 868 } 869 870 #undef __FUNCT__ 871 #define __FUNCT__ "PCSetModifySubMatrices" 872 /*@C 873 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 874 submatrices that arise within certain subdomain-based preconditioners. 875 The basic submatrices are extracted from the preconditioner matrix as 876 usual; the user can then alter these (for example, to set different boundary 877 conditions for each submatrix) before they are used for the local solves. 878 879 Logically Collective on PC 880 881 Input Parameters: 882 + pc - the preconditioner context 883 . func - routine for modifying the submatrices 884 - ctx - optional user-defined context (may be null) 885 886 Calling sequence of func: 887 $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx); 888 889 . row - an array of index sets that contain the global row numbers 890 that comprise each local submatrix 891 . col - an array of index sets that contain the global column numbers 892 that comprise each local submatrix 893 . submat - array of local submatrices 894 - ctx - optional user-defined context for private data for the 895 user-defined func routine (may be null) 896 897 Notes: 898 PCSetModifySubMatrices() MUST be called before KSPSetUp() and 899 KSPSolve(). 900 901 A routine set by PCSetModifySubMatrices() is currently called within 902 the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM) 903 preconditioners. All other preconditioners ignore this routine. 904 905 Level: advanced 906 907 .keywords: PC, set, modify, submatrices 908 909 .seealso: PCModifySubMatrices(), PCASMGetSubMatrices() 910 @*/ 911 PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx) 912 { 913 PetscFunctionBegin; 914 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 915 pc->modifysubmatrices = func; 916 pc->modifysubmatricesP = ctx; 917 PetscFunctionReturn(0); 918 } 919 920 #undef __FUNCT__ 921 #define __FUNCT__ "PCModifySubMatrices" 922 /*@C 923 PCModifySubMatrices - Calls an optional user-defined routine within 924 certain preconditioners if one has been set with PCSetModifySubMarices(). 925 926 Collective on PC 927 928 Input Parameters: 929 + pc - the preconditioner context 930 . nsub - the number of local submatrices 931 . row - an array of index sets that contain the global row numbers 932 that comprise each local submatrix 933 . col - an array of index sets that contain the global column numbers 934 that comprise each local submatrix 935 . submat - array of local submatrices 936 - ctx - optional user-defined context for private data for the 937 user-defined routine (may be null) 938 939 Output Parameter: 940 . submat - array of local submatrices (the entries of which may 941 have been modified) 942 943 Notes: 944 The user should NOT generally call this routine, as it will 945 automatically be called within certain preconditioners (currently 946 block Jacobi, additive Schwarz) if set. 947 948 The basic submatrices are extracted from the preconditioner matrix 949 as usual; the user can then alter these (for example, to set different 950 boundary conditions for each submatrix) before they are used for the 951 local solves. 952 953 Level: developer 954 955 .keywords: PC, modify, submatrices 956 957 .seealso: PCSetModifySubMatrices() 958 @*/ 959 PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx) 960 { 961 PetscErrorCode ierr; 962 963 PetscFunctionBegin; 964 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 965 if (!pc->modifysubmatrices) PetscFunctionReturn(0); 966 ierr = PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 967 ierr = (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);CHKERRQ(ierr); 968 ierr = PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 969 PetscFunctionReturn(0); 970 } 971 972 #undef __FUNCT__ 973 #define __FUNCT__ "PCSetOperators" 974 /*@ 975 PCSetOperators - Sets the matrix associated with the linear system and 976 a (possibly) different one associated with the preconditioner. 977 978 Logically Collective on PC and Mat 979 980 Input Parameters: 981 + pc - the preconditioner context 982 . Amat - the matrix associated with the linear system 983 . Pmat - the matrix to be used in constructing the preconditioner, usually 984 the same as Amat. 985 - flag - flag indicating information about the preconditioner matrix structure 986 during successive linear solves. This flag is ignored the first time a 987 linear system is solved, and thus is irrelevant when solving just one linear 988 system. 989 990 Notes: 991 The flag can be used to eliminate unnecessary work in the preconditioner 992 during the repeated solution of linear systems of the same size. The 993 available options are 994 + SAME_PRECONDITIONER - 995 Pmat is identical during successive linear solves. 996 This option is intended for folks who are using 997 different Amat and Pmat matrices and wish to reuse the 998 same preconditioner matrix. For example, this option 999 saves work by not recomputing incomplete factorization 1000 for ILU/ICC preconditioners. 1001 . SAME_NONZERO_PATTERN - 1002 Pmat has the same nonzero structure during 1003 successive linear solves. 1004 - DIFFERENT_NONZERO_PATTERN - 1005 Pmat does not have the same nonzero structure. 1006 1007 Passing a PETSC_NULL for Amat or Pmat removes the matrix that is currently used. 1008 1009 If you wish to replace either Amat or Pmat but leave the other one untouched then 1010 first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference() 1011 on it and then pass it back in in your call to KSPSetOperators(). 1012 1013 Caution: 1014 If you specify SAME_NONZERO_PATTERN, PETSc believes your assertion 1015 and does not check the structure of the matrix. If you erroneously 1016 claim that the structure is the same when it actually is not, the new 1017 preconditioner will not function correctly. Thus, use this optimization 1018 feature carefully! 1019 1020 If in doubt about whether your preconditioner matrix has changed 1021 structure or not, use the flag DIFFERENT_NONZERO_PATTERN. 1022 1023 More Notes about Repeated Solution of Linear Systems: 1024 PETSc does NOT reset the matrix entries of either Amat or Pmat 1025 to zero after a linear solve; the user is completely responsible for 1026 matrix assembly. See the routine MatZeroEntries() if desiring to 1027 zero all elements of a matrix. 1028 1029 Level: intermediate 1030 1031 .keywords: PC, set, operators, matrix, linear system 1032 1033 .seealso: PCGetOperators(), MatZeroEntries() 1034 @*/ 1035 PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat,MatStructure flag) 1036 { 1037 PetscErrorCode ierr; 1038 PetscInt m1,n1,m2,n2; 1039 1040 PetscFunctionBegin; 1041 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1042 if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1043 if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3); 1044 if (Amat) PetscCheckSameComm(pc,1,Amat,2); 1045 if (Pmat) PetscCheckSameComm(pc,1,Pmat,3); 1046 if (pc->setupcalled && Amat && Pmat) { 1047 ierr = MatGetLocalSize(Amat,&m1,&n1);CHKERRQ(ierr); 1048 ierr = MatGetLocalSize(pc->mat,&m2,&n2);CHKERRQ(ierr); 1049 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1050 ierr = MatGetLocalSize(Pmat,&m1,&n1);CHKERRQ(ierr); 1051 ierr = MatGetLocalSize(pc->pmat,&m2,&n2);CHKERRQ(ierr); 1052 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1053 } 1054 1055 /* reference first in case the matrices are the same */ 1056 if (Amat) {ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr);} 1057 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 1058 if (Pmat) {ierr = PetscObjectReference((PetscObject)Pmat);CHKERRQ(ierr);} 1059 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 1060 pc->mat = Amat; 1061 pc->pmat = Pmat; 1062 1063 if (pc->setupcalled == 2 && flag != SAME_PRECONDITIONER) { 1064 pc->setupcalled = 1; 1065 } 1066 pc->flag = flag; 1067 PetscFunctionReturn(0); 1068 } 1069 1070 #undef __FUNCT__ 1071 #define __FUNCT__ "PCGetOperators" 1072 /*@C 1073 PCGetOperators - Gets the matrix associated with the linear system and 1074 possibly a different one associated with the preconditioner. 1075 1076 Not collective, though parallel Mats are returned if the PC is parallel 1077 1078 Input Parameter: 1079 . pc - the preconditioner context 1080 1081 Output Parameters: 1082 + mat - the matrix associated with the linear system 1083 . pmat - matrix associated with the preconditioner, usually the same 1084 as mat. 1085 - flag - flag indicating information about the preconditioner 1086 matrix structure. See PCSetOperators() for details. 1087 1088 Level: intermediate 1089 1090 Notes: Does not increase the reference count of the matrices, so you should not destroy them 1091 1092 Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators 1093 are created in PC and returned to the user. In this case, if both operators 1094 mat and pmat are requested, two DIFFERENT operators will be returned. If 1095 only one is requested both operators in the PC will be the same (i.e. as 1096 if one had called KSP/PCSetOperators() with the same argument for both Mats). 1097 The user must set the sizes of the returned matrices and their type etc just 1098 as if the user created them with MatCreate(). For example, 1099 1100 $ KSP/PCGetOperators(ksp/pc,&mat,PETSC_NULL,PETSC_NULL); is equivalent to 1101 $ set size, type, etc of mat 1102 1103 $ MatCreate(comm,&mat); 1104 $ KSP/PCSetOperators(ksp/pc,mat,mat,SAME_NONZERO_PATTERN); 1105 $ PetscObjectDereference((PetscObject)mat); 1106 $ set size, type, etc of mat 1107 1108 and 1109 1110 $ KSP/PCGetOperators(ksp/pc,&mat,&pmat,PETSC_NULL); is equivalent to 1111 $ set size, type, etc of mat and pmat 1112 1113 $ MatCreate(comm,&mat); 1114 $ MatCreate(comm,&pmat); 1115 $ KSP/PCSetOperators(ksp/pc,mat,pmat,SAME_NONZERO_PATTERN); 1116 $ PetscObjectDereference((PetscObject)mat); 1117 $ PetscObjectDereference((PetscObject)pmat); 1118 $ set size, type, etc of mat and pmat 1119 1120 The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy 1121 of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely 1122 managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look 1123 at this is when you create a SNES you do not NEED to create a KSP and attach it to 1124 the SNES object (the SNES object manages it for you). Similarly when you create a KSP 1125 you do not need to attach a PC to it (the KSP object manages the PC object for you). 1126 Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when 1127 it can be created for you? 1128 1129 1130 .keywords: PC, get, operators, matrix, linear system 1131 1132 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet() 1133 @*/ 1134 PetscErrorCode PCGetOperators(PC pc,Mat *mat,Mat *pmat,MatStructure *flag) 1135 { 1136 PetscErrorCode ierr; 1137 1138 PetscFunctionBegin; 1139 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1140 if (mat) { 1141 if (!pc->mat) { 1142 if (pc->pmat && !pmat) { /* pmat has been set, but user did not request it, so use for mat */ 1143 pc->mat = pc->pmat; 1144 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1145 } else { /* both mat and pmat are empty */ 1146 ierr = MatCreate(((PetscObject)pc)->comm,&pc->mat);CHKERRQ(ierr); 1147 if (!pmat) { /* user did NOT request pmat, so make same as mat */ 1148 pc->pmat = pc->mat; 1149 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1150 } 1151 } 1152 } 1153 *mat = pc->mat; 1154 } 1155 if (pmat) { 1156 if (!pc->pmat) { 1157 if (pc->mat && !mat) { /* mat has been set but was not requested, so use for pmat */ 1158 pc->pmat = pc->mat; 1159 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1160 } else { 1161 ierr = MatCreate(((PetscObject)pc)->comm,&pc->pmat);CHKERRQ(ierr); 1162 if (!mat) { /* user did NOT request mat, so make same as pmat */ 1163 pc->mat = pc->pmat; 1164 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1165 } 1166 } 1167 } 1168 *pmat = pc->pmat; 1169 } 1170 if (flag) *flag = pc->flag; 1171 PetscFunctionReturn(0); 1172 } 1173 1174 #undef __FUNCT__ 1175 #define __FUNCT__ "PCGetOperatorsSet" 1176 /*@C 1177 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1178 possibly a different one associated with the preconditioner have been set in the PC. 1179 1180 Not collective, though the results on all processes should be the same 1181 1182 Input Parameter: 1183 . pc - the preconditioner context 1184 1185 Output Parameters: 1186 + mat - the matrix associated with the linear system was set 1187 - pmat - matrix associated with the preconditioner was set, usually the same 1188 1189 Level: intermediate 1190 1191 .keywords: PC, get, operators, matrix, linear system 1192 1193 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators() 1194 @*/ 1195 PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat) 1196 { 1197 PetscFunctionBegin; 1198 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1199 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1200 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1201 PetscFunctionReturn(0); 1202 } 1203 1204 #undef __FUNCT__ 1205 #define __FUNCT__ "PCFactorGetMatrix" 1206 /*@ 1207 PCFactorGetMatrix - Gets the factored matrix from the 1208 preconditioner context. This routine is valid only for the LU, 1209 incomplete LU, Cholesky, and incomplete Cholesky methods. 1210 1211 Not Collective on PC though Mat is parallel if PC is parallel 1212 1213 Input Parameters: 1214 . pc - the preconditioner context 1215 1216 Output parameters: 1217 . mat - the factored matrix 1218 1219 Level: advanced 1220 1221 Notes: Does not increase the reference count for the matrix so DO NOT destroy it 1222 1223 .keywords: PC, get, factored, matrix 1224 @*/ 1225 PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat) 1226 { 1227 PetscErrorCode ierr; 1228 1229 PetscFunctionBegin; 1230 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1231 PetscValidPointer(mat,2); 1232 if (pc->ops->getfactoredmatrix) { 1233 ierr = (*pc->ops->getfactoredmatrix)(pc,mat);CHKERRQ(ierr); 1234 } else { 1235 SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"PC type does not support getting factor matrix"); 1236 } 1237 PetscFunctionReturn(0); 1238 } 1239 1240 #undef __FUNCT__ 1241 #define __FUNCT__ "PCSetOptionsPrefix" 1242 /*@C 1243 PCSetOptionsPrefix - Sets the prefix used for searching for all 1244 PC options in the database. 1245 1246 Logically Collective on PC 1247 1248 Input Parameters: 1249 + pc - the preconditioner context 1250 - prefix - the prefix string to prepend to all PC option requests 1251 1252 Notes: 1253 A hyphen (-) must NOT be given at the beginning of the prefix name. 1254 The first character of all runtime options is AUTOMATICALLY the 1255 hyphen. 1256 1257 Level: advanced 1258 1259 .keywords: PC, set, options, prefix, database 1260 1261 .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix() 1262 @*/ 1263 PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[]) 1264 { 1265 PetscErrorCode ierr; 1266 1267 PetscFunctionBegin; 1268 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1269 ierr = PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1270 PetscFunctionReturn(0); 1271 } 1272 1273 #undef __FUNCT__ 1274 #define __FUNCT__ "PCAppendOptionsPrefix" 1275 /*@C 1276 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1277 PC options in the database. 1278 1279 Logically Collective on PC 1280 1281 Input Parameters: 1282 + pc - the preconditioner context 1283 - prefix - the prefix string to prepend to all PC option requests 1284 1285 Notes: 1286 A hyphen (-) must NOT be given at the beginning of the prefix name. 1287 The first character of all runtime options is AUTOMATICALLY the 1288 hyphen. 1289 1290 Level: advanced 1291 1292 .keywords: PC, append, options, prefix, database 1293 1294 .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix() 1295 @*/ 1296 PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[]) 1297 { 1298 PetscErrorCode ierr; 1299 1300 PetscFunctionBegin; 1301 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1302 ierr = PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1303 PetscFunctionReturn(0); 1304 } 1305 1306 #undef __FUNCT__ 1307 #define __FUNCT__ "PCGetOptionsPrefix" 1308 /*@C 1309 PCGetOptionsPrefix - Gets the prefix used for searching for all 1310 PC options in the database. 1311 1312 Not Collective 1313 1314 Input Parameters: 1315 . pc - the preconditioner context 1316 1317 Output Parameters: 1318 . prefix - pointer to the prefix string used, is returned 1319 1320 Notes: On the fortran side, the user should pass in a string 'prifix' of 1321 sufficient length to hold the prefix. 1322 1323 Level: advanced 1324 1325 .keywords: PC, get, options, prefix, database 1326 1327 .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix() 1328 @*/ 1329 PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[]) 1330 { 1331 PetscErrorCode ierr; 1332 1333 PetscFunctionBegin; 1334 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1335 PetscValidPointer(prefix,2); 1336 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1337 PetscFunctionReturn(0); 1338 } 1339 1340 #undef __FUNCT__ 1341 #define __FUNCT__ "PCPreSolve" 1342 /*@ 1343 PCPreSolve - Optional pre-solve phase, intended for any 1344 preconditioner-specific actions that must be performed before 1345 the iterative solve itself. 1346 1347 Collective on PC 1348 1349 Input Parameters: 1350 + pc - the preconditioner context 1351 - ksp - the Krylov subspace context 1352 1353 Level: developer 1354 1355 Sample of Usage: 1356 .vb 1357 PCPreSolve(pc,ksp); 1358 KSPSolve(ksp,b,x); 1359 PCPostSolve(pc,ksp); 1360 .ve 1361 1362 Notes: 1363 The pre-solve phase is distinct from the PCSetUp() phase. 1364 1365 KSPSolve() calls this directly, so is rarely called by the user. 1366 1367 .keywords: PC, pre-solve 1368 1369 .seealso: PCPostSolve() 1370 @*/ 1371 PetscErrorCode PCPreSolve(PC pc,KSP ksp) 1372 { 1373 PetscErrorCode ierr; 1374 Vec x,rhs; 1375 1376 PetscFunctionBegin; 1377 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1378 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1379 pc->presolvedone++; 1380 if (pc->presolvedone > 2) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice"); 1381 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1382 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1383 1384 if (pc->ops->presolve) { 1385 ierr = (*pc->ops->presolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1386 } 1387 PetscFunctionReturn(0); 1388 } 1389 1390 #undef __FUNCT__ 1391 #define __FUNCT__ "PCPostSolve" 1392 /*@ 1393 PCPostSolve - Optional post-solve phase, intended for any 1394 preconditioner-specific actions that must be performed after 1395 the iterative solve itself. 1396 1397 Collective on PC 1398 1399 Input Parameters: 1400 + pc - the preconditioner context 1401 - ksp - the Krylov subspace context 1402 1403 Sample of Usage: 1404 .vb 1405 PCPreSolve(pc,ksp); 1406 KSPSolve(ksp,b,x); 1407 PCPostSolve(pc,ksp); 1408 .ve 1409 1410 Note: 1411 KSPSolve() calls this routine directly, so it is rarely called by the user. 1412 1413 Level: developer 1414 1415 .keywords: PC, post-solve 1416 1417 .seealso: PCPreSolve(), KSPSolve() 1418 @*/ 1419 PetscErrorCode PCPostSolve(PC pc,KSP ksp) 1420 { 1421 PetscErrorCode ierr; 1422 Vec x,rhs; 1423 1424 PetscFunctionBegin; 1425 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1426 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1427 pc->presolvedone--; 1428 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1429 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1430 if (pc->ops->postsolve) { 1431 ierr = (*pc->ops->postsolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1432 } 1433 PetscFunctionReturn(0); 1434 } 1435 1436 #undef __FUNCT__ 1437 #define __FUNCT__ "PCLoad" 1438 /*@C 1439 PCLoad - Loads a PC that has been stored in binary with PCView(). 1440 1441 Collective on PetscViewer 1442 1443 Input Parameters: 1444 + newdm - the newly loaded PC, this needs to have been created with PCCreate() or 1445 some related function before a call to PCLoad(). 1446 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() 1447 1448 Level: intermediate 1449 1450 Notes: 1451 The type is determined by the data in the file, any type set into the PC before this call is ignored. 1452 1453 Notes for advanced users: 1454 Most users should not need to know the details of the binary storage 1455 format, since PCLoad() and PCView() completely hide these details. 1456 But for anyone who's interested, the standard binary matrix storage 1457 format is 1458 .vb 1459 has not yet been determined 1460 .ve 1461 1462 .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad() 1463 @*/ 1464 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) 1465 { 1466 PetscErrorCode ierr; 1467 PetscBool isbinary; 1468 PetscInt classid; 1469 char type[256]; 1470 1471 PetscFunctionBegin; 1472 PetscValidHeaderSpecific(newdm,PC_CLASSID,1); 1473 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1474 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1475 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 1476 1477 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 1478 if (classid != PC_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not PC next in file"); 1479 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 1480 ierr = PCSetType(newdm, type);CHKERRQ(ierr); 1481 if (newdm->ops->load) { 1482 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 1483 } 1484 PetscFunctionReturn(0); 1485 } 1486 1487 #undef __FUNCT__ 1488 #define __FUNCT__ "PCView" 1489 /*@C 1490 PCView - Prints the PC data structure. 1491 1492 Collective on PC 1493 1494 Input Parameters: 1495 + PC - the PC context 1496 - viewer - optional visualization context 1497 1498 Note: 1499 The available visualization contexts include 1500 + PETSC_VIEWER_STDOUT_SELF - standard output (default) 1501 - PETSC_VIEWER_STDOUT_WORLD - synchronized standard 1502 output where only the first processor opens 1503 the file. All other processors send their 1504 data to the first processor to print. 1505 1506 The user can open an alternative visualization contexts with 1507 PetscViewerASCIIOpen() (output to a specified file). 1508 1509 Level: developer 1510 1511 .keywords: PC, view 1512 1513 .seealso: KSPView(), PetscViewerASCIIOpen() 1514 @*/ 1515 PetscErrorCode PCView(PC pc,PetscViewer viewer) 1516 { 1517 PCType cstr; 1518 PetscErrorCode ierr; 1519 PetscBool iascii,isstring,isbinary,isdraw; 1520 PetscViewerFormat format; 1521 1522 PetscFunctionBegin; 1523 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1524 if (!viewer) { 1525 ierr = PetscViewerASCIIGetStdout(((PetscObject)pc)->comm,&viewer);CHKERRQ(ierr); 1526 } 1527 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1528 PetscCheckSameComm(pc,1,viewer,2); 1529 1530 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1531 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 1532 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1533 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 1534 1535 if (iascii) { 1536 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1537 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer,"PC Object");CHKERRQ(ierr); 1538 if (pc->ops->view) { 1539 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1540 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1541 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1542 } 1543 if (pc->mat) { 1544 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);CHKERRQ(ierr); 1545 if (pc->pmat == pc->mat) { 1546 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");CHKERRQ(ierr); 1547 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1548 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1549 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1550 } else { 1551 if (pc->pmat) { 1552 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");CHKERRQ(ierr); 1553 } else { 1554 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix:\n");CHKERRQ(ierr); 1555 } 1556 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1557 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1558 if (pc->pmat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1559 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1560 } 1561 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1562 } 1563 } else if (isstring) { 1564 ierr = PCGetType(pc,&cstr);CHKERRQ(ierr); 1565 ierr = PetscViewerStringSPrintf(viewer," %-7.7s",cstr);CHKERRQ(ierr); 1566 if (pc->ops->view) {ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr);} 1567 } else if (isbinary) { 1568 PetscInt classid = PC_FILE_CLASSID; 1569 MPI_Comm comm; 1570 PetscMPIInt rank; 1571 char type[256]; 1572 1573 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1574 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1575 if (!rank) { 1576 ierr = PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 1577 ierr = PetscStrncpy(type,((PetscObject)pc)->type_name,256);CHKERRQ(ierr); 1578 ierr = PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 1579 } 1580 if (pc->ops->view) { 1581 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1582 } 1583 } else if (isdraw) { 1584 PetscDraw draw; 1585 char str[25]; 1586 PetscReal x,y,bottom,h; 1587 PetscInt n; 1588 1589 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1590 ierr = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr); 1591 if (pc->mat) { 1592 ierr = MatGetSize(pc->mat,&n,PETSC_NULL);CHKERRQ(ierr); 1593 ierr = PetscSNPrintf(str,25,"PC: %s (%D)",((PetscObject)pc)->type_name,n);CHKERRQ(ierr); 1594 } else { 1595 ierr = PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name);CHKERRQ(ierr); 1596 } 1597 ierr = PetscDrawBoxedString(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,PETSC_NULL,&h);CHKERRQ(ierr); 1598 bottom = y - h; 1599 ierr = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr); 1600 if (pc->ops->view) { 1601 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1602 } 1603 ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr); 1604 } 1605 PetscFunctionReturn(0); 1606 } 1607 1608 1609 #undef __FUNCT__ 1610 #define __FUNCT__ "PCSetInitialGuessNonzero" 1611 /*@ 1612 PCSetInitialGuessNonzero - Tells the iterative solver that the 1613 initial guess is nonzero; otherwise PC assumes the initial guess 1614 is to be zero (and thus zeros it out before solving). 1615 1616 Logically Collective on PC 1617 1618 Input Parameters: 1619 + pc - iterative context obtained from PCCreate() 1620 - flg - PETSC_TRUE indicates the guess is non-zero, PETSC_FALSE indicates the guess is zero 1621 1622 Level: Developer 1623 1624 Notes: 1625 This is a weird function. Since PC's are linear operators on the right hand side they 1626 CANNOT use an initial guess. This function is for the "pass-through" preconditioners 1627 PCKSP, PCREDUNDANT and PCHMPI and causes the inner KSP object to use the nonzero 1628 initial guess. Not currently working for PCREDUNDANT, that has to be rewritten to use KSP. 1629 1630 1631 .keywords: PC, set, initial guess, nonzero 1632 1633 .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll() 1634 @*/ 1635 PetscErrorCode PCSetInitialGuessNonzero(PC pc,PetscBool flg) 1636 { 1637 PetscFunctionBegin; 1638 PetscValidLogicalCollectiveBool(pc,flg,2); 1639 pc->nonzero_guess = flg; 1640 PetscFunctionReturn(0); 1641 } 1642 1643 #undef __FUNCT__ 1644 #define __FUNCT__ "PCRegister" 1645 /*@C 1646 PCRegister - See PCRegisterDynamic() 1647 1648 Level: advanced 1649 @*/ 1650 PetscErrorCode PCRegister(const char sname[],const char path[],const char name[],PetscErrorCode (*function)(PC)) 1651 { 1652 PetscErrorCode ierr; 1653 char fullname[PETSC_MAX_PATH_LEN]; 1654 1655 PetscFunctionBegin; 1656 1657 ierr = PetscFunctionListConcat(path,name,fullname);CHKERRQ(ierr); 1658 ierr = PetscFunctionListAdd(PETSC_COMM_WORLD,&PCList,sname,fullname,(void (*)(void))function);CHKERRQ(ierr); 1659 PetscFunctionReturn(0); 1660 } 1661 1662 #undef __FUNCT__ 1663 #define __FUNCT__ "PCComputeExplicitOperator" 1664 /*@ 1665 PCComputeExplicitOperator - Computes the explicit preconditioned operator. 1666 1667 Collective on PC 1668 1669 Input Parameter: 1670 . pc - the preconditioner object 1671 1672 Output Parameter: 1673 . mat - the explict preconditioned operator 1674 1675 Notes: 1676 This computation is done by applying the operators to columns of the 1677 identity matrix. 1678 1679 Currently, this routine uses a dense matrix format when 1 processor 1680 is used and a sparse format otherwise. This routine is costly in general, 1681 and is recommended for use only with relatively small systems. 1682 1683 Level: advanced 1684 1685 .keywords: PC, compute, explicit, operator 1686 1687 .seealso: KSPComputeExplicitOperator() 1688 1689 @*/ 1690 PetscErrorCode PCComputeExplicitOperator(PC pc,Mat *mat) 1691 { 1692 Vec in,out; 1693 PetscErrorCode ierr; 1694 PetscInt i,M,m,*rows,start,end; 1695 PetscMPIInt size; 1696 MPI_Comm comm; 1697 PetscScalar *array,one = 1.0; 1698 1699 PetscFunctionBegin; 1700 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1701 PetscValidPointer(mat,2); 1702 1703 comm = ((PetscObject)pc)->comm; 1704 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1705 1706 if (!pc->pmat) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"You must call KSPSetOperators() or PCSetOperators() before this call"); 1707 ierr = MatGetVecs(pc->pmat,&in,0);CHKERRQ(ierr); 1708 ierr = VecDuplicate(in,&out);CHKERRQ(ierr); 1709 ierr = VecGetOwnershipRange(in,&start,&end);CHKERRQ(ierr); 1710 ierr = VecGetSize(in,&M);CHKERRQ(ierr); 1711 ierr = VecGetLocalSize(in,&m);CHKERRQ(ierr); 1712 ierr = PetscMalloc((m+1)*sizeof(PetscInt),&rows);CHKERRQ(ierr); 1713 for (i=0; i<m; i++) {rows[i] = start + i;} 1714 1715 ierr = MatCreate(comm,mat);CHKERRQ(ierr); 1716 ierr = MatSetSizes(*mat,m,m,M,M);CHKERRQ(ierr); 1717 if (size == 1) { 1718 ierr = MatSetType(*mat,MATSEQDENSE);CHKERRQ(ierr); 1719 ierr = MatSeqDenseSetPreallocation(*mat,PETSC_NULL);CHKERRQ(ierr); 1720 } else { 1721 ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 1722 ierr = MatMPIAIJSetPreallocation(*mat,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 1723 } 1724 1725 for (i=0; i<M; i++) { 1726 1727 ierr = VecSet(in,0.0);CHKERRQ(ierr); 1728 ierr = VecSetValues(in,1,&i,&one,INSERT_VALUES);CHKERRQ(ierr); 1729 ierr = VecAssemblyBegin(in);CHKERRQ(ierr); 1730 ierr = VecAssemblyEnd(in);CHKERRQ(ierr); 1731 1732 /* should fix, allowing user to choose side */ 1733 ierr = PCApply(pc,in,out);CHKERRQ(ierr); 1734 1735 ierr = VecGetArray(out,&array);CHKERRQ(ierr); 1736 ierr = MatSetValues(*mat,m,rows,1,&i,array,INSERT_VALUES);CHKERRQ(ierr); 1737 ierr = VecRestoreArray(out,&array);CHKERRQ(ierr); 1738 1739 } 1740 ierr = PetscFree(rows);CHKERRQ(ierr); 1741 ierr = VecDestroy(&out);CHKERRQ(ierr); 1742 ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1743 ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1744 PetscFunctionReturn(0); 1745 } 1746 1747 #undef __FUNCT__ 1748 #define __FUNCT__ "PCSetCoordinates" 1749 /*@ 1750 PCSetCoordinates - sets the coordinates of all the nodes on the local process 1751 1752 Collective on PC 1753 1754 Input Parameters: 1755 + pc - the solver context 1756 . dim - the dimension of the coordinates 1, 2, or 3 1757 - coords - the coordinates 1758 1759 Level: intermediate 1760 1761 Notes: coords is an array of the 3D coordinates for the nodes on 1762 the local processor. So if there are 108 equation on a processor 1763 for a displacement finite element discretization of elasticity (so 1764 that there are 36 = 108/3 nodes) then the array must have 108 1765 double precision values (ie, 3 * 36). These x y z coordinates 1766 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 1767 ... , N-1.z ]. 1768 1769 .seealso: MatSetNearNullSpace 1770 @*/ 1771 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords) 1772 { 1773 PetscErrorCode ierr; 1774 1775 PetscFunctionBegin; 1776 ierr = PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));CHKERRQ(ierr); 1777 PetscFunctionReturn(0); 1778 } 1779