1 2 /* 3 Defines a block Jacobi preconditioner. 4 */ 5 6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /*I "petscpc.h" I*/ 7 8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat); 9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat); 10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC); 11 12 static PetscErrorCode PCSetUp_BJacobi(PC pc) 13 { 14 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 15 Mat mat = pc->mat,pmat = pc->pmat; 16 PetscErrorCode ierr; 17 PetscBool hasop; 18 PetscInt N,M,start,i,sum,end; 19 PetscInt bs,i_start=-1,i_end=-1; 20 PetscMPIInt rank,size; 21 const char *pprefix,*mprefix; 22 23 PetscFunctionBegin; 24 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 25 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 26 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 27 ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr); 28 29 if (jac->n > 0 && jac->n < size) { 30 ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr); 31 PetscFunctionReturn(0); 32 } 33 34 /* -------------------------------------------------------------------------- 35 Determines the number of blocks assigned to each processor 36 -----------------------------------------------------------------------------*/ 37 38 /* local block count given */ 39 if (jac->n_local > 0 && jac->n < 0) { 40 ierr = MPIU_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 41 if (jac->l_lens) { /* check that user set these correctly */ 42 sum = 0; 43 for (i=0; i<jac->n_local; i++) { 44 if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 45 sum += jac->l_lens[i]; 46 } 47 if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly"); 48 } else { 49 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 50 for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i)); 51 } 52 } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */ 53 /* global blocks given: determine which ones are local */ 54 if (jac->g_lens) { 55 /* check if the g_lens is has valid entries */ 56 for (i=0; i<jac->n; i++) { 57 if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed"); 58 if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 59 } 60 if (size == 1) { 61 jac->n_local = jac->n; 62 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 63 ierr = PetscArraycpy(jac->l_lens,jac->g_lens,jac->n_local);CHKERRQ(ierr); 64 /* check that user set these correctly */ 65 sum = 0; 66 for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i]; 67 if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly"); 68 } else { 69 ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr); 70 /* loop over blocks determing first one owned by me */ 71 sum = 0; 72 for (i=0; i<jac->n+1; i++) { 73 if (sum == start) { i_start = i; goto start_1;} 74 if (i < jac->n) sum += jac->g_lens[i]; 75 } 76 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout"); 77 start_1: 78 for (i=i_start; i<jac->n+1; i++) { 79 if (sum == end) { i_end = i; goto end_1; } 80 if (i < jac->n) sum += jac->g_lens[i]; 81 } 82 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout"); 83 end_1: 84 jac->n_local = i_end - i_start; 85 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 86 ierr = PetscArraycpy(jac->l_lens,jac->g_lens+i_start,jac->n_local);CHKERRQ(ierr); 87 } 88 } else { /* no global blocks given, determine then using default layout */ 89 jac->n_local = jac->n/size + ((jac->n % size) > rank); 90 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 91 for (i=0; i<jac->n_local; i++) { 92 jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs; 93 if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given"); 94 } 95 } 96 } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */ 97 jac->n = size; 98 jac->n_local = 1; 99 ierr = PetscMalloc1(1,&jac->l_lens);CHKERRQ(ierr); 100 jac->l_lens[0] = M; 101 } else { /* jac->n > 0 && jac->n_local > 0 */ 102 if (!jac->l_lens) { 103 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 104 for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i)); 105 } 106 } 107 if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors"); 108 109 /* ------------------------- 110 Determines mat and pmat 111 ---------------------------*/ 112 ierr = MatHasOperation(pc->mat,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 113 if (!hasop && size == 1) { 114 mat = pc->mat; 115 pmat = pc->pmat; 116 } else { 117 if (pc->useAmat) { 118 /* use block from Amat matrix, not Pmat for local MatMult() */ 119 ierr = MatGetDiagonalBlock(pc->mat,&mat);CHKERRQ(ierr); 120 /* make submatrix have same prefix as entire matrix */ 121 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr); 122 ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr); 123 } 124 if (pc->pmat != pc->mat || !pc->useAmat) { 125 ierr = MatGetDiagonalBlock(pc->pmat,&pmat);CHKERRQ(ierr); 126 /* make submatrix have same prefix as entire matrix */ 127 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr); 128 ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr); 129 } else pmat = mat; 130 } 131 132 /* ------ 133 Setup code depends on the number of blocks 134 */ 135 if (jac->n_local == 1) { 136 ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr); 137 } else { 138 ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr); 139 } 140 PetscFunctionReturn(0); 141 } 142 143 /* Default destroy, if it has never been setup */ 144 static PetscErrorCode PCDestroy_BJacobi(PC pc) 145 { 146 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 147 PetscErrorCode ierr; 148 149 PetscFunctionBegin; 150 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 151 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 152 ierr = PetscFree(pc->data);CHKERRQ(ierr); 153 PetscFunctionReturn(0); 154 } 155 156 157 static PetscErrorCode PCSetFromOptions_BJacobi(PetscOptionItems *PetscOptionsObject,PC pc) 158 { 159 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 160 PetscErrorCode ierr; 161 PetscInt blocks,i; 162 PetscBool flg; 163 164 PetscFunctionBegin; 165 ierr = PetscOptionsHead(PetscOptionsObject,"Block Jacobi options");CHKERRQ(ierr); 166 ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr); 167 if (flg) {ierr = PCBJacobiSetTotalBlocks(pc,blocks,NULL);CHKERRQ(ierr);} 168 ierr = PetscOptionsInt("-pc_bjacobi_local_blocks","Local number of blocks","PCBJacobiSetLocalBlocks",jac->n_local,&blocks,&flg);CHKERRQ(ierr); 169 if (flg) {ierr = PCBJacobiSetLocalBlocks(pc,blocks,NULL);CHKERRQ(ierr);} 170 if (jac->ksp) { 171 /* The sub-KSP has already been set up (e.g., PCSetUp_BJacobi_Singleblock), but KSPSetFromOptions was not called 172 * unless we had already been called. */ 173 for (i=0; i<jac->n_local; i++) { 174 ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr); 175 } 176 } 177 ierr = PetscOptionsTail();CHKERRQ(ierr); 178 PetscFunctionReturn(0); 179 } 180 181 #include <petscdraw.h> 182 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer) 183 { 184 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 185 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 186 PetscErrorCode ierr; 187 PetscMPIInt rank; 188 PetscInt i; 189 PetscBool iascii,isstring,isdraw; 190 PetscViewer sviewer; 191 192 PetscFunctionBegin; 193 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 194 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 195 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 196 if (iascii) { 197 if (pc->useAmat) { 198 ierr = PetscViewerASCIIPrintf(viewer," using Amat local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr); 199 } 200 ierr = PetscViewerASCIIPrintf(viewer," number of blocks = %D\n",jac->n);CHKERRQ(ierr); 201 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 202 if (jac->same_local_solves) { 203 ierr = PetscViewerASCIIPrintf(viewer," Local solver is the same for all blocks, as in the following KSP and PC objects on rank 0:\n");CHKERRQ(ierr); 204 if (jac->ksp && !jac->psubcomm) { 205 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 206 if (!rank) { 207 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 208 ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr); 209 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 210 } 211 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 212 } else if (mpjac && jac->ksp && mpjac->psubcomm) { 213 ierr = PetscViewerGetSubViewer(viewer,mpjac->psubcomm->child,&sviewer);CHKERRQ(ierr); 214 if (!mpjac->psubcomm->color) { 215 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 216 ierr = KSPView(*(jac->ksp),sviewer);CHKERRQ(ierr); 217 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 218 } 219 ierr = PetscViewerRestoreSubViewer(viewer,mpjac->psubcomm->child,&sviewer);CHKERRQ(ierr); 220 } 221 } else { 222 PetscInt n_global; 223 ierr = MPIU_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 224 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 225 ierr = PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr); 226 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n", 227 rank,jac->n_local,jac->first_local);CHKERRQ(ierr); 228 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 229 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 230 for (i=0; i<jac->n_local; i++) { 231 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr); 232 ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr); 233 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 234 } 235 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 236 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 237 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 238 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 239 } 240 } else if (isstring) { 241 ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr); 242 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 243 if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);} 244 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 245 } else if (isdraw) { 246 PetscDraw draw; 247 char str[25]; 248 PetscReal x,y,bottom,h; 249 250 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 251 ierr = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr); 252 ierr = PetscSNPrintf(str,25,"Number blocks %D",jac->n);CHKERRQ(ierr); 253 ierr = PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr); 254 bottom = y - h; 255 ierr = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr); 256 /* warning the communicator on viewer is different then on ksp in parallel */ 257 if (jac->ksp) {ierr = KSPView(jac->ksp[0],viewer);CHKERRQ(ierr);} 258 ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr); 259 } 260 PetscFunctionReturn(0); 261 } 262 263 /* -------------------------------------------------------------------------------------*/ 264 265 static PetscErrorCode PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp) 266 { 267 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 268 269 PetscFunctionBegin; 270 if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first"); 271 272 if (n_local) *n_local = jac->n_local; 273 if (first_local) *first_local = jac->first_local; 274 *ksp = jac->ksp; 275 jac->same_local_solves = PETSC_FALSE; /* Assume that local solves are now different; 276 not necessarily true though! This flag is 277 used only for PCView_BJacobi() */ 278 PetscFunctionReturn(0); 279 } 280 281 static PetscErrorCode PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens) 282 { 283 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 284 PetscErrorCode ierr; 285 286 PetscFunctionBegin; 287 if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called"); 288 jac->n = blocks; 289 if (!lens) jac->g_lens = NULL; 290 else { 291 ierr = PetscMalloc1(blocks,&jac->g_lens);CHKERRQ(ierr); 292 ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr); 293 ierr = PetscArraycpy(jac->g_lens,lens,blocks);CHKERRQ(ierr); 294 } 295 PetscFunctionReturn(0); 296 } 297 298 static PetscErrorCode PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[]) 299 { 300 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 301 302 PetscFunctionBegin; 303 *blocks = jac->n; 304 if (lens) *lens = jac->g_lens; 305 PetscFunctionReturn(0); 306 } 307 308 static PetscErrorCode PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[]) 309 { 310 PC_BJacobi *jac; 311 PetscErrorCode ierr; 312 313 PetscFunctionBegin; 314 jac = (PC_BJacobi*)pc->data; 315 316 jac->n_local = blocks; 317 if (!lens) jac->l_lens = NULL; 318 else { 319 ierr = PetscMalloc1(blocks,&jac->l_lens);CHKERRQ(ierr); 320 ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr); 321 ierr = PetscArraycpy(jac->l_lens,lens,blocks);CHKERRQ(ierr); 322 } 323 PetscFunctionReturn(0); 324 } 325 326 static PetscErrorCode PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[]) 327 { 328 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 329 330 PetscFunctionBegin; 331 *blocks = jac->n_local; 332 if (lens) *lens = jac->l_lens; 333 PetscFunctionReturn(0); 334 } 335 336 /* -------------------------------------------------------------------------------------*/ 337 338 /*@C 339 PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on 340 this processor. 341 342 Not Collective 343 344 Input Parameter: 345 . pc - the preconditioner context 346 347 Output Parameters: 348 + n_local - the number of blocks on this processor, or NULL 349 . first_local - the global number of the first block on this processor, or NULL 350 - ksp - the array of KSP contexts 351 352 Notes: 353 After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed. 354 355 Currently for some matrix implementations only 1 block per processor 356 is supported. 357 358 You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP(). 359 360 Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs. 361 You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,PETSC_NULL_KSP,ierr) to determine how large the 362 KSP array must be. 363 364 Level: advanced 365 366 .seealso: PCBJacobiGetSubKSP() 367 @*/ 368 PetscErrorCode PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[]) 369 { 370 PetscErrorCode ierr; 371 372 PetscFunctionBegin; 373 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 374 ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr); 375 PetscFunctionReturn(0); 376 } 377 378 /*@ 379 PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block 380 Jacobi preconditioner. 381 382 Collective on PC 383 384 Input Parameters: 385 + pc - the preconditioner context 386 . blocks - the number of blocks 387 - lens - [optional] integer array containing the size of each block 388 389 Options Database Key: 390 . -pc_bjacobi_blocks <blocks> - Sets the number of global blocks 391 392 Notes: 393 Currently only a limited number of blocking configurations are supported. 394 All processors sharing the PC must call this routine with the same data. 395 396 Level: intermediate 397 398 .seealso: PCSetUseAmat(), PCBJacobiSetLocalBlocks() 399 @*/ 400 PetscErrorCode PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[]) 401 { 402 PetscErrorCode ierr; 403 404 PetscFunctionBegin; 405 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 406 if (blocks <= 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks"); 407 ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr); 408 PetscFunctionReturn(0); 409 } 410 411 /*@C 412 PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block 413 Jacobi preconditioner. 414 415 Not Collective 416 417 Input Parameter: 418 . pc - the preconditioner context 419 420 Output parameters: 421 + blocks - the number of blocks 422 - lens - integer array containing the size of each block 423 424 Level: intermediate 425 426 .seealso: PCSetUseAmat(), PCBJacobiGetLocalBlocks() 427 @*/ 428 PetscErrorCode PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[]) 429 { 430 PetscErrorCode ierr; 431 432 PetscFunctionBegin; 433 PetscValidHeaderSpecific(pc, PC_CLASSID,1); 434 PetscValidIntPointer(blocks,2); 435 ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr); 436 PetscFunctionReturn(0); 437 } 438 439 /*@ 440 PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block 441 Jacobi preconditioner. 442 443 Not Collective 444 445 Input Parameters: 446 + pc - the preconditioner context 447 . blocks - the number of blocks 448 - lens - [optional] integer array containing size of each block 449 450 Options Database Key: 451 . -pc_bjacobi_local_blocks <blocks> - Sets the number of local blocks 452 453 Note: 454 Currently only a limited number of blocking configurations are supported. 455 456 Level: intermediate 457 458 .seealso: PCSetUseAmat(), PCBJacobiSetTotalBlocks() 459 @*/ 460 PetscErrorCode PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[]) 461 { 462 PetscErrorCode ierr; 463 464 PetscFunctionBegin; 465 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 466 if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks"); 467 ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr); 468 PetscFunctionReturn(0); 469 } 470 471 /*@C 472 PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block 473 Jacobi preconditioner. 474 475 Not Collective 476 477 Input Parameters: 478 + pc - the preconditioner context 479 . blocks - the number of blocks 480 - lens - [optional] integer array containing size of each block 481 482 Note: 483 Currently only a limited number of blocking configurations are supported. 484 485 Level: intermediate 486 487 .seealso: PCSetUseAmat(), PCBJacobiGetTotalBlocks() 488 @*/ 489 PetscErrorCode PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[]) 490 { 491 PetscErrorCode ierr; 492 493 PetscFunctionBegin; 494 PetscValidHeaderSpecific(pc, PC_CLASSID,1); 495 PetscValidIntPointer(blocks,2); 496 ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr); 497 PetscFunctionReturn(0); 498 } 499 500 /* -----------------------------------------------------------------------------------*/ 501 502 /*MC 503 PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with 504 its own KSP object. 505 506 Options Database Keys: 507 + -pc_use_amat - use Amat to apply block of operator in inner Krylov method 508 - -pc_bjacobi_blocks <n> - use n total blocks 509 510 Notes: 511 Each processor can have one or more blocks, or a single block can be shared by several processes. Defaults to one block per processor. 512 513 To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC 514 options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly 515 516 To set the options on the solvers separate for each block call PCBJacobiGetSubKSP() 517 and set the options directly on the resulting KSP object (you can access its PC 518 KSPGetPC()) 519 520 For GPU-based vectors (CUDA, ViennaCL) it is recommended to use exactly one block per MPI process for best 521 performance. Different block partitioning may lead to additional data transfers 522 between host and GPU that lead to degraded performance. 523 524 The options prefix for each block is sub_, for example -sub_pc_type lu. 525 526 When multiple processes share a single block, each block encompasses exactly all the unknowns owned its set of processes. 527 528 Level: beginner 529 530 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, 531 PCASM, PCSetUseAmat(), PCGetUseAmat(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(), 532 PCBJacobiSetLocalBlocks(), PCSetModifySubMatrices() 533 M*/ 534 535 PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc) 536 { 537 PetscErrorCode ierr; 538 PetscMPIInt rank; 539 PC_BJacobi *jac; 540 541 PetscFunctionBegin; 542 ierr = PetscNewLog(pc,&jac);CHKERRQ(ierr); 543 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 544 545 pc->ops->apply = NULL; 546 pc->ops->applytranspose = NULL; 547 pc->ops->setup = PCSetUp_BJacobi; 548 pc->ops->destroy = PCDestroy_BJacobi; 549 pc->ops->setfromoptions = PCSetFromOptions_BJacobi; 550 pc->ops->view = PCView_BJacobi; 551 pc->ops->applyrichardson = NULL; 552 553 pc->data = (void*)jac; 554 jac->n = -1; 555 jac->n_local = -1; 556 jac->first_local = rank; 557 jac->ksp = NULL; 558 jac->same_local_solves = PETSC_TRUE; 559 jac->g_lens = NULL; 560 jac->l_lens = NULL; 561 jac->psubcomm = NULL; 562 563 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C",PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr); 564 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C",PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr); 565 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C",PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr); 566 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C",PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr); 567 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C",PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr); 568 PetscFunctionReturn(0); 569 } 570 571 /* --------------------------------------------------------------------------------------------*/ 572 /* 573 These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI 574 */ 575 static PetscErrorCode PCReset_BJacobi_Singleblock(PC pc) 576 { 577 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 578 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 579 PetscErrorCode ierr; 580 581 PetscFunctionBegin; 582 ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr); 583 ierr = VecDestroy(&bjac->x);CHKERRQ(ierr); 584 ierr = VecDestroy(&bjac->y);CHKERRQ(ierr); 585 PetscFunctionReturn(0); 586 } 587 588 static PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc) 589 { 590 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 591 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 592 PetscErrorCode ierr; 593 594 PetscFunctionBegin; 595 ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr); 596 ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr); 597 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 598 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 599 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 600 ierr = PetscFree(bjac);CHKERRQ(ierr); 601 ierr = PetscFree(pc->data);CHKERRQ(ierr); 602 PetscFunctionReturn(0); 603 } 604 605 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc) 606 { 607 PetscErrorCode ierr; 608 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 609 KSP subksp = jac->ksp[0]; 610 KSPConvergedReason reason; 611 612 PetscFunctionBegin; 613 ierr = KSPSetUp(subksp);CHKERRQ(ierr); 614 ierr = KSPGetConvergedReason(subksp,&reason);CHKERRQ(ierr); 615 if (reason == KSP_DIVERGED_PC_FAILED) { 616 pc->failedreason = PC_SUBPC_ERROR; 617 } 618 PetscFunctionReturn(0); 619 } 620 621 static PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y) 622 { 623 PetscErrorCode ierr; 624 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 625 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 626 627 PetscFunctionBegin; 628 ierr = VecGetLocalVectorRead(x, bjac->x);CHKERRQ(ierr); 629 ierr = VecGetLocalVector(y, bjac->y);CHKERRQ(ierr); 630 /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner 631 matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild 632 of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/ 633 ierr = KSPSetReusePreconditioner(jac->ksp[0],pc->reusepreconditioner);CHKERRQ(ierr); 634 ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr); 635 ierr = KSPCheckSolve(jac->ksp[0],pc,bjac->y);CHKERRQ(ierr); 636 ierr = VecRestoreLocalVectorRead(x, bjac->x);CHKERRQ(ierr); 637 ierr = VecRestoreLocalVector(y, bjac->y);CHKERRQ(ierr); 638 PetscFunctionReturn(0); 639 } 640 641 static PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y) 642 { 643 PetscErrorCode ierr; 644 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 645 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 646 PetscScalar *y_array; 647 const PetscScalar *x_array; 648 PC subpc; 649 650 PetscFunctionBegin; 651 /* 652 The VecPlaceArray() is to avoid having to copy the 653 y vector into the bjac->x vector. The reason for 654 the bjac->x vector is that we need a sequential vector 655 for the sequential solve. 656 */ 657 ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr); 658 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 659 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 660 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 661 /* apply the symmetric left portion of the inner PC operator */ 662 /* note this by-passes the inner KSP and its options completely */ 663 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 664 ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 665 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 666 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 667 ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr); 668 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 669 PetscFunctionReturn(0); 670 } 671 672 static PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y) 673 { 674 PetscErrorCode ierr; 675 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 676 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 677 PetscScalar *y_array; 678 const PetscScalar *x_array; 679 PC subpc; 680 681 PetscFunctionBegin; 682 /* 683 The VecPlaceArray() is to avoid having to copy the 684 y vector into the bjac->x vector. The reason for 685 the bjac->x vector is that we need a sequential vector 686 for the sequential solve. 687 */ 688 ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr); 689 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 690 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 691 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 692 693 /* apply the symmetric right portion of the inner PC operator */ 694 /* note this by-passes the inner KSP and its options completely */ 695 696 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 697 ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 698 699 ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr); 700 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 701 PetscFunctionReturn(0); 702 } 703 704 static PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y) 705 { 706 PetscErrorCode ierr; 707 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 708 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 709 PetscScalar *y_array; 710 const PetscScalar *x_array; 711 712 PetscFunctionBegin; 713 /* 714 The VecPlaceArray() is to avoid having to copy the 715 y vector into the bjac->x vector. The reason for 716 the bjac->x vector is that we need a sequential vector 717 for the sequential solve. 718 */ 719 ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr); 720 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 721 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 722 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 723 ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr); 724 ierr = KSPCheckSolve(jac->ksp[0],pc,bjac->y);CHKERRQ(ierr); 725 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 726 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 727 ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr); 728 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 729 PetscFunctionReturn(0); 730 } 731 732 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat) 733 { 734 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 735 PetscErrorCode ierr; 736 PetscInt m; 737 KSP ksp; 738 PC_BJacobi_Singleblock *bjac; 739 PetscBool wasSetup = PETSC_TRUE; 740 #if defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_VIENNACL) 741 PetscBool is_gpumatrix = PETSC_FALSE; 742 #endif 743 744 PetscFunctionBegin; 745 if (!pc->setupcalled) { 746 const char *prefix; 747 748 if (!jac->ksp) { 749 wasSetup = PETSC_FALSE; 750 751 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 752 ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr); 753 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 754 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr); 755 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 756 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 757 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 758 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 759 760 pc->ops->reset = PCReset_BJacobi_Singleblock; 761 pc->ops->destroy = PCDestroy_BJacobi_Singleblock; 762 pc->ops->apply = PCApply_BJacobi_Singleblock; 763 pc->ops->applysymmetricleft = PCApplySymmetricLeft_BJacobi_Singleblock; 764 pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock; 765 pc->ops->applytranspose = PCApplyTranspose_BJacobi_Singleblock; 766 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Singleblock; 767 768 ierr = PetscMalloc1(1,&jac->ksp);CHKERRQ(ierr); 769 jac->ksp[0] = ksp; 770 771 ierr = PetscNewLog(pc,&bjac);CHKERRQ(ierr); 772 jac->data = (void*)bjac; 773 } else { 774 ksp = jac->ksp[0]; 775 bjac = (PC_BJacobi_Singleblock*)jac->data; 776 } 777 778 /* 779 The reason we need to generate these vectors is to serve 780 as the right-hand side and solution vector for the solve on the 781 block. We do not need to allocate space for the vectors since 782 that is provided via VecPlaceArray() just before the call to 783 KSPSolve() on the block. 784 */ 785 ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr); 786 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->x);CHKERRQ(ierr); 787 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->y);CHKERRQ(ierr); 788 #if defined(PETSC_HAVE_CUDA) 789 ierr = PetscObjectTypeCompareAny((PetscObject)pmat,&is_gpumatrix,MATAIJCUSPARSE,MATSEQAIJCUSPARSE,MATMPIAIJCUSPARSE,"");CHKERRQ(ierr); 790 if (is_gpumatrix) { 791 ierr = VecSetType(bjac->x,VECCUDA);CHKERRQ(ierr); 792 ierr = VecSetType(bjac->y,VECCUDA);CHKERRQ(ierr); 793 } 794 #endif 795 #if defined(PETSC_HAVE_VIENNACL) 796 ierr = PetscObjectTypeCompareAny((PetscObject)pmat,&is_gpumatrix,MATAIJVIENNACL,MATSEQAIJVIENNACL,MATMPIAIJVIENNACL,"");CHKERRQ(ierr); 797 if (is_gpumatrix) { 798 ierr = VecSetType(bjac->x,VECVIENNACL);CHKERRQ(ierr); 799 ierr = VecSetType(bjac->y,VECVIENNACL);CHKERRQ(ierr); 800 } 801 #endif 802 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->x);CHKERRQ(ierr); 803 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->y);CHKERRQ(ierr); 804 } else { 805 ksp = jac->ksp[0]; 806 bjac = (PC_BJacobi_Singleblock*)jac->data; 807 } 808 if (pc->useAmat) { 809 ierr = KSPSetOperators(ksp,mat,pmat);CHKERRQ(ierr); 810 } else { 811 ierr = KSPSetOperators(ksp,pmat,pmat);CHKERRQ(ierr); 812 } 813 if (!wasSetup && pc->setfromoptionscalled) { 814 /* If PCSetFromOptions_BJacobi is called later, KSPSetFromOptions will be called at that time. */ 815 ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr); 816 } 817 PetscFunctionReturn(0); 818 } 819 820 /* ---------------------------------------------------------------------------------------------*/ 821 static PetscErrorCode PCReset_BJacobi_Multiblock(PC pc) 822 { 823 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 824 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 825 PetscErrorCode ierr; 826 PetscInt i; 827 828 PetscFunctionBegin; 829 if (bjac && bjac->pmat) { 830 ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr); 831 if (pc->useAmat) { 832 ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr); 833 } 834 } 835 836 for (i=0; i<jac->n_local; i++) { 837 ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr); 838 if (bjac && bjac->x) { 839 ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr); 840 ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr); 841 ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr); 842 } 843 } 844 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 845 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 846 PetscFunctionReturn(0); 847 } 848 849 static PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc) 850 { 851 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 852 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 853 PetscErrorCode ierr; 854 PetscInt i; 855 856 PetscFunctionBegin; 857 ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr); 858 if (bjac) { 859 ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr); 860 ierr = PetscFree(bjac->starts);CHKERRQ(ierr); 861 ierr = PetscFree(bjac->is);CHKERRQ(ierr); 862 } 863 ierr = PetscFree(jac->data);CHKERRQ(ierr); 864 for (i=0; i<jac->n_local; i++) { 865 ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr); 866 } 867 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 868 ierr = PetscFree(pc->data);CHKERRQ(ierr); 869 PetscFunctionReturn(0); 870 } 871 872 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc) 873 { 874 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 875 PetscErrorCode ierr; 876 PetscInt i,n_local = jac->n_local; 877 KSPConvergedReason reason; 878 879 PetscFunctionBegin; 880 for (i=0; i<n_local; i++) { 881 ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr); 882 ierr = KSPGetConvergedReason(jac->ksp[i],&reason);CHKERRQ(ierr); 883 if (reason == KSP_DIVERGED_PC_FAILED) { 884 pc->failedreason = PC_SUBPC_ERROR; 885 } 886 } 887 PetscFunctionReturn(0); 888 } 889 890 /* 891 Preconditioner for block Jacobi 892 */ 893 static PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y) 894 { 895 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 896 PetscErrorCode ierr; 897 PetscInt i,n_local = jac->n_local; 898 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 899 PetscScalar *yin; 900 const PetscScalar *xin; 901 902 PetscFunctionBegin; 903 ierr = VecGetArrayRead(x,&xin);CHKERRQ(ierr); 904 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 905 for (i=0; i<n_local; i++) { 906 /* 907 To avoid copying the subvector from x into a workspace we instead 908 make the workspace vector array point to the subpart of the array of 909 the global vector. 910 */ 911 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 912 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 913 914 ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 915 ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr); 916 ierr = KSPCheckSolve(jac->ksp[i],pc,bjac->y[i]);CHKERRQ(ierr); 917 ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 918 919 ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr); 920 ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr); 921 } 922 ierr = VecRestoreArrayRead(x,&xin);CHKERRQ(ierr); 923 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 924 PetscFunctionReturn(0); 925 } 926 927 /* 928 Preconditioner for block Jacobi 929 */ 930 static PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y) 931 { 932 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 933 PetscErrorCode ierr; 934 PetscInt i,n_local = jac->n_local; 935 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 936 PetscScalar *yin; 937 const PetscScalar *xin; 938 939 PetscFunctionBegin; 940 ierr = VecGetArrayRead(x,&xin);CHKERRQ(ierr); 941 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 942 for (i=0; i<n_local; i++) { 943 /* 944 To avoid copying the subvector from x into a workspace we instead 945 make the workspace vector array point to the subpart of the array of 946 the global vector. 947 */ 948 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 949 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 950 951 ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 952 ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr); 953 ierr = KSPCheckSolve(jac->ksp[i],pc,bjac->y[i]);CHKERRQ(ierr); 954 ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 955 956 ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr); 957 ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr); 958 } 959 ierr = VecRestoreArrayRead(x,&xin);CHKERRQ(ierr); 960 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 961 PetscFunctionReturn(0); 962 } 963 964 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat) 965 { 966 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 967 PetscErrorCode ierr; 968 PetscInt m,n_local,N,M,start,i; 969 const char *prefix,*pprefix,*mprefix; 970 KSP ksp; 971 Vec x,y; 972 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 973 PC subpc; 974 IS is; 975 MatReuse scall; 976 #if defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_VIENNACL) 977 PetscBool is_gpumatrix = PETSC_FALSE; 978 #endif 979 980 PetscFunctionBegin; 981 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 982 983 n_local = jac->n_local; 984 985 if (pc->useAmat) { 986 PetscBool same; 987 ierr = PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr); 988 if (!same) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_INCOMP,"Matrices not of same type"); 989 } 990 991 if (!pc->setupcalled) { 992 scall = MAT_INITIAL_MATRIX; 993 994 if (!jac->ksp) { 995 pc->ops->reset = PCReset_BJacobi_Multiblock; 996 pc->ops->destroy = PCDestroy_BJacobi_Multiblock; 997 pc->ops->apply = PCApply_BJacobi_Multiblock; 998 pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock; 999 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock; 1000 1001 ierr = PetscNewLog(pc,&bjac);CHKERRQ(ierr); 1002 ierr = PetscMalloc1(n_local,&jac->ksp);CHKERRQ(ierr); 1003 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr); 1004 ierr = PetscMalloc2(n_local,&bjac->x,n_local,&bjac->y);CHKERRQ(ierr); 1005 ierr = PetscMalloc1(n_local,&bjac->starts);CHKERRQ(ierr); 1006 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr); 1007 1008 jac->data = (void*)bjac; 1009 ierr = PetscMalloc1(n_local,&bjac->is);CHKERRQ(ierr); 1010 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr); 1011 1012 for (i=0; i<n_local; i++) { 1013 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 1014 ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr); 1015 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 1016 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr); 1017 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 1018 ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr); 1019 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1020 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 1021 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 1022 1023 jac->ksp[i] = ksp; 1024 } 1025 } else { 1026 bjac = (PC_BJacobi_Multiblock*)jac->data; 1027 } 1028 1029 start = 0; 1030 for (i=0; i<n_local; i++) { 1031 m = jac->l_lens[i]; 1032 /* 1033 The reason we need to generate these vectors is to serve 1034 as the right-hand side and solution vector for the solve on the 1035 block. We do not need to allocate space for the vectors since 1036 that is provided via VecPlaceArray() just before the call to 1037 KSPSolve() on the block. 1038 1039 */ 1040 ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr); 1041 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&y);CHKERRQ(ierr); 1042 #if defined(PETSC_HAVE_CUDA) 1043 ierr = PetscObjectTypeCompareAny((PetscObject)pmat,&is_gpumatrix,MATAIJCUSPARSE,MATSEQAIJCUSPARSE,MATMPIAIJCUSPARSE,"");CHKERRQ(ierr); 1044 if (is_gpumatrix) { 1045 ierr = VecSetType(x,VECCUDA);CHKERRQ(ierr); 1046 ierr = VecSetType(y,VECCUDA);CHKERRQ(ierr); 1047 } 1048 #endif 1049 #if defined(PETSC_HAVE_VIENNACL) 1050 ierr = PetscObjectTypeCompareAny((PetscObject)pmat,&is_gpumatrix,MATAIJVIENNACL,MATSEQAIJVIENNACL,MATMPIAIJVIENNACL,"");CHKERRQ(ierr); 1051 if (is_gpumatrix) { 1052 ierr = VecSetType(x,VECVIENNACL);CHKERRQ(ierr); 1053 ierr = VecSetType(y,VECVIENNACL);CHKERRQ(ierr); 1054 } 1055 #endif 1056 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)x);CHKERRQ(ierr); 1057 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)y);CHKERRQ(ierr); 1058 1059 bjac->x[i] = x; 1060 bjac->y[i] = y; 1061 bjac->starts[i] = start; 1062 1063 ierr = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr); 1064 bjac->is[i] = is; 1065 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)is);CHKERRQ(ierr); 1066 1067 start += m; 1068 } 1069 } else { 1070 bjac = (PC_BJacobi_Multiblock*)jac->data; 1071 /* 1072 Destroy the blocks from the previous iteration 1073 */ 1074 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 1075 ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr); 1076 if (pc->useAmat) { 1077 ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr); 1078 } 1079 scall = MAT_INITIAL_MATRIX; 1080 } else scall = MAT_REUSE_MATRIX; 1081 } 1082 1083 ierr = MatCreateSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr); 1084 if (pc->useAmat) { 1085 ierr = MatCreateSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr); 1086 } 1087 /* Return control to the user so that the submatrices can be modified (e.g., to apply 1088 different boundary conditions for the submatrices than for the global problem) */ 1089 ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr); 1090 1091 ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr); 1092 for (i=0; i<n_local; i++) { 1093 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->pmat[i]);CHKERRQ(ierr); 1094 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr); 1095 if (pc->useAmat) { 1096 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->mat[i]);CHKERRQ(ierr); 1097 ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr); 1098 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr); 1099 ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i]);CHKERRQ(ierr); 1100 } else { 1101 ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i]);CHKERRQ(ierr); 1102 } 1103 if (pc->setfromoptionscalled) { 1104 ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr); 1105 } 1106 } 1107 PetscFunctionReturn(0); 1108 } 1109 1110 /* ---------------------------------------------------------------------------------------------*/ 1111 /* 1112 These are for a single block with multiple processes; 1113 */ 1114 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc) 1115 { 1116 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1117 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1118 PetscErrorCode ierr; 1119 1120 PetscFunctionBegin; 1121 ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr); 1122 ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr); 1123 ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr); 1124 if (jac->ksp) {ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);} 1125 PetscFunctionReturn(0); 1126 } 1127 1128 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc) 1129 { 1130 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1131 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1132 PetscErrorCode ierr; 1133 1134 PetscFunctionBegin; 1135 ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr); 1136 ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr); 1137 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 1138 ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr); 1139 1140 ierr = PetscFree(mpjac);CHKERRQ(ierr); 1141 ierr = PetscFree(pc->data);CHKERRQ(ierr); 1142 PetscFunctionReturn(0); 1143 } 1144 1145 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y) 1146 { 1147 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1148 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1149 PetscErrorCode ierr; 1150 PetscScalar *yarray; 1151 const PetscScalar *xarray; 1152 KSPConvergedReason reason; 1153 1154 PetscFunctionBegin; 1155 /* place x's and y's local arrays into xsub and ysub */ 1156 ierr = VecGetArrayRead(x,&xarray);CHKERRQ(ierr); 1157 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1158 ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr); 1159 ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr); 1160 1161 /* apply preconditioner on each matrix block */ 1162 ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr); 1163 ierr = KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);CHKERRQ(ierr); 1164 ierr = KSPCheckSolve(jac->ksp[0],pc,mpjac->ysub);CHKERRQ(ierr); 1165 ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr); 1166 ierr = KSPGetConvergedReason(jac->ksp[0],&reason);CHKERRQ(ierr); 1167 if (reason == KSP_DIVERGED_PC_FAILED) { 1168 pc->failedreason = PC_SUBPC_ERROR; 1169 } 1170 1171 ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr); 1172 ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr); 1173 ierr = VecRestoreArrayRead(x,&xarray);CHKERRQ(ierr); 1174 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1175 PetscFunctionReturn(0); 1176 } 1177 1178 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc) 1179 { 1180 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1181 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1182 PetscErrorCode ierr; 1183 PetscInt m,n; 1184 MPI_Comm comm,subcomm=0; 1185 const char *prefix; 1186 PetscBool wasSetup = PETSC_TRUE; 1187 #if defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_VIENNACL) 1188 PetscBool is_gpumatrix = PETSC_FALSE; 1189 #endif 1190 1191 1192 PetscFunctionBegin; 1193 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1194 if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported"); 1195 jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */ 1196 if (!pc->setupcalled) { 1197 wasSetup = PETSC_FALSE; 1198 ierr = PetscNewLog(pc,&mpjac);CHKERRQ(ierr); 1199 jac->data = (void*)mpjac; 1200 1201 /* initialize datastructure mpjac */ 1202 if (!jac->psubcomm) { 1203 /* Create default contiguous subcommunicatiors if user does not provide them */ 1204 ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr); 1205 ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr); 1206 ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr); 1207 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr); 1208 } 1209 mpjac->psubcomm = jac->psubcomm; 1210 subcomm = PetscSubcommChild(mpjac->psubcomm); 1211 1212 /* Get matrix blocks of pmat */ 1213 ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1214 1215 /* create a new PC that processors in each subcomm have copy of */ 1216 ierr = PetscMalloc1(1,&jac->ksp);CHKERRQ(ierr); 1217 ierr = KSPCreate(subcomm,&jac->ksp[0]);CHKERRQ(ierr); 1218 ierr = KSPSetErrorIfNotConverged(jac->ksp[0],pc->erroriffailure);CHKERRQ(ierr); 1219 ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);CHKERRQ(ierr); 1220 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)jac->ksp[0]);CHKERRQ(ierr); 1221 ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr); 1222 ierr = KSPGetPC(jac->ksp[0],&mpjac->pc);CHKERRQ(ierr); 1223 1224 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1225 ierr = KSPSetOptionsPrefix(jac->ksp[0],prefix);CHKERRQ(ierr); 1226 ierr = KSPAppendOptionsPrefix(jac->ksp[0],"sub_");CHKERRQ(ierr); 1227 /* 1228 PetscMPIInt rank,subsize,subrank; 1229 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1230 ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 1231 ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 1232 1233 ierr = MatGetLocalSize(mpjac->submats,&m,NULL);CHKERRQ(ierr); 1234 ierr = MatGetSize(mpjac->submats,&n,NULL);CHKERRQ(ierr); 1235 ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank); 1236 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1237 */ 1238 1239 /* create dummy vectors xsub and ysub */ 1240 ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr); 1241 ierr = VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&mpjac->xsub);CHKERRQ(ierr); 1242 ierr = VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&mpjac->ysub);CHKERRQ(ierr); 1243 #if defined(PETSC_HAVE_CUDA) 1244 ierr = PetscObjectTypeCompareAny((PetscObject)mpjac->submats,&is_gpumatrix,MATAIJCUSPARSE,MATMPIAIJCUSPARSE,"");CHKERRQ(ierr); 1245 if (is_gpumatrix) { 1246 ierr = VecSetType(mpjac->xsub,VECMPICUDA);CHKERRQ(ierr); 1247 ierr = VecSetType(mpjac->ysub,VECMPICUDA);CHKERRQ(ierr); 1248 } 1249 #endif 1250 #if defined(PETSC_HAVE_VIENNACL) 1251 ierr = PetscObjectTypeCompareAny((PetscObject)mpjac->submats,&is_gpumatrix,MATAIJVIENNACL,MATMPIAIJVIENNACL,"");CHKERRQ(ierr); 1252 if (is_gpumatrix) { 1253 ierr = VecSetType(mpjac->xsub,VECMPIVIENNACL);CHKERRQ(ierr); 1254 ierr = VecSetType(mpjac->ysub,VECMPIVIENNACL);CHKERRQ(ierr); 1255 } 1256 #endif 1257 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->xsub);CHKERRQ(ierr); 1258 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->ysub);CHKERRQ(ierr); 1259 1260 pc->ops->reset = PCReset_BJacobi_Multiproc; 1261 pc->ops->destroy = PCDestroy_BJacobi_Multiproc; 1262 pc->ops->apply = PCApply_BJacobi_Multiproc; 1263 } else { /* pc->setupcalled */ 1264 subcomm = PetscSubcommChild(mpjac->psubcomm); 1265 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 1266 /* destroy old matrix blocks, then get new matrix blocks */ 1267 if (mpjac->submats) {ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);} 1268 ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1269 } else { 1270 ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1271 } 1272 ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr); 1273 } 1274 1275 if (!wasSetup && pc->setfromoptionscalled) { 1276 ierr = KSPSetFromOptions(jac->ksp[0]);CHKERRQ(ierr); 1277 } 1278 PetscFunctionReturn(0); 1279 } 1280