1 2 /* 3 Defines a block Jacobi preconditioner. 4 */ 5 6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /*I "petscpc.h" I*/ 7 8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat); 9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat); 10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC); 11 12 static PetscErrorCode PCSetUp_BJacobi(PC pc) 13 { 14 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 15 Mat mat = pc->mat,pmat = pc->pmat; 16 PetscErrorCode ierr; 17 PetscBool hasop; 18 PetscInt N,M,start,i,sum,end; 19 PetscInt bs,i_start=-1,i_end=-1; 20 PetscMPIInt rank,size; 21 const char *pprefix,*mprefix; 22 23 PetscFunctionBegin; 24 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 25 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 26 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 27 ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr); 28 29 if (jac->n > 0 && jac->n < size) { 30 ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr); 31 PetscFunctionReturn(0); 32 } 33 34 /* -------------------------------------------------------------------------- 35 Determines the number of blocks assigned to each processor 36 -----------------------------------------------------------------------------*/ 37 38 /* local block count given */ 39 if (jac->n_local > 0 && jac->n < 0) { 40 ierr = MPIU_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 41 if (jac->l_lens) { /* check that user set these correctly */ 42 sum = 0; 43 for (i=0; i<jac->n_local; i++) { 44 if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 45 sum += jac->l_lens[i]; 46 } 47 if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly"); 48 } else { 49 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 50 for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i)); 51 } 52 } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */ 53 /* global blocks given: determine which ones are local */ 54 if (jac->g_lens) { 55 /* check if the g_lens is has valid entries */ 56 for (i=0; i<jac->n; i++) { 57 if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed"); 58 if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 59 } 60 if (size == 1) { 61 jac->n_local = jac->n; 62 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 63 ierr = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr); 64 /* check that user set these correctly */ 65 sum = 0; 66 for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i]; 67 if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly"); 68 } else { 69 ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr); 70 /* loop over blocks determing first one owned by me */ 71 sum = 0; 72 for (i=0; i<jac->n+1; i++) { 73 if (sum == start) { i_start = i; goto start_1;} 74 if (i < jac->n) sum += jac->g_lens[i]; 75 } 76 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout"); 77 start_1: 78 for (i=i_start; i<jac->n+1; i++) { 79 if (sum == end) { i_end = i; goto end_1; } 80 if (i < jac->n) sum += jac->g_lens[i]; 81 } 82 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout"); 83 end_1: 84 jac->n_local = i_end - i_start; 85 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 86 ierr = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr); 87 } 88 } else { /* no global blocks given, determine then using default layout */ 89 jac->n_local = jac->n/size + ((jac->n % size) > rank); 90 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 91 for (i=0; i<jac->n_local; i++) { 92 jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs; 93 if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given"); 94 } 95 } 96 } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */ 97 jac->n = size; 98 jac->n_local = 1; 99 ierr = PetscMalloc1(1,&jac->l_lens);CHKERRQ(ierr); 100 jac->l_lens[0] = M; 101 } else { /* jac->n > 0 && jac->n_local > 0 */ 102 if (!jac->l_lens) { 103 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 104 for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i)); 105 } 106 } 107 if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors"); 108 109 /* ------------------------- 110 Determines mat and pmat 111 ---------------------------*/ 112 ierr = MatHasOperation(pc->mat,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 113 if (!hasop && size == 1) { 114 mat = pc->mat; 115 pmat = pc->pmat; 116 } else { 117 if (pc->useAmat) { 118 /* use block from Amat matrix, not Pmat for local MatMult() */ 119 ierr = MatGetDiagonalBlock(pc->mat,&mat);CHKERRQ(ierr); 120 /* make submatrix have same prefix as entire matrix */ 121 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr); 122 ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr); 123 } 124 if (pc->pmat != pc->mat || !pc->useAmat) { 125 ierr = MatGetDiagonalBlock(pc->pmat,&pmat);CHKERRQ(ierr); 126 /* make submatrix have same prefix as entire matrix */ 127 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr); 128 ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr); 129 } else pmat = mat; 130 } 131 132 /* ------ 133 Setup code depends on the number of blocks 134 */ 135 if (jac->n_local == 1) { 136 ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr); 137 } else { 138 ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr); 139 } 140 PetscFunctionReturn(0); 141 } 142 143 /* Default destroy, if it has never been setup */ 144 static PetscErrorCode PCDestroy_BJacobi(PC pc) 145 { 146 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 147 PetscErrorCode ierr; 148 149 PetscFunctionBegin; 150 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 151 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 152 ierr = PetscFree(pc->data);CHKERRQ(ierr); 153 PetscFunctionReturn(0); 154 } 155 156 157 static PetscErrorCode PCSetFromOptions_BJacobi(PetscOptionItems *PetscOptionsObject,PC pc) 158 { 159 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 160 PetscErrorCode ierr; 161 PetscInt blocks,i; 162 PetscBool flg; 163 164 PetscFunctionBegin; 165 ierr = PetscOptionsHead(PetscOptionsObject,"Block Jacobi options");CHKERRQ(ierr); 166 ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr); 167 if (flg) {ierr = PCBJacobiSetTotalBlocks(pc,blocks,NULL);CHKERRQ(ierr);} 168 ierr = PetscOptionsInt("-pc_bjacobi_local_blocks","Local number of blocks","PCBJacobiSetLocalBlocks",jac->n_local,&blocks,&flg);CHKERRQ(ierr); 169 if (flg) {ierr = PCBJacobiSetLocalBlocks(pc,blocks,NULL);CHKERRQ(ierr);} 170 if (jac->ksp) { 171 /* The sub-KSP has already been set up (e.g., PCSetUp_BJacobi_Singleblock), but KSPSetFromOptions was not called 172 * unless we had already been called. */ 173 for (i=0; i<jac->n_local; i++) { 174 ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr); 175 } 176 } 177 ierr = PetscOptionsTail();CHKERRQ(ierr); 178 PetscFunctionReturn(0); 179 } 180 181 #include <petscdraw.h> 182 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer) 183 { 184 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 185 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 186 PetscErrorCode ierr; 187 PetscMPIInt rank; 188 PetscInt i; 189 PetscBool iascii,isstring,isdraw; 190 PetscViewer sviewer; 191 192 PetscFunctionBegin; 193 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 194 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 195 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 196 if (iascii) { 197 if (pc->useAmat) { 198 ierr = PetscViewerASCIIPrintf(viewer," using Amat local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr); 199 } 200 ierr = PetscViewerASCIIPrintf(viewer," number of blocks = %D\n",jac->n);CHKERRQ(ierr); 201 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 202 if (jac->same_local_solves) { 203 ierr = PetscViewerASCIIPrintf(viewer," Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr); 204 if (jac->ksp && !jac->psubcomm) { 205 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 206 if (!rank) { 207 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 208 ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr); 209 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 210 } 211 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 212 } else if (mpjac && jac->ksp && mpjac->psubcomm) { 213 ierr = PetscViewerGetSubViewer(viewer,mpjac->psubcomm->child,&sviewer);CHKERRQ(ierr); 214 if (!mpjac->psubcomm->color) { 215 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 216 ierr = KSPView(*(jac->ksp),sviewer);CHKERRQ(ierr); 217 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 218 } 219 ierr = PetscViewerRestoreSubViewer(viewer,mpjac->psubcomm->child,&sviewer);CHKERRQ(ierr); 220 } 221 } else { 222 PetscInt n_global; 223 ierr = MPIU_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 224 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 225 ierr = PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr); 226 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n", 227 rank,jac->n_local,jac->first_local);CHKERRQ(ierr); 228 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 229 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 230 for (i=0; i<jac->n_local; i++) { 231 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr); 232 ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr); 233 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 234 } 235 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 236 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 237 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 238 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 239 } 240 } else if (isstring) { 241 ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr); 242 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 243 if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);} 244 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 245 } else if (isdraw) { 246 PetscDraw draw; 247 char str[25]; 248 PetscReal x,y,bottom,h; 249 250 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 251 ierr = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr); 252 ierr = PetscSNPrintf(str,25,"Number blocks %D",jac->n);CHKERRQ(ierr); 253 ierr = PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr); 254 bottom = y - h; 255 ierr = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr); 256 /* warning the communicator on viewer is different then on ksp in parallel */ 257 if (jac->ksp) {ierr = KSPView(jac->ksp[0],viewer);CHKERRQ(ierr);} 258 ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr); 259 } 260 PetscFunctionReturn(0); 261 } 262 263 /* -------------------------------------------------------------------------------------*/ 264 265 static PetscErrorCode PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp) 266 { 267 PC_BJacobi *jac = (PC_BJacobi*)pc->data;; 268 269 PetscFunctionBegin; 270 if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first"); 271 272 if (n_local) *n_local = jac->n_local; 273 if (first_local) *first_local = jac->first_local; 274 *ksp = jac->ksp; 275 jac->same_local_solves = PETSC_FALSE; /* Assume that local solves are now different; 276 not necessarily true though! This flag is 277 used only for PCView_BJacobi() */ 278 PetscFunctionReturn(0); 279 } 280 281 static PetscErrorCode PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens) 282 { 283 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 284 PetscErrorCode ierr; 285 286 PetscFunctionBegin; 287 if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called"); 288 jac->n = blocks; 289 if (!lens) jac->g_lens = 0; 290 else { 291 ierr = PetscMalloc1(blocks,&jac->g_lens);CHKERRQ(ierr); 292 ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr); 293 ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr); 294 } 295 PetscFunctionReturn(0); 296 } 297 298 static PetscErrorCode PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[]) 299 { 300 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 301 302 PetscFunctionBegin; 303 *blocks = jac->n; 304 if (lens) *lens = jac->g_lens; 305 PetscFunctionReturn(0); 306 } 307 308 static PetscErrorCode PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[]) 309 { 310 PC_BJacobi *jac; 311 PetscErrorCode ierr; 312 313 PetscFunctionBegin; 314 jac = (PC_BJacobi*)pc->data; 315 316 jac->n_local = blocks; 317 if (!lens) jac->l_lens = 0; 318 else { 319 ierr = PetscMalloc1(blocks,&jac->l_lens);CHKERRQ(ierr); 320 ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr); 321 ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr); 322 } 323 PetscFunctionReturn(0); 324 } 325 326 static PetscErrorCode PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[]) 327 { 328 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 329 330 PetscFunctionBegin; 331 *blocks = jac->n_local; 332 if (lens) *lens = jac->l_lens; 333 PetscFunctionReturn(0); 334 } 335 336 /* -------------------------------------------------------------------------------------*/ 337 338 /*@C 339 PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on 340 this processor. 341 342 Not Collective 343 344 Input Parameter: 345 . pc - the preconditioner context 346 347 Output Parameters: 348 + n_local - the number of blocks on this processor, or NULL 349 . first_local - the global number of the first block on this processor, or NULL 350 - ksp - the array of KSP contexts 351 352 Notes: 353 After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed. 354 355 Currently for some matrix implementations only 1 block per processor 356 is supported. 357 358 You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP(). 359 360 Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs. 361 You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,PETSC_NULL_KSP,ierr) to determine how large the 362 KSP array must be. 363 364 Level: advanced 365 366 .keywords: block, Jacobi, get, sub, KSP, context 367 368 .seealso: PCBJacobiGetSubKSP() 369 @*/ 370 PetscErrorCode PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[]) 371 { 372 PetscErrorCode ierr; 373 374 PetscFunctionBegin; 375 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 376 ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr); 377 PetscFunctionReturn(0); 378 } 379 380 /*@ 381 PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block 382 Jacobi preconditioner. 383 384 Collective on PC 385 386 Input Parameters: 387 + pc - the preconditioner context 388 . blocks - the number of blocks 389 - lens - [optional] integer array containing the size of each block 390 391 Options Database Key: 392 . -pc_bjacobi_blocks <blocks> - Sets the number of global blocks 393 394 Notes: 395 Currently only a limited number of blocking configurations are supported. 396 All processors sharing the PC must call this routine with the same data. 397 398 Level: intermediate 399 400 .keywords: set, number, Jacobi, global, total, blocks 401 402 .seealso: PCSetUseAmat(), PCBJacobiSetLocalBlocks() 403 @*/ 404 PetscErrorCode PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[]) 405 { 406 PetscErrorCode ierr; 407 408 PetscFunctionBegin; 409 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 410 if (blocks <= 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks"); 411 ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr); 412 PetscFunctionReturn(0); 413 } 414 415 /*@C 416 PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block 417 Jacobi preconditioner. 418 419 Not Collective 420 421 Input Parameter: 422 . pc - the preconditioner context 423 424 Output parameters: 425 + blocks - the number of blocks 426 - lens - integer array containing the size of each block 427 428 Level: intermediate 429 430 .keywords: get, number, Jacobi, global, total, blocks 431 432 .seealso: PCSetUseAmat(), PCBJacobiGetLocalBlocks() 433 @*/ 434 PetscErrorCode PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[]) 435 { 436 PetscErrorCode ierr; 437 438 PetscFunctionBegin; 439 PetscValidHeaderSpecific(pc, PC_CLASSID,1); 440 PetscValidIntPointer(blocks,2); 441 ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr); 442 PetscFunctionReturn(0); 443 } 444 445 /*@ 446 PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block 447 Jacobi preconditioner. 448 449 Not Collective 450 451 Input Parameters: 452 + pc - the preconditioner context 453 . blocks - the number of blocks 454 - lens - [optional] integer array containing size of each block 455 456 Options Database Key: 457 . -pc_bjacobi_local_blocks <blocks> - Sets the number of local blocks 458 459 Note: 460 Currently only a limited number of blocking configurations are supported. 461 462 Level: intermediate 463 464 .keywords: PC, set, number, Jacobi, local, blocks 465 466 .seealso: PCSetUseAmat(), PCBJacobiSetTotalBlocks() 467 @*/ 468 PetscErrorCode PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[]) 469 { 470 PetscErrorCode ierr; 471 472 PetscFunctionBegin; 473 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 474 if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks"); 475 ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr); 476 PetscFunctionReturn(0); 477 } 478 479 /*@C 480 PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block 481 Jacobi preconditioner. 482 483 Not Collective 484 485 Input Parameters: 486 + pc - the preconditioner context 487 . blocks - the number of blocks 488 - lens - [optional] integer array containing size of each block 489 490 Note: 491 Currently only a limited number of blocking configurations are supported. 492 493 Level: intermediate 494 495 .keywords: PC, get, number, Jacobi, local, blocks 496 497 .seealso: PCSetUseAmat(), PCBJacobiGetTotalBlocks() 498 @*/ 499 PetscErrorCode PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[]) 500 { 501 PetscErrorCode ierr; 502 503 PetscFunctionBegin; 504 PetscValidHeaderSpecific(pc, PC_CLASSID,1); 505 PetscValidIntPointer(blocks,2); 506 ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr); 507 PetscFunctionReturn(0); 508 } 509 510 /* -----------------------------------------------------------------------------------*/ 511 512 /*MC 513 PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with 514 its own KSP object. 515 516 Options Database Keys: 517 + -pc_use_amat - use Amat to apply block of operator in inner Krylov method 518 - -pc_bjacobi_blocks <n> - use n total blocks 519 520 Notes: 521 Each processor can have one or more blocks, or a single block can be shared by several processes. Defaults to one block per processor. 522 523 To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC 524 options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly 525 526 To set the options on the solvers separate for each block call PCBJacobiGetSubKSP() 527 and set the options directly on the resulting KSP object (you can access its PC 528 KSPGetPC()) 529 530 For GPU-based vectors (CUDA, ViennaCL) it is recommended to use exactly one block per MPI process for best 531 performance. Different block partitioning may lead to additional data transfers 532 between host and GPU that lead to degraded performance. 533 534 The options prefix for each block is sub_, for example -sub_pc_type lu. 535 536 When multiple processes share a single block, each block encompasses exactly all the unknowns owned its set of processes. 537 538 Level: beginner 539 540 Concepts: block Jacobi 541 542 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, 543 PCASM, PCSetUseAmat(), PCGetUseAmat(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(), 544 PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices() 545 M*/ 546 547 PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc) 548 { 549 PetscErrorCode ierr; 550 PetscMPIInt rank; 551 PC_BJacobi *jac; 552 553 PetscFunctionBegin; 554 ierr = PetscNewLog(pc,&jac);CHKERRQ(ierr); 555 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 556 557 pc->ops->apply = 0; 558 pc->ops->applytranspose = 0; 559 pc->ops->setup = PCSetUp_BJacobi; 560 pc->ops->destroy = PCDestroy_BJacobi; 561 pc->ops->setfromoptions = PCSetFromOptions_BJacobi; 562 pc->ops->view = PCView_BJacobi; 563 pc->ops->applyrichardson = 0; 564 565 pc->data = (void*)jac; 566 jac->n = -1; 567 jac->n_local = -1; 568 jac->first_local = rank; 569 jac->ksp = 0; 570 jac->same_local_solves = PETSC_TRUE; 571 jac->g_lens = 0; 572 jac->l_lens = 0; 573 jac->psubcomm = 0; 574 575 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C",PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr); 576 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C",PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr); 577 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C",PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr); 578 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C",PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr); 579 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C",PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr); 580 PetscFunctionReturn(0); 581 } 582 583 /* --------------------------------------------------------------------------------------------*/ 584 /* 585 These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI 586 */ 587 static PetscErrorCode PCReset_BJacobi_Singleblock(PC pc) 588 { 589 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 590 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 591 PetscErrorCode ierr; 592 593 PetscFunctionBegin; 594 ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr); 595 ierr = VecDestroy(&bjac->x);CHKERRQ(ierr); 596 ierr = VecDestroy(&bjac->y);CHKERRQ(ierr); 597 PetscFunctionReturn(0); 598 } 599 600 static PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc) 601 { 602 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 603 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 604 PetscErrorCode ierr; 605 606 PetscFunctionBegin; 607 ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr); 608 ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr); 609 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 610 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 611 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 612 ierr = PetscFree(bjac);CHKERRQ(ierr); 613 ierr = PetscFree(pc->data);CHKERRQ(ierr); 614 PetscFunctionReturn(0); 615 } 616 617 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc) 618 { 619 PetscErrorCode ierr; 620 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 621 KSP subksp = jac->ksp[0]; 622 KSPConvergedReason reason; 623 624 PetscFunctionBegin; 625 ierr = KSPSetUp(subksp);CHKERRQ(ierr); 626 ierr = KSPGetConvergedReason(subksp,&reason);CHKERRQ(ierr); 627 if (reason == KSP_DIVERGED_PCSETUP_FAILED) { 628 pc->failedreason = PC_SUBPC_ERROR; 629 } 630 PetscFunctionReturn(0); 631 } 632 633 static PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y) 634 { 635 PetscErrorCode ierr; 636 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 637 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 638 639 PetscFunctionBegin; 640 ierr = VecGetLocalVectorRead(x, bjac->x);CHKERRQ(ierr); 641 ierr = VecGetLocalVector(y, bjac->y);CHKERRQ(ierr); 642 /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner 643 matrix may change even if the outter KSP/PC has not updated the preconditioner, this will trigger a rebuild 644 of the inner preconditioner automatically unless we pass down the outter preconditioners reuse flag.*/ 645 ierr = KSPSetReusePreconditioner(jac->ksp[0],pc->reusepreconditioner);CHKERRQ(ierr); 646 ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr); 647 ierr = VecRestoreLocalVectorRead(x, bjac->x);CHKERRQ(ierr); 648 ierr = VecRestoreLocalVector(y, bjac->y);CHKERRQ(ierr); 649 PetscFunctionReturn(0); 650 } 651 652 static PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y) 653 { 654 PetscErrorCode ierr; 655 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 656 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 657 PetscScalar *y_array; 658 const PetscScalar *x_array; 659 PC subpc; 660 661 PetscFunctionBegin; 662 /* 663 The VecPlaceArray() is to avoid having to copy the 664 y vector into the bjac->x vector. The reason for 665 the bjac->x vector is that we need a sequential vector 666 for the sequential solve. 667 */ 668 ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr); 669 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 670 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 671 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 672 /* apply the symmetric left portion of the inner PC operator */ 673 /* note this by-passes the inner KSP and its options completely */ 674 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 675 ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 676 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 677 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 678 ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr); 679 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 680 PetscFunctionReturn(0); 681 } 682 683 static PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y) 684 { 685 PetscErrorCode ierr; 686 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 687 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 688 PetscScalar *y_array; 689 const PetscScalar *x_array; 690 PC subpc; 691 692 PetscFunctionBegin; 693 /* 694 The VecPlaceArray() is to avoid having to copy the 695 y vector into the bjac->x vector. The reason for 696 the bjac->x vector is that we need a sequential vector 697 for the sequential solve. 698 */ 699 ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr); 700 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 701 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 702 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 703 704 /* apply the symmetric right portion of the inner PC operator */ 705 /* note this by-passes the inner KSP and its options completely */ 706 707 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 708 ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 709 710 ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr); 711 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 712 PetscFunctionReturn(0); 713 } 714 715 static PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y) 716 { 717 PetscErrorCode ierr; 718 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 719 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 720 PetscScalar *y_array; 721 const PetscScalar *x_array; 722 723 PetscFunctionBegin; 724 /* 725 The VecPlaceArray() is to avoid having to copy the 726 y vector into the bjac->x vector. The reason for 727 the bjac->x vector is that we need a sequential vector 728 for the sequential solve. 729 */ 730 ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr); 731 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 732 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 733 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 734 ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr); 735 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 736 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 737 ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr); 738 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 739 PetscFunctionReturn(0); 740 } 741 742 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat) 743 { 744 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 745 PetscErrorCode ierr; 746 PetscInt m; 747 KSP ksp; 748 PC_BJacobi_Singleblock *bjac; 749 PetscBool wasSetup = PETSC_TRUE; 750 #if defined(PETSC_HAVE_VECCUDA) || defined(PETSC_HAVE_VIENNACL) 751 PetscBool is_gpumatrix = PETSC_FALSE; 752 #endif 753 754 PetscFunctionBegin; 755 if (!pc->setupcalled) { 756 const char *prefix; 757 758 if (!jac->ksp) { 759 wasSetup = PETSC_FALSE; 760 761 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 762 ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr); 763 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 764 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr); 765 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 766 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 767 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 768 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 769 770 pc->ops->reset = PCReset_BJacobi_Singleblock; 771 pc->ops->destroy = PCDestroy_BJacobi_Singleblock; 772 pc->ops->apply = PCApply_BJacobi_Singleblock; 773 pc->ops->applysymmetricleft = PCApplySymmetricLeft_BJacobi_Singleblock; 774 pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock; 775 pc->ops->applytranspose = PCApplyTranspose_BJacobi_Singleblock; 776 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Singleblock; 777 778 ierr = PetscMalloc1(1,&jac->ksp);CHKERRQ(ierr); 779 jac->ksp[0] = ksp; 780 781 ierr = PetscNewLog(pc,&bjac);CHKERRQ(ierr); 782 jac->data = (void*)bjac; 783 } else { 784 ksp = jac->ksp[0]; 785 bjac = (PC_BJacobi_Singleblock*)jac->data; 786 } 787 788 /* 789 The reason we need to generate these vectors is to serve 790 as the right-hand side and solution vector for the solve on the 791 block. We do not need to allocate space for the vectors since 792 that is provided via VecPlaceArray() just before the call to 793 KSPSolve() on the block. 794 */ 795 ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr); 796 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->x);CHKERRQ(ierr); 797 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->y);CHKERRQ(ierr); 798 #if defined(PETSC_HAVE_VECCUDA) 799 ierr = PetscObjectTypeCompareAny((PetscObject)pmat,&is_gpumatrix,MATAIJCUSPARSE,MATSEQAIJCUSPARSE,MATMPIAIJCUSPARSE,"");CHKERRQ(ierr); 800 if (is_gpumatrix) { 801 ierr = VecSetType(bjac->x,VECCUDA);CHKERRQ(ierr); 802 ierr = VecSetType(bjac->y,VECCUDA);CHKERRQ(ierr); 803 } 804 #endif 805 #if defined(PETSC_HAVE_VIENNACL) 806 ierr = PetscObjectTypeCompareAny((PetscObject)pmat,&is_gpumatrix,MATAIJVIENNACL,MATSEQAIJVIENNACL,MATMPIAIJVIENNACL,"");CHKERRQ(ierr); 807 if (is_gpumatrix) { 808 ierr = VecSetType(bjac->x,VECVIENNACL);CHKERRQ(ierr); 809 ierr = VecSetType(bjac->y,VECVIENNACL);CHKERRQ(ierr); 810 } 811 #endif 812 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->x);CHKERRQ(ierr); 813 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->y);CHKERRQ(ierr); 814 } else { 815 ksp = jac->ksp[0]; 816 bjac = (PC_BJacobi_Singleblock*)jac->data; 817 } 818 if (pc->useAmat) { 819 ierr = KSPSetOperators(ksp,mat,pmat);CHKERRQ(ierr); 820 } else { 821 ierr = KSPSetOperators(ksp,pmat,pmat);CHKERRQ(ierr); 822 } 823 if (!wasSetup && pc->setfromoptionscalled) { 824 /* If PCSetFromOptions_BJacobi is called later, KSPSetFromOptions will be called at that time. */ 825 ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr); 826 } 827 PetscFunctionReturn(0); 828 } 829 830 /* ---------------------------------------------------------------------------------------------*/ 831 static PetscErrorCode PCReset_BJacobi_Multiblock(PC pc) 832 { 833 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 834 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 835 PetscErrorCode ierr; 836 PetscInt i; 837 838 PetscFunctionBegin; 839 if (bjac && bjac->pmat) { 840 ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr); 841 if (pc->useAmat) { 842 ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr); 843 } 844 } 845 846 for (i=0; i<jac->n_local; i++) { 847 ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr); 848 if (bjac && bjac->x) { 849 ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr); 850 ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr); 851 ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr); 852 } 853 } 854 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 855 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 856 PetscFunctionReturn(0); 857 } 858 859 static PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc) 860 { 861 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 862 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 863 PetscErrorCode ierr; 864 PetscInt i; 865 866 PetscFunctionBegin; 867 ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr); 868 if (bjac) { 869 ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr); 870 ierr = PetscFree(bjac->starts);CHKERRQ(ierr); 871 ierr = PetscFree(bjac->is);CHKERRQ(ierr); 872 } 873 ierr = PetscFree(jac->data);CHKERRQ(ierr); 874 for (i=0; i<jac->n_local; i++) { 875 ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr); 876 } 877 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 878 ierr = PetscFree(pc->data);CHKERRQ(ierr); 879 PetscFunctionReturn(0); 880 } 881 882 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc) 883 { 884 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 885 PetscErrorCode ierr; 886 PetscInt i,n_local = jac->n_local; 887 KSPConvergedReason reason; 888 889 PetscFunctionBegin; 890 for (i=0; i<n_local; i++) { 891 ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr); 892 ierr = KSPGetConvergedReason(jac->ksp[i],&reason);CHKERRQ(ierr); 893 if (reason == KSP_DIVERGED_PCSETUP_FAILED) { 894 pc->failedreason = PC_SUBPC_ERROR; 895 } 896 } 897 PetscFunctionReturn(0); 898 } 899 900 /* 901 Preconditioner for block Jacobi 902 */ 903 static PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y) 904 { 905 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 906 PetscErrorCode ierr; 907 PetscInt i,n_local = jac->n_local; 908 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 909 PetscScalar *yin; 910 const PetscScalar *xin; 911 912 PetscFunctionBegin; 913 ierr = VecGetArrayRead(x,&xin);CHKERRQ(ierr); 914 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 915 for (i=0; i<n_local; i++) { 916 /* 917 To avoid copying the subvector from x into a workspace we instead 918 make the workspace vector array point to the subpart of the array of 919 the global vector. 920 */ 921 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 922 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 923 924 ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 925 ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr); 926 ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 927 928 ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr); 929 ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr); 930 } 931 ierr = VecRestoreArrayRead(x,&xin);CHKERRQ(ierr); 932 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 933 PetscFunctionReturn(0); 934 } 935 936 /* 937 Preconditioner for block Jacobi 938 */ 939 static PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y) 940 { 941 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 942 PetscErrorCode ierr; 943 PetscInt i,n_local = jac->n_local; 944 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 945 PetscScalar *yin; 946 const PetscScalar *xin; 947 948 PetscFunctionBegin; 949 ierr = VecGetArrayRead(x,&xin);CHKERRQ(ierr); 950 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 951 for (i=0; i<n_local; i++) { 952 /* 953 To avoid copying the subvector from x into a workspace we instead 954 make the workspace vector array point to the subpart of the array of 955 the global vector. 956 */ 957 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 958 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 959 960 ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 961 ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr); 962 ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 963 964 ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr); 965 ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr); 966 } 967 ierr = VecRestoreArrayRead(x,&xin);CHKERRQ(ierr); 968 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 969 PetscFunctionReturn(0); 970 } 971 972 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat) 973 { 974 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 975 PetscErrorCode ierr; 976 PetscInt m,n_local,N,M,start,i; 977 const char *prefix,*pprefix,*mprefix; 978 KSP ksp; 979 Vec x,y; 980 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 981 PC subpc; 982 IS is; 983 MatReuse scall; 984 #if defined(PETSC_HAVE_VECCUDA) || defined(PETSC_HAVE_VIENNACL) 985 PetscBool is_gpumatrix = PETSC_FALSE; 986 #endif 987 988 PetscFunctionBegin; 989 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 990 991 n_local = jac->n_local; 992 993 if (pc->useAmat) { 994 PetscBool same; 995 ierr = PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr); 996 if (!same) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_INCOMP,"Matrices not of same type"); 997 } 998 999 if (!pc->setupcalled) { 1000 scall = MAT_INITIAL_MATRIX; 1001 1002 if (!jac->ksp) { 1003 pc->ops->reset = PCReset_BJacobi_Multiblock; 1004 pc->ops->destroy = PCDestroy_BJacobi_Multiblock; 1005 pc->ops->apply = PCApply_BJacobi_Multiblock; 1006 pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock; 1007 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock; 1008 1009 ierr = PetscNewLog(pc,&bjac);CHKERRQ(ierr); 1010 ierr = PetscMalloc1(n_local,&jac->ksp);CHKERRQ(ierr); 1011 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr); 1012 ierr = PetscMalloc2(n_local,&bjac->x,n_local,&bjac->y);CHKERRQ(ierr); 1013 ierr = PetscMalloc1(n_local,&bjac->starts);CHKERRQ(ierr); 1014 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr); 1015 1016 jac->data = (void*)bjac; 1017 ierr = PetscMalloc1(n_local,&bjac->is);CHKERRQ(ierr); 1018 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr); 1019 1020 for (i=0; i<n_local; i++) { 1021 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 1022 ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr); 1023 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 1024 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr); 1025 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 1026 ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr); 1027 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1028 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 1029 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 1030 1031 jac->ksp[i] = ksp; 1032 } 1033 } else { 1034 bjac = (PC_BJacobi_Multiblock*)jac->data; 1035 } 1036 1037 start = 0; 1038 for (i=0; i<n_local; i++) { 1039 m = jac->l_lens[i]; 1040 /* 1041 The reason we need to generate these vectors is to serve 1042 as the right-hand side and solution vector for the solve on the 1043 block. We do not need to allocate space for the vectors since 1044 that is provided via VecPlaceArray() just before the call to 1045 KSPSolve() on the block. 1046 1047 */ 1048 ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr); 1049 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&y);CHKERRQ(ierr); 1050 #if defined(PETSC_HAVE_VECCUDA) 1051 ierr = PetscObjectTypeCompareAny((PetscObject)pmat,&is_gpumatrix,MATAIJCUSPARSE,MATSEQAIJCUSPARSE,MATMPIAIJCUSPARSE,"");CHKERRQ(ierr); 1052 if (is_gpumatrix) { 1053 ierr = VecSetType(x,VECCUDA);CHKERRQ(ierr); 1054 ierr = VecSetType(y,VECCUDA);CHKERRQ(ierr); 1055 } 1056 #endif 1057 #if defined(PETSC_HAVE_VIENNACL) 1058 ierr = PetscObjectTypeCompareAny((PetscObject)pmat,&is_gpumatrix,MATAIJVIENNACL,MATSEQAIJVIENNACL,MATMPIAIJVIENNACL,"");CHKERRQ(ierr); 1059 if (is_gpumatrix) { 1060 ierr = VecSetType(x,VECVIENNACL);CHKERRQ(ierr); 1061 ierr = VecSetType(y,VECVIENNACL);CHKERRQ(ierr); 1062 } 1063 #endif 1064 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)x);CHKERRQ(ierr); 1065 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)y);CHKERRQ(ierr); 1066 1067 bjac->x[i] = x; 1068 bjac->y[i] = y; 1069 bjac->starts[i] = start; 1070 1071 ierr = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr); 1072 bjac->is[i] = is; 1073 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)is);CHKERRQ(ierr); 1074 1075 start += m; 1076 } 1077 } else { 1078 bjac = (PC_BJacobi_Multiblock*)jac->data; 1079 /* 1080 Destroy the blocks from the previous iteration 1081 */ 1082 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 1083 ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr); 1084 if (pc->useAmat) { 1085 ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr); 1086 } 1087 scall = MAT_INITIAL_MATRIX; 1088 } else scall = MAT_REUSE_MATRIX; 1089 } 1090 1091 ierr = MatCreateSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr); 1092 if (pc->useAmat) { 1093 ierr = MatCreateSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr); 1094 } 1095 /* Return control to the user so that the submatrices can be modified (e.g., to apply 1096 different boundary conditions for the submatrices than for the global problem) */ 1097 ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr); 1098 1099 ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr); 1100 for (i=0; i<n_local; i++) { 1101 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->pmat[i]);CHKERRQ(ierr); 1102 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr); 1103 if (pc->useAmat) { 1104 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->mat[i]);CHKERRQ(ierr); 1105 ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr); 1106 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr); 1107 ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i]);CHKERRQ(ierr); 1108 } else { 1109 ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i]);CHKERRQ(ierr); 1110 } 1111 if (pc->setfromoptionscalled) { 1112 ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr); 1113 } 1114 } 1115 PetscFunctionReturn(0); 1116 } 1117 1118 /* ---------------------------------------------------------------------------------------------*/ 1119 /* 1120 These are for a single block with multiple processes; 1121 */ 1122 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc) 1123 { 1124 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1125 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1126 PetscErrorCode ierr; 1127 1128 PetscFunctionBegin; 1129 ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr); 1130 ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr); 1131 ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr); 1132 if (jac->ksp) {ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);} 1133 PetscFunctionReturn(0); 1134 } 1135 1136 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc) 1137 { 1138 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1139 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1140 PetscErrorCode ierr; 1141 1142 PetscFunctionBegin; 1143 ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr); 1144 ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr); 1145 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 1146 ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr); 1147 1148 ierr = PetscFree(mpjac);CHKERRQ(ierr); 1149 ierr = PetscFree(pc->data);CHKERRQ(ierr); 1150 PetscFunctionReturn(0); 1151 } 1152 1153 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y) 1154 { 1155 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1156 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1157 PetscErrorCode ierr; 1158 PetscScalar *yarray; 1159 const PetscScalar *xarray; 1160 KSPConvergedReason reason; 1161 1162 PetscFunctionBegin; 1163 /* place x's and y's local arrays into xsub and ysub */ 1164 ierr = VecGetArrayRead(x,&xarray);CHKERRQ(ierr); 1165 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1166 ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr); 1167 ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr); 1168 1169 /* apply preconditioner on each matrix block */ 1170 ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr); 1171 ierr = KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);CHKERRQ(ierr); 1172 ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr); 1173 ierr = KSPGetConvergedReason(jac->ksp[0],&reason);CHKERRQ(ierr); 1174 if (reason == KSP_DIVERGED_PCSETUP_FAILED) { 1175 pc->failedreason = PC_SUBPC_ERROR; 1176 } 1177 1178 ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr); 1179 ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr); 1180 ierr = VecRestoreArrayRead(x,&xarray);CHKERRQ(ierr); 1181 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1182 PetscFunctionReturn(0); 1183 } 1184 1185 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc) 1186 { 1187 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1188 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1189 PetscErrorCode ierr; 1190 PetscInt m,n; 1191 MPI_Comm comm,subcomm=0; 1192 const char *prefix; 1193 PetscBool wasSetup = PETSC_TRUE; 1194 #if defined(PETSC_HAVE_VECCUDA) || defined(PETSC_HAVE_VIENNACL) 1195 PetscBool is_gpumatrix = PETSC_FALSE; 1196 #endif 1197 1198 1199 PetscFunctionBegin; 1200 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1201 if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported"); 1202 jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */ 1203 if (!pc->setupcalled) { 1204 wasSetup = PETSC_FALSE; 1205 ierr = PetscNewLog(pc,&mpjac);CHKERRQ(ierr); 1206 jac->data = (void*)mpjac; 1207 1208 /* initialize datastructure mpjac */ 1209 if (!jac->psubcomm) { 1210 /* Create default contiguous subcommunicatiors if user does not provide them */ 1211 ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr); 1212 ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr); 1213 ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr); 1214 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr); 1215 } 1216 mpjac->psubcomm = jac->psubcomm; 1217 subcomm = PetscSubcommChild(mpjac->psubcomm); 1218 1219 /* Get matrix blocks of pmat */ 1220 ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1221 1222 /* create a new PC that processors in each subcomm have copy of */ 1223 ierr = PetscMalloc1(1,&jac->ksp);CHKERRQ(ierr); 1224 ierr = KSPCreate(subcomm,&jac->ksp[0]);CHKERRQ(ierr); 1225 ierr = KSPSetErrorIfNotConverged(jac->ksp[0],pc->erroriffailure);CHKERRQ(ierr); 1226 ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);CHKERRQ(ierr); 1227 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)jac->ksp[0]);CHKERRQ(ierr); 1228 ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr); 1229 ierr = KSPGetPC(jac->ksp[0],&mpjac->pc);CHKERRQ(ierr); 1230 1231 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1232 ierr = KSPSetOptionsPrefix(jac->ksp[0],prefix);CHKERRQ(ierr); 1233 ierr = KSPAppendOptionsPrefix(jac->ksp[0],"sub_");CHKERRQ(ierr); 1234 /* 1235 PetscMPIInt rank,subsize,subrank; 1236 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1237 ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 1238 ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 1239 1240 ierr = MatGetLocalSize(mpjac->submats,&m,NULL);CHKERRQ(ierr); 1241 ierr = MatGetSize(mpjac->submats,&n,NULL);CHKERRQ(ierr); 1242 ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank); 1243 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1244 */ 1245 1246 /* create dummy vectors xsub and ysub */ 1247 ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr); 1248 ierr = VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&mpjac->xsub);CHKERRQ(ierr); 1249 ierr = VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&mpjac->ysub);CHKERRQ(ierr); 1250 #if defined(PETSC_HAVE_VECCUDA) 1251 ierr = PetscObjectTypeCompareAny((PetscObject)mpjac->submats,&is_gpumatrix,MATAIJCUSPARSE,MATMPIAIJCUSPARSE,"");CHKERRQ(ierr); 1252 if (is_gpumatrix) { 1253 ierr = VecSetType(mpjac->xsub,VECMPICUDA);CHKERRQ(ierr); 1254 ierr = VecSetType(mpjac->ysub,VECMPICUDA);CHKERRQ(ierr); 1255 } 1256 #endif 1257 #if defined(PETSC_HAVE_VIENNACL) 1258 ierr = PetscObjectTypeCompareAny((PetscObject)mpjac->submats,&is_gpumatrix,MATAIJVIENNACL,MATMPIAIJVIENNACL,"");CHKERRQ(ierr); 1259 if (is_gpumatrix) { 1260 ierr = VecSetType(mpjac->xsub,VECMPIVIENNACL);CHKERRQ(ierr); 1261 ierr = VecSetType(mpjac->ysub,VECMPIVIENNACL);CHKERRQ(ierr); 1262 } 1263 #endif 1264 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->xsub);CHKERRQ(ierr); 1265 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->ysub);CHKERRQ(ierr); 1266 1267 pc->ops->reset = PCReset_BJacobi_Multiproc; 1268 pc->ops->destroy = PCDestroy_BJacobi_Multiproc; 1269 pc->ops->apply = PCApply_BJacobi_Multiproc; 1270 } else { /* pc->setupcalled */ 1271 subcomm = PetscSubcommChild(mpjac->psubcomm); 1272 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 1273 /* destroy old matrix blocks, then get new matrix blocks */ 1274 if (mpjac->submats) {ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);} 1275 ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1276 } else { 1277 ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1278 } 1279 ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr); 1280 } 1281 1282 if (!wasSetup && pc->setfromoptionscalled) { 1283 ierr = KSPSetFromOptions(jac->ksp[0]);CHKERRQ(ierr); 1284 } 1285 PetscFunctionReturn(0); 1286 } 1287