1 2 /* 3 Defines a block Jacobi preconditioner. 4 */ 5 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> 7 8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat); 9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat); 10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC); 11 12 #undef __FUNCT__ 13 #define __FUNCT__ "PCSetUp_BJacobi" 14 static PetscErrorCode PCSetUp_BJacobi(PC pc) 15 { 16 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 17 Mat mat = pc->mat,pmat = pc->pmat; 18 PetscErrorCode ierr,(*f)(Mat,Mat*); 19 PetscInt N,M,start,i,sum,end; 20 PetscInt bs,i_start=-1,i_end=-1; 21 PetscMPIInt rank,size; 22 const char *pprefix,*mprefix; 23 24 PetscFunctionBegin; 25 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 26 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 27 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 28 ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr); 29 30 if (jac->n > 0 && jac->n < size) { 31 ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr); 32 PetscFunctionReturn(0); 33 } 34 35 /* -------------------------------------------------------------------------- 36 Determines the number of blocks assigned to each processor 37 -----------------------------------------------------------------------------*/ 38 39 /* local block count given */ 40 if (jac->n_local > 0 && jac->n < 0) { 41 ierr = MPIU_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 42 if (jac->l_lens) { /* check that user set these correctly */ 43 sum = 0; 44 for (i=0; i<jac->n_local; i++) { 45 if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 46 sum += jac->l_lens[i]; 47 } 48 if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly"); 49 } else { 50 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 51 for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i)); 52 } 53 } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */ 54 /* global blocks given: determine which ones are local */ 55 if (jac->g_lens) { 56 /* check if the g_lens is has valid entries */ 57 for (i=0; i<jac->n; i++) { 58 if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed"); 59 if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 60 } 61 if (size == 1) { 62 jac->n_local = jac->n; 63 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 64 ierr = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr); 65 /* check that user set these correctly */ 66 sum = 0; 67 for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i]; 68 if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly"); 69 } else { 70 ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr); 71 /* loop over blocks determing first one owned by me */ 72 sum = 0; 73 for (i=0; i<jac->n+1; i++) { 74 if (sum == start) { i_start = i; goto start_1;} 75 if (i < jac->n) sum += jac->g_lens[i]; 76 } 77 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout"); 78 start_1: 79 for (i=i_start; i<jac->n+1; i++) { 80 if (sum == end) { i_end = i; goto end_1; } 81 if (i < jac->n) sum += jac->g_lens[i]; 82 } 83 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout"); 84 end_1: 85 jac->n_local = i_end - i_start; 86 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 87 ierr = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr); 88 } 89 } else { /* no global blocks given, determine then using default layout */ 90 jac->n_local = jac->n/size + ((jac->n % size) > rank); 91 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 92 for (i=0; i<jac->n_local; i++) { 93 jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs; 94 if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given"); 95 } 96 } 97 } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */ 98 jac->n = size; 99 jac->n_local = 1; 100 ierr = PetscMalloc1(1,&jac->l_lens);CHKERRQ(ierr); 101 jac->l_lens[0] = M; 102 } else { /* jac->n > 0 && jac->n_local > 0 */ 103 if (!jac->l_lens) { 104 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 105 for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i)); 106 } 107 } 108 if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors"); 109 110 /* ------------------------- 111 Determines mat and pmat 112 ---------------------------*/ 113 ierr = PetscObjectQueryFunction((PetscObject)pc->mat,"MatGetDiagonalBlock_C",&f);CHKERRQ(ierr); 114 if (!f && size == 1) { 115 mat = pc->mat; 116 pmat = pc->pmat; 117 } else { 118 if (pc->useAmat) { 119 /* use block from Amat matrix, not Pmat for local MatMult() */ 120 ierr = MatGetDiagonalBlock(pc->mat,&mat);CHKERRQ(ierr); 121 /* make submatrix have same prefix as entire matrix */ 122 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr); 123 ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr); 124 } 125 if (pc->pmat != pc->mat || !pc->useAmat) { 126 ierr = MatGetDiagonalBlock(pc->pmat,&pmat);CHKERRQ(ierr); 127 /* make submatrix have same prefix as entire matrix */ 128 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr); 129 ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr); 130 } else pmat = mat; 131 } 132 133 /* ------ 134 Setup code depends on the number of blocks 135 */ 136 if (jac->n_local == 1) { 137 ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr); 138 } else { 139 ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr); 140 } 141 PetscFunctionReturn(0); 142 } 143 144 /* Default destroy, if it has never been setup */ 145 #undef __FUNCT__ 146 #define __FUNCT__ "PCDestroy_BJacobi" 147 static PetscErrorCode PCDestroy_BJacobi(PC pc) 148 { 149 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 150 PetscErrorCode ierr; 151 152 PetscFunctionBegin; 153 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 154 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 155 ierr = PetscFree(pc->data);CHKERRQ(ierr); 156 PetscFunctionReturn(0); 157 } 158 159 #undef __FUNCT__ 160 #define __FUNCT__ "PCSetFromOptions_BJacobi" 161 162 static PetscErrorCode PCSetFromOptions_BJacobi(PetscOptionItems *PetscOptionsObject,PC pc) 163 { 164 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 165 PetscErrorCode ierr; 166 PetscInt blocks,i; 167 PetscBool flg; 168 169 PetscFunctionBegin; 170 ierr = PetscOptionsHead(PetscOptionsObject,"Block Jacobi options");CHKERRQ(ierr); 171 ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr); 172 if (flg) { 173 ierr = PCBJacobiSetTotalBlocks(pc,blocks,NULL);CHKERRQ(ierr); 174 } 175 if (jac->ksp) { 176 /* The sub-KSP has already been set up (e.g., PCSetUp_BJacobi_Singleblock), but KSPSetFromOptions was not called 177 * unless we had already been called. */ 178 for (i=0; i<jac->n_local; i++) { 179 ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr); 180 } 181 } 182 ierr = PetscOptionsTail();CHKERRQ(ierr); 183 PetscFunctionReturn(0); 184 } 185 186 #include <petscdraw.h> 187 #undef __FUNCT__ 188 #define __FUNCT__ "PCView_BJacobi" 189 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer) 190 { 191 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 192 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 193 PetscErrorCode ierr; 194 PetscMPIInt rank; 195 PetscInt i; 196 PetscBool iascii,isstring,isdraw; 197 PetscViewer sviewer; 198 199 PetscFunctionBegin; 200 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 201 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 202 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 203 if (iascii) { 204 if (pc->useAmat) { 205 ierr = PetscViewerASCIIPrintf(viewer," block Jacobi: using Amat local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr); 206 } 207 ierr = PetscViewerASCIIPrintf(viewer," block Jacobi: number of blocks = %D\n",jac->n);CHKERRQ(ierr); 208 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 209 if (jac->same_local_solves) { 210 ierr = PetscViewerASCIIPrintf(viewer," Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr); 211 if (jac->ksp && !jac->psubcomm) { 212 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 213 if (!rank) { 214 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 215 ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr); 216 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 217 } 218 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 219 } else if (jac->psubcomm && !jac->psubcomm->color) { 220 ierr = PetscViewerASCIIGetStdout(PetscSubcommChild(mpjac->psubcomm),&sviewer);CHKERRQ(ierr); 221 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 222 ierr = KSPView(*(jac->ksp),sviewer);CHKERRQ(ierr); 223 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 224 } 225 } else { 226 PetscInt n_global; 227 ierr = MPIU_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 228 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 229 ierr = PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr); 230 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n", 231 rank,jac->n_local,jac->first_local);CHKERRQ(ierr); 232 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 233 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 234 for (i=0; i<jac->n_local; i++) { 235 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr); 236 ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr); 237 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 238 } 239 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 240 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 241 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 242 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 243 } 244 } else if (isstring) { 245 ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr); 246 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 247 if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);} 248 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 249 } else if (isdraw) { 250 PetscDraw draw; 251 char str[25]; 252 PetscReal x,y,bottom,h; 253 254 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 255 ierr = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr); 256 ierr = PetscSNPrintf(str,25,"Number blocks %D",jac->n);CHKERRQ(ierr); 257 ierr = PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr); 258 bottom = y - h; 259 ierr = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr); 260 /* warning the communicator on viewer is different then on ksp in parallel */ 261 if (jac->ksp) {ierr = KSPView(jac->ksp[0],viewer);CHKERRQ(ierr);} 262 ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr); 263 } 264 PetscFunctionReturn(0); 265 } 266 267 /* -------------------------------------------------------------------------------------*/ 268 269 #undef __FUNCT__ 270 #define __FUNCT__ "PCBJacobiGetSubKSP_BJacobi" 271 static PetscErrorCode PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp) 272 { 273 PC_BJacobi *jac = (PC_BJacobi*)pc->data;; 274 275 PetscFunctionBegin; 276 if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first"); 277 278 if (n_local) *n_local = jac->n_local; 279 if (first_local) *first_local = jac->first_local; 280 *ksp = jac->ksp; 281 jac->same_local_solves = PETSC_FALSE; /* Assume that local solves are now different; 282 not necessarily true though! This flag is 283 used only for PCView_BJacobi() */ 284 PetscFunctionReturn(0); 285 } 286 287 #undef __FUNCT__ 288 #define __FUNCT__ "PCBJacobiSetTotalBlocks_BJacobi" 289 static PetscErrorCode PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens) 290 { 291 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 292 PetscErrorCode ierr; 293 294 PetscFunctionBegin; 295 if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called"); 296 jac->n = blocks; 297 if (!lens) jac->g_lens = 0; 298 else { 299 ierr = PetscMalloc1(blocks,&jac->g_lens);CHKERRQ(ierr); 300 ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr); 301 ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr); 302 } 303 PetscFunctionReturn(0); 304 } 305 306 #undef __FUNCT__ 307 #define __FUNCT__ "PCBJacobiGetTotalBlocks_BJacobi" 308 static PetscErrorCode PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[]) 309 { 310 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 311 312 PetscFunctionBegin; 313 *blocks = jac->n; 314 if (lens) *lens = jac->g_lens; 315 PetscFunctionReturn(0); 316 } 317 318 #undef __FUNCT__ 319 #define __FUNCT__ "PCBJacobiSetLocalBlocks_BJacobi" 320 static PetscErrorCode PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[]) 321 { 322 PC_BJacobi *jac; 323 PetscErrorCode ierr; 324 325 PetscFunctionBegin; 326 jac = (PC_BJacobi*)pc->data; 327 328 jac->n_local = blocks; 329 if (!lens) jac->l_lens = 0; 330 else { 331 ierr = PetscMalloc1(blocks,&jac->l_lens);CHKERRQ(ierr); 332 ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr); 333 ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr); 334 } 335 PetscFunctionReturn(0); 336 } 337 338 #undef __FUNCT__ 339 #define __FUNCT__ "PCBJacobiGetLocalBlocks_BJacobi" 340 static PetscErrorCode PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[]) 341 { 342 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 343 344 PetscFunctionBegin; 345 *blocks = jac->n_local; 346 if (lens) *lens = jac->l_lens; 347 PetscFunctionReturn(0); 348 } 349 350 /* -------------------------------------------------------------------------------------*/ 351 352 #undef __FUNCT__ 353 #define __FUNCT__ "PCBJacobiGetSubKSP" 354 /*@C 355 PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on 356 this processor. 357 358 Note Collective 359 360 Input Parameter: 361 . pc - the preconditioner context 362 363 Output Parameters: 364 + n_local - the number of blocks on this processor, or NULL 365 . first_local - the global number of the first block on this processor, or NULL 366 - ksp - the array of KSP contexts 367 368 Notes: 369 After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed. 370 371 Currently for some matrix implementations only 1 block per processor 372 is supported. 373 374 You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP(). 375 376 Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs. 377 You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,NULL_OBJECT,ierr) to determine how large the 378 KSP array must be. 379 380 Level: advanced 381 382 .keywords: block, Jacobi, get, sub, KSP, context 383 384 .seealso: PCBJacobiGetSubKSP() 385 @*/ 386 PetscErrorCode PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[]) 387 { 388 PetscErrorCode ierr; 389 390 PetscFunctionBegin; 391 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 392 ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr); 393 PetscFunctionReturn(0); 394 } 395 396 #undef __FUNCT__ 397 #define __FUNCT__ "PCBJacobiSetTotalBlocks" 398 /*@ 399 PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block 400 Jacobi preconditioner. 401 402 Collective on PC 403 404 Input Parameters: 405 + pc - the preconditioner context 406 . blocks - the number of blocks 407 - lens - [optional] integer array containing the size of each block 408 409 Options Database Key: 410 . -pc_bjacobi_blocks <blocks> - Sets the number of global blocks 411 412 Notes: 413 Currently only a limited number of blocking configurations are supported. 414 All processors sharing the PC must call this routine with the same data. 415 416 Level: intermediate 417 418 .keywords: set, number, Jacobi, global, total, blocks 419 420 .seealso: PCSetUseAmat(), PCBJacobiSetLocalBlocks() 421 @*/ 422 PetscErrorCode PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[]) 423 { 424 PetscErrorCode ierr; 425 426 PetscFunctionBegin; 427 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 428 if (blocks <= 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks"); 429 ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr); 430 PetscFunctionReturn(0); 431 } 432 433 #undef __FUNCT__ 434 #define __FUNCT__ "PCBJacobiGetTotalBlocks" 435 /*@C 436 PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block 437 Jacobi preconditioner. 438 439 Not Collective 440 441 Input Parameter: 442 . pc - the preconditioner context 443 444 Output parameters: 445 + blocks - the number of blocks 446 - lens - integer array containing the size of each block 447 448 Level: intermediate 449 450 .keywords: get, number, Jacobi, global, total, blocks 451 452 .seealso: PCSetUseAmat(), PCBJacobiGetLocalBlocks() 453 @*/ 454 PetscErrorCode PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[]) 455 { 456 PetscErrorCode ierr; 457 458 PetscFunctionBegin; 459 PetscValidHeaderSpecific(pc, PC_CLASSID,1); 460 PetscValidIntPointer(blocks,2); 461 ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr); 462 PetscFunctionReturn(0); 463 } 464 465 #undef __FUNCT__ 466 #define __FUNCT__ "PCBJacobiSetLocalBlocks" 467 /*@ 468 PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block 469 Jacobi preconditioner. 470 471 Not Collective 472 473 Input Parameters: 474 + pc - the preconditioner context 475 . blocks - the number of blocks 476 - lens - [optional] integer array containing size of each block 477 478 Note: 479 Currently only a limited number of blocking configurations are supported. 480 481 Level: intermediate 482 483 .keywords: PC, set, number, Jacobi, local, blocks 484 485 .seealso: PCSetUseAmat(), PCBJacobiSetTotalBlocks() 486 @*/ 487 PetscErrorCode PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[]) 488 { 489 PetscErrorCode ierr; 490 491 PetscFunctionBegin; 492 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 493 if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks"); 494 ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr); 495 PetscFunctionReturn(0); 496 } 497 498 #undef __FUNCT__ 499 #define __FUNCT__ "PCBJacobiGetLocalBlocks" 500 /*@C 501 PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block 502 Jacobi preconditioner. 503 504 Not Collective 505 506 Input Parameters: 507 + pc - the preconditioner context 508 . blocks - the number of blocks 509 - lens - [optional] integer array containing size of each block 510 511 Note: 512 Currently only a limited number of blocking configurations are supported. 513 514 Level: intermediate 515 516 .keywords: PC, get, number, Jacobi, local, blocks 517 518 .seealso: PCSetUseAmat(), PCBJacobiGetTotalBlocks() 519 @*/ 520 PetscErrorCode PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[]) 521 { 522 PetscErrorCode ierr; 523 524 PetscFunctionBegin; 525 PetscValidHeaderSpecific(pc, PC_CLASSID,1); 526 PetscValidIntPointer(blocks,2); 527 ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr); 528 PetscFunctionReturn(0); 529 } 530 531 /* -----------------------------------------------------------------------------------*/ 532 533 /*MC 534 PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with 535 its own KSP object. 536 537 Options Database Keys: 538 . -pc_use_amat - use Amat to apply block of operator in inner Krylov method 539 540 Notes: Each processor can have one or more blocks, but a block cannot be shared by more 541 than one processor. Defaults to one block per processor. 542 543 To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC 544 options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly 545 546 To set the options on the solvers separate for each block call PCBJacobiGetSubKSP() 547 and set the options directly on the resulting KSP object (you can access its PC 548 KSPGetPC()) 549 550 For CUSP vectors it is recommended to use exactly one block per MPI process for best 551 performance. Different block partitioning may lead to additional data transfers 552 between host and GPU that lead to degraded performance. 553 554 Level: beginner 555 556 Concepts: block Jacobi 557 558 559 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, 560 PCASM, PCSetUseAmat(), PCGetUseAmat(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(), 561 PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices() 562 M*/ 563 564 #undef __FUNCT__ 565 #define __FUNCT__ "PCCreate_BJacobi" 566 PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc) 567 { 568 PetscErrorCode ierr; 569 PetscMPIInt rank; 570 PC_BJacobi *jac; 571 572 PetscFunctionBegin; 573 ierr = PetscNewLog(pc,&jac);CHKERRQ(ierr); 574 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 575 576 pc->ops->apply = 0; 577 pc->ops->applytranspose = 0; 578 pc->ops->setup = PCSetUp_BJacobi; 579 pc->ops->destroy = PCDestroy_BJacobi; 580 pc->ops->setfromoptions = PCSetFromOptions_BJacobi; 581 pc->ops->view = PCView_BJacobi; 582 pc->ops->applyrichardson = 0; 583 584 pc->data = (void*)jac; 585 jac->n = -1; 586 jac->n_local = -1; 587 jac->first_local = rank; 588 jac->ksp = 0; 589 jac->same_local_solves = PETSC_TRUE; 590 jac->g_lens = 0; 591 jac->l_lens = 0; 592 jac->psubcomm = 0; 593 594 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C",PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr); 595 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C",PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr); 596 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C",PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr); 597 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C",PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr); 598 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C",PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr); 599 PetscFunctionReturn(0); 600 } 601 602 /* --------------------------------------------------------------------------------------------*/ 603 /* 604 These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI 605 */ 606 #undef __FUNCT__ 607 #define __FUNCT__ "PCReset_BJacobi_Singleblock" 608 static PetscErrorCode PCReset_BJacobi_Singleblock(PC pc) 609 { 610 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 611 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 612 PetscErrorCode ierr; 613 614 PetscFunctionBegin; 615 ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr); 616 ierr = VecDestroy(&bjac->x);CHKERRQ(ierr); 617 ierr = VecDestroy(&bjac->y);CHKERRQ(ierr); 618 PetscFunctionReturn(0); 619 } 620 621 #undef __FUNCT__ 622 #define __FUNCT__ "PCDestroy_BJacobi_Singleblock" 623 static PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc) 624 { 625 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 626 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 627 PetscErrorCode ierr; 628 629 PetscFunctionBegin; 630 ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr); 631 ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr); 632 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 633 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 634 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 635 ierr = PetscFree(bjac);CHKERRQ(ierr); 636 ierr = PetscFree(pc->data);CHKERRQ(ierr); 637 PetscFunctionReturn(0); 638 } 639 640 #include <petsc/private/kspimpl.h> 641 #undef __FUNCT__ 642 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Singleblock" 643 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc) 644 { 645 PetscErrorCode ierr; 646 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 647 KSP subksp = jac->ksp[0]; 648 649 PetscFunctionBegin; 650 ierr = KSPSetUp(subksp);CHKERRQ(ierr); 651 if (subksp->reason == KSP_DIVERGED_PCSETUP_FAILED) { 652 pc->failedreason = PC_SUBPC_ERROR; 653 } 654 PetscFunctionReturn(0); 655 } 656 657 #undef __FUNCT__ 658 #define __FUNCT__ "PCApply_BJacobi_Singleblock" 659 static PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y) 660 { 661 PetscErrorCode ierr; 662 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 663 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 664 665 PetscFunctionBegin; 666 ierr = VecGetLocalVectorRead(x, bjac->x);CHKERRQ(ierr); 667 ierr = VecGetLocalVector(y, bjac->y);CHKERRQ(ierr); 668 /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner 669 matrix may change even if the outter KSP/PC has not updated the preconditioner, this will trigger a rebuild 670 of the inner preconditioner automatically unless we pass down the outter preconditioners reuse flag.*/ 671 ierr = KSPSetReusePreconditioner(jac->ksp[0],pc->reusepreconditioner);CHKERRQ(ierr); 672 ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr); 673 ierr = VecRestoreLocalVectorRead(x, bjac->x);CHKERRQ(ierr); 674 ierr = VecRestoreLocalVector(y, bjac->y);CHKERRQ(ierr); 675 PetscFunctionReturn(0); 676 } 677 678 #undef __FUNCT__ 679 #define __FUNCT__ "PCApplySymmetricLeft_BJacobi_Singleblock" 680 static PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y) 681 { 682 PetscErrorCode ierr; 683 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 684 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 685 PetscScalar *y_array; 686 const PetscScalar *x_array; 687 PC subpc; 688 689 PetscFunctionBegin; 690 /* 691 The VecPlaceArray() is to avoid having to copy the 692 y vector into the bjac->x vector. The reason for 693 the bjac->x vector is that we need a sequential vector 694 for the sequential solve. 695 */ 696 ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr); 697 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 698 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 699 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 700 /* apply the symmetric left portion of the inner PC operator */ 701 /* note this by-passes the inner KSP and its options completely */ 702 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 703 ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 704 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 705 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 706 ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr); 707 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 708 PetscFunctionReturn(0); 709 } 710 711 #undef __FUNCT__ 712 #define __FUNCT__ "PCApplySymmetricRight_BJacobi_Singleblock" 713 static PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y) 714 { 715 PetscErrorCode ierr; 716 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 717 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 718 PetscScalar *y_array; 719 const PetscScalar *x_array; 720 PC subpc; 721 722 PetscFunctionBegin; 723 /* 724 The VecPlaceArray() is to avoid having to copy the 725 y vector into the bjac->x vector. The reason for 726 the bjac->x vector is that we need a sequential vector 727 for the sequential solve. 728 */ 729 ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr); 730 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 731 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 732 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 733 734 /* apply the symmetric right portion of the inner PC operator */ 735 /* note this by-passes the inner KSP and its options completely */ 736 737 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 738 ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 739 740 ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr); 741 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 742 PetscFunctionReturn(0); 743 } 744 745 #undef __FUNCT__ 746 #define __FUNCT__ "PCApplyTranspose_BJacobi_Singleblock" 747 static PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y) 748 { 749 PetscErrorCode ierr; 750 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 751 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 752 PetscScalar *y_array; 753 const PetscScalar *x_array; 754 755 PetscFunctionBegin; 756 /* 757 The VecPlaceArray() is to avoid having to copy the 758 y vector into the bjac->x vector. The reason for 759 the bjac->x vector is that we need a sequential vector 760 for the sequential solve. 761 */ 762 ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr); 763 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 764 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 765 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 766 ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr); 767 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 768 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 769 ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr); 770 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 771 PetscFunctionReturn(0); 772 } 773 774 #undef __FUNCT__ 775 #define __FUNCT__ "PCSetUp_BJacobi_Singleblock" 776 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat) 777 { 778 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 779 PetscErrorCode ierr; 780 PetscInt m; 781 KSP ksp; 782 PC_BJacobi_Singleblock *bjac; 783 PetscBool wasSetup = PETSC_TRUE; 784 785 PetscFunctionBegin; 786 if (!pc->setupcalled) { 787 const char *prefix; 788 789 if (!jac->ksp) { 790 wasSetup = PETSC_FALSE; 791 792 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 793 ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr); 794 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 795 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr); 796 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 797 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 798 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 799 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 800 801 pc->ops->reset = PCReset_BJacobi_Singleblock; 802 pc->ops->destroy = PCDestroy_BJacobi_Singleblock; 803 pc->ops->apply = PCApply_BJacobi_Singleblock; 804 pc->ops->applysymmetricleft = PCApplySymmetricLeft_BJacobi_Singleblock; 805 pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock; 806 pc->ops->applytranspose = PCApplyTranspose_BJacobi_Singleblock; 807 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Singleblock; 808 809 ierr = PetscMalloc1(1,&jac->ksp);CHKERRQ(ierr); 810 jac->ksp[0] = ksp; 811 812 ierr = PetscNewLog(pc,&bjac);CHKERRQ(ierr); 813 jac->data = (void*)bjac; 814 } else { 815 ksp = jac->ksp[0]; 816 bjac = (PC_BJacobi_Singleblock*)jac->data; 817 } 818 819 /* 820 The reason we need to generate these vectors is to serve 821 as the right-hand side and solution vector for the solve on the 822 block. We do not need to allocate space for the vectors since 823 that is provided via VecPlaceArray() just before the call to 824 KSPSolve() on the block. 825 */ 826 ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr); 827 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->x);CHKERRQ(ierr); 828 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->y);CHKERRQ(ierr); 829 #if defined(PETSC_HAVE_CUSP) 830 ierr = VecSetType(bjac->x,VECCUSP);CHKERRQ(ierr); 831 ierr = VecSetType(bjac->y,VECCUSP);CHKERRQ(ierr); 832 #elif defined(PETSC_HAVE_VECCUDA) 833 ierr = VecSetType(bjac->x,VECCUDA);CHKERRQ(ierr); 834 ierr = VecSetType(bjac->y,VECCUDA);CHKERRQ(ierr); 835 #endif 836 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->x);CHKERRQ(ierr); 837 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->y);CHKERRQ(ierr); 838 } else { 839 ksp = jac->ksp[0]; 840 bjac = (PC_BJacobi_Singleblock*)jac->data; 841 } 842 if (pc->useAmat) { 843 ierr = KSPSetOperators(ksp,mat,pmat);CHKERRQ(ierr); 844 } else { 845 ierr = KSPSetOperators(ksp,pmat,pmat);CHKERRQ(ierr); 846 } 847 if (!wasSetup && pc->setfromoptionscalled) { 848 /* If PCSetFromOptions_BJacobi is called later, KSPSetFromOptions will be called at that time. */ 849 ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr); 850 } 851 PetscFunctionReturn(0); 852 } 853 854 /* ---------------------------------------------------------------------------------------------*/ 855 #undef __FUNCT__ 856 #define __FUNCT__ "PCReset_BJacobi_Multiblock" 857 static PetscErrorCode PCReset_BJacobi_Multiblock(PC pc) 858 { 859 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 860 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 861 PetscErrorCode ierr; 862 PetscInt i; 863 864 PetscFunctionBegin; 865 if (bjac && bjac->pmat) { 866 ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr); 867 if (pc->useAmat) { 868 ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr); 869 } 870 } 871 872 for (i=0; i<jac->n_local; i++) { 873 ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr); 874 if (bjac && bjac->x) { 875 ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr); 876 ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr); 877 ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr); 878 } 879 } 880 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 881 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 882 PetscFunctionReturn(0); 883 } 884 885 #undef __FUNCT__ 886 #define __FUNCT__ "PCDestroy_BJacobi_Multiblock" 887 static PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc) 888 { 889 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 890 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 891 PetscErrorCode ierr; 892 PetscInt i; 893 894 PetscFunctionBegin; 895 ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr); 896 if (bjac) { 897 ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr); 898 ierr = PetscFree(bjac->starts);CHKERRQ(ierr); 899 ierr = PetscFree(bjac->is);CHKERRQ(ierr); 900 } 901 ierr = PetscFree(jac->data);CHKERRQ(ierr); 902 for (i=0; i<jac->n_local; i++) { 903 ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr); 904 } 905 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 906 ierr = PetscFree(pc->data);CHKERRQ(ierr); 907 PetscFunctionReturn(0); 908 } 909 910 #undef __FUNCT__ 911 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Multiblock" 912 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc) 913 { 914 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 915 PetscErrorCode ierr; 916 PetscInt i,n_local = jac->n_local; 917 918 PetscFunctionBegin; 919 for (i=0; i<n_local; i++) { 920 ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr); 921 if (jac->ksp[i]->reason == KSP_DIVERGED_PCSETUP_FAILED) { 922 pc->failedreason = PC_SUBPC_ERROR; 923 } 924 } 925 PetscFunctionReturn(0); 926 } 927 928 /* 929 Preconditioner for block Jacobi 930 */ 931 #undef __FUNCT__ 932 #define __FUNCT__ "PCApply_BJacobi_Multiblock" 933 static PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y) 934 { 935 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 936 PetscErrorCode ierr; 937 PetscInt i,n_local = jac->n_local; 938 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 939 PetscScalar *yin; 940 const PetscScalar *xin; 941 942 PetscFunctionBegin; 943 ierr = VecGetArrayRead(x,&xin);CHKERRQ(ierr); 944 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 945 for (i=0; i<n_local; i++) { 946 /* 947 To avoid copying the subvector from x into a workspace we instead 948 make the workspace vector array point to the subpart of the array of 949 the global vector. 950 */ 951 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 952 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 953 954 ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 955 ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr); 956 ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 957 958 ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr); 959 ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr); 960 } 961 ierr = VecRestoreArrayRead(x,&xin);CHKERRQ(ierr); 962 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 963 PetscFunctionReturn(0); 964 } 965 966 /* 967 Preconditioner for block Jacobi 968 */ 969 #undef __FUNCT__ 970 #define __FUNCT__ "PCApplyTranspose_BJacobi_Multiblock" 971 static PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y) 972 { 973 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 974 PetscErrorCode ierr; 975 PetscInt i,n_local = jac->n_local; 976 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 977 PetscScalar *yin; 978 const PetscScalar *xin; 979 980 PetscFunctionBegin; 981 ierr = VecGetArrayRead(x,&xin);CHKERRQ(ierr); 982 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 983 for (i=0; i<n_local; i++) { 984 /* 985 To avoid copying the subvector from x into a workspace we instead 986 make the workspace vector array point to the subpart of the array of 987 the global vector. 988 */ 989 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 990 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 991 992 ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 993 ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr); 994 ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 995 996 ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr); 997 ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr); 998 } 999 ierr = VecRestoreArrayRead(x,&xin);CHKERRQ(ierr); 1000 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 1001 PetscFunctionReturn(0); 1002 } 1003 1004 #undef __FUNCT__ 1005 #define __FUNCT__ "PCSetUp_BJacobi_Multiblock" 1006 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat) 1007 { 1008 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1009 PetscErrorCode ierr; 1010 PetscInt m,n_local,N,M,start,i; 1011 const char *prefix,*pprefix,*mprefix; 1012 KSP ksp; 1013 Vec x,y; 1014 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 1015 PC subpc; 1016 IS is; 1017 MatReuse scall; 1018 1019 PetscFunctionBegin; 1020 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 1021 1022 n_local = jac->n_local; 1023 1024 if (pc->useAmat) { 1025 PetscBool same; 1026 ierr = PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr); 1027 if (!same) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_INCOMP,"Matrices not of same type"); 1028 } 1029 1030 if (!pc->setupcalled) { 1031 scall = MAT_INITIAL_MATRIX; 1032 1033 if (!jac->ksp) { 1034 pc->ops->reset = PCReset_BJacobi_Multiblock; 1035 pc->ops->destroy = PCDestroy_BJacobi_Multiblock; 1036 pc->ops->apply = PCApply_BJacobi_Multiblock; 1037 pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock; 1038 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock; 1039 1040 ierr = PetscNewLog(pc,&bjac);CHKERRQ(ierr); 1041 ierr = PetscMalloc1(n_local,&jac->ksp);CHKERRQ(ierr); 1042 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr); 1043 ierr = PetscMalloc2(n_local,&bjac->x,n_local,&bjac->y);CHKERRQ(ierr); 1044 ierr = PetscMalloc1(n_local,&bjac->starts);CHKERRQ(ierr); 1045 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr); 1046 1047 jac->data = (void*)bjac; 1048 ierr = PetscMalloc1(n_local,&bjac->is);CHKERRQ(ierr); 1049 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr); 1050 1051 for (i=0; i<n_local; i++) { 1052 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 1053 ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr); 1054 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 1055 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr); 1056 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 1057 ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr); 1058 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1059 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 1060 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 1061 1062 jac->ksp[i] = ksp; 1063 } 1064 } else { 1065 bjac = (PC_BJacobi_Multiblock*)jac->data; 1066 } 1067 1068 start = 0; 1069 for (i=0; i<n_local; i++) { 1070 m = jac->l_lens[i]; 1071 /* 1072 The reason we need to generate these vectors is to serve 1073 as the right-hand side and solution vector for the solve on the 1074 block. We do not need to allocate space for the vectors since 1075 that is provided via VecPlaceArray() just before the call to 1076 KSPSolve() on the block. 1077 1078 */ 1079 ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr); 1080 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&y);CHKERRQ(ierr); 1081 #if defined(PETSC_HAVE_CUSP) 1082 ierr = VecSetType(x,VECCUSP);CHKERRQ(ierr); 1083 ierr = VecSetType(y,VECCUSP);CHKERRQ(ierr); 1084 #elif defined(PETSC_HAVE_VECCUDA) 1085 ierr = VecSetType(x,VECCUDA);CHKERRQ(ierr); 1086 ierr = VecSetType(y,VECCUDA);CHKERRQ(ierr); 1087 #endif 1088 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)x);CHKERRQ(ierr); 1089 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)y);CHKERRQ(ierr); 1090 1091 bjac->x[i] = x; 1092 bjac->y[i] = y; 1093 bjac->starts[i] = start; 1094 1095 ierr = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr); 1096 bjac->is[i] = is; 1097 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)is);CHKERRQ(ierr); 1098 1099 start += m; 1100 } 1101 } else { 1102 bjac = (PC_BJacobi_Multiblock*)jac->data; 1103 /* 1104 Destroy the blocks from the previous iteration 1105 */ 1106 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 1107 ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr); 1108 if (pc->useAmat) { 1109 ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr); 1110 } 1111 scall = MAT_INITIAL_MATRIX; 1112 } else scall = MAT_REUSE_MATRIX; 1113 } 1114 1115 ierr = MatGetSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr); 1116 if (pc->useAmat) { 1117 ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr); 1118 ierr = MatGetSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr); 1119 } 1120 /* Return control to the user so that the submatrices can be modified (e.g., to apply 1121 different boundary conditions for the submatrices than for the global problem) */ 1122 ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr); 1123 1124 ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr); 1125 for (i=0; i<n_local; i++) { 1126 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->pmat[i]);CHKERRQ(ierr); 1127 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr); 1128 if (pc->useAmat) { 1129 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->mat[i]);CHKERRQ(ierr); 1130 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr); 1131 ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i]);CHKERRQ(ierr); 1132 } else { 1133 ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i]);CHKERRQ(ierr); 1134 } 1135 if (pc->setfromoptionscalled) { 1136 ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr); 1137 } 1138 } 1139 PetscFunctionReturn(0); 1140 } 1141 1142 /* ---------------------------------------------------------------------------------------------*/ 1143 /* 1144 These are for a single block with multiple processes; 1145 */ 1146 #undef __FUNCT__ 1147 #define __FUNCT__ "PCReset_BJacobi_Multiproc" 1148 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc) 1149 { 1150 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1151 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1152 PetscErrorCode ierr; 1153 1154 PetscFunctionBegin; 1155 ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr); 1156 ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr); 1157 ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr); 1158 if (jac->ksp) {ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);} 1159 PetscFunctionReturn(0); 1160 } 1161 1162 #undef __FUNCT__ 1163 #define __FUNCT__ "PCDestroy_BJacobi_Multiproc" 1164 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc) 1165 { 1166 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1167 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1168 PetscErrorCode ierr; 1169 1170 PetscFunctionBegin; 1171 ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr); 1172 ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr); 1173 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 1174 ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr); 1175 1176 ierr = PetscFree(mpjac);CHKERRQ(ierr); 1177 ierr = PetscFree(pc->data);CHKERRQ(ierr); 1178 PetscFunctionReturn(0); 1179 } 1180 1181 #undef __FUNCT__ 1182 #define __FUNCT__ "PCApply_BJacobi_Multiproc" 1183 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y) 1184 { 1185 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1186 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1187 PetscErrorCode ierr; 1188 PetscScalar *yarray; 1189 const PetscScalar *xarray; 1190 1191 PetscFunctionBegin; 1192 /* place x's and y's local arrays into xsub and ysub */ 1193 ierr = VecGetArrayRead(x,&xarray);CHKERRQ(ierr); 1194 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1195 ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr); 1196 ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr); 1197 1198 /* apply preconditioner on each matrix block */ 1199 ierr = PetscLogEventBegin(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr); 1200 ierr = KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);CHKERRQ(ierr); 1201 ierr = PetscLogEventEnd(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr); 1202 1203 if (jac->ksp[0]->reason == KSP_DIVERGED_PCSETUP_FAILED) { 1204 pc->failedreason = PC_SUBPC_ERROR; 1205 } 1206 1207 ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr); 1208 ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr); 1209 ierr = VecRestoreArrayRead(x,&xarray);CHKERRQ(ierr); 1210 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1211 PetscFunctionReturn(0); 1212 } 1213 1214 #include <petsc/private/matimpl.h> 1215 #undef __FUNCT__ 1216 #define __FUNCT__ "PCSetUp_BJacobi_Multiproc" 1217 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc) 1218 { 1219 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1220 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1221 PetscErrorCode ierr; 1222 PetscInt m,n; 1223 MPI_Comm comm,subcomm=0; 1224 const char *prefix; 1225 PetscBool wasSetup = PETSC_TRUE; 1226 1227 PetscFunctionBegin; 1228 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1229 if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported"); 1230 jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */ 1231 if (!pc->setupcalled) { 1232 wasSetup = PETSC_FALSE; 1233 ierr = PetscNewLog(pc,&mpjac);CHKERRQ(ierr); 1234 jac->data = (void*)mpjac; 1235 1236 /* initialize datastructure mpjac */ 1237 if (!jac->psubcomm) { 1238 /* Create default contiguous subcommunicatiors if user does not provide them */ 1239 ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr); 1240 ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr); 1241 ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr); 1242 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr); 1243 } 1244 mpjac->psubcomm = jac->psubcomm; 1245 subcomm = PetscSubcommChild(mpjac->psubcomm); 1246 1247 /* Get matrix blocks of pmat */ 1248 if (!pc->pmat->ops->getmultiprocblock) SETERRQ(PetscObjectComm((PetscObject)pc->pmat),PETSC_ERR_SUP,"No support for the requested operation"); 1249 ierr = (*pc->pmat->ops->getmultiprocblock)(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1250 1251 /* create a new PC that processors in each subcomm have copy of */ 1252 ierr = PetscMalloc1(1,&jac->ksp);CHKERRQ(ierr); 1253 ierr = KSPCreate(subcomm,&jac->ksp[0]);CHKERRQ(ierr); 1254 ierr = KSPSetErrorIfNotConverged(jac->ksp[0],pc->erroriffailure);CHKERRQ(ierr); 1255 ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);CHKERRQ(ierr); 1256 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)jac->ksp[0]);CHKERRQ(ierr); 1257 ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr); 1258 ierr = KSPGetPC(jac->ksp[0],&mpjac->pc);CHKERRQ(ierr); 1259 1260 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1261 ierr = KSPSetOptionsPrefix(jac->ksp[0],prefix);CHKERRQ(ierr); 1262 ierr = KSPAppendOptionsPrefix(jac->ksp[0],"sub_");CHKERRQ(ierr); 1263 /* 1264 PetscMPIInt rank,subsize,subrank; 1265 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1266 ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 1267 ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 1268 1269 ierr = MatGetLocalSize(mpjac->submats,&m,NULL);CHKERRQ(ierr); 1270 ierr = MatGetSize(mpjac->submats,&n,NULL);CHKERRQ(ierr); 1271 ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank); 1272 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1273 */ 1274 1275 /* create dummy vectors xsub and ysub */ 1276 ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr); 1277 ierr = VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&mpjac->xsub);CHKERRQ(ierr); 1278 ierr = VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&mpjac->ysub);CHKERRQ(ierr); 1279 #if defined(PETSC_HAVE_CUSP) 1280 ierr = VecSetType(mpjac->xsub,VECMPICUSP);CHKERRQ(ierr); 1281 ierr = VecSetType(mpjac->ysub,VECMPICUSP);CHKERRQ(ierr); 1282 #elif defined(PETSC_HAVE_VECCUDA) 1283 ierr = VecSetType(mpjac->xsub,VECMPICUDA);CHKERRQ(ierr); 1284 ierr = VecSetType(mpjac->ysub,VECMPICUDA);CHKERRQ(ierr); 1285 #endif 1286 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->xsub);CHKERRQ(ierr); 1287 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->ysub);CHKERRQ(ierr); 1288 1289 pc->ops->reset = PCReset_BJacobi_Multiproc; 1290 pc->ops->destroy = PCDestroy_BJacobi_Multiproc; 1291 pc->ops->apply = PCApply_BJacobi_Multiproc; 1292 } else { /* pc->setupcalled */ 1293 subcomm = PetscSubcommChild(mpjac->psubcomm); 1294 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 1295 /* destroy old matrix blocks, then get new matrix blocks */ 1296 if (mpjac->submats) {ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);} 1297 ierr = (*pc->pmat->ops->getmultiprocblock)(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1298 } else { 1299 ierr = (*pc->pmat->ops->getmultiprocblock)(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1300 } 1301 ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr); 1302 } 1303 1304 if (!wasSetup && pc->setfromoptionscalled) { 1305 ierr = KSPSetFromOptions(jac->ksp[0]);CHKERRQ(ierr); 1306 } 1307 PetscFunctionReturn(0); 1308 } 1309