1 2 /* 3 Defines a block Jacobi preconditioner. 4 */ 5 6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /*I "petscpc.h" I*/ 7 8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat); 9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat); 10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC); 11 12 static PetscErrorCode PCSetUp_BJacobi(PC pc) 13 { 14 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 15 Mat mat = pc->mat,pmat = pc->pmat; 16 PetscErrorCode ierr; 17 void (*f)(void); 18 PetscInt N,M,start,i,sum,end; 19 PetscInt bs,i_start=-1,i_end=-1; 20 PetscMPIInt rank,size; 21 const char *pprefix,*mprefix; 22 23 PetscFunctionBegin; 24 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 25 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 26 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 27 ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr); 28 29 if (jac->n > 0 && jac->n < size) { 30 ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr); 31 PetscFunctionReturn(0); 32 } 33 34 /* -------------------------------------------------------------------------- 35 Determines the number of blocks assigned to each processor 36 -----------------------------------------------------------------------------*/ 37 38 /* local block count given */ 39 if (jac->n_local > 0 && jac->n < 0) { 40 ierr = MPIU_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 41 if (jac->l_lens) { /* check that user set these correctly */ 42 sum = 0; 43 for (i=0; i<jac->n_local; i++) { 44 if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 45 sum += jac->l_lens[i]; 46 } 47 if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly"); 48 } else { 49 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 50 for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i)); 51 } 52 } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */ 53 /* global blocks given: determine which ones are local */ 54 if (jac->g_lens) { 55 /* check if the g_lens is has valid entries */ 56 for (i=0; i<jac->n; i++) { 57 if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed"); 58 if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 59 } 60 if (size == 1) { 61 jac->n_local = jac->n; 62 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 63 ierr = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr); 64 /* check that user set these correctly */ 65 sum = 0; 66 for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i]; 67 if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly"); 68 } else { 69 ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr); 70 /* loop over blocks determing first one owned by me */ 71 sum = 0; 72 for (i=0; i<jac->n+1; i++) { 73 if (sum == start) { i_start = i; goto start_1;} 74 if (i < jac->n) sum += jac->g_lens[i]; 75 } 76 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout"); 77 start_1: 78 for (i=i_start; i<jac->n+1; i++) { 79 if (sum == end) { i_end = i; goto end_1; } 80 if (i < jac->n) sum += jac->g_lens[i]; 81 } 82 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout"); 83 end_1: 84 jac->n_local = i_end - i_start; 85 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 86 ierr = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr); 87 } 88 } else { /* no global blocks given, determine then using default layout */ 89 jac->n_local = jac->n/size + ((jac->n % size) > rank); 90 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 91 for (i=0; i<jac->n_local; i++) { 92 jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs; 93 if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given"); 94 } 95 } 96 } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */ 97 jac->n = size; 98 jac->n_local = 1; 99 ierr = PetscMalloc1(1,&jac->l_lens);CHKERRQ(ierr); 100 jac->l_lens[0] = M; 101 } else { /* jac->n > 0 && jac->n_local > 0 */ 102 if (!jac->l_lens) { 103 ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr); 104 for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i)); 105 } 106 } 107 if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors"); 108 109 /* ------------------------- 110 Determines mat and pmat 111 ---------------------------*/ 112 ierr = MatShellGetOperation(pc->mat,MATOP_GET_DIAGONAL_BLOCK,&f);CHKERRQ(ierr); 113 if (!f && size == 1) { 114 mat = pc->mat; 115 pmat = pc->pmat; 116 } else { 117 if (pc->useAmat) { 118 /* use block from Amat matrix, not Pmat for local MatMult() */ 119 ierr = MatGetDiagonalBlock(pc->mat,&mat);CHKERRQ(ierr); 120 /* make submatrix have same prefix as entire matrix */ 121 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr); 122 ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr); 123 } 124 if (pc->pmat != pc->mat || !pc->useAmat) { 125 ierr = MatGetDiagonalBlock(pc->pmat,&pmat);CHKERRQ(ierr); 126 /* make submatrix have same prefix as entire matrix */ 127 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr); 128 ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr); 129 } else pmat = mat; 130 } 131 132 /* ------ 133 Setup code depends on the number of blocks 134 */ 135 if (jac->n_local == 1) { 136 ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr); 137 } else { 138 ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr); 139 } 140 PetscFunctionReturn(0); 141 } 142 143 /* Default destroy, if it has never been setup */ 144 static PetscErrorCode PCDestroy_BJacobi(PC pc) 145 { 146 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 147 PetscErrorCode ierr; 148 149 PetscFunctionBegin; 150 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 151 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 152 ierr = PetscFree(pc->data);CHKERRQ(ierr); 153 PetscFunctionReturn(0); 154 } 155 156 157 static PetscErrorCode PCSetFromOptions_BJacobi(PetscOptionItems *PetscOptionsObject,PC pc) 158 { 159 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 160 PetscErrorCode ierr; 161 PetscInt blocks,i; 162 PetscBool flg; 163 164 PetscFunctionBegin; 165 ierr = PetscOptionsHead(PetscOptionsObject,"Block Jacobi options");CHKERRQ(ierr); 166 ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr); 167 if (flg) { 168 ierr = PCBJacobiSetTotalBlocks(pc,blocks,NULL);CHKERRQ(ierr); 169 } 170 if (jac->ksp) { 171 /* The sub-KSP has already been set up (e.g., PCSetUp_BJacobi_Singleblock), but KSPSetFromOptions was not called 172 * unless we had already been called. */ 173 for (i=0; i<jac->n_local; i++) { 174 ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr); 175 } 176 } 177 ierr = PetscOptionsTail();CHKERRQ(ierr); 178 PetscFunctionReturn(0); 179 } 180 181 #include <petscdraw.h> 182 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer) 183 { 184 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 185 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 186 PetscErrorCode ierr; 187 PetscMPIInt rank; 188 PetscInt i; 189 PetscBool iascii,isstring,isdraw; 190 PetscViewer sviewer; 191 192 PetscFunctionBegin; 193 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 194 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 195 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 196 if (iascii) { 197 if (pc->useAmat) { 198 ierr = PetscViewerASCIIPrintf(viewer," block Jacobi: using Amat local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr); 199 } 200 ierr = PetscViewerASCIIPrintf(viewer," block Jacobi: number of blocks = %D\n",jac->n);CHKERRQ(ierr); 201 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 202 if (jac->same_local_solves) { 203 ierr = PetscViewerASCIIPrintf(viewer," Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr); 204 if (jac->ksp && !jac->psubcomm) { 205 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 206 if (!rank) { 207 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 208 ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr); 209 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 210 } 211 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 212 } else if (mpjac && jac->ksp && mpjac->psubcomm) { 213 ierr = PetscViewerGetSubViewer(viewer,mpjac->psubcomm->child,&sviewer);CHKERRQ(ierr); 214 if (!mpjac->psubcomm->color) { 215 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 216 ierr = KSPView(*(jac->ksp),sviewer);CHKERRQ(ierr); 217 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 218 } 219 ierr = PetscViewerRestoreSubViewer(viewer,mpjac->psubcomm->child,&sviewer);CHKERRQ(ierr); 220 } 221 } else { 222 PetscInt n_global; 223 ierr = MPIU_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 224 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 225 ierr = PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr); 226 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n", 227 rank,jac->n_local,jac->first_local);CHKERRQ(ierr); 228 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 229 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 230 for (i=0; i<jac->n_local; i++) { 231 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr); 232 ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr); 233 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 234 } 235 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 236 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 237 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 238 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 239 } 240 } else if (isstring) { 241 ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr); 242 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 243 if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);} 244 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 245 } else if (isdraw) { 246 PetscDraw draw; 247 char str[25]; 248 PetscReal x,y,bottom,h; 249 250 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 251 ierr = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr); 252 ierr = PetscSNPrintf(str,25,"Number blocks %D",jac->n);CHKERRQ(ierr); 253 ierr = PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr); 254 bottom = y - h; 255 ierr = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr); 256 /* warning the communicator on viewer is different then on ksp in parallel */ 257 if (jac->ksp) {ierr = KSPView(jac->ksp[0],viewer);CHKERRQ(ierr);} 258 ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr); 259 } 260 PetscFunctionReturn(0); 261 } 262 263 /* -------------------------------------------------------------------------------------*/ 264 265 static PetscErrorCode PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp) 266 { 267 PC_BJacobi *jac = (PC_BJacobi*)pc->data;; 268 269 PetscFunctionBegin; 270 if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first"); 271 272 if (n_local) *n_local = jac->n_local; 273 if (first_local) *first_local = jac->first_local; 274 *ksp = jac->ksp; 275 jac->same_local_solves = PETSC_FALSE; /* Assume that local solves are now different; 276 not necessarily true though! This flag is 277 used only for PCView_BJacobi() */ 278 PetscFunctionReturn(0); 279 } 280 281 static PetscErrorCode PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens) 282 { 283 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 284 PetscErrorCode ierr; 285 286 PetscFunctionBegin; 287 if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called"); 288 jac->n = blocks; 289 if (!lens) jac->g_lens = 0; 290 else { 291 ierr = PetscMalloc1(blocks,&jac->g_lens);CHKERRQ(ierr); 292 ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr); 293 ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr); 294 } 295 PetscFunctionReturn(0); 296 } 297 298 static PetscErrorCode PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[]) 299 { 300 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 301 302 PetscFunctionBegin; 303 *blocks = jac->n; 304 if (lens) *lens = jac->g_lens; 305 PetscFunctionReturn(0); 306 } 307 308 static PetscErrorCode PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[]) 309 { 310 PC_BJacobi *jac; 311 PetscErrorCode ierr; 312 313 PetscFunctionBegin; 314 jac = (PC_BJacobi*)pc->data; 315 316 jac->n_local = blocks; 317 if (!lens) jac->l_lens = 0; 318 else { 319 ierr = PetscMalloc1(blocks,&jac->l_lens);CHKERRQ(ierr); 320 ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr); 321 ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr); 322 } 323 PetscFunctionReturn(0); 324 } 325 326 static PetscErrorCode PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[]) 327 { 328 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 329 330 PetscFunctionBegin; 331 *blocks = jac->n_local; 332 if (lens) *lens = jac->l_lens; 333 PetscFunctionReturn(0); 334 } 335 336 /* -------------------------------------------------------------------------------------*/ 337 338 /*@C 339 PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on 340 this processor. 341 342 Note Collective 343 344 Input Parameter: 345 . pc - the preconditioner context 346 347 Output Parameters: 348 + n_local - the number of blocks on this processor, or NULL 349 . first_local - the global number of the first block on this processor, or NULL 350 - ksp - the array of KSP contexts 351 352 Notes: 353 After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed. 354 355 Currently for some matrix implementations only 1 block per processor 356 is supported. 357 358 You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP(). 359 360 Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs. 361 You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,PETSC_NULL_KSP,ierr) to determine how large the 362 KSP array must be. 363 364 Level: advanced 365 366 .keywords: block, Jacobi, get, sub, KSP, context 367 368 .seealso: PCBJacobiGetSubKSP() 369 @*/ 370 PetscErrorCode PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[]) 371 { 372 PetscErrorCode ierr; 373 374 PetscFunctionBegin; 375 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 376 ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr); 377 PetscFunctionReturn(0); 378 } 379 380 /*@ 381 PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block 382 Jacobi preconditioner. 383 384 Collective on PC 385 386 Input Parameters: 387 + pc - the preconditioner context 388 . blocks - the number of blocks 389 - lens - [optional] integer array containing the size of each block 390 391 Options Database Key: 392 . -pc_bjacobi_blocks <blocks> - Sets the number of global blocks 393 394 Notes: 395 Currently only a limited number of blocking configurations are supported. 396 All processors sharing the PC must call this routine with the same data. 397 398 Level: intermediate 399 400 .keywords: set, number, Jacobi, global, total, blocks 401 402 .seealso: PCSetUseAmat(), PCBJacobiSetLocalBlocks() 403 @*/ 404 PetscErrorCode PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[]) 405 { 406 PetscErrorCode ierr; 407 408 PetscFunctionBegin; 409 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 410 if (blocks <= 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks"); 411 ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr); 412 PetscFunctionReturn(0); 413 } 414 415 /*@C 416 PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block 417 Jacobi preconditioner. 418 419 Not Collective 420 421 Input Parameter: 422 . pc - the preconditioner context 423 424 Output parameters: 425 + blocks - the number of blocks 426 - lens - integer array containing the size of each block 427 428 Level: intermediate 429 430 .keywords: get, number, Jacobi, global, total, blocks 431 432 .seealso: PCSetUseAmat(), PCBJacobiGetLocalBlocks() 433 @*/ 434 PetscErrorCode PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[]) 435 { 436 PetscErrorCode ierr; 437 438 PetscFunctionBegin; 439 PetscValidHeaderSpecific(pc, PC_CLASSID,1); 440 PetscValidIntPointer(blocks,2); 441 ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr); 442 PetscFunctionReturn(0); 443 } 444 445 /*@ 446 PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block 447 Jacobi preconditioner. 448 449 Not Collective 450 451 Input Parameters: 452 + pc - the preconditioner context 453 . blocks - the number of blocks 454 - lens - [optional] integer array containing size of each block 455 456 Note: 457 Currently only a limited number of blocking configurations are supported. 458 459 Level: intermediate 460 461 .keywords: PC, set, number, Jacobi, local, blocks 462 463 .seealso: PCSetUseAmat(), PCBJacobiSetTotalBlocks() 464 @*/ 465 PetscErrorCode PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[]) 466 { 467 PetscErrorCode ierr; 468 469 PetscFunctionBegin; 470 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 471 if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks"); 472 ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr); 473 PetscFunctionReturn(0); 474 } 475 476 /*@C 477 PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block 478 Jacobi preconditioner. 479 480 Not Collective 481 482 Input Parameters: 483 + pc - the preconditioner context 484 . blocks - the number of blocks 485 - lens - [optional] integer array containing size of each block 486 487 Note: 488 Currently only a limited number of blocking configurations are supported. 489 490 Level: intermediate 491 492 .keywords: PC, get, number, Jacobi, local, blocks 493 494 .seealso: PCSetUseAmat(), PCBJacobiGetTotalBlocks() 495 @*/ 496 PetscErrorCode PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[]) 497 { 498 PetscErrorCode ierr; 499 500 PetscFunctionBegin; 501 PetscValidHeaderSpecific(pc, PC_CLASSID,1); 502 PetscValidIntPointer(blocks,2); 503 ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr); 504 PetscFunctionReturn(0); 505 } 506 507 /* -----------------------------------------------------------------------------------*/ 508 509 /*MC 510 PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with 511 its own KSP object. 512 513 Options Database Keys: 514 . -pc_use_amat - use Amat to apply block of operator in inner Krylov method 515 516 Notes: Each processor can have one or more blocks, but a block cannot be shared by more 517 than one processor. Defaults to one block per processor. 518 519 To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC 520 options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly 521 522 To set the options on the solvers separate for each block call PCBJacobiGetSubKSP() 523 and set the options directly on the resulting KSP object (you can access its PC 524 KSPGetPC()) 525 526 For GPU-based vectors (CUDA, CUSP, ViennaCL) it is recommended to use exactly one block per MPI process for best 527 performance. Different block partitioning may lead to additional data transfers 528 between host and GPU that lead to degraded performance. 529 530 Level: beginner 531 532 Concepts: block Jacobi 533 534 535 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, 536 PCASM, PCSetUseAmat(), PCGetUseAmat(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(), 537 PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices() 538 M*/ 539 540 PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc) 541 { 542 PetscErrorCode ierr; 543 PetscMPIInt rank; 544 PC_BJacobi *jac; 545 546 PetscFunctionBegin; 547 ierr = PetscNewLog(pc,&jac);CHKERRQ(ierr); 548 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 549 550 pc->ops->apply = 0; 551 pc->ops->applytranspose = 0; 552 pc->ops->setup = PCSetUp_BJacobi; 553 pc->ops->destroy = PCDestroy_BJacobi; 554 pc->ops->setfromoptions = PCSetFromOptions_BJacobi; 555 pc->ops->view = PCView_BJacobi; 556 pc->ops->applyrichardson = 0; 557 558 pc->data = (void*)jac; 559 jac->n = -1; 560 jac->n_local = -1; 561 jac->first_local = rank; 562 jac->ksp = 0; 563 jac->same_local_solves = PETSC_TRUE; 564 jac->g_lens = 0; 565 jac->l_lens = 0; 566 jac->psubcomm = 0; 567 568 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C",PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr); 569 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C",PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr); 570 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C",PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr); 571 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C",PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr); 572 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C",PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr); 573 PetscFunctionReturn(0); 574 } 575 576 /* --------------------------------------------------------------------------------------------*/ 577 /* 578 These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI 579 */ 580 static PetscErrorCode PCReset_BJacobi_Singleblock(PC pc) 581 { 582 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 583 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 584 PetscErrorCode ierr; 585 586 PetscFunctionBegin; 587 ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr); 588 ierr = VecDestroy(&bjac->x);CHKERRQ(ierr); 589 ierr = VecDestroy(&bjac->y);CHKERRQ(ierr); 590 PetscFunctionReturn(0); 591 } 592 593 static PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc) 594 { 595 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 596 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 597 PetscErrorCode ierr; 598 599 PetscFunctionBegin; 600 ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr); 601 ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr); 602 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 603 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 604 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 605 ierr = PetscFree(bjac);CHKERRQ(ierr); 606 ierr = PetscFree(pc->data);CHKERRQ(ierr); 607 PetscFunctionReturn(0); 608 } 609 610 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc) 611 { 612 PetscErrorCode ierr; 613 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 614 KSP subksp = jac->ksp[0]; 615 KSPConvergedReason reason; 616 617 PetscFunctionBegin; 618 ierr = KSPSetUp(subksp);CHKERRQ(ierr); 619 ierr = KSPGetConvergedReason(subksp,&reason);CHKERRQ(ierr); 620 if (reason == KSP_DIVERGED_PCSETUP_FAILED) { 621 pc->failedreason = PC_SUBPC_ERROR; 622 } 623 PetscFunctionReturn(0); 624 } 625 626 static PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y) 627 { 628 PetscErrorCode ierr; 629 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 630 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 631 632 PetscFunctionBegin; 633 ierr = VecGetLocalVectorRead(x, bjac->x);CHKERRQ(ierr); 634 ierr = VecGetLocalVector(y, bjac->y);CHKERRQ(ierr); 635 /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner 636 matrix may change even if the outter KSP/PC has not updated the preconditioner, this will trigger a rebuild 637 of the inner preconditioner automatically unless we pass down the outter preconditioners reuse flag.*/ 638 ierr = KSPSetReusePreconditioner(jac->ksp[0],pc->reusepreconditioner);CHKERRQ(ierr); 639 ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr); 640 ierr = VecRestoreLocalVectorRead(x, bjac->x);CHKERRQ(ierr); 641 ierr = VecRestoreLocalVector(y, bjac->y);CHKERRQ(ierr); 642 PetscFunctionReturn(0); 643 } 644 645 static PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y) 646 { 647 PetscErrorCode ierr; 648 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 649 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 650 PetscScalar *y_array; 651 const PetscScalar *x_array; 652 PC subpc; 653 654 PetscFunctionBegin; 655 /* 656 The VecPlaceArray() is to avoid having to copy the 657 y vector into the bjac->x vector. The reason for 658 the bjac->x vector is that we need a sequential vector 659 for the sequential solve. 660 */ 661 ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr); 662 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 663 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 664 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 665 /* apply the symmetric left portion of the inner PC operator */ 666 /* note this by-passes the inner KSP and its options completely */ 667 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 668 ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 669 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 670 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 671 ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr); 672 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 673 PetscFunctionReturn(0); 674 } 675 676 static PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y) 677 { 678 PetscErrorCode ierr; 679 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 680 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 681 PetscScalar *y_array; 682 const PetscScalar *x_array; 683 PC subpc; 684 685 PetscFunctionBegin; 686 /* 687 The VecPlaceArray() is to avoid having to copy the 688 y vector into the bjac->x vector. The reason for 689 the bjac->x vector is that we need a sequential vector 690 for the sequential solve. 691 */ 692 ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr); 693 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 694 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 695 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 696 697 /* apply the symmetric right portion of the inner PC operator */ 698 /* note this by-passes the inner KSP and its options completely */ 699 700 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 701 ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 702 703 ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr); 704 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 705 PetscFunctionReturn(0); 706 } 707 708 static PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y) 709 { 710 PetscErrorCode ierr; 711 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 712 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 713 PetscScalar *y_array; 714 const PetscScalar *x_array; 715 716 PetscFunctionBegin; 717 /* 718 The VecPlaceArray() is to avoid having to copy the 719 y vector into the bjac->x vector. The reason for 720 the bjac->x vector is that we need a sequential vector 721 for the sequential solve. 722 */ 723 ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr); 724 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 725 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 726 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 727 ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr); 728 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 729 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 730 ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr); 731 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 732 PetscFunctionReturn(0); 733 } 734 735 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat) 736 { 737 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 738 PetscErrorCode ierr; 739 PetscInt m; 740 KSP ksp; 741 PC_BJacobi_Singleblock *bjac; 742 PetscBool wasSetup = PETSC_TRUE; 743 744 PetscFunctionBegin; 745 if (!pc->setupcalled) { 746 const char *prefix; 747 748 if (!jac->ksp) { 749 wasSetup = PETSC_FALSE; 750 751 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 752 ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr); 753 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 754 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr); 755 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 756 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 757 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 758 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 759 760 pc->ops->reset = PCReset_BJacobi_Singleblock; 761 pc->ops->destroy = PCDestroy_BJacobi_Singleblock; 762 pc->ops->apply = PCApply_BJacobi_Singleblock; 763 pc->ops->applysymmetricleft = PCApplySymmetricLeft_BJacobi_Singleblock; 764 pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock; 765 pc->ops->applytranspose = PCApplyTranspose_BJacobi_Singleblock; 766 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Singleblock; 767 768 ierr = PetscMalloc1(1,&jac->ksp);CHKERRQ(ierr); 769 jac->ksp[0] = ksp; 770 771 ierr = PetscNewLog(pc,&bjac);CHKERRQ(ierr); 772 jac->data = (void*)bjac; 773 } else { 774 ksp = jac->ksp[0]; 775 bjac = (PC_BJacobi_Singleblock*)jac->data; 776 } 777 778 /* 779 The reason we need to generate these vectors is to serve 780 as the right-hand side and solution vector for the solve on the 781 block. We do not need to allocate space for the vectors since 782 that is provided via VecPlaceArray() just before the call to 783 KSPSolve() on the block. 784 */ 785 ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr); 786 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->x);CHKERRQ(ierr); 787 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->y);CHKERRQ(ierr); 788 #if defined(PETSC_HAVE_CUSP) 789 ierr = VecSetType(bjac->x,VECCUSP);CHKERRQ(ierr); 790 ierr = VecSetType(bjac->y,VECCUSP);CHKERRQ(ierr); 791 #elif defined(PETSC_HAVE_VECCUDA) 792 ierr = VecSetType(bjac->x,VECCUDA);CHKERRQ(ierr); 793 ierr = VecSetType(bjac->y,VECCUDA);CHKERRQ(ierr); 794 #elif defined(PETSC_HAVE_VIENNACL) 795 ierr = VecSetType(bjac->x,VECVIENNACL);CHKERRQ(ierr); 796 ierr = VecSetType(bjac->y,VECVIENNACL);CHKERRQ(ierr); 797 #endif 798 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->x);CHKERRQ(ierr); 799 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->y);CHKERRQ(ierr); 800 } else { 801 ksp = jac->ksp[0]; 802 bjac = (PC_BJacobi_Singleblock*)jac->data; 803 } 804 if (pc->useAmat) { 805 ierr = KSPSetOperators(ksp,mat,pmat);CHKERRQ(ierr); 806 } else { 807 ierr = KSPSetOperators(ksp,pmat,pmat);CHKERRQ(ierr); 808 } 809 if (!wasSetup && pc->setfromoptionscalled) { 810 /* If PCSetFromOptions_BJacobi is called later, KSPSetFromOptions will be called at that time. */ 811 ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr); 812 } 813 PetscFunctionReturn(0); 814 } 815 816 /* ---------------------------------------------------------------------------------------------*/ 817 static PetscErrorCode PCReset_BJacobi_Multiblock(PC pc) 818 { 819 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 820 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 821 PetscErrorCode ierr; 822 PetscInt i; 823 824 PetscFunctionBegin; 825 if (bjac && bjac->pmat) { 826 ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr); 827 if (pc->useAmat) { 828 ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr); 829 } 830 } 831 832 for (i=0; i<jac->n_local; i++) { 833 ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr); 834 if (bjac && bjac->x) { 835 ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr); 836 ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr); 837 ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr); 838 } 839 } 840 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 841 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 842 PetscFunctionReturn(0); 843 } 844 845 static PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc) 846 { 847 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 848 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 849 PetscErrorCode ierr; 850 PetscInt i; 851 852 PetscFunctionBegin; 853 ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr); 854 if (bjac) { 855 ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr); 856 ierr = PetscFree(bjac->starts);CHKERRQ(ierr); 857 ierr = PetscFree(bjac->is);CHKERRQ(ierr); 858 } 859 ierr = PetscFree(jac->data);CHKERRQ(ierr); 860 for (i=0; i<jac->n_local; i++) { 861 ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr); 862 } 863 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 864 ierr = PetscFree(pc->data);CHKERRQ(ierr); 865 PetscFunctionReturn(0); 866 } 867 868 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc) 869 { 870 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 871 PetscErrorCode ierr; 872 PetscInt i,n_local = jac->n_local; 873 KSPConvergedReason reason; 874 875 PetscFunctionBegin; 876 for (i=0; i<n_local; i++) { 877 ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr); 878 ierr = KSPGetConvergedReason(jac->ksp[i],&reason);CHKERRQ(ierr); 879 if (reason == KSP_DIVERGED_PCSETUP_FAILED) { 880 pc->failedreason = PC_SUBPC_ERROR; 881 } 882 } 883 PetscFunctionReturn(0); 884 } 885 886 /* 887 Preconditioner for block Jacobi 888 */ 889 static PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y) 890 { 891 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 892 PetscErrorCode ierr; 893 PetscInt i,n_local = jac->n_local; 894 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 895 PetscScalar *yin; 896 const PetscScalar *xin; 897 898 PetscFunctionBegin; 899 ierr = VecGetArrayRead(x,&xin);CHKERRQ(ierr); 900 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 901 for (i=0; i<n_local; i++) { 902 /* 903 To avoid copying the subvector from x into a workspace we instead 904 make the workspace vector array point to the subpart of the array of 905 the global vector. 906 */ 907 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 908 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 909 910 ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 911 ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr); 912 ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 913 914 ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr); 915 ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr); 916 } 917 ierr = VecRestoreArrayRead(x,&xin);CHKERRQ(ierr); 918 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 919 PetscFunctionReturn(0); 920 } 921 922 /* 923 Preconditioner for block Jacobi 924 */ 925 static PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y) 926 { 927 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 928 PetscErrorCode ierr; 929 PetscInt i,n_local = jac->n_local; 930 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 931 PetscScalar *yin; 932 const PetscScalar *xin; 933 934 PetscFunctionBegin; 935 ierr = VecGetArrayRead(x,&xin);CHKERRQ(ierr); 936 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 937 for (i=0; i<n_local; i++) { 938 /* 939 To avoid copying the subvector from x into a workspace we instead 940 make the workspace vector array point to the subpart of the array of 941 the global vector. 942 */ 943 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 944 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 945 946 ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 947 ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr); 948 ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 949 950 ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr); 951 ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr); 952 } 953 ierr = VecRestoreArrayRead(x,&xin);CHKERRQ(ierr); 954 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 955 PetscFunctionReturn(0); 956 } 957 958 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat) 959 { 960 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 961 PetscErrorCode ierr; 962 PetscInt m,n_local,N,M,start,i; 963 const char *prefix,*pprefix,*mprefix; 964 KSP ksp; 965 Vec x,y; 966 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 967 PC subpc; 968 IS is; 969 MatReuse scall; 970 971 PetscFunctionBegin; 972 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 973 974 n_local = jac->n_local; 975 976 if (pc->useAmat) { 977 PetscBool same; 978 ierr = PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr); 979 if (!same) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_INCOMP,"Matrices not of same type"); 980 } 981 982 if (!pc->setupcalled) { 983 scall = MAT_INITIAL_MATRIX; 984 985 if (!jac->ksp) { 986 pc->ops->reset = PCReset_BJacobi_Multiblock; 987 pc->ops->destroy = PCDestroy_BJacobi_Multiblock; 988 pc->ops->apply = PCApply_BJacobi_Multiblock; 989 pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock; 990 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock; 991 992 ierr = PetscNewLog(pc,&bjac);CHKERRQ(ierr); 993 ierr = PetscMalloc1(n_local,&jac->ksp);CHKERRQ(ierr); 994 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr); 995 ierr = PetscMalloc2(n_local,&bjac->x,n_local,&bjac->y);CHKERRQ(ierr); 996 ierr = PetscMalloc1(n_local,&bjac->starts);CHKERRQ(ierr); 997 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr); 998 999 jac->data = (void*)bjac; 1000 ierr = PetscMalloc1(n_local,&bjac->is);CHKERRQ(ierr); 1001 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr); 1002 1003 for (i=0; i<n_local; i++) { 1004 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 1005 ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr); 1006 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 1007 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr); 1008 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 1009 ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr); 1010 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1011 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 1012 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 1013 1014 jac->ksp[i] = ksp; 1015 } 1016 } else { 1017 bjac = (PC_BJacobi_Multiblock*)jac->data; 1018 } 1019 1020 start = 0; 1021 for (i=0; i<n_local; i++) { 1022 m = jac->l_lens[i]; 1023 /* 1024 The reason we need to generate these vectors is to serve 1025 as the right-hand side and solution vector for the solve on the 1026 block. We do not need to allocate space for the vectors since 1027 that is provided via VecPlaceArray() just before the call to 1028 KSPSolve() on the block. 1029 1030 */ 1031 ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr); 1032 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&y);CHKERRQ(ierr); 1033 #if defined(PETSC_HAVE_CUSP) 1034 ierr = VecSetType(x,VECCUSP);CHKERRQ(ierr); 1035 ierr = VecSetType(y,VECCUSP);CHKERRQ(ierr); 1036 #elif defined(PETSC_HAVE_VECCUDA) 1037 ierr = VecSetType(x,VECCUDA);CHKERRQ(ierr); 1038 ierr = VecSetType(y,VECCUDA);CHKERRQ(ierr); 1039 #elif defined(PETSC_HAVE_VIENNACL) 1040 ierr = VecSetType(x,VECVIENNACL);CHKERRQ(ierr); 1041 ierr = VecSetType(y,VECVIENNACL);CHKERRQ(ierr); 1042 #endif 1043 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)x);CHKERRQ(ierr); 1044 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)y);CHKERRQ(ierr); 1045 1046 bjac->x[i] = x; 1047 bjac->y[i] = y; 1048 bjac->starts[i] = start; 1049 1050 ierr = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr); 1051 bjac->is[i] = is; 1052 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)is);CHKERRQ(ierr); 1053 1054 start += m; 1055 } 1056 } else { 1057 bjac = (PC_BJacobi_Multiblock*)jac->data; 1058 /* 1059 Destroy the blocks from the previous iteration 1060 */ 1061 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 1062 ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr); 1063 if (pc->useAmat) { 1064 ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr); 1065 } 1066 scall = MAT_INITIAL_MATRIX; 1067 } else scall = MAT_REUSE_MATRIX; 1068 } 1069 1070 ierr = MatCreateSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr); 1071 if (pc->useAmat) { 1072 ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr); 1073 ierr = MatCreateSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr); 1074 } 1075 /* Return control to the user so that the submatrices can be modified (e.g., to apply 1076 different boundary conditions for the submatrices than for the global problem) */ 1077 ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr); 1078 1079 ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr); 1080 for (i=0; i<n_local; i++) { 1081 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->pmat[i]);CHKERRQ(ierr); 1082 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr); 1083 if (pc->useAmat) { 1084 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->mat[i]);CHKERRQ(ierr); 1085 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr); 1086 ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i]);CHKERRQ(ierr); 1087 } else { 1088 ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i]);CHKERRQ(ierr); 1089 } 1090 if (pc->setfromoptionscalled) { 1091 ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr); 1092 } 1093 } 1094 PetscFunctionReturn(0); 1095 } 1096 1097 /* ---------------------------------------------------------------------------------------------*/ 1098 /* 1099 These are for a single block with multiple processes; 1100 */ 1101 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc) 1102 { 1103 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1104 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1105 PetscErrorCode ierr; 1106 1107 PetscFunctionBegin; 1108 ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr); 1109 ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr); 1110 ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr); 1111 if (jac->ksp) {ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);} 1112 PetscFunctionReturn(0); 1113 } 1114 1115 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc) 1116 { 1117 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1118 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1119 PetscErrorCode ierr; 1120 1121 PetscFunctionBegin; 1122 ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr); 1123 ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr); 1124 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 1125 ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr); 1126 1127 ierr = PetscFree(mpjac);CHKERRQ(ierr); 1128 ierr = PetscFree(pc->data);CHKERRQ(ierr); 1129 PetscFunctionReturn(0); 1130 } 1131 1132 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y) 1133 { 1134 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1135 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1136 PetscErrorCode ierr; 1137 PetscScalar *yarray; 1138 const PetscScalar *xarray; 1139 KSPConvergedReason reason; 1140 1141 PetscFunctionBegin; 1142 /* place x's and y's local arrays into xsub and ysub */ 1143 ierr = VecGetArrayRead(x,&xarray);CHKERRQ(ierr); 1144 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1145 ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr); 1146 ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr); 1147 1148 /* apply preconditioner on each matrix block */ 1149 ierr = PetscLogEventBegin(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr); 1150 ierr = KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);CHKERRQ(ierr); 1151 ierr = PetscLogEventEnd(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr); 1152 ierr = KSPGetConvergedReason(jac->ksp[0],&reason);CHKERRQ(ierr); 1153 if (reason == KSP_DIVERGED_PCSETUP_FAILED) { 1154 pc->failedreason = PC_SUBPC_ERROR; 1155 } 1156 1157 ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr); 1158 ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr); 1159 ierr = VecRestoreArrayRead(x,&xarray);CHKERRQ(ierr); 1160 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1161 PetscFunctionReturn(0); 1162 } 1163 1164 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc) 1165 { 1166 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1167 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1168 PetscErrorCode ierr; 1169 PetscInt m,n; 1170 MPI_Comm comm,subcomm=0; 1171 const char *prefix; 1172 PetscBool wasSetup = PETSC_TRUE; 1173 1174 PetscFunctionBegin; 1175 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1176 if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported"); 1177 jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */ 1178 if (!pc->setupcalled) { 1179 wasSetup = PETSC_FALSE; 1180 ierr = PetscNewLog(pc,&mpjac);CHKERRQ(ierr); 1181 jac->data = (void*)mpjac; 1182 1183 /* initialize datastructure mpjac */ 1184 if (!jac->psubcomm) { 1185 /* Create default contiguous subcommunicatiors if user does not provide them */ 1186 ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr); 1187 ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr); 1188 ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr); 1189 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr); 1190 } 1191 mpjac->psubcomm = jac->psubcomm; 1192 subcomm = PetscSubcommChild(mpjac->psubcomm); 1193 1194 /* Get matrix blocks of pmat */ 1195 ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1196 1197 /* create a new PC that processors in each subcomm have copy of */ 1198 ierr = PetscMalloc1(1,&jac->ksp);CHKERRQ(ierr); 1199 ierr = KSPCreate(subcomm,&jac->ksp[0]);CHKERRQ(ierr); 1200 ierr = KSPSetErrorIfNotConverged(jac->ksp[0],pc->erroriffailure);CHKERRQ(ierr); 1201 ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);CHKERRQ(ierr); 1202 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)jac->ksp[0]);CHKERRQ(ierr); 1203 ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr); 1204 ierr = KSPGetPC(jac->ksp[0],&mpjac->pc);CHKERRQ(ierr); 1205 1206 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1207 ierr = KSPSetOptionsPrefix(jac->ksp[0],prefix);CHKERRQ(ierr); 1208 ierr = KSPAppendOptionsPrefix(jac->ksp[0],"sub_");CHKERRQ(ierr); 1209 /* 1210 PetscMPIInt rank,subsize,subrank; 1211 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1212 ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 1213 ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 1214 1215 ierr = MatGetLocalSize(mpjac->submats,&m,NULL);CHKERRQ(ierr); 1216 ierr = MatGetSize(mpjac->submats,&n,NULL);CHKERRQ(ierr); 1217 ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank); 1218 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1219 */ 1220 1221 /* create dummy vectors xsub and ysub */ 1222 ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr); 1223 ierr = VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&mpjac->xsub);CHKERRQ(ierr); 1224 ierr = VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&mpjac->ysub);CHKERRQ(ierr); 1225 #if defined(PETSC_HAVE_CUSP) 1226 ierr = VecSetType(mpjac->xsub,VECMPICUSP);CHKERRQ(ierr); 1227 ierr = VecSetType(mpjac->ysub,VECMPICUSP);CHKERRQ(ierr); 1228 #elif defined(PETSC_HAVE_VECCUDA) 1229 ierr = VecSetType(mpjac->xsub,VECMPICUDA);CHKERRQ(ierr); 1230 ierr = VecSetType(mpjac->ysub,VECMPICUDA);CHKERRQ(ierr); 1231 #elif defined(PETSC_HAVE_VECVIENNACL) 1232 ierr = VecSetType(mpjac->xsub,VECMPIVIENNACL);CHKERRQ(ierr); 1233 ierr = VecSetType(mpjac->ysub,VECMPIVIENNACL);CHKERRQ(ierr); 1234 #endif 1235 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->xsub);CHKERRQ(ierr); 1236 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->ysub);CHKERRQ(ierr); 1237 1238 pc->ops->reset = PCReset_BJacobi_Multiproc; 1239 pc->ops->destroy = PCDestroy_BJacobi_Multiproc; 1240 pc->ops->apply = PCApply_BJacobi_Multiproc; 1241 } else { /* pc->setupcalled */ 1242 subcomm = PetscSubcommChild(mpjac->psubcomm); 1243 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 1244 /* destroy old matrix blocks, then get new matrix blocks */ 1245 if (mpjac->submats) {ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);} 1246 ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1247 } else { 1248 ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1249 } 1250 ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr); 1251 } 1252 1253 if (!wasSetup && pc->setfromoptionscalled) { 1254 ierr = KSPSetFromOptions(jac->ksp[0]);CHKERRQ(ierr); 1255 } 1256 PetscFunctionReturn(0); 1257 } 1258