1 #include <petsc-private/dmdaimpl.h> /*I "petscdmda.h" I*/ 2 3 4 #undef __FUNCT__ 5 #define __FUNCT__ "DMDASetDim" 6 /*@ 7 DMDASetDim - Sets the dimension 8 9 Collective on DMDA 10 11 Input Parameters: 12 + da - the DMDA 13 - dim - the dimension (or PETSC_DECIDE) 14 15 Level: intermediate 16 17 .seealso: DaGetDim(), DMDASetSizes() 18 @*/ 19 PetscErrorCode DMDASetDim(DM da, PetscInt dim) 20 { 21 DM_DA *dd = (DM_DA*)da->data; 22 23 PetscFunctionBegin; 24 PetscValidHeaderSpecific(da, DM_CLASSID, 1); 25 if (dd->dim > 0 && dim != dd->dim) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot change DMDA dim from %D after it was set to %D",dd->dim,dim); 26 dd->dim = dim; 27 PetscFunctionReturn(0); 28 } 29 30 #undef __FUNCT__ 31 #define __FUNCT__ "DMDASetSizes" 32 /*@ 33 DMDASetSizes - Sets the global sizes 34 35 Logically Collective on DMDA 36 37 Input Parameters: 38 + da - the DMDA 39 . M - the global X size (or PETSC_DECIDE) 40 . N - the global Y size (or PETSC_DECIDE) 41 - P - the global Z size (or PETSC_DECIDE) 42 43 Level: intermediate 44 45 .seealso: DMDAGetSize(), PetscSplitOwnership() 46 @*/ 47 PetscErrorCode DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P) 48 { 49 DM_DA *dd = (DM_DA*)da->data; 50 51 PetscFunctionBegin; 52 PetscValidHeaderSpecific(da, DM_CLASSID, 1); 53 PetscValidLogicalCollectiveInt(da,M,2); 54 PetscValidLogicalCollectiveInt(da,N,3); 55 PetscValidLogicalCollectiveInt(da,P,4); 56 if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 57 58 dd->M = M; 59 dd->N = N; 60 dd->P = P; 61 PetscFunctionReturn(0); 62 } 63 64 #undef __FUNCT__ 65 #define __FUNCT__ "DMDASetNumProcs" 66 /*@ 67 DMDASetNumProcs - Sets the number of processes in each dimension 68 69 Logically Collective on DMDA 70 71 Input Parameters: 72 + da - the DMDA 73 . m - the number of X procs (or PETSC_DECIDE) 74 . n - the number of Y procs (or PETSC_DECIDE) 75 - p - the number of Z procs (or PETSC_DECIDE) 76 77 Level: intermediate 78 79 .seealso: DMDASetSizes(), DMDAGetSize(), PetscSplitOwnership() 80 @*/ 81 PetscErrorCode DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p) 82 { 83 DM_DA *dd = (DM_DA*)da->data; 84 PetscErrorCode ierr; 85 86 PetscFunctionBegin; 87 PetscValidHeaderSpecific(da, DM_CLASSID, 1); 88 PetscValidLogicalCollectiveInt(da,m,2); 89 PetscValidLogicalCollectiveInt(da,n,3); 90 PetscValidLogicalCollectiveInt(da,p,4); 91 if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 92 dd->m = m; 93 dd->n = n; 94 dd->p = p; 95 if (dd->dim == 2) { 96 PetscMPIInt size; 97 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr); 98 if ((dd->m > 0) && (dd->n < 0)) { 99 dd->n = size/dd->m; 100 if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in X direction not divisible into comm size %d",m,size); 101 } 102 if ((dd->n > 0) && (dd->m < 0)) { 103 dd->m = size/dd->n; 104 if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in Y direction not divisible into comm size %d",n,size); 105 } 106 } 107 PetscFunctionReturn(0); 108 } 109 110 #undef __FUNCT__ 111 #define __FUNCT__ "DMDASetBoundaryType" 112 /*@ 113 DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries. 114 115 Not collective 116 117 Input Parameter: 118 + da - The DMDA 119 - bx,by,bz - One of DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_GHOSTED, DMDA_BOUNDARY_PERIODIC 120 121 Level: intermediate 122 123 .keywords: distributed array, periodicity 124 .seealso: DMDACreate(), DMDestroy(), DMDA, DMDABoundaryType 125 @*/ 126 PetscErrorCode DMDASetBoundaryType(DM da,DMDABoundaryType bx,DMDABoundaryType by,DMDABoundaryType bz) 127 { 128 DM_DA *dd = (DM_DA*)da->data; 129 130 PetscFunctionBegin; 131 PetscValidHeaderSpecific(da,DM_CLASSID,1); 132 PetscValidLogicalCollectiveEnum(da,bx,2); 133 PetscValidLogicalCollectiveEnum(da,by,3); 134 PetscValidLogicalCollectiveEnum(da,bz,4); 135 if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 136 dd->bx = bx; 137 dd->by = by; 138 dd->bz = bz; 139 PetscFunctionReturn(0); 140 } 141 142 #undef __FUNCT__ 143 #define __FUNCT__ "DMDASetDof" 144 /*@ 145 DMDASetDof - Sets the number of degrees of freedom per vertex 146 147 Not collective 148 149 Input Parameter: 150 + da - The DMDA 151 - dof - Number of degrees of freedom 152 153 Level: intermediate 154 155 .keywords: distributed array, degrees of freedom 156 .seealso: DMDACreate(), DMDestroy(), DMDA 157 @*/ 158 PetscErrorCode DMDASetDof(DM da, PetscInt dof) 159 { 160 DM_DA *dd = (DM_DA*)da->data; 161 162 PetscFunctionBegin; 163 PetscValidHeaderSpecific(da,DM_CLASSID,1); 164 PetscValidLogicalCollectiveInt(da,dof,2); 165 if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 166 dd->w = dof; 167 da->bs = dof; 168 PetscFunctionReturn(0); 169 } 170 171 #undef __FUNCT__ 172 #define __FUNCT__ "DMDAGetOverlap" 173 /*@ 174 DMDAGetOverlap - Gets the size of the per-processor overlap. 175 176 Not collective 177 178 Input Parameters: 179 . da - The DMDA 180 181 Output Parameters: 182 + x - Overlap in the x direction 183 . y - Overlap in the y direction 184 - z - Overlap in the z direction 185 186 Level: intermediate 187 188 .keywords: distributed array, overlap, domain decomposition 189 .seealso: DMDACreateDomainDecomposition(), DMDASetOverlap(), DMDA 190 @*/ 191 PetscErrorCode DMDAGetOverlap(DM da,PetscInt *x,PetscInt *y,PetscInt *z) 192 { 193 DM_DA *dd = (DM_DA*)da->data; 194 195 PetscFunctionBegin; 196 PetscValidHeaderSpecific(da,DM_CLASSID,1); 197 if (x) *x = dd->xol; 198 if (y) *y = dd->yol; 199 if (z) *z = dd->zol; 200 PetscFunctionReturn(0); 201 } 202 203 #undef __FUNCT__ 204 #define __FUNCT__ "DMDASetOverlap" 205 /*@ 206 DMDASetOverlap - Sets the size of the per-processor overlap. 207 208 Not collective 209 210 Input Parameters: 211 + da - The DMDA 212 . x - Overlap in the x direction 213 . y - Overlap in the y direction 214 - z - Overlap in the z direction 215 216 Level: intermediate 217 218 .keywords: distributed array, overlap, domain decomposition 219 .seealso: DMDACreateDomainDecomposition(), DMDAGetOverlap(), DMDA 220 @*/ 221 PetscErrorCode DMDASetOverlap(DM da,PetscInt x,PetscInt y,PetscInt z) 222 { 223 DM_DA *dd = (DM_DA*)da->data; 224 225 PetscFunctionBegin; 226 PetscValidHeaderSpecific(da,DM_CLASSID,1); 227 PetscValidLogicalCollectiveInt(da,x,2); 228 PetscValidLogicalCollectiveInt(da,y,3); 229 PetscValidLogicalCollectiveInt(da,z,4); 230 dd->xol = x; 231 dd->yol = y; 232 dd->zol = z; 233 PetscFunctionReturn(0); 234 } 235 236 237 #undef __FUNCT__ 238 #define __FUNCT__ "DMDAGetNumLocalSubDomains" 239 /*@ 240 DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition. 241 242 Not collective 243 244 Input Parameters: 245 . da - The DMDA 246 247 Output Parameters: 248 + Nsub - Number of local subdomains created upon decomposition 249 250 Level: intermediate 251 252 .keywords: distributed array, domain decomposition 253 .seealso: DMDACreateDomainDecomposition(), DMDASetNumLocalSubDomains(), DMDA 254 @*/ 255 PetscErrorCode DMDAGetNumLocalSubDomains(DM da,PetscInt *Nsub) 256 { 257 DM_DA *dd = (DM_DA*)da->data; 258 259 PetscFunctionBegin; 260 PetscValidHeaderSpecific(da,DM_CLASSID,1); 261 if (Nsub) *Nsub = dd->Nsub; 262 PetscFunctionReturn(0); 263 } 264 265 #undef __FUNCT__ 266 #define __FUNCT__ "DMDASetNumLocalSubDomains" 267 /*@ 268 DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition. 269 270 Not collective 271 272 Input Parameters: 273 + da - The DMDA 274 - Nsub - The number of local subdomains requested 275 276 Level: intermediate 277 278 .keywords: distributed array, domain decomposition 279 .seealso: DMDACreateDomainDecomposition(), DMDAGetNumLocalSubDomains(), DMDA 280 @*/ 281 PetscErrorCode DMDASetNumLocalSubDomains(DM da,PetscInt Nsub) 282 { 283 DM_DA *dd = (DM_DA*)da->data; 284 285 PetscFunctionBegin; 286 PetscValidHeaderSpecific(da,DM_CLASSID,1); 287 PetscValidLogicalCollectiveInt(da,Nsub,2); 288 dd->Nsub = Nsub; 289 PetscFunctionReturn(0); 290 } 291 292 #undef __FUNCT__ 293 #define __FUNCT__ "DMDASetOffset" 294 /*@ 295 DMDASetOffset - Sets the index offset of the DA. 296 297 Collective on DA 298 299 Input Parameter: 300 + da - The DMDA 301 . xo - The offset in the x direction 302 . yo - The offset in the y direction 303 - zo - The offset in the z direction 304 305 Level: intermediate 306 307 Notes: This is used primarily to overlap a computation on a local DA with that on a global DA without 308 changing boundary conditions or subdomain features that depend upon the global offsets. 309 310 .keywords: distributed array, degrees of freedom 311 .seealso: DMDAGetOffset(), DMDAVecGetArray() 312 @*/ 313 PetscErrorCode DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po) 314 { 315 PetscErrorCode ierr; 316 DM_DA *dd = (DM_DA*)da->data; 317 318 PetscFunctionBegin; 319 PetscValidHeaderSpecific(da,DM_CLASSID,1); 320 PetscValidLogicalCollectiveInt(da,xo,2); 321 PetscValidLogicalCollectiveInt(da,yo,3); 322 PetscValidLogicalCollectiveInt(da,zo,4); 323 PetscValidLogicalCollectiveInt(da,Mo,5); 324 PetscValidLogicalCollectiveInt(da,No,6); 325 PetscValidLogicalCollectiveInt(da,Po,7); 326 dd->xo = xo; 327 dd->yo = yo; 328 dd->zo = zo; 329 dd->Mo = Mo; 330 dd->No = No; 331 dd->Po = Po; 332 333 if (da->coordinateDM) { 334 ierr = DMDASetOffset(da->coordinateDM,xo,yo,zo,Mo,No,Po);CHKERRQ(ierr); 335 } 336 PetscFunctionReturn(0); 337 } 338 339 #undef __FUNCT__ 340 #define __FUNCT__ "DMDAGetOffset" 341 /*@ 342 DMDAGetOffset - Gets the index offset of the DA. 343 344 Not collective 345 346 Input Parameter: 347 . da - The DMDA 348 349 Output Parameters: 350 + xo - The offset in the x direction 351 . yo - The offset in the y direction 352 . zo - The offset in the z direction 353 . Mo - The global size in the x direction 354 . No - The global size in the y direction 355 - Po - The global size in the z direction 356 357 Level: intermediate 358 359 .keywords: distributed array, degrees of freedom 360 .seealso: DMDASetOffset(), DMDAVecGetArray() 361 @*/ 362 PetscErrorCode DMDAGetOffset(DM da,PetscInt *xo,PetscInt *yo,PetscInt *zo,PetscInt *Mo,PetscInt *No,PetscInt *Po) 363 { 364 DM_DA *dd = (DM_DA*)da->data; 365 366 PetscFunctionBegin; 367 PetscValidHeaderSpecific(da,DM_CLASSID,1); 368 if (xo) *xo = dd->xo; 369 if (yo) *yo = dd->yo; 370 if (zo) *zo = dd->zo; 371 if (Mo) *Mo = dd->Mo; 372 if (No) *No = dd->No; 373 if (Po) *Po = dd->Po; 374 PetscFunctionReturn(0); 375 } 376 377 378 #undef __FUNCT__ 379 #define __FUNCT__ "DMDASetStencilType" 380 /*@ 381 DMDASetStencilType - Sets the type of the communication stencil 382 383 Logically Collective on DMDA 384 385 Input Parameter: 386 + da - The DMDA 387 - stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR. 388 389 Level: intermediate 390 391 .keywords: distributed array, stencil 392 .seealso: DMDACreate(), DMDestroy(), DMDA 393 @*/ 394 PetscErrorCode DMDASetStencilType(DM da, DMDAStencilType stype) 395 { 396 DM_DA *dd = (DM_DA*)da->data; 397 398 PetscFunctionBegin; 399 PetscValidHeaderSpecific(da,DM_CLASSID,1); 400 PetscValidLogicalCollectiveEnum(da,stype,2); 401 if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 402 dd->stencil_type = stype; 403 PetscFunctionReturn(0); 404 } 405 406 #undef __FUNCT__ 407 #define __FUNCT__ "DMDASetStencilWidth" 408 /*@ 409 DMDASetStencilWidth - Sets the width of the communication stencil 410 411 Logically Collective on DMDA 412 413 Input Parameter: 414 + da - The DMDA 415 - width - The stencil width 416 417 Level: intermediate 418 419 .keywords: distributed array, stencil 420 .seealso: DMDACreate(), DMDestroy(), DMDA 421 @*/ 422 PetscErrorCode DMDASetStencilWidth(DM da, PetscInt width) 423 { 424 DM_DA *dd = (DM_DA*)da->data; 425 426 PetscFunctionBegin; 427 PetscValidHeaderSpecific(da,DM_CLASSID,1); 428 PetscValidLogicalCollectiveInt(da,width,2); 429 if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 430 dd->s = width; 431 PetscFunctionReturn(0); 432 } 433 434 #undef __FUNCT__ 435 #define __FUNCT__ "DMDACheckOwnershipRanges_Private" 436 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da,PetscInt M,PetscInt m,const PetscInt lx[]) 437 { 438 PetscInt i,sum; 439 440 PetscFunctionBegin; 441 if (M < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Global dimension not set"); 442 for (i=sum=0; i<m; i++) sum += lx[i]; 443 if (sum != M) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Ownership ranges sum to %D but global dimension is %D",sum,M); 444 PetscFunctionReturn(0); 445 } 446 447 #undef __FUNCT__ 448 #define __FUNCT__ "DMDASetOwnershipRanges" 449 /*@ 450 DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process 451 452 Logically Collective on DMDA 453 454 Input Parameter: 455 + da - The DMDA 456 . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m 457 . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n 458 - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p. 459 460 Level: intermediate 461 462 Note: these numbers are NOT multiplied by the number of dof per node. 463 464 .keywords: distributed array 465 .seealso: DMDACreate(), DMDestroy(), DMDA 466 @*/ 467 PetscErrorCode DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[]) 468 { 469 PetscErrorCode ierr; 470 DM_DA *dd = (DM_DA*)da->data; 471 472 PetscFunctionBegin; 473 PetscValidHeaderSpecific(da,DM_CLASSID,1); 474 if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 475 if (lx) { 476 if (dd->m < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs"); 477 ierr = DMDACheckOwnershipRanges_Private(da,dd->M,dd->m,lx);CHKERRQ(ierr); 478 if (!dd->lx) { 479 ierr = PetscMalloc(dd->m*sizeof(PetscInt), &dd->lx);CHKERRQ(ierr); 480 } 481 ierr = PetscMemcpy(dd->lx, lx, dd->m*sizeof(PetscInt));CHKERRQ(ierr); 482 } 483 if (ly) { 484 if (dd->n < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs"); 485 ierr = DMDACheckOwnershipRanges_Private(da,dd->N,dd->n,ly);CHKERRQ(ierr); 486 if (!dd->ly) { 487 ierr = PetscMalloc(dd->n*sizeof(PetscInt), &dd->ly);CHKERRQ(ierr); 488 } 489 ierr = PetscMemcpy(dd->ly, ly, dd->n*sizeof(PetscInt));CHKERRQ(ierr); 490 } 491 if (lz) { 492 if (dd->p < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs"); 493 ierr = DMDACheckOwnershipRanges_Private(da,dd->P,dd->p,lz);CHKERRQ(ierr); 494 if (!dd->lz) { 495 ierr = PetscMalloc(dd->p*sizeof(PetscInt), &dd->lz);CHKERRQ(ierr); 496 } 497 ierr = PetscMemcpy(dd->lz, lz, dd->p*sizeof(PetscInt));CHKERRQ(ierr); 498 } 499 PetscFunctionReturn(0); 500 } 501 502 #undef __FUNCT__ 503 #define __FUNCT__ "DMDASetInterpolationType" 504 /*@ 505 DMDASetInterpolationType - Sets the type of interpolation that will be 506 returned by DMCreateInterpolation() 507 508 Logically Collective on DMDA 509 510 Input Parameter: 511 + da - initial distributed array 512 . ctype - DMDA_Q1 and DMDA_Q0 are currently the only supported forms 513 514 Level: intermediate 515 516 Notes: you should call this on the coarser of the two DMDAs you pass to DMCreateInterpolation() 517 518 .keywords: distributed array, interpolation 519 520 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType 521 @*/ 522 PetscErrorCode DMDASetInterpolationType(DM da,DMDAInterpolationType ctype) 523 { 524 DM_DA *dd = (DM_DA*)da->data; 525 526 PetscFunctionBegin; 527 PetscValidHeaderSpecific(da,DM_CLASSID,1); 528 PetscValidLogicalCollectiveEnum(da,ctype,2); 529 dd->interptype = ctype; 530 PetscFunctionReturn(0); 531 } 532 533 #undef __FUNCT__ 534 #define __FUNCT__ "DMDAGetInterpolationType" 535 /*@ 536 DMDAGetInterpolationType - Gets the type of interpolation that will be 537 used by DMCreateInterpolation() 538 539 Not Collective 540 541 Input Parameter: 542 . da - distributed array 543 544 Output Parameter: 545 . ctype - interpolation type (DMDA_Q1 and DMDA_Q0 are currently the only supported forms) 546 547 Level: intermediate 548 549 .keywords: distributed array, interpolation 550 551 .seealso: DMDA, DMDAInterpolationType, DMDASetInterpolationType(), DMCreateInterpolation() 552 @*/ 553 PetscErrorCode DMDAGetInterpolationType(DM da,DMDAInterpolationType *ctype) 554 { 555 DM_DA *dd = (DM_DA*)da->data; 556 557 PetscFunctionBegin; 558 PetscValidHeaderSpecific(da,DM_CLASSID,1); 559 PetscValidPointer(ctype,2); 560 *ctype = dd->interptype; 561 PetscFunctionReturn(0); 562 } 563 564 #undef __FUNCT__ 565 #define __FUNCT__ "DMDAGetNeighbors" 566 /*@C 567 DMDAGetNeighbors - Gets an array containing the MPI rank of all the current 568 processes neighbors. 569 570 Not Collective 571 572 Input Parameter: 573 . da - the DMDA object 574 575 Output Parameters: 576 . ranks - the neighbors ranks, stored with the x index increasing most rapidly. 577 this process itself is in the list 578 579 Notes: In 2d the array is of length 9, in 3d of length 27 580 Not supported in 1d 581 Do not free the array, it is freed when the DMDA is destroyed. 582 583 Fortran Notes: In fortran you must pass in an array of the appropriate length. 584 585 Level: intermediate 586 587 @*/ 588 PetscErrorCode DMDAGetNeighbors(DM da,const PetscMPIInt *ranks[]) 589 { 590 DM_DA *dd = (DM_DA*)da->data; 591 592 PetscFunctionBegin; 593 PetscValidHeaderSpecific(da,DM_CLASSID,1); 594 *ranks = dd->neighbors; 595 PetscFunctionReturn(0); 596 } 597 598 #undef __FUNCT__ 599 #define __FUNCT__ "DMDAGetOwnershipRanges" 600 /*@C 601 DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process 602 603 Not Collective 604 605 Input Parameter: 606 . da - the DMDA object 607 608 Output Parameter: 609 + lx - ownership along x direction (optional) 610 . ly - ownership along y direction (optional) 611 - lz - ownership along z direction (optional) 612 613 Level: intermediate 614 615 Note: these correspond to the optional final arguments passed to DMDACreate(), DMDACreate2d(), DMDACreate3d() 616 617 In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and 618 eighth arguments from DMDAGetInfo() 619 620 In C you should not free these arrays, nor change the values in them. They will only have valid values while the 621 DMDA they came from still exists (has not been destroyed). 622 623 These numbers are NOT multiplied by the number of dof per node. 624 625 .seealso: DMDAGetCorners(), DMDAGetGhostCorners(), DMDACreate(), DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), VecGetOwnershipRanges() 626 @*/ 627 PetscErrorCode DMDAGetOwnershipRanges(DM da,const PetscInt *lx[],const PetscInt *ly[],const PetscInt *lz[]) 628 { 629 DM_DA *dd = (DM_DA*)da->data; 630 631 PetscFunctionBegin; 632 PetscValidHeaderSpecific(da,DM_CLASSID,1); 633 if (lx) *lx = dd->lx; 634 if (ly) *ly = dd->ly; 635 if (lz) *lz = dd->lz; 636 PetscFunctionReturn(0); 637 } 638 639 #undef __FUNCT__ 640 #define __FUNCT__ "DMDASetRefinementFactor" 641 /*@ 642 DMDASetRefinementFactor - Set the ratios that the DMDA grid is refined 643 644 Logically Collective on DMDA 645 646 Input Parameters: 647 + da - the DMDA object 648 . refine_x - ratio of fine grid to coarse in x direction (2 by default) 649 . refine_y - ratio of fine grid to coarse in y direction (2 by default) 650 - refine_z - ratio of fine grid to coarse in z direction (2 by default) 651 652 Options Database: 653 + -da_refine_x - refinement ratio in x direction 654 . -da_refine_y - refinement ratio in y direction 655 - -da_refine_z - refinement ratio in z direction 656 657 Level: intermediate 658 659 Notes: Pass PETSC_IGNORE to leave a value unchanged 660 661 .seealso: DMRefine(), DMDAGetRefinementFactor() 662 @*/ 663 PetscErrorCode DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z) 664 { 665 DM_DA *dd = (DM_DA*)da->data; 666 667 PetscFunctionBegin; 668 PetscValidHeaderSpecific(da,DM_CLASSID,1); 669 PetscValidLogicalCollectiveInt(da,refine_x,2); 670 PetscValidLogicalCollectiveInt(da,refine_y,3); 671 PetscValidLogicalCollectiveInt(da,refine_z,4); 672 673 if (refine_x > 0) dd->refine_x = refine_x; 674 if (refine_y > 0) dd->refine_y = refine_y; 675 if (refine_z > 0) dd->refine_z = refine_z; 676 PetscFunctionReturn(0); 677 } 678 679 #undef __FUNCT__ 680 #define __FUNCT__ "DMDAGetRefinementFactor" 681 /*@C 682 DMDAGetRefinementFactor - Gets the ratios that the DMDA grid is refined 683 684 Not Collective 685 686 Input Parameter: 687 . da - the DMDA object 688 689 Output Parameters: 690 + refine_x - ratio of fine grid to coarse in x direction (2 by default) 691 . refine_y - ratio of fine grid to coarse in y direction (2 by default) 692 - refine_z - ratio of fine grid to coarse in z direction (2 by default) 693 694 Level: intermediate 695 696 Notes: Pass NULL for values you do not need 697 698 .seealso: DMRefine(), DMDASetRefinementFactor() 699 @*/ 700 PetscErrorCode DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z) 701 { 702 DM_DA *dd = (DM_DA*)da->data; 703 704 PetscFunctionBegin; 705 PetscValidHeaderSpecific(da,DM_CLASSID,1); 706 if (refine_x) *refine_x = dd->refine_x; 707 if (refine_y) *refine_y = dd->refine_y; 708 if (refine_z) *refine_z = dd->refine_z; 709 PetscFunctionReturn(0); 710 } 711 712 #undef __FUNCT__ 713 #define __FUNCT__ "DMDASetGetMatrix" 714 /*@C 715 DMDASetGetMatrix - Sets the routine used by the DMDA to allocate a matrix. 716 717 Logically Collective on DMDA 718 719 Input Parameters: 720 + da - the DMDA object 721 - f - the function that allocates the matrix for that specific DMDA 722 723 Level: developer 724 725 Notes: See DMDASetBlockFills() that provides a simple way to provide the nonzero structure for 726 the diagonal and off-diagonal blocks of the matrix 727 728 .seealso: DMCreateMatrix(), DMDASetBlockFills() 729 @*/ 730 PetscErrorCode DMDASetGetMatrix(DM da,PetscErrorCode (*f)(DM, MatType,Mat*)) 731 { 732 PetscFunctionBegin; 733 PetscValidHeaderSpecific(da,DM_CLASSID,1); 734 da->ops->creatematrix = f; 735 PetscFunctionReturn(0); 736 } 737 738 #undef __FUNCT__ 739 #define __FUNCT__ "DMDARefineOwnershipRanges" 740 /* 741 Creates "balanced" ownership ranges after refinement, constrained by the need for the 742 fine grid boundaries to fall within one stencil width of the coarse partition. 743 744 Uses a greedy algorithm to handle non-ideal layouts, could probably do something better. 745 */ 746 static PetscErrorCode DMDARefineOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lc[],PetscInt lf[]) 747 { 748 PetscInt i,totalc = 0,remaining,startc = 0,startf = 0; 749 PetscErrorCode ierr; 750 751 PetscFunctionBegin; 752 if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio); 753 if (ratio == 1) { 754 ierr = PetscMemcpy(lf,lc,m*sizeof(lc[0]));CHKERRQ(ierr); 755 PetscFunctionReturn(0); 756 } 757 for (i=0; i<m; i++) totalc += lc[i]; 758 remaining = (!periodic) + ratio * (totalc - (!periodic)); 759 for (i=0; i<m; i++) { 760 PetscInt want = remaining/(m-i) + !!(remaining%(m-i)); 761 if (i == m-1) lf[i] = want; 762 else { 763 const PetscInt nextc = startc + lc[i]; 764 /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one 765 * coarse stencil width of the first coarse node in the next subdomain. */ 766 while ((startf+want)/ratio < nextc - stencil_width) want++; 767 /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one 768 * coarse stencil width of the last coarse node in the current subdomain. */ 769 while ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width) want--; 770 /* Make sure all constraints are satisfied */ 771 if (want < 0 || want > remaining || ((startf+want)/ratio < nextc - stencil_width) 772 || ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible refined ownership range"); 773 } 774 lf[i] = want; 775 startc += lc[i]; 776 startf += lf[i]; 777 remaining -= lf[i]; 778 } 779 PetscFunctionReturn(0); 780 } 781 782 #undef __FUNCT__ 783 #define __FUNCT__ "DMDACoarsenOwnershipRanges" 784 /* 785 Creates "balanced" ownership ranges after coarsening, constrained by the need for the 786 fine grid boundaries to fall within one stencil width of the coarse partition. 787 788 Uses a greedy algorithm to handle non-ideal layouts, could probably do something better. 789 */ 790 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lf[],PetscInt lc[]) 791 { 792 PetscInt i,totalf,remaining,startc,startf; 793 PetscErrorCode ierr; 794 795 PetscFunctionBegin; 796 if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio); 797 if (ratio == 1) { 798 ierr = PetscMemcpy(lc,lf,m*sizeof(lf[0]));CHKERRQ(ierr); 799 PetscFunctionReturn(0); 800 } 801 for (i=0,totalf=0; i<m; i++) totalf += lf[i]; 802 remaining = (!periodic) + (totalf - (!periodic)) / ratio; 803 for (i=0,startc=0,startf=0; i<m; i++) { 804 PetscInt want = remaining/(m-i) + !!(remaining%(m-i)); 805 if (i == m-1) lc[i] = want; 806 else { 807 const PetscInt nextf = startf+lf[i]; 808 /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine 809 * node is within one stencil width. */ 810 while (nextf/ratio < startc+want-stencil_width) want--; 811 /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last 812 * fine node is within one stencil width. */ 813 while ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width) want++; 814 if (want < 0 || want > remaining 815 || (nextf/ratio < startc+want-stencil_width) || ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible coarsened ownership range"); 816 } 817 lc[i] = want; 818 startc += lc[i]; 819 startf += lf[i]; 820 remaining -= lc[i]; 821 } 822 PetscFunctionReturn(0); 823 } 824 825 #undef __FUNCT__ 826 #define __FUNCT__ "DMRefine_DA" 827 PetscErrorCode DMRefine_DA(DM da,MPI_Comm comm,DM *daref) 828 { 829 PetscErrorCode ierr; 830 PetscInt M,N,P,i; 831 DM da2; 832 DM_DA *dd = (DM_DA*)da->data,*dd2; 833 834 PetscFunctionBegin; 835 PetscValidHeaderSpecific(da,DM_CLASSID,1); 836 PetscValidPointer(daref,3); 837 838 if (dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 839 M = dd->refine_x*dd->M; 840 } else { 841 M = 1 + dd->refine_x*(dd->M - 1); 842 } 843 if (dd->by == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 844 if (dd->dim > 1) { 845 N = dd->refine_y*dd->N; 846 } else { 847 N = 1; 848 } 849 } else { 850 N = 1 + dd->refine_y*(dd->N - 1); 851 } 852 if (dd->bz == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 853 if (dd->dim > 2) { 854 P = dd->refine_z*dd->P; 855 } else { 856 P = 1; 857 } 858 } else { 859 P = 1 + dd->refine_z*(dd->P - 1); 860 } 861 ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr); 862 ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr); 863 ierr = DMDASetDim(da2,dd->dim);CHKERRQ(ierr); 864 ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr); 865 ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr); 866 ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr); 867 ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr); 868 ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr); 869 ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr); 870 if (dd->dim == 3) { 871 PetscInt *lx,*ly,*lz; 872 ierr = PetscMalloc3(dd->m,PetscInt,&lx,dd->n,PetscInt,&ly,dd->p,PetscInt,&lz);CHKERRQ(ierr); 873 ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr); 874 ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr); 875 ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bz == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_z,dd->p,dd->lz,lz);CHKERRQ(ierr); 876 ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr); 877 ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr); 878 } else if (dd->dim == 2) { 879 PetscInt *lx,*ly; 880 ierr = PetscMalloc2(dd->m,PetscInt,&lx,dd->n,PetscInt,&ly);CHKERRQ(ierr); 881 ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr); 882 ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr); 883 ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr); 884 ierr = PetscFree2(lx,ly);CHKERRQ(ierr); 885 } else if (dd->dim == 1) { 886 PetscInt *lx; 887 ierr = PetscMalloc(dd->m*sizeof(PetscInt),&lx);CHKERRQ(ierr); 888 ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr); 889 ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr); 890 ierr = PetscFree(lx);CHKERRQ(ierr); 891 } 892 dd2 = (DM_DA*)da2->data; 893 894 /* allow overloaded (user replaced) operations to be inherited by refinement clones */ 895 da2->ops->creatematrix = da->ops->creatematrix; 896 /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */ 897 da2->ops->getcoloring = da->ops->getcoloring; 898 dd2->interptype = dd->interptype; 899 900 /* copy fill information if given */ 901 if (dd->dfill) { 902 ierr = PetscMalloc((dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt),&dd2->dfill);CHKERRQ(ierr); 903 ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr); 904 } 905 if (dd->ofill) { 906 ierr = PetscMalloc((dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt),&dd2->ofill);CHKERRQ(ierr); 907 ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr); 908 } 909 /* copy the refine information */ 910 dd2->coarsen_x = dd2->refine_x = dd->refine_x; 911 dd2->coarsen_y = dd2->refine_y = dd->refine_y; 912 dd2->coarsen_z = dd2->refine_z = dd->refine_z; 913 914 /* copy vector type information */ 915 ierr = PetscFree(da2->vectype);CHKERRQ(ierr); 916 ierr = PetscStrallocpy(da->vectype,(char**)&da2->vectype);CHKERRQ(ierr); 917 918 dd2->lf = dd->lf; 919 dd2->lj = dd->lj; 920 921 da2->leveldown = da->leveldown; 922 da2->levelup = da->levelup + 1; 923 924 ierr = DMSetFromOptions(da2);CHKERRQ(ierr); 925 ierr = DMSetUp(da2);CHKERRQ(ierr); 926 ierr = DMViewFromOptions(da2,"-dm_view");CHKERRQ(ierr); 927 928 /* interpolate coordinates if they are set on the coarse grid */ 929 if (da->coordinates) { 930 DM cdaf,cdac; 931 Vec coordsc,coordsf; 932 Mat II; 933 934 ierr = DMGetCoordinateDM(da,&cdac);CHKERRQ(ierr); 935 ierr = DMGetCoordinates(da,&coordsc);CHKERRQ(ierr); 936 ierr = DMGetCoordinateDM(da2,&cdaf);CHKERRQ(ierr); 937 /* force creation of the coordinate vector */ 938 ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr); 939 ierr = DMGetCoordinates(da2,&coordsf);CHKERRQ(ierr); 940 ierr = DMCreateInterpolation(cdac,cdaf,&II,NULL);CHKERRQ(ierr); 941 ierr = MatInterpolate(II,coordsc,coordsf);CHKERRQ(ierr); 942 ierr = MatDestroy(&II);CHKERRQ(ierr); 943 } 944 945 for (i=0; i<da->bs; i++) { 946 const char *fieldname; 947 ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr); 948 ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr); 949 } 950 951 *daref = da2; 952 PetscFunctionReturn(0); 953 } 954 955 956 #undef __FUNCT__ 957 #define __FUNCT__ "DMCoarsen_DA" 958 PetscErrorCode DMCoarsen_DA(DM da, MPI_Comm comm,DM *daref) 959 { 960 PetscErrorCode ierr; 961 PetscInt M,N,P,i; 962 DM da2; 963 DM_DA *dd = (DM_DA*)da->data,*dd2; 964 965 PetscFunctionBegin; 966 PetscValidHeaderSpecific(da,DM_CLASSID,1); 967 PetscValidPointer(daref,3); 968 969 if (dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 970 M = dd->M / dd->coarsen_x; 971 } else { 972 M = 1 + (dd->M - 1) / dd->coarsen_x; 973 } 974 if (dd->by == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 975 if (dd->dim > 1) { 976 N = dd->N / dd->coarsen_y; 977 } else { 978 N = 1; 979 } 980 } else { 981 N = 1 + (dd->N - 1) / dd->coarsen_y; 982 } 983 if (dd->bz == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 984 if (dd->dim > 2) { 985 P = dd->P / dd->coarsen_z; 986 } else { 987 P = 1; 988 } 989 } else { 990 P = 1 + (dd->P - 1) / dd->coarsen_z; 991 } 992 ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr); 993 ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr); 994 ierr = DMDASetDim(da2,dd->dim);CHKERRQ(ierr); 995 ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr); 996 ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr); 997 ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr); 998 ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr); 999 ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr); 1000 ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr); 1001 if (dd->dim == 3) { 1002 PetscInt *lx,*ly,*lz; 1003 ierr = PetscMalloc3(dd->m,PetscInt,&lx,dd->n,PetscInt,&ly,dd->p,PetscInt,&lz);CHKERRQ(ierr); 1004 ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr); 1005 ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr); 1006 ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bz == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_z,dd->p,dd->lz,lz);CHKERRQ(ierr); 1007 ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr); 1008 ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr); 1009 } else if (dd->dim == 2) { 1010 PetscInt *lx,*ly; 1011 ierr = PetscMalloc2(dd->m,PetscInt,&lx,dd->n,PetscInt,&ly);CHKERRQ(ierr); 1012 ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr); 1013 ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr); 1014 ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr); 1015 ierr = PetscFree2(lx,ly);CHKERRQ(ierr); 1016 } else if (dd->dim == 1) { 1017 PetscInt *lx; 1018 ierr = PetscMalloc(dd->m*sizeof(PetscInt),&lx);CHKERRQ(ierr); 1019 ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr); 1020 ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr); 1021 ierr = PetscFree(lx);CHKERRQ(ierr); 1022 } 1023 dd2 = (DM_DA*)da2->data; 1024 1025 /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */ 1026 /* da2->ops->createinterpolation = da->ops->createinterpolation; copying this one causes trouble for DMSetVI */ 1027 da2->ops->creatematrix = da->ops->creatematrix; 1028 da2->ops->getcoloring = da->ops->getcoloring; 1029 dd2->interptype = dd->interptype; 1030 1031 /* copy fill information if given */ 1032 if (dd->dfill) { 1033 ierr = PetscMalloc((dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt),&dd2->dfill);CHKERRQ(ierr); 1034 ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr); 1035 } 1036 if (dd->ofill) { 1037 ierr = PetscMalloc((dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt),&dd2->ofill);CHKERRQ(ierr); 1038 ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr); 1039 } 1040 /* copy the refine information */ 1041 dd2->coarsen_x = dd2->refine_x = dd->coarsen_x; 1042 dd2->coarsen_y = dd2->refine_y = dd->coarsen_y; 1043 dd2->coarsen_z = dd2->refine_z = dd->coarsen_z; 1044 1045 /* copy vector type information */ 1046 ierr = PetscFree(da2->vectype);CHKERRQ(ierr); 1047 ierr = PetscStrallocpy(da->vectype,(char**)&da2->vectype);CHKERRQ(ierr); 1048 1049 dd2->lf = dd->lf; 1050 dd2->lj = dd->lj; 1051 1052 da2->leveldown = da->leveldown + 1; 1053 da2->levelup = da->levelup; 1054 1055 ierr = DMSetFromOptions(da2);CHKERRQ(ierr); 1056 ierr = DMSetUp(da2);CHKERRQ(ierr); 1057 ierr = DMViewFromOptions(da2,"-dm_view");CHKERRQ(ierr); 1058 1059 /* inject coordinates if they are set on the fine grid */ 1060 if (da->coordinates) { 1061 DM cdaf,cdac; 1062 Vec coordsc,coordsf; 1063 VecScatter inject; 1064 1065 ierr = DMGetCoordinateDM(da,&cdaf);CHKERRQ(ierr); 1066 ierr = DMGetCoordinates(da,&coordsf);CHKERRQ(ierr); 1067 ierr = DMGetCoordinateDM(da2,&cdac);CHKERRQ(ierr); 1068 /* force creation of the coordinate vector */ 1069 ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr); 1070 ierr = DMGetCoordinates(da2,&coordsc);CHKERRQ(ierr); 1071 1072 ierr = DMCreateInjection(cdac,cdaf,&inject);CHKERRQ(ierr); 1073 ierr = VecScatterBegin(inject,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1074 ierr = VecScatterEnd(inject,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1075 ierr = VecScatterDestroy(&inject);CHKERRQ(ierr); 1076 } 1077 1078 for (i=0; i<da->bs; i++) { 1079 const char *fieldname; 1080 ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr); 1081 ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr); 1082 } 1083 1084 *daref = da2; 1085 PetscFunctionReturn(0); 1086 } 1087 1088 #undef __FUNCT__ 1089 #define __FUNCT__ "DMRefineHierarchy_DA" 1090 PetscErrorCode DMRefineHierarchy_DA(DM da,PetscInt nlevels,DM daf[]) 1091 { 1092 PetscErrorCode ierr; 1093 PetscInt i,n,*refx,*refy,*refz; 1094 1095 PetscFunctionBegin; 1096 PetscValidHeaderSpecific(da,DM_CLASSID,1); 1097 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1098 if (nlevels == 0) PetscFunctionReturn(0); 1099 PetscValidPointer(daf,3); 1100 1101 /* Get refinement factors, defaults taken from the coarse DMDA */ 1102 ierr = PetscMalloc3(nlevels,PetscInt,&refx,nlevels,PetscInt,&refy,nlevels,PetscInt,&refz);CHKERRQ(ierr); 1103 for (i=0; i<nlevels; i++) { 1104 ierr = DMDAGetRefinementFactor(da,&refx[i],&refy[i],&refz[i]);CHKERRQ(ierr); 1105 } 1106 n = nlevels; 1107 ierr = PetscOptionsGetIntArray(((PetscObject)da)->prefix,"-da_refine_hierarchy_x",refx,&n,NULL);CHKERRQ(ierr); 1108 n = nlevels; 1109 ierr = PetscOptionsGetIntArray(((PetscObject)da)->prefix,"-da_refine_hierarchy_y",refy,&n,NULL);CHKERRQ(ierr); 1110 n = nlevels; 1111 ierr = PetscOptionsGetIntArray(((PetscObject)da)->prefix,"-da_refine_hierarchy_z",refz,&n,NULL);CHKERRQ(ierr); 1112 1113 ierr = DMDASetRefinementFactor(da,refx[0],refy[0],refz[0]);CHKERRQ(ierr); 1114 ierr = DMRefine(da,PetscObjectComm((PetscObject)da),&daf[0]);CHKERRQ(ierr); 1115 for (i=1; i<nlevels; i++) { 1116 ierr = DMDASetRefinementFactor(daf[i-1],refx[i],refy[i],refz[i]);CHKERRQ(ierr); 1117 ierr = DMRefine(daf[i-1],PetscObjectComm((PetscObject)da),&daf[i]);CHKERRQ(ierr); 1118 } 1119 ierr = PetscFree3(refx,refy,refz);CHKERRQ(ierr); 1120 PetscFunctionReturn(0); 1121 } 1122 1123 #undef __FUNCT__ 1124 #define __FUNCT__ "DMCoarsenHierarchy_DA" 1125 PetscErrorCode DMCoarsenHierarchy_DA(DM da,PetscInt nlevels,DM dac[]) 1126 { 1127 PetscErrorCode ierr; 1128 PetscInt i; 1129 1130 PetscFunctionBegin; 1131 PetscValidHeaderSpecific(da,DM_CLASSID,1); 1132 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1133 if (nlevels == 0) PetscFunctionReturn(0); 1134 PetscValidPointer(dac,3); 1135 ierr = DMCoarsen(da,PetscObjectComm((PetscObject)da),&dac[0]);CHKERRQ(ierr); 1136 for (i=1; i<nlevels; i++) { 1137 ierr = DMCoarsen(dac[i-1],PetscObjectComm((PetscObject)da),&dac[i]);CHKERRQ(ierr); 1138 } 1139 PetscFunctionReturn(0); 1140 } 1141