1 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 2 3 /*@ 4 DMDASetSizes - Sets the global sizes 5 6 Logically Collective on DMDA 7 8 Input Parameters: 9 + da - the DMDA 10 . M - the global X size (or PETSC_DECIDE) 11 . N - the global Y size (or PETSC_DECIDE) 12 - P - the global Z size (or PETSC_DECIDE) 13 14 Level: intermediate 15 16 .seealso: DMDAGetSize(), PetscSplitOwnership() 17 @*/ 18 PetscErrorCode DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P) 19 { 20 DM_DA *dd = (DM_DA*)da->data; 21 22 PetscFunctionBegin; 23 PetscValidHeaderSpecific(da, DM_CLASSID, 1); 24 PetscValidLogicalCollectiveInt(da,M,2); 25 PetscValidLogicalCollectiveInt(da,N,3); 26 PetscValidLogicalCollectiveInt(da,P,4); 27 if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 28 29 dd->M = M; 30 dd->N = N; 31 dd->P = P; 32 PetscFunctionReturn(0); 33 } 34 35 /*@ 36 DMDASetNumProcs - Sets the number of processes in each dimension 37 38 Logically Collective on DMDA 39 40 Input Parameters: 41 + da - the DMDA 42 . m - the number of X procs (or PETSC_DECIDE) 43 . n - the number of Y procs (or PETSC_DECIDE) 44 - p - the number of Z procs (or PETSC_DECIDE) 45 46 Level: intermediate 47 48 .seealso: DMDASetSizes(), DMDAGetSize(), PetscSplitOwnership() 49 @*/ 50 PetscErrorCode DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p) 51 { 52 DM_DA *dd = (DM_DA*)da->data; 53 PetscErrorCode ierr; 54 55 PetscFunctionBegin; 56 PetscValidHeaderSpecific(da, DM_CLASSID, 1); 57 PetscValidLogicalCollectiveInt(da,m,2); 58 PetscValidLogicalCollectiveInt(da,n,3); 59 PetscValidLogicalCollectiveInt(da,p,4); 60 if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 61 dd->m = m; 62 dd->n = n; 63 dd->p = p; 64 if (da->dim == 2) { 65 PetscMPIInt size; 66 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr); 67 if ((dd->m > 0) && (dd->n < 0)) { 68 dd->n = size/dd->m; 69 if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in X direction not divisible into comm size %d",m,size); 70 } 71 if ((dd->n > 0) && (dd->m < 0)) { 72 dd->m = size/dd->n; 73 if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in Y direction not divisible into comm size %d",n,size); 74 } 75 } 76 PetscFunctionReturn(0); 77 } 78 79 /*@ 80 DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries. 81 82 Not collective 83 84 Input Parameter: 85 + da - The DMDA 86 - bx,by,bz - One of DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, DM_BOUNDARY_PERIODIC 87 88 Level: intermediate 89 90 .keywords: distributed array, periodicity 91 .seealso: DMDACreate(), DMDestroy(), DMDA, DMBoundaryType 92 @*/ 93 PetscErrorCode DMDASetBoundaryType(DM da,DMBoundaryType bx,DMBoundaryType by,DMBoundaryType bz) 94 { 95 DM_DA *dd = (DM_DA*)da->data; 96 97 PetscFunctionBegin; 98 PetscValidHeaderSpecific(da,DM_CLASSID,1); 99 PetscValidLogicalCollectiveEnum(da,bx,2); 100 PetscValidLogicalCollectiveEnum(da,by,3); 101 PetscValidLogicalCollectiveEnum(da,bz,4); 102 if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 103 dd->bx = bx; 104 dd->by = by; 105 dd->bz = bz; 106 PetscFunctionReturn(0); 107 } 108 109 /*@ 110 DMDASetDof - Sets the number of degrees of freedom per vertex 111 112 Not collective 113 114 Input Parameters: 115 + da - The DMDA 116 - dof - Number of degrees of freedom 117 118 Level: intermediate 119 120 .keywords: distributed array, degrees of freedom 121 .seealso: DMDAGetDof(), DMDACreate(), DMDestroy(), DMDA 122 @*/ 123 PetscErrorCode DMDASetDof(DM da, PetscInt dof) 124 { 125 DM_DA *dd = (DM_DA*)da->data; 126 127 PetscFunctionBegin; 128 PetscValidHeaderSpecific(da,DM_CLASSID,1); 129 PetscValidLogicalCollectiveInt(da,dof,2); 130 if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 131 dd->w = dof; 132 da->bs = dof; 133 PetscFunctionReturn(0); 134 } 135 136 /*@ 137 DMDAGetDof - Gets the number of degrees of freedom per vertex 138 139 Not collective 140 141 Input Parameter: 142 . da - The DMDA 143 144 Output Parameter: 145 . dof - Number of degrees of freedom 146 147 Level: intermediate 148 149 .keywords: distributed array, degrees of freedom 150 .seealso: DMDASetDof(), DMDACreate(), DMDestroy(), DMDA 151 @*/ 152 PetscErrorCode DMDAGetDof(DM da, PetscInt *dof) 153 { 154 DM_DA *dd = (DM_DA *) da->data; 155 156 PetscFunctionBegin; 157 PetscValidHeaderSpecific(da,DM_CLASSID,1); 158 PetscValidPointer(dof,2); 159 *dof = dd->w; 160 PetscFunctionReturn(0); 161 } 162 163 /*@ 164 DMDAGetOverlap - Gets the size of the per-processor overlap. 165 166 Not collective 167 168 Input Parameters: 169 . da - The DMDA 170 171 Output Parameters: 172 + x - Overlap in the x direction 173 . y - Overlap in the y direction 174 - z - Overlap in the z direction 175 176 Level: intermediate 177 178 .keywords: distributed array, overlap, domain decomposition 179 .seealso: DMDACreateDomainDecomposition(), DMDASetOverlap(), DMDA 180 @*/ 181 PetscErrorCode DMDAGetOverlap(DM da,PetscInt *x,PetscInt *y,PetscInt *z) 182 { 183 DM_DA *dd = (DM_DA*)da->data; 184 185 PetscFunctionBegin; 186 PetscValidHeaderSpecific(da,DM_CLASSID,1); 187 if (x) *x = dd->xol; 188 if (y) *y = dd->yol; 189 if (z) *z = dd->zol; 190 PetscFunctionReturn(0); 191 } 192 193 /*@ 194 DMDASetOverlap - Sets the size of the per-processor overlap. 195 196 Not collective 197 198 Input Parameters: 199 + da - The DMDA 200 . x - Overlap in the x direction 201 . y - Overlap in the y direction 202 - z - Overlap in the z direction 203 204 Level: intermediate 205 206 .keywords: distributed array, overlap, domain decomposition 207 .seealso: DMDACreateDomainDecomposition(), DMDAGetOverlap(), DMDA 208 @*/ 209 PetscErrorCode DMDASetOverlap(DM da,PetscInt x,PetscInt y,PetscInt z) 210 { 211 DM_DA *dd = (DM_DA*)da->data; 212 213 PetscFunctionBegin; 214 PetscValidHeaderSpecific(da,DM_CLASSID,1); 215 PetscValidLogicalCollectiveInt(da,x,2); 216 PetscValidLogicalCollectiveInt(da,y,3); 217 PetscValidLogicalCollectiveInt(da,z,4); 218 dd->xol = x; 219 dd->yol = y; 220 dd->zol = z; 221 PetscFunctionReturn(0); 222 } 223 224 225 /*@ 226 DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition. 227 228 Not collective 229 230 Input Parameters: 231 . da - The DMDA 232 233 Output Parameters: 234 + Nsub - Number of local subdomains created upon decomposition 235 236 Level: intermediate 237 238 .keywords: distributed array, domain decomposition 239 .seealso: DMDACreateDomainDecomposition(), DMDASetNumLocalSubDomains(), DMDA 240 @*/ 241 PetscErrorCode DMDAGetNumLocalSubDomains(DM da,PetscInt *Nsub) 242 { 243 DM_DA *dd = (DM_DA*)da->data; 244 245 PetscFunctionBegin; 246 PetscValidHeaderSpecific(da,DM_CLASSID,1); 247 if (Nsub) *Nsub = dd->Nsub; 248 PetscFunctionReturn(0); 249 } 250 251 /*@ 252 DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition. 253 254 Not collective 255 256 Input Parameters: 257 + da - The DMDA 258 - Nsub - The number of local subdomains requested 259 260 Level: intermediate 261 262 .keywords: distributed array, domain decomposition 263 .seealso: DMDACreateDomainDecomposition(), DMDAGetNumLocalSubDomains(), DMDA 264 @*/ 265 PetscErrorCode DMDASetNumLocalSubDomains(DM da,PetscInt Nsub) 266 { 267 DM_DA *dd = (DM_DA*)da->data; 268 269 PetscFunctionBegin; 270 PetscValidHeaderSpecific(da,DM_CLASSID,1); 271 PetscValidLogicalCollectiveInt(da,Nsub,2); 272 dd->Nsub = Nsub; 273 PetscFunctionReturn(0); 274 } 275 276 /*@ 277 DMDASetOffset - Sets the index offset of the DA. 278 279 Collective on DA 280 281 Input Parameter: 282 + da - The DMDA 283 . xo - The offset in the x direction 284 . yo - The offset in the y direction 285 - zo - The offset in the z direction 286 287 Level: intermediate 288 289 Notes: This is used primarily to overlap a computation on a local DA with that on a global DA without 290 changing boundary conditions or subdomain features that depend upon the global offsets. 291 292 .keywords: distributed array, degrees of freedom 293 .seealso: DMDAGetOffset(), DMDAVecGetArray() 294 @*/ 295 PetscErrorCode DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po) 296 { 297 PetscErrorCode ierr; 298 DM_DA *dd = (DM_DA*)da->data; 299 300 PetscFunctionBegin; 301 PetscValidHeaderSpecific(da,DM_CLASSID,1); 302 PetscValidLogicalCollectiveInt(da,xo,2); 303 PetscValidLogicalCollectiveInt(da,yo,3); 304 PetscValidLogicalCollectiveInt(da,zo,4); 305 PetscValidLogicalCollectiveInt(da,Mo,5); 306 PetscValidLogicalCollectiveInt(da,No,6); 307 PetscValidLogicalCollectiveInt(da,Po,7); 308 dd->xo = xo; 309 dd->yo = yo; 310 dd->zo = zo; 311 dd->Mo = Mo; 312 dd->No = No; 313 dd->Po = Po; 314 315 if (da->coordinateDM) { 316 ierr = DMDASetOffset(da->coordinateDM,xo,yo,zo,Mo,No,Po);CHKERRQ(ierr); 317 } 318 PetscFunctionReturn(0); 319 } 320 321 /*@ 322 DMDAGetOffset - Gets the index offset of the DA. 323 324 Not collective 325 326 Input Parameter: 327 . da - The DMDA 328 329 Output Parameters: 330 + xo - The offset in the x direction 331 . yo - The offset in the y direction 332 . zo - The offset in the z direction 333 . Mo - The global size in the x direction 334 . No - The global size in the y direction 335 - Po - The global size in the z direction 336 337 Level: intermediate 338 339 .keywords: distributed array, degrees of freedom 340 .seealso: DMDASetOffset(), DMDAVecGetArray() 341 @*/ 342 PetscErrorCode DMDAGetOffset(DM da,PetscInt *xo,PetscInt *yo,PetscInt *zo,PetscInt *Mo,PetscInt *No,PetscInt *Po) 343 { 344 DM_DA *dd = (DM_DA*)da->data; 345 346 PetscFunctionBegin; 347 PetscValidHeaderSpecific(da,DM_CLASSID,1); 348 if (xo) *xo = dd->xo; 349 if (yo) *yo = dd->yo; 350 if (zo) *zo = dd->zo; 351 if (Mo) *Mo = dd->Mo; 352 if (No) *No = dd->No; 353 if (Po) *Po = dd->Po; 354 PetscFunctionReturn(0); 355 } 356 357 /*@ 358 DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain DM. 359 360 Not collective 361 362 Input Parameter: 363 . da - The DMDA 364 365 Output Parameters: 366 + xs - The start of the region in x 367 . ys - The start of the region in y 368 . zs - The start of the region in z 369 . xs - The size of the region in x 370 . ys - The size of the region in y 371 . zs - The size of the region in z 372 373 Level: intermediate 374 375 .keywords: distributed array, degrees of freedom 376 .seealso: DMDAGetOffset(), DMDAVecGetArray() 377 @*/ 378 PetscErrorCode DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm) 379 { 380 DM_DA *dd = (DM_DA*)da->data; 381 382 PetscFunctionBegin; 383 PetscValidHeaderSpecific(da,DM_CLASSID,1); 384 if (xs) *xs = dd->nonxs; 385 if (ys) *ys = dd->nonys; 386 if (zs) *zs = dd->nonzs; 387 if (xm) *xm = dd->nonxm; 388 if (ym) *ym = dd->nonym; 389 if (zm) *zm = dd->nonzm; 390 PetscFunctionReturn(0); 391 } 392 393 394 /*@ 395 DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain DM. 396 397 Collective on DA 398 399 Input Parameter: 400 + da - The DMDA 401 . xs - The start of the region in x 402 . ys - The start of the region in y 403 . zs - The start of the region in z 404 . xs - The size of the region in x 405 . ys - The size of the region in y 406 . zs - The size of the region in z 407 408 Level: intermediate 409 410 .keywords: distributed array, degrees of freedom 411 .seealso: DMDAGetOffset(), DMDAVecGetArray() 412 @*/ 413 PetscErrorCode DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm) 414 { 415 DM_DA *dd = (DM_DA*)da->data; 416 417 PetscFunctionBegin; 418 PetscValidHeaderSpecific(da,DM_CLASSID,1); 419 PetscValidLogicalCollectiveInt(da,xs,2); 420 PetscValidLogicalCollectiveInt(da,ys,3); 421 PetscValidLogicalCollectiveInt(da,zs,4); 422 PetscValidLogicalCollectiveInt(da,xm,5); 423 PetscValidLogicalCollectiveInt(da,ym,6); 424 PetscValidLogicalCollectiveInt(da,zm,7); 425 dd->nonxs = xs; 426 dd->nonys = ys; 427 dd->nonzs = zs; 428 dd->nonxm = xm; 429 dd->nonym = ym; 430 dd->nonzm = zm; 431 432 PetscFunctionReturn(0); 433 } 434 435 /*@ 436 DMDASetStencilType - Sets the type of the communication stencil 437 438 Logically Collective on DMDA 439 440 Input Parameter: 441 + da - The DMDA 442 - stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR. 443 444 Level: intermediate 445 446 .keywords: distributed array, stencil 447 .seealso: DMDACreate(), DMDestroy(), DMDA 448 @*/ 449 PetscErrorCode DMDASetStencilType(DM da, DMDAStencilType stype) 450 { 451 DM_DA *dd = (DM_DA*)da->data; 452 453 PetscFunctionBegin; 454 PetscValidHeaderSpecific(da,DM_CLASSID,1); 455 PetscValidLogicalCollectiveEnum(da,stype,2); 456 if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 457 dd->stencil_type = stype; 458 PetscFunctionReturn(0); 459 } 460 461 /*@ 462 DMDAGetStencilType - Gets the type of the communication stencil 463 464 Not collective 465 466 Input Parameter: 467 . da - The DMDA 468 469 Output Parameter: 470 . stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR. 471 472 Level: intermediate 473 474 .keywords: distributed array, stencil 475 .seealso: DMDACreate(), DMDestroy(), DMDA 476 @*/ 477 PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype) 478 { 479 DM_DA *dd = (DM_DA*)da->data; 480 481 PetscFunctionBegin; 482 PetscValidHeaderSpecific(da,DM_CLASSID,1); 483 PetscValidPointer(stype,2); 484 *stype = dd->stencil_type; 485 PetscFunctionReturn(0); 486 } 487 488 /*@ 489 DMDASetStencilWidth - Sets the width of the communication stencil 490 491 Logically Collective on DMDA 492 493 Input Parameter: 494 + da - The DMDA 495 - width - The stencil width 496 497 Level: intermediate 498 499 .keywords: distributed array, stencil 500 .seealso: DMDACreate(), DMDestroy(), DMDA 501 @*/ 502 PetscErrorCode DMDASetStencilWidth(DM da, PetscInt width) 503 { 504 DM_DA *dd = (DM_DA*)da->data; 505 506 PetscFunctionBegin; 507 PetscValidHeaderSpecific(da,DM_CLASSID,1); 508 PetscValidLogicalCollectiveInt(da,width,2); 509 if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 510 dd->s = width; 511 PetscFunctionReturn(0); 512 } 513 514 /*@ 515 DMDAGetStencilWidth - Gets the width of the communication stencil 516 517 Not collective 518 519 Input Parameter: 520 . da - The DMDA 521 522 Output Parameter: 523 . width - The stencil width 524 525 Level: intermediate 526 527 .keywords: distributed array, stencil 528 .seealso: DMDACreate(), DMDestroy(), DMDA 529 @*/ 530 PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width) 531 { 532 DM_DA *dd = (DM_DA *) da->data; 533 534 PetscFunctionBegin; 535 PetscValidHeaderSpecific(da,DM_CLASSID,1); 536 PetscValidPointer(width,2); 537 *width = dd->s; 538 PetscFunctionReturn(0); 539 } 540 541 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da,PetscInt M,PetscInt m,const PetscInt lx[]) 542 { 543 PetscInt i,sum; 544 545 PetscFunctionBegin; 546 if (M < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Global dimension not set"); 547 for (i=sum=0; i<m; i++) sum += lx[i]; 548 if (sum != M) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Ownership ranges sum to %D but global dimension is %D",sum,M); 549 PetscFunctionReturn(0); 550 } 551 552 /*@ 553 DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process 554 555 Logically Collective on DMDA 556 557 Input Parameter: 558 + da - The DMDA 559 . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m 560 . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n 561 - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p. 562 563 Level: intermediate 564 565 Note: these numbers are NOT multiplied by the number of dof per node. 566 567 .keywords: distributed array 568 .seealso: DMDACreate(), DMDestroy(), DMDA 569 @*/ 570 PetscErrorCode DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[]) 571 { 572 PetscErrorCode ierr; 573 DM_DA *dd = (DM_DA*)da->data; 574 575 PetscFunctionBegin; 576 PetscValidHeaderSpecific(da,DM_CLASSID,1); 577 if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 578 if (lx) { 579 if (dd->m < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs"); 580 ierr = DMDACheckOwnershipRanges_Private(da,dd->M,dd->m,lx);CHKERRQ(ierr); 581 if (!dd->lx) { 582 ierr = PetscMalloc1(dd->m, &dd->lx);CHKERRQ(ierr); 583 } 584 ierr = PetscMemcpy(dd->lx, lx, dd->m*sizeof(PetscInt));CHKERRQ(ierr); 585 } 586 if (ly) { 587 if (dd->n < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs"); 588 ierr = DMDACheckOwnershipRanges_Private(da,dd->N,dd->n,ly);CHKERRQ(ierr); 589 if (!dd->ly) { 590 ierr = PetscMalloc1(dd->n, &dd->ly);CHKERRQ(ierr); 591 } 592 ierr = PetscMemcpy(dd->ly, ly, dd->n*sizeof(PetscInt));CHKERRQ(ierr); 593 } 594 if (lz) { 595 if (dd->p < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs"); 596 ierr = DMDACheckOwnershipRanges_Private(da,dd->P,dd->p,lz);CHKERRQ(ierr); 597 if (!dd->lz) { 598 ierr = PetscMalloc1(dd->p, &dd->lz);CHKERRQ(ierr); 599 } 600 ierr = PetscMemcpy(dd->lz, lz, dd->p*sizeof(PetscInt));CHKERRQ(ierr); 601 } 602 PetscFunctionReturn(0); 603 } 604 605 /*@ 606 DMDASetInterpolationType - Sets the type of interpolation that will be 607 returned by DMCreateInterpolation() 608 609 Logically Collective on DMDA 610 611 Input Parameter: 612 + da - initial distributed array 613 . ctype - DMDA_Q1 and DMDA_Q0 are currently the only supported forms 614 615 Level: intermediate 616 617 Notes: you should call this on the coarser of the two DMDAs you pass to DMCreateInterpolation() 618 619 .keywords: distributed array, interpolation 620 621 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType 622 @*/ 623 PetscErrorCode DMDASetInterpolationType(DM da,DMDAInterpolationType ctype) 624 { 625 DM_DA *dd = (DM_DA*)da->data; 626 627 PetscFunctionBegin; 628 PetscValidHeaderSpecific(da,DM_CLASSID,1); 629 PetscValidLogicalCollectiveEnum(da,ctype,2); 630 dd->interptype = ctype; 631 PetscFunctionReturn(0); 632 } 633 634 /*@ 635 DMDAGetInterpolationType - Gets the type of interpolation that will be 636 used by DMCreateInterpolation() 637 638 Not Collective 639 640 Input Parameter: 641 . da - distributed array 642 643 Output Parameter: 644 . ctype - interpolation type (DMDA_Q1 and DMDA_Q0 are currently the only supported forms) 645 646 Level: intermediate 647 648 .keywords: distributed array, interpolation 649 650 .seealso: DMDA, DMDAInterpolationType, DMDASetInterpolationType(), DMCreateInterpolation() 651 @*/ 652 PetscErrorCode DMDAGetInterpolationType(DM da,DMDAInterpolationType *ctype) 653 { 654 DM_DA *dd = (DM_DA*)da->data; 655 656 PetscFunctionBegin; 657 PetscValidHeaderSpecific(da,DM_CLASSID,1); 658 PetscValidPointer(ctype,2); 659 *ctype = dd->interptype; 660 PetscFunctionReturn(0); 661 } 662 663 /*@C 664 DMDAGetNeighbors - Gets an array containing the MPI rank of all the current 665 processes neighbors. 666 667 Not Collective 668 669 Input Parameter: 670 . da - the DMDA object 671 672 Output Parameters: 673 . ranks - the neighbors ranks, stored with the x index increasing most rapidly. 674 this process itself is in the list 675 676 Notes: In 2d the array is of length 9, in 3d of length 27 677 Not supported in 1d 678 Do not free the array, it is freed when the DMDA is destroyed. 679 680 Fortran Notes: In fortran you must pass in an array of the appropriate length. 681 682 Level: intermediate 683 684 @*/ 685 PetscErrorCode DMDAGetNeighbors(DM da,const PetscMPIInt *ranks[]) 686 { 687 DM_DA *dd = (DM_DA*)da->data; 688 689 PetscFunctionBegin; 690 PetscValidHeaderSpecific(da,DM_CLASSID,1); 691 *ranks = dd->neighbors; 692 PetscFunctionReturn(0); 693 } 694 695 /*@C 696 DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process 697 698 Not Collective 699 700 Input Parameter: 701 . da - the DMDA object 702 703 Output Parameter: 704 + lx - ownership along x direction (optional) 705 . ly - ownership along y direction (optional) 706 - lz - ownership along z direction (optional) 707 708 Level: intermediate 709 710 Note: these correspond to the optional final arguments passed to DMDACreate(), DMDACreate2d(), DMDACreate3d() 711 712 In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and 713 eighth arguments from DMDAGetInfo() 714 715 In C you should not free these arrays, nor change the values in them. They will only have valid values while the 716 DMDA they came from still exists (has not been destroyed). 717 718 These numbers are NOT multiplied by the number of dof per node. 719 720 Not available from Fortran 721 722 .seealso: DMDAGetCorners(), DMDAGetGhostCorners(), DMDACreate(), DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), VecGetOwnershipRanges() 723 @*/ 724 PetscErrorCode DMDAGetOwnershipRanges(DM da,const PetscInt *lx[],const PetscInt *ly[],const PetscInt *lz[]) 725 { 726 DM_DA *dd = (DM_DA*)da->data; 727 728 PetscFunctionBegin; 729 PetscValidHeaderSpecific(da,DM_CLASSID,1); 730 if (lx) *lx = dd->lx; 731 if (ly) *ly = dd->ly; 732 if (lz) *lz = dd->lz; 733 PetscFunctionReturn(0); 734 } 735 736 /*@ 737 DMDASetRefinementFactor - Set the ratios that the DMDA grid is refined 738 739 Logically Collective on DMDA 740 741 Input Parameters: 742 + da - the DMDA object 743 . refine_x - ratio of fine grid to coarse in x direction (2 by default) 744 . refine_y - ratio of fine grid to coarse in y direction (2 by default) 745 - refine_z - ratio of fine grid to coarse in z direction (2 by default) 746 747 Options Database: 748 + -da_refine_x - refinement ratio in x direction 749 . -da_refine_y - refinement ratio in y direction 750 - -da_refine_z - refinement ratio in z direction 751 752 Level: intermediate 753 754 Notes: Pass PETSC_IGNORE to leave a value unchanged 755 756 .seealso: DMRefine(), DMDAGetRefinementFactor() 757 @*/ 758 PetscErrorCode DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z) 759 { 760 DM_DA *dd = (DM_DA*)da->data; 761 762 PetscFunctionBegin; 763 PetscValidHeaderSpecific(da,DM_CLASSID,1); 764 PetscValidLogicalCollectiveInt(da,refine_x,2); 765 PetscValidLogicalCollectiveInt(da,refine_y,3); 766 PetscValidLogicalCollectiveInt(da,refine_z,4); 767 768 if (refine_x > 0) dd->refine_x = refine_x; 769 if (refine_y > 0) dd->refine_y = refine_y; 770 if (refine_z > 0) dd->refine_z = refine_z; 771 PetscFunctionReturn(0); 772 } 773 774 /*@C 775 DMDAGetRefinementFactor - Gets the ratios that the DMDA grid is refined 776 777 Not Collective 778 779 Input Parameter: 780 . da - the DMDA object 781 782 Output Parameters: 783 + refine_x - ratio of fine grid to coarse in x direction (2 by default) 784 . refine_y - ratio of fine grid to coarse in y direction (2 by default) 785 - refine_z - ratio of fine grid to coarse in z direction (2 by default) 786 787 Level: intermediate 788 789 Notes: Pass NULL for values you do not need 790 791 .seealso: DMRefine(), DMDASetRefinementFactor() 792 @*/ 793 PetscErrorCode DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z) 794 { 795 DM_DA *dd = (DM_DA*)da->data; 796 797 PetscFunctionBegin; 798 PetscValidHeaderSpecific(da,DM_CLASSID,1); 799 if (refine_x) *refine_x = dd->refine_x; 800 if (refine_y) *refine_y = dd->refine_y; 801 if (refine_z) *refine_z = dd->refine_z; 802 PetscFunctionReturn(0); 803 } 804 805 /*@C 806 DMDASetGetMatrix - Sets the routine used by the DMDA to allocate a matrix. 807 808 Logically Collective on DMDA 809 810 Input Parameters: 811 + da - the DMDA object 812 - f - the function that allocates the matrix for that specific DMDA 813 814 Level: developer 815 816 Notes: See DMDASetBlockFills() that provides a simple way to provide the nonzero structure for 817 the diagonal and off-diagonal blocks of the matrix 818 819 Not supported from Fortran 820 821 .seealso: DMCreateMatrix(), DMDASetBlockFills() 822 @*/ 823 PetscErrorCode DMDASetGetMatrix(DM da,PetscErrorCode (*f)(DM, Mat*)) 824 { 825 PetscFunctionBegin; 826 PetscValidHeaderSpecific(da,DM_CLASSID,1); 827 da->ops->creatematrix = f; 828 PetscFunctionReturn(0); 829 } 830 831 /* 832 Creates "balanced" ownership ranges after refinement, constrained by the need for the 833 fine grid boundaries to fall within one stencil width of the coarse partition. 834 835 Uses a greedy algorithm to handle non-ideal layouts, could probably do something better. 836 */ 837 static PetscErrorCode DMDARefineOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lc[],PetscInt lf[]) 838 { 839 PetscInt i,totalc = 0,remaining,startc = 0,startf = 0; 840 PetscErrorCode ierr; 841 842 PetscFunctionBegin; 843 if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio); 844 if (ratio == 1) { 845 ierr = PetscMemcpy(lf,lc,m*sizeof(lc[0]));CHKERRQ(ierr); 846 PetscFunctionReturn(0); 847 } 848 for (i=0; i<m; i++) totalc += lc[i]; 849 remaining = (!periodic) + ratio * (totalc - (!periodic)); 850 for (i=0; i<m; i++) { 851 PetscInt want = remaining/(m-i) + !!(remaining%(m-i)); 852 if (i == m-1) lf[i] = want; 853 else { 854 const PetscInt nextc = startc + lc[i]; 855 /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one 856 * coarse stencil width of the first coarse node in the next subdomain. */ 857 while ((startf+want)/ratio < nextc - stencil_width) want++; 858 /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one 859 * coarse stencil width of the last coarse node in the current subdomain. */ 860 while ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width) want--; 861 /* Make sure all constraints are satisfied */ 862 if (want < 0 || want > remaining || ((startf+want)/ratio < nextc - stencil_width) 863 || ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible refined ownership range"); 864 } 865 lf[i] = want; 866 startc += lc[i]; 867 startf += lf[i]; 868 remaining -= lf[i]; 869 } 870 PetscFunctionReturn(0); 871 } 872 873 /* 874 Creates "balanced" ownership ranges after coarsening, constrained by the need for the 875 fine grid boundaries to fall within one stencil width of the coarse partition. 876 877 Uses a greedy algorithm to handle non-ideal layouts, could probably do something better. 878 */ 879 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lf[],PetscInt lc[]) 880 { 881 PetscInt i,totalf,remaining,startc,startf; 882 PetscErrorCode ierr; 883 884 PetscFunctionBegin; 885 if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio); 886 if (ratio == 1) { 887 ierr = PetscMemcpy(lc,lf,m*sizeof(lf[0]));CHKERRQ(ierr); 888 PetscFunctionReturn(0); 889 } 890 for (i=0,totalf=0; i<m; i++) totalf += lf[i]; 891 remaining = (!periodic) + (totalf - (!periodic)) / ratio; 892 for (i=0,startc=0,startf=0; i<m; i++) { 893 PetscInt want = remaining/(m-i) + !!(remaining%(m-i)); 894 if (i == m-1) lc[i] = want; 895 else { 896 const PetscInt nextf = startf+lf[i]; 897 /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine 898 * node is within one stencil width. */ 899 while (nextf/ratio < startc+want-stencil_width) want--; 900 /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last 901 * fine node is within one stencil width. */ 902 while ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width) want++; 903 if (want < 0 || want > remaining 904 || (nextf/ratio < startc+want-stencil_width) || ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible coarsened ownership range"); 905 } 906 lc[i] = want; 907 startc += lc[i]; 908 startf += lf[i]; 909 remaining -= lc[i]; 910 } 911 PetscFunctionReturn(0); 912 } 913 914 PetscErrorCode DMRefine_DA(DM da,MPI_Comm comm,DM *daref) 915 { 916 PetscErrorCode ierr; 917 PetscInt M,N,P,i,dim; 918 DM da2; 919 DM_DA *dd = (DM_DA*)da->data,*dd2; 920 921 PetscFunctionBegin; 922 PetscValidHeaderSpecific(da,DM_CLASSID,1); 923 PetscValidPointer(daref,3); 924 925 ierr = DMGetDimension(da, &dim);CHKERRQ(ierr); 926 if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 927 M = dd->refine_x*dd->M; 928 } else { 929 M = 1 + dd->refine_x*(dd->M - 1); 930 } 931 if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 932 if (dim > 1) { 933 N = dd->refine_y*dd->N; 934 } else { 935 N = 1; 936 } 937 } else { 938 N = 1 + dd->refine_y*(dd->N - 1); 939 } 940 if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 941 if (dim > 2) { 942 P = dd->refine_z*dd->P; 943 } else { 944 P = 1; 945 } 946 } else { 947 P = 1 + dd->refine_z*(dd->P - 1); 948 } 949 ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr); 950 ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr); 951 ierr = DMSetDimension(da2,dim);CHKERRQ(ierr); 952 ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr); 953 ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr); 954 ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr); 955 ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr); 956 ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr); 957 ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr); 958 if (dim == 3) { 959 PetscInt *lx,*ly,*lz; 960 ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr); 961 ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr); 962 ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr); 963 ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_z,dd->p,dd->lz,lz);CHKERRQ(ierr); 964 ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr); 965 ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr); 966 } else if (dim == 2) { 967 PetscInt *lx,*ly; 968 ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr); 969 ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr); 970 ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr); 971 ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr); 972 ierr = PetscFree2(lx,ly);CHKERRQ(ierr); 973 } else if (dim == 1) { 974 PetscInt *lx; 975 ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr); 976 ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr); 977 ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr); 978 ierr = PetscFree(lx);CHKERRQ(ierr); 979 } 980 dd2 = (DM_DA*)da2->data; 981 982 /* allow overloaded (user replaced) operations to be inherited by refinement clones */ 983 da2->ops->creatematrix = da->ops->creatematrix; 984 /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */ 985 da2->ops->getcoloring = da->ops->getcoloring; 986 dd2->interptype = dd->interptype; 987 988 /* copy fill information if given */ 989 if (dd->dfill) { 990 ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr); 991 ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr); 992 } 993 if (dd->ofill) { 994 ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr); 995 ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr); 996 } 997 /* copy the refine information */ 998 dd2->coarsen_x = dd2->refine_x = dd->refine_x; 999 dd2->coarsen_y = dd2->refine_y = dd->refine_y; 1000 dd2->coarsen_z = dd2->refine_z = dd->refine_z; 1001 1002 if (dd->refine_z_hier) { 1003 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) { 1004 dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1]; 1005 } 1006 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) { 1007 dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown]; 1008 } 1009 dd2->refine_z_hier_n = dd->refine_z_hier_n; 1010 ierr = PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);CHKERRQ(ierr); 1011 ierr = PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));CHKERRQ(ierr); 1012 } 1013 if (dd->refine_y_hier) { 1014 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) { 1015 dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1]; 1016 } 1017 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) { 1018 dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown]; 1019 } 1020 dd2->refine_y_hier_n = dd->refine_y_hier_n; 1021 ierr = PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);CHKERRQ(ierr); 1022 ierr = PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));CHKERRQ(ierr); 1023 } 1024 if (dd->refine_x_hier) { 1025 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) { 1026 dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1]; 1027 } 1028 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) { 1029 dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown]; 1030 } 1031 dd2->refine_x_hier_n = dd->refine_x_hier_n; 1032 ierr = PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);CHKERRQ(ierr); 1033 ierr = PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));CHKERRQ(ierr); 1034 } 1035 1036 1037 /* copy vector type information */ 1038 ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr); 1039 1040 dd2->lf = dd->lf; 1041 dd2->lj = dd->lj; 1042 1043 da2->leveldown = da->leveldown; 1044 da2->levelup = da->levelup + 1; 1045 1046 ierr = DMSetUp(da2);CHKERRQ(ierr); 1047 1048 /* interpolate coordinates if they are set on the coarse grid */ 1049 if (da->coordinates) { 1050 DM cdaf,cdac; 1051 Vec coordsc,coordsf; 1052 Mat II; 1053 1054 ierr = DMGetCoordinateDM(da,&cdac);CHKERRQ(ierr); 1055 ierr = DMGetCoordinates(da,&coordsc);CHKERRQ(ierr); 1056 ierr = DMGetCoordinateDM(da2,&cdaf);CHKERRQ(ierr); 1057 /* force creation of the coordinate vector */ 1058 ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr); 1059 ierr = DMGetCoordinates(da2,&coordsf);CHKERRQ(ierr); 1060 ierr = DMCreateInterpolation(cdac,cdaf,&II,NULL);CHKERRQ(ierr); 1061 ierr = MatInterpolate(II,coordsc,coordsf);CHKERRQ(ierr); 1062 ierr = MatDestroy(&II);CHKERRQ(ierr); 1063 } 1064 1065 for (i=0; i<da->bs; i++) { 1066 const char *fieldname; 1067 ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr); 1068 ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr); 1069 } 1070 1071 *daref = da2; 1072 PetscFunctionReturn(0); 1073 } 1074 1075 1076 PetscErrorCode DMCoarsen_DA(DM da, MPI_Comm comm,DM *daref) 1077 { 1078 PetscErrorCode ierr; 1079 PetscInt M,N,P,i,dim; 1080 DM da2; 1081 DM_DA *dd = (DM_DA*)da->data,*dd2; 1082 1083 PetscFunctionBegin; 1084 PetscValidHeaderSpecific(da,DM_CLASSID,1); 1085 PetscValidPointer(daref,3); 1086 1087 ierr = DMGetDimension(da, &dim);CHKERRQ(ierr); 1088 if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1089 M = dd->M / dd->coarsen_x; 1090 } else { 1091 M = 1 + (dd->M - 1) / dd->coarsen_x; 1092 } 1093 if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1094 if (dim > 1) { 1095 N = dd->N / dd->coarsen_y; 1096 } else { 1097 N = 1; 1098 } 1099 } else { 1100 N = 1 + (dd->N - 1) / dd->coarsen_y; 1101 } 1102 if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1103 if (dim > 2) { 1104 P = dd->P / dd->coarsen_z; 1105 } else { 1106 P = 1; 1107 } 1108 } else { 1109 P = 1 + (dd->P - 1) / dd->coarsen_z; 1110 } 1111 ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr); 1112 ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr); 1113 ierr = DMSetDimension(da2,dim);CHKERRQ(ierr); 1114 ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr); 1115 ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr); 1116 ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr); 1117 ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr); 1118 ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr); 1119 ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr); 1120 if (dim == 3) { 1121 PetscInt *lx,*ly,*lz; 1122 ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr); 1123 ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr); 1124 ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr); 1125 ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_z,dd->p,dd->lz,lz);CHKERRQ(ierr); 1126 ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr); 1127 ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr); 1128 } else if (dim == 2) { 1129 PetscInt *lx,*ly; 1130 ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr); 1131 ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr); 1132 ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr); 1133 ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr); 1134 ierr = PetscFree2(lx,ly);CHKERRQ(ierr); 1135 } else if (dim == 1) { 1136 PetscInt *lx; 1137 ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr); 1138 ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr); 1139 ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr); 1140 ierr = PetscFree(lx);CHKERRQ(ierr); 1141 } 1142 dd2 = (DM_DA*)da2->data; 1143 1144 /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */ 1145 /* da2->ops->createinterpolation = da->ops->createinterpolation; copying this one causes trouble for DMSetVI */ 1146 da2->ops->creatematrix = da->ops->creatematrix; 1147 da2->ops->getcoloring = da->ops->getcoloring; 1148 dd2->interptype = dd->interptype; 1149 1150 /* copy fill information if given */ 1151 if (dd->dfill) { 1152 ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr); 1153 ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr); 1154 } 1155 if (dd->ofill) { 1156 ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr); 1157 ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr); 1158 } 1159 /* copy the refine information */ 1160 dd2->coarsen_x = dd2->refine_x = dd->coarsen_x; 1161 dd2->coarsen_y = dd2->refine_y = dd->coarsen_y; 1162 dd2->coarsen_z = dd2->refine_z = dd->coarsen_z; 1163 1164 if (dd->refine_z_hier) { 1165 if (da->levelup - da->leveldown -1 > -1 && da->levelup - da->leveldown - 1< dd->refine_z_hier_n) { 1166 dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown - 1]; 1167 } 1168 if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_z_hier_n) { 1169 dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown - 2]; 1170 } 1171 dd2->refine_z_hier_n = dd->refine_z_hier_n; 1172 ierr = PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);CHKERRQ(ierr); 1173 ierr = PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));CHKERRQ(ierr); 1174 } 1175 if (dd->refine_y_hier) { 1176 if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1< dd->refine_y_hier_n) { 1177 dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown - 1]; 1178 } 1179 if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_y_hier_n) { 1180 dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown - 2]; 1181 } 1182 dd2->refine_y_hier_n = dd->refine_y_hier_n; 1183 ierr = PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);CHKERRQ(ierr); 1184 ierr = PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));CHKERRQ(ierr); 1185 } 1186 if (dd->refine_x_hier) { 1187 if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1 < dd->refine_x_hier_n) { 1188 dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown - 1]; 1189 } 1190 if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_x_hier_n) { 1191 dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown - 2]; 1192 } 1193 dd2->refine_x_hier_n = dd->refine_x_hier_n; 1194 ierr = PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);CHKERRQ(ierr); 1195 ierr = PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));CHKERRQ(ierr); 1196 } 1197 1198 /* copy vector type information */ 1199 ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr); 1200 1201 dd2->lf = dd->lf; 1202 dd2->lj = dd->lj; 1203 1204 da2->leveldown = da->leveldown + 1; 1205 da2->levelup = da->levelup; 1206 1207 ierr = DMSetUp(da2);CHKERRQ(ierr); 1208 1209 /* inject coordinates if they are set on the fine grid */ 1210 if (da->coordinates) { 1211 DM cdaf,cdac; 1212 Vec coordsc,coordsf; 1213 Mat inject; 1214 VecScatter vscat; 1215 1216 ierr = DMGetCoordinateDM(da,&cdaf);CHKERRQ(ierr); 1217 ierr = DMGetCoordinates(da,&coordsf);CHKERRQ(ierr); 1218 ierr = DMGetCoordinateDM(da2,&cdac);CHKERRQ(ierr); 1219 /* force creation of the coordinate vector */ 1220 ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr); 1221 ierr = DMGetCoordinates(da2,&coordsc);CHKERRQ(ierr); 1222 1223 ierr = DMCreateInjection(cdac,cdaf,&inject);CHKERRQ(ierr); 1224 ierr = MatScatterGetVecScatter(inject,&vscat);CHKERRQ(ierr); 1225 ierr = VecScatterBegin(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1226 ierr = VecScatterEnd(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1227 ierr = MatDestroy(&inject);CHKERRQ(ierr); 1228 } 1229 1230 for (i=0; i<da->bs; i++) { 1231 const char *fieldname; 1232 ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr); 1233 ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr); 1234 } 1235 1236 *daref = da2; 1237 PetscFunctionReturn(0); 1238 } 1239 1240 PetscErrorCode DMRefineHierarchy_DA(DM da,PetscInt nlevels,DM daf[]) 1241 { 1242 PetscErrorCode ierr; 1243 PetscInt i,n,*refx,*refy,*refz; 1244 1245 PetscFunctionBegin; 1246 PetscValidHeaderSpecific(da,DM_CLASSID,1); 1247 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1248 if (nlevels == 0) PetscFunctionReturn(0); 1249 PetscValidPointer(daf,3); 1250 1251 /* Get refinement factors, defaults taken from the coarse DMDA */ 1252 ierr = PetscMalloc3(nlevels,&refx,nlevels,&refy,nlevels,&refz);CHKERRQ(ierr); 1253 for (i=0; i<nlevels; i++) { 1254 ierr = DMDAGetRefinementFactor(da,&refx[i],&refy[i],&refz[i]);CHKERRQ(ierr); 1255 } 1256 n = nlevels; 1257 ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_x",refx,&n,NULL);CHKERRQ(ierr); 1258 n = nlevels; 1259 ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_y",refy,&n,NULL);CHKERRQ(ierr); 1260 n = nlevels; 1261 ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_z",refz,&n,NULL);CHKERRQ(ierr); 1262 1263 ierr = DMDASetRefinementFactor(da,refx[0],refy[0],refz[0]);CHKERRQ(ierr); 1264 ierr = DMRefine(da,PetscObjectComm((PetscObject)da),&daf[0]);CHKERRQ(ierr); 1265 for (i=1; i<nlevels; i++) { 1266 ierr = DMDASetRefinementFactor(daf[i-1],refx[i],refy[i],refz[i]);CHKERRQ(ierr); 1267 ierr = DMRefine(daf[i-1],PetscObjectComm((PetscObject)da),&daf[i]);CHKERRQ(ierr); 1268 } 1269 ierr = PetscFree3(refx,refy,refz);CHKERRQ(ierr); 1270 PetscFunctionReturn(0); 1271 } 1272 1273 PetscErrorCode DMCoarsenHierarchy_DA(DM da,PetscInt nlevels,DM dac[]) 1274 { 1275 PetscErrorCode ierr; 1276 PetscInt i; 1277 1278 PetscFunctionBegin; 1279 PetscValidHeaderSpecific(da,DM_CLASSID,1); 1280 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1281 if (nlevels == 0) PetscFunctionReturn(0); 1282 PetscValidPointer(dac,3); 1283 ierr = DMCoarsen(da,PetscObjectComm((PetscObject)da),&dac[0]);CHKERRQ(ierr); 1284 for (i=1; i<nlevels; i++) { 1285 ierr = DMCoarsen(dac[i-1],PetscObjectComm((PetscObject)da),&dac[i]);CHKERRQ(ierr); 1286 } 1287 PetscFunctionReturn(0); 1288 } 1289 1290 #include <petscgll.h> 1291 1292 PetscErrorCode DMDASetGLLCoordinates_1d(DM dm,PetscGLL *gll) 1293 { 1294 PetscErrorCode ierr; 1295 PetscInt i,j,n = gll->n,xs,xn,q; 1296 PetscScalar *xx; 1297 PetscReal h; 1298 Vec x; 1299 DM_DA *da = (DM_DA*)dm->data; 1300 1301 PetscFunctionBegin; 1302 if (da->bx != DM_BOUNDARY_PERIODIC) { 1303 ierr = DMDAGetInfo(dm,NULL,&q,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr); 1304 q = (q-1)/(n-1); /* number of spectral elements */ 1305 h = 2.0/q; 1306 ierr = DMDAGetCorners(dm,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr); 1307 xs = xs/(n-1); 1308 xn = xn/(n-1); 1309 ierr = DMDASetUniformCoordinates(dm,-1.,1.,0.,0.,0.,0.);CHKERRQ(ierr); 1310 ierr = DMGetCoordinates(dm,&x);CHKERRQ(ierr); 1311 ierr = DMDAVecGetArray(dm,x,&xx);CHKERRQ(ierr); 1312 1313 /* loop over local spectral elements */ 1314 for (j=xs; j<xs+xn; j++) { 1315 /* 1316 Except for the first process, each process starts on the second GLL point of the first element on that process 1317 */ 1318 for (i= (j == xs && xs > 0)? 1 : 0; i<n; i++) { 1319 xx[j*(n-1) + i] = -1.0 + h*j + h*(gll->nodes[i]+1.0)/2.; 1320 } 1321 } 1322 ierr = DMDAVecRestoreArray(dm,x,&xx);CHKERRQ(ierr); 1323 } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for periodic"); 1324 PetscFunctionReturn(0); 1325 } 1326 1327 /*@ 1328 1329 DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points 1330 1331 Collective on DM 1332 1333 Input Parameters: 1334 + da - the DMDA object 1335 - gll - the GLL object 1336 1337 Notes: the parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points 1338 on each process much be divisible by the number of GLL elements needed per process. This depends on whether the DM is 1339 periodic or not. 1340 1341 Level: advanced 1342 1343 .seealso: DMDACreate(), PetscGLLCreate(), DMGetCoordinates() 1344 @*/ 1345 PetscErrorCode DMDASetGLLCoordinates(DM da,PetscGLL *gll) 1346 { 1347 PetscErrorCode ierr; 1348 1349 PetscFunctionBegin; 1350 if (da->dim == 1) { 1351 ierr = DMDASetGLLCoordinates_1d(da,gll);CHKERRQ(ierr); 1352 } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for 2 or 3d"); 1353 PetscFunctionReturn(0); 1354 } 1355