1 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 2 3 /*@ 4 DMDASetSizes - Sets the number of grid points in the three dimensional directions 5 6 Logically Collective on da 7 8 Input Parameters: 9 + da - the DMDA 10 . M - the global X size 11 . N - the global Y size 12 - P - the global Z size 13 14 Level: intermediate 15 16 Developer Notes: 17 Since the dimension may not yet have been set the code cannot error check for non-positive Y and Z number of grid points 18 19 .seealso: `PetscSplitOwnership()` 20 @*/ 21 PetscErrorCode DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P) 22 { 23 DM_DA *dd = (DM_DA*)da->data; 24 25 PetscFunctionBegin; 26 PetscValidHeaderSpecificType(da, DM_CLASSID, 1,DMDA); 27 PetscValidLogicalCollectiveInt(da,M,2); 28 PetscValidLogicalCollectiveInt(da,N,3); 29 PetscValidLogicalCollectiveInt(da,P,4); 30 PetscCheck(!da->setupcalled,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 31 PetscCheck(M >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Number of grid points in X direction must be positive"); 32 PetscCheck(N >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Number of grid points in Y direction must be positive"); 33 PetscCheck(P >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Number of grid points in Z direction must be positive"); 34 35 dd->M = M; 36 dd->N = N; 37 dd->P = P; 38 PetscFunctionReturn(0); 39 } 40 41 /*@ 42 DMDASetNumProcs - Sets the number of processes in each dimension 43 44 Logically Collective on da 45 46 Input Parameters: 47 + da - the DMDA 48 . m - the number of X procs (or PETSC_DECIDE) 49 . n - the number of Y procs (or PETSC_DECIDE) 50 - p - the number of Z procs (or PETSC_DECIDE) 51 52 Level: intermediate 53 54 .seealso: `DMDASetSizes()`, `PetscSplitOwnership()` 55 @*/ 56 PetscErrorCode DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p) 57 { 58 DM_DA *dd = (DM_DA*)da->data; 59 60 PetscFunctionBegin; 61 PetscValidHeaderSpecificType(da, DM_CLASSID, 1,DMDA); 62 PetscValidLogicalCollectiveInt(da,m,2); 63 PetscValidLogicalCollectiveInt(da,n,3); 64 PetscValidLogicalCollectiveInt(da,p,4); 65 PetscCheck(!da->setupcalled,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 66 dd->m = m; 67 dd->n = n; 68 dd->p = p; 69 if (da->dim == 2) { 70 PetscMPIInt size; 71 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da),&size)); 72 if ((dd->m > 0) && (dd->n < 0)) { 73 dd->n = size/dd->m; 74 PetscCheck(dd->n*dd->m == size,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%" PetscInt_FMT " processes in X direction not divisible into comm size %d",m,size); 75 } 76 if ((dd->n > 0) && (dd->m < 0)) { 77 dd->m = size/dd->n; 78 PetscCheck(dd->n*dd->m == size,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%" PetscInt_FMT " processes in Y direction not divisible into comm size %d",n,size); 79 } 80 } 81 PetscFunctionReturn(0); 82 } 83 84 /*@ 85 DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries. 86 87 Not collective 88 89 Input Parameters: 90 + da - The DMDA 91 - bx,by,bz - One of DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, DM_BOUNDARY_PERIODIC 92 93 Level: intermediate 94 95 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA`, `DMBoundaryType` 96 @*/ 97 PetscErrorCode DMDASetBoundaryType(DM da,DMBoundaryType bx,DMBoundaryType by,DMBoundaryType bz) 98 { 99 DM_DA *dd = (DM_DA*)da->data; 100 101 PetscFunctionBegin; 102 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 103 PetscValidLogicalCollectiveEnum(da,bx,2); 104 PetscValidLogicalCollectiveEnum(da,by,3); 105 PetscValidLogicalCollectiveEnum(da,bz,4); 106 PetscCheck(!da->setupcalled,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 107 dd->bx = bx; 108 dd->by = by; 109 dd->bz = bz; 110 PetscFunctionReturn(0); 111 } 112 113 /*@ 114 DMDASetDof - Sets the number of degrees of freedom per vertex 115 116 Not collective 117 118 Input Parameters: 119 + da - The DMDA 120 - dof - Number of degrees of freedom 121 122 Level: intermediate 123 124 .seealso: `DMDAGetDof()`, `DMDACreate()`, `DMDestroy()`, `DMDA` 125 @*/ 126 PetscErrorCode DMDASetDof(DM da, PetscInt dof) 127 { 128 DM_DA *dd = (DM_DA*)da->data; 129 130 PetscFunctionBegin; 131 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 132 PetscValidLogicalCollectiveInt(da,dof,2); 133 PetscCheck(!da->setupcalled,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 134 dd->w = dof; 135 da->bs = dof; 136 PetscFunctionReturn(0); 137 } 138 139 /*@ 140 DMDAGetDof - Gets the number of degrees of freedom per vertex 141 142 Not collective 143 144 Input Parameter: 145 . da - The DMDA 146 147 Output Parameter: 148 . dof - Number of degrees of freedom 149 150 Level: intermediate 151 152 .seealso: `DMDASetDof()`, `DMDACreate()`, `DMDestroy()`, `DMDA` 153 @*/ 154 PetscErrorCode DMDAGetDof(DM da, PetscInt *dof) 155 { 156 DM_DA *dd = (DM_DA *) da->data; 157 158 PetscFunctionBegin; 159 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 160 PetscValidIntPointer(dof,2); 161 *dof = dd->w; 162 PetscFunctionReturn(0); 163 } 164 165 /*@ 166 DMDAGetOverlap - Gets the size of the per-processor overlap. 167 168 Not collective 169 170 Input Parameter: 171 . da - The DMDA 172 173 Output Parameters: 174 + x - Overlap in the x direction 175 . y - Overlap in the y direction 176 - z - Overlap in the z direction 177 178 Level: intermediate 179 180 .seealso: `DMDACreateDomainDecomposition()`, `DMDASetOverlap()`, `DMDA` 181 @*/ 182 PetscErrorCode DMDAGetOverlap(DM da,PetscInt *x,PetscInt *y,PetscInt *z) 183 { 184 DM_DA *dd = (DM_DA*)da->data; 185 186 PetscFunctionBegin; 187 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 188 if (x) *x = dd->xol; 189 if (y) *y = dd->yol; 190 if (z) *z = dd->zol; 191 PetscFunctionReturn(0); 192 } 193 194 /*@ 195 DMDASetOverlap - Sets the size of the per-processor overlap. 196 197 Not collective 198 199 Input Parameters: 200 + da - The DMDA 201 . x - Overlap in the x direction 202 . y - Overlap in the y direction 203 - z - Overlap in the z direction 204 205 Level: intermediate 206 207 .seealso: `DMDACreateDomainDecomposition()`, `DMDAGetOverlap()`, `DMDA` 208 @*/ 209 PetscErrorCode DMDASetOverlap(DM da,PetscInt x,PetscInt y,PetscInt z) 210 { 211 DM_DA *dd = (DM_DA*)da->data; 212 213 PetscFunctionBegin; 214 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 215 PetscValidLogicalCollectiveInt(da,x,2); 216 PetscValidLogicalCollectiveInt(da,y,3); 217 PetscValidLogicalCollectiveInt(da,z,4); 218 dd->xol = x; 219 dd->yol = y; 220 dd->zol = z; 221 PetscFunctionReturn(0); 222 } 223 224 /*@ 225 DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition. 226 227 Not collective 228 229 Input Parameters: 230 . da - The DMDA 231 232 Output Parameters: 233 . Nsub - Number of local subdomains created upon decomposition 234 235 Level: intermediate 236 237 .seealso: `DMDACreateDomainDecomposition()`, `DMDASetNumLocalSubDomains()`, `DMDA` 238 @*/ 239 PetscErrorCode DMDAGetNumLocalSubDomains(DM da,PetscInt *Nsub) 240 { 241 DM_DA *dd = (DM_DA*)da->data; 242 243 PetscFunctionBegin; 244 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 245 if (Nsub) *Nsub = dd->Nsub; 246 PetscFunctionReturn(0); 247 } 248 249 /*@ 250 DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition. 251 252 Not collective 253 254 Input Parameters: 255 + da - The DMDA 256 - Nsub - The number of local subdomains requested 257 258 Level: intermediate 259 260 .seealso: `DMDACreateDomainDecomposition()`, `DMDAGetNumLocalSubDomains()`, `DMDA` 261 @*/ 262 PetscErrorCode DMDASetNumLocalSubDomains(DM da,PetscInt Nsub) 263 { 264 DM_DA *dd = (DM_DA*)da->data; 265 266 PetscFunctionBegin; 267 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 268 PetscValidLogicalCollectiveInt(da,Nsub,2); 269 dd->Nsub = Nsub; 270 PetscFunctionReturn(0); 271 } 272 273 /*@ 274 DMDASetOffset - Sets the index offset of the DA. 275 276 Collective on DA 277 278 Input Parameters: 279 + da - The DMDA 280 . xo - The offset in the x direction 281 . yo - The offset in the y direction 282 - zo - The offset in the z direction 283 284 Level: intermediate 285 286 Notes: 287 This is used primarily to overlap a computation on a local DA with that on a global DA without 288 changing boundary conditions or subdomain features that depend upon the global offsets. 289 290 .seealso: `DMDAGetOffset()`, `DMDAVecGetArray()` 291 @*/ 292 PetscErrorCode DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po) 293 { 294 DM_DA *dd = (DM_DA*)da->data; 295 296 PetscFunctionBegin; 297 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 298 PetscValidLogicalCollectiveInt(da,xo,2); 299 PetscValidLogicalCollectiveInt(da,yo,3); 300 PetscValidLogicalCollectiveInt(da,zo,4); 301 PetscValidLogicalCollectiveInt(da,Mo,5); 302 PetscValidLogicalCollectiveInt(da,No,6); 303 PetscValidLogicalCollectiveInt(da,Po,7); 304 dd->xo = xo; 305 dd->yo = yo; 306 dd->zo = zo; 307 dd->Mo = Mo; 308 dd->No = No; 309 dd->Po = Po; 310 311 if (da->coordinateDM) { 312 PetscCall(DMDASetOffset(da->coordinateDM,xo,yo,zo,Mo,No,Po)); 313 } 314 PetscFunctionReturn(0); 315 } 316 317 /*@ 318 DMDAGetOffset - Gets the index offset of the DA. 319 320 Not collective 321 322 Input Parameter: 323 . da - The DMDA 324 325 Output Parameters: 326 + xo - The offset in the x direction 327 . yo - The offset in the y direction 328 . zo - The offset in the z direction 329 . Mo - The global size in the x direction 330 . No - The global size in the y direction 331 - Po - The global size in the z direction 332 333 Level: intermediate 334 335 .seealso: `DMDASetOffset()`, `DMDAVecGetArray()` 336 @*/ 337 PetscErrorCode DMDAGetOffset(DM da,PetscInt *xo,PetscInt *yo,PetscInt *zo,PetscInt *Mo,PetscInt *No,PetscInt *Po) 338 { 339 DM_DA *dd = (DM_DA*)da->data; 340 341 PetscFunctionBegin; 342 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 343 if (xo) *xo = dd->xo; 344 if (yo) *yo = dd->yo; 345 if (zo) *zo = dd->zo; 346 if (Mo) *Mo = dd->Mo; 347 if (No) *No = dd->No; 348 if (Po) *Po = dd->Po; 349 PetscFunctionReturn(0); 350 } 351 352 /*@ 353 DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain DM. 354 355 Not collective 356 357 Input Parameter: 358 . da - The DMDA 359 360 Output Parameters: 361 + xs - The start of the region in x 362 . ys - The start of the region in y 363 . zs - The start of the region in z 364 . xs - The size of the region in x 365 . ys - The size of the region in y 366 - zs - The size of the region in z 367 368 Level: intermediate 369 370 .seealso: `DMDAGetOffset()`, `DMDAVecGetArray()` 371 @*/ 372 PetscErrorCode DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm) 373 { 374 DM_DA *dd = (DM_DA*)da->data; 375 376 PetscFunctionBegin; 377 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 378 if (xs) *xs = dd->nonxs; 379 if (ys) *ys = dd->nonys; 380 if (zs) *zs = dd->nonzs; 381 if (xm) *xm = dd->nonxm; 382 if (ym) *ym = dd->nonym; 383 if (zm) *zm = dd->nonzm; 384 PetscFunctionReturn(0); 385 } 386 387 /*@ 388 DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain DM. 389 390 Collective on DA 391 392 Input Parameters: 393 + da - The DMDA 394 . xs - The start of the region in x 395 . ys - The start of the region in y 396 . zs - The start of the region in z 397 . xs - The size of the region in x 398 . ys - The size of the region in y 399 - zs - The size of the region in z 400 401 Level: intermediate 402 403 .seealso: `DMDAGetOffset()`, `DMDAVecGetArray()` 404 @*/ 405 PetscErrorCode DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm) 406 { 407 DM_DA *dd = (DM_DA*)da->data; 408 409 PetscFunctionBegin; 410 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 411 PetscValidLogicalCollectiveInt(da,xs,2); 412 PetscValidLogicalCollectiveInt(da,ys,3); 413 PetscValidLogicalCollectiveInt(da,zs,4); 414 PetscValidLogicalCollectiveInt(da,xm,5); 415 PetscValidLogicalCollectiveInt(da,ym,6); 416 PetscValidLogicalCollectiveInt(da,zm,7); 417 dd->nonxs = xs; 418 dd->nonys = ys; 419 dd->nonzs = zs; 420 dd->nonxm = xm; 421 dd->nonym = ym; 422 dd->nonzm = zm; 423 424 PetscFunctionReturn(0); 425 } 426 427 /*@ 428 DMDASetStencilType - Sets the type of the communication stencil 429 430 Logically Collective on da 431 432 Input Parameters: 433 + da - The DMDA 434 - stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR. 435 436 Level: intermediate 437 438 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 439 @*/ 440 PetscErrorCode DMDASetStencilType(DM da, DMDAStencilType stype) 441 { 442 DM_DA *dd = (DM_DA*)da->data; 443 444 PetscFunctionBegin; 445 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 446 PetscValidLogicalCollectiveEnum(da,stype,2); 447 PetscCheck(!da->setupcalled,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 448 dd->stencil_type = stype; 449 PetscFunctionReturn(0); 450 } 451 452 /*@ 453 DMDAGetStencilType - Gets the type of the communication stencil 454 455 Not collective 456 457 Input Parameter: 458 . da - The DMDA 459 460 Output Parameter: 461 . stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR. 462 463 Level: intermediate 464 465 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 466 @*/ 467 PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype) 468 { 469 DM_DA *dd = (DM_DA*)da->data; 470 471 PetscFunctionBegin; 472 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 473 PetscValidPointer(stype,2); 474 *stype = dd->stencil_type; 475 PetscFunctionReturn(0); 476 } 477 478 /*@ 479 DMDASetStencilWidth - Sets the width of the communication stencil 480 481 Logically Collective on da 482 483 Input Parameters: 484 + da - The DMDA 485 - width - The stencil width 486 487 Level: intermediate 488 489 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 490 @*/ 491 PetscErrorCode DMDASetStencilWidth(DM da, PetscInt width) 492 { 493 DM_DA *dd = (DM_DA*)da->data; 494 495 PetscFunctionBegin; 496 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 497 PetscValidLogicalCollectiveInt(da,width,2); 498 PetscCheck(!da->setupcalled,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 499 dd->s = width; 500 PetscFunctionReturn(0); 501 } 502 503 /*@ 504 DMDAGetStencilWidth - Gets the width of the communication stencil 505 506 Not collective 507 508 Input Parameter: 509 . da - The DMDA 510 511 Output Parameter: 512 . width - The stencil width 513 514 Level: intermediate 515 516 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 517 @*/ 518 PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width) 519 { 520 DM_DA *dd = (DM_DA *) da->data; 521 522 PetscFunctionBegin; 523 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 524 PetscValidIntPointer(width,2); 525 *width = dd->s; 526 PetscFunctionReturn(0); 527 } 528 529 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da,PetscInt M,PetscInt m,const PetscInt lx[]) 530 { 531 PetscInt i,sum; 532 533 PetscFunctionBegin; 534 PetscCheck(M >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Global dimension not set"); 535 for (i=sum=0; i<m; i++) sum += lx[i]; 536 PetscCheck(sum == M,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Ownership ranges sum to %" PetscInt_FMT " but global dimension is %" PetscInt_FMT,sum,M); 537 PetscFunctionReturn(0); 538 } 539 540 /*@ 541 DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process 542 543 Logically Collective on da 544 545 Input Parameters: 546 + da - The DMDA 547 . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m 548 . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n 549 - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p. 550 551 Level: intermediate 552 553 Note: these numbers are NOT multiplied by the number of dof per node. 554 555 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 556 @*/ 557 PetscErrorCode DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[]) 558 { 559 DM_DA *dd = (DM_DA*)da->data; 560 561 PetscFunctionBegin; 562 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 563 PetscCheck(!da->setupcalled,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 564 if (lx) { 565 PetscCheck(dd->m >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs"); 566 PetscCall(DMDACheckOwnershipRanges_Private(da,dd->M,dd->m,lx)); 567 if (!dd->lx) { 568 PetscCall(PetscMalloc1(dd->m, &dd->lx)); 569 } 570 PetscCall(PetscArraycpy(dd->lx, lx, dd->m)); 571 } 572 if (ly) { 573 PetscCheck(dd->n >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs"); 574 PetscCall(DMDACheckOwnershipRanges_Private(da,dd->N,dd->n,ly)); 575 if (!dd->ly) { 576 PetscCall(PetscMalloc1(dd->n, &dd->ly)); 577 } 578 PetscCall(PetscArraycpy(dd->ly, ly, dd->n)); 579 } 580 if (lz) { 581 PetscCheck(dd->p >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs"); 582 PetscCall(DMDACheckOwnershipRanges_Private(da,dd->P,dd->p,lz)); 583 if (!dd->lz) { 584 PetscCall(PetscMalloc1(dd->p, &dd->lz)); 585 } 586 PetscCall(PetscArraycpy(dd->lz, lz, dd->p)); 587 } 588 PetscFunctionReturn(0); 589 } 590 591 /*@ 592 DMDASetInterpolationType - Sets the type of interpolation that will be 593 returned by DMCreateInterpolation() 594 595 Logically Collective on da 596 597 Input Parameters: 598 + da - initial distributed array 599 - ctype - DMDA_Q1 and DMDA_Q0 are currently the only supported forms 600 601 Level: intermediate 602 603 Notes: 604 you should call this on the coarser of the two DMDAs you pass to DMCreateInterpolation() 605 606 .seealso: `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMDestroy()`, `DMDA`, `DMDAInterpolationType` 607 @*/ 608 PetscErrorCode DMDASetInterpolationType(DM da,DMDAInterpolationType ctype) 609 { 610 DM_DA *dd = (DM_DA*)da->data; 611 612 PetscFunctionBegin; 613 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 614 PetscValidLogicalCollectiveEnum(da,ctype,2); 615 dd->interptype = ctype; 616 PetscFunctionReturn(0); 617 } 618 619 /*@ 620 DMDAGetInterpolationType - Gets the type of interpolation that will be 621 used by DMCreateInterpolation() 622 623 Not Collective 624 625 Input Parameter: 626 . da - distributed array 627 628 Output Parameter: 629 . ctype - interpolation type (DMDA_Q1 and DMDA_Q0 are currently the only supported forms) 630 631 Level: intermediate 632 633 .seealso: `DMDA`, `DMDAInterpolationType`, `DMDASetInterpolationType()`, `DMCreateInterpolation()` 634 @*/ 635 PetscErrorCode DMDAGetInterpolationType(DM da,DMDAInterpolationType *ctype) 636 { 637 DM_DA *dd = (DM_DA*)da->data; 638 639 PetscFunctionBegin; 640 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 641 PetscValidPointer(ctype,2); 642 *ctype = dd->interptype; 643 PetscFunctionReturn(0); 644 } 645 646 /*@C 647 DMDAGetNeighbors - Gets an array containing the MPI rank of all the current 648 processes neighbors. 649 650 Not Collective 651 652 Input Parameter: 653 . da - the DMDA object 654 655 Output Parameters: 656 . ranks - the neighbors ranks, stored with the x index increasing most rapidly. 657 this process itself is in the list 658 659 Notes: 660 In 2d the array is of length 9, in 3d of length 27 661 Not supported in 1d 662 Do not free the array, it is freed when the DMDA is destroyed. 663 664 Fortran Notes: 665 In fortran you must pass in an array of the appropriate length. 666 667 Level: intermediate 668 669 @*/ 670 PetscErrorCode DMDAGetNeighbors(DM da,const PetscMPIInt *ranks[]) 671 { 672 DM_DA *dd = (DM_DA*)da->data; 673 674 PetscFunctionBegin; 675 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 676 *ranks = dd->neighbors; 677 PetscFunctionReturn(0); 678 } 679 680 /*@C 681 DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process 682 683 Not Collective 684 685 Input Parameter: 686 . da - the DMDA object 687 688 Output Parameters: 689 + lx - ownership along x direction (optional) 690 . ly - ownership along y direction (optional) 691 - lz - ownership along z direction (optional) 692 693 Level: intermediate 694 695 Note: these correspond to the optional final arguments passed to DMDACreate(), DMDACreate2d(), DMDACreate3d() 696 697 In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and 698 eighth arguments from DMDAGetInfo() 699 700 In C you should not free these arrays, nor change the values in them. They will only have valid values while the 701 DMDA they came from still exists (has not been destroyed). 702 703 These numbers are NOT multiplied by the number of dof per node. 704 705 .seealso: `DMDAGetCorners()`, `DMDAGetGhostCorners()`, `DMDACreate()`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `VecGetOwnershipRanges()` 706 @*/ 707 PetscErrorCode DMDAGetOwnershipRanges(DM da,const PetscInt *lx[],const PetscInt *ly[],const PetscInt *lz[]) 708 { 709 DM_DA *dd = (DM_DA*)da->data; 710 711 PetscFunctionBegin; 712 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 713 if (lx) *lx = dd->lx; 714 if (ly) *ly = dd->ly; 715 if (lz) *lz = dd->lz; 716 PetscFunctionReturn(0); 717 } 718 719 /*@ 720 DMDASetRefinementFactor - Set the ratios that the DMDA grid is refined 721 722 Logically Collective on da 723 724 Input Parameters: 725 + da - the DMDA object 726 . refine_x - ratio of fine grid to coarse in x direction (2 by default) 727 . refine_y - ratio of fine grid to coarse in y direction (2 by default) 728 - refine_z - ratio of fine grid to coarse in z direction (2 by default) 729 730 Options Database: 731 + -da_refine_x refine_x - refinement ratio in x direction 732 . -da_refine_y rafine_y - refinement ratio in y direction 733 . -da_refine_z refine_z - refinement ratio in z direction 734 - -da_refine <n> - refine the DMDA object n times when it is created. 735 736 Level: intermediate 737 738 Notes: 739 Pass PETSC_IGNORE to leave a value unchanged 740 741 .seealso: `DMRefine()`, `DMDAGetRefinementFactor()` 742 @*/ 743 PetscErrorCode DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z) 744 { 745 DM_DA *dd = (DM_DA*)da->data; 746 747 PetscFunctionBegin; 748 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 749 PetscValidLogicalCollectiveInt(da,refine_x,2); 750 PetscValidLogicalCollectiveInt(da,refine_y,3); 751 PetscValidLogicalCollectiveInt(da,refine_z,4); 752 753 if (refine_x > 0) dd->refine_x = refine_x; 754 if (refine_y > 0) dd->refine_y = refine_y; 755 if (refine_z > 0) dd->refine_z = refine_z; 756 PetscFunctionReturn(0); 757 } 758 759 /*@C 760 DMDAGetRefinementFactor - Gets the ratios that the DMDA grid is refined 761 762 Not Collective 763 764 Input Parameter: 765 . da - the DMDA object 766 767 Output Parameters: 768 + refine_x - ratio of fine grid to coarse in x direction (2 by default) 769 . refine_y - ratio of fine grid to coarse in y direction (2 by default) 770 - refine_z - ratio of fine grid to coarse in z direction (2 by default) 771 772 Level: intermediate 773 774 Notes: 775 Pass NULL for values you do not need 776 777 .seealso: `DMRefine()`, `DMDASetRefinementFactor()` 778 @*/ 779 PetscErrorCode DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z) 780 { 781 DM_DA *dd = (DM_DA*)da->data; 782 783 PetscFunctionBegin; 784 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 785 if (refine_x) *refine_x = dd->refine_x; 786 if (refine_y) *refine_y = dd->refine_y; 787 if (refine_z) *refine_z = dd->refine_z; 788 PetscFunctionReturn(0); 789 } 790 791 /*@C 792 DMDASetGetMatrix - Sets the routine used by the DMDA to allocate a matrix. 793 794 Logically Collective on da 795 796 Input Parameters: 797 + da - the DMDA object 798 - f - the function that allocates the matrix for that specific DMDA 799 800 Level: developer 801 802 Notes: 803 See DMDASetBlockFills() that provides a simple way to provide the nonzero structure for 804 the diagonal and off-diagonal blocks of the matrix 805 806 Not supported from Fortran 807 808 .seealso: `DMCreateMatrix()`, `DMDASetBlockFills()` 809 @*/ 810 PetscErrorCode DMDASetGetMatrix(DM da,PetscErrorCode (*f)(DM, Mat*)) 811 { 812 PetscFunctionBegin; 813 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 814 da->ops->creatematrix = f; 815 PetscFunctionReturn(0); 816 } 817 818 /*@ 819 DMDAMapMatStencilToGlobal - Map a list of MatStencils on a grid to global indices. 820 821 Not Collective 822 823 Input Parameters: 824 + da - the DMDA object 825 . m - number of MatStencils 826 - idxm - grid points (and component number when dof > 1) 827 828 Output Parameters: 829 + gidxm - global row indices 830 831 .seealso: `MatStencil` 832 @*/ 833 PetscErrorCode DMDAMapMatStencilToGlobal(DM da,PetscInt m,const MatStencil idxm[],PetscInt gidxm[]) 834 { 835 const DM_DA *dd = (const DM_DA*)da->data; 836 const PetscInt *dxm = (const PetscInt*)idxm; 837 PetscInt i,j,sdim,tmp,dim; 838 PetscInt dims[4],starts[4],dims2[3],starts2[3],dof = dd->w; 839 ISLocalToGlobalMapping ltog; 840 841 PetscFunctionBegin; 842 if (m <= 0) PetscFunctionReturn(0); 843 dim = da->dim; /* DA dim: 1 to 3 */ 844 sdim = dim + (dof > 1? 1 : 0); /* Dimension in MatStencil's (k,j,i,c) view */ 845 846 starts2[0] = dd->Xs/dof + dd->xo; 847 starts2[1] = dd->Ys + dd->yo; 848 starts2[2] = dd->Zs + dd->zo; 849 dims2[0] = (dd->Xe - dd->Xs)/dof; 850 dims2[1] = (dd->Ye - dd->Ys); 851 dims2[2] = (dd->Ze - dd->Zs); 852 853 for (i=0; i<dim; i++) { 854 dims[i] = dims2[dim-i-1]; /* copy the values in backwards */ 855 starts[i] = starts2[dim-i-1]; 856 } 857 starts[dim] = 0; 858 dims[dim] = dof; 859 860 /* Map stencils to local indices (code adapted from MatSetValuesStencil()) */ 861 for (i=0; i<m; i++) { 862 for (j=0; j<3-dim; j++) dxm++; /* dxm[] is in k,j,i,c order; move to the first significant index */ 863 tmp = *dxm++ - starts[0]; 864 for (j=0; j<sdim-1; j++) { 865 if (tmp < 0 || (*dxm - starts[j+1]) < 0) tmp = -1; /* Beyond the ghost region, therefore ignored with negative indices */ 866 else tmp = tmp*dims[j] + (*dxm - starts[j+1]); 867 dxm++; 868 } 869 if (dof == 1) dxm++; /* If no dof, skip the unused c */ 870 gidxm[i] = tmp; 871 } 872 873 /* Map local indices to global indices */ 874 PetscCall(DMGetLocalToGlobalMapping(da,<og)); 875 PetscCall(ISLocalToGlobalMappingApply(ltog,m,gidxm,gidxm)); 876 PetscFunctionReturn(0); 877 } 878 879 /* 880 Creates "balanced" ownership ranges after refinement, constrained by the need for the 881 fine grid boundaries to fall within one stencil width of the coarse partition. 882 883 Uses a greedy algorithm to handle non-ideal layouts, could probably do something better. 884 */ 885 static PetscErrorCode DMDARefineOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lc[],PetscInt lf[]) 886 { 887 PetscInt i,totalc = 0,remaining,startc = 0,startf = 0; 888 889 PetscFunctionBegin; 890 PetscCheck(ratio >= 1,PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %" PetscInt_FMT " must be at least 1",ratio); 891 if (ratio == 1) { 892 PetscCall(PetscArraycpy(lf,lc,m)); 893 PetscFunctionReturn(0); 894 } 895 for (i=0; i<m; i++) totalc += lc[i]; 896 remaining = (!periodic) + ratio * (totalc - (!periodic)); 897 for (i=0; i<m; i++) { 898 PetscInt want = remaining/(m-i) + !!(remaining%(m-i)); 899 if (i == m-1) lf[i] = want; 900 else { 901 const PetscInt nextc = startc + lc[i]; 902 /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one 903 * coarse stencil width of the first coarse node in the next subdomain. */ 904 while ((startf+want)/ratio < nextc - stencil_width) want++; 905 /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one 906 * coarse stencil width of the last coarse node in the current subdomain. */ 907 while ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width) want--; 908 /* Make sure all constraints are satisfied */ 909 if (want < 0 || want > remaining || ((startf+want)/ratio < nextc - stencil_width) 910 || ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible refined ownership range"); 911 } 912 lf[i] = want; 913 startc += lc[i]; 914 startf += lf[i]; 915 remaining -= lf[i]; 916 } 917 PetscFunctionReturn(0); 918 } 919 920 /* 921 Creates "balanced" ownership ranges after coarsening, constrained by the need for the 922 fine grid boundaries to fall within one stencil width of the coarse partition. 923 924 Uses a greedy algorithm to handle non-ideal layouts, could probably do something better. 925 */ 926 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lf[],PetscInt lc[]) 927 { 928 PetscInt i,totalf,remaining,startc,startf; 929 930 PetscFunctionBegin; 931 PetscCheck(ratio >= 1,PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %" PetscInt_FMT " must be at least 1",ratio); 932 if (ratio == 1) { 933 PetscCall(PetscArraycpy(lc,lf,m)); 934 PetscFunctionReturn(0); 935 } 936 for (i=0,totalf=0; i<m; i++) totalf += lf[i]; 937 remaining = (!periodic) + (totalf - (!periodic)) / ratio; 938 for (i=0,startc=0,startf=0; i<m; i++) { 939 PetscInt want = remaining/(m-i) + !!(remaining%(m-i)); 940 if (i == m-1) lc[i] = want; 941 else { 942 const PetscInt nextf = startf+lf[i]; 943 /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine 944 * node is within one stencil width. */ 945 while (nextf/ratio < startc+want-stencil_width) want--; 946 /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last 947 * fine node is within one stencil width. */ 948 while ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width) want++; 949 if (want < 0 || want > remaining 950 || (nextf/ratio < startc+want-stencil_width) || ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible coarsened ownership range"); 951 } 952 lc[i] = want; 953 startc += lc[i]; 954 startf += lf[i]; 955 remaining -= lc[i]; 956 } 957 PetscFunctionReturn(0); 958 } 959 960 PetscErrorCode DMRefine_DA(DM da,MPI_Comm comm,DM *daref) 961 { 962 PetscInt M,N,P,i,dim; 963 DM da2; 964 DM_DA *dd = (DM_DA*)da->data,*dd2; 965 966 PetscFunctionBegin; 967 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 968 PetscValidPointer(daref,3); 969 970 PetscCall(DMGetDimension(da, &dim)); 971 if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 972 M = dd->refine_x*dd->M; 973 } else { 974 M = 1 + dd->refine_x*(dd->M - 1); 975 } 976 if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 977 if (dim > 1) { 978 N = dd->refine_y*dd->N; 979 } else { 980 N = 1; 981 } 982 } else { 983 N = 1 + dd->refine_y*(dd->N - 1); 984 } 985 if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 986 if (dim > 2) { 987 P = dd->refine_z*dd->P; 988 } else { 989 P = 1; 990 } 991 } else { 992 P = 1 + dd->refine_z*(dd->P - 1); 993 } 994 PetscCall(DMDACreate(PetscObjectComm((PetscObject)da),&da2)); 995 PetscCall(DMSetOptionsPrefix(da2,((PetscObject)da)->prefix)); 996 PetscCall(DMSetDimension(da2,dim)); 997 PetscCall(DMDASetSizes(da2,M,N,P)); 998 PetscCall(DMDASetNumProcs(da2,dd->m,dd->n,dd->p)); 999 PetscCall(DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz)); 1000 PetscCall(DMDASetDof(da2,dd->w)); 1001 PetscCall(DMDASetStencilType(da2,dd->stencil_type)); 1002 PetscCall(DMDASetStencilWidth(da2,dd->s)); 1003 if (dim == 3) { 1004 PetscInt *lx,*ly,*lz; 1005 PetscCall(PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz)); 1006 PetscCall(DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx)); 1007 PetscCall(DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly)); 1008 PetscCall(DMDARefineOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_z,dd->p,dd->lz,lz)); 1009 PetscCall(DMDASetOwnershipRanges(da2,lx,ly,lz)); 1010 PetscCall(PetscFree3(lx,ly,lz)); 1011 } else if (dim == 2) { 1012 PetscInt *lx,*ly; 1013 PetscCall(PetscMalloc2(dd->m,&lx,dd->n,&ly)); 1014 PetscCall(DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx)); 1015 PetscCall(DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly)); 1016 PetscCall(DMDASetOwnershipRanges(da2,lx,ly,NULL)); 1017 PetscCall(PetscFree2(lx,ly)); 1018 } else if (dim == 1) { 1019 PetscInt *lx; 1020 PetscCall(PetscMalloc1(dd->m,&lx)); 1021 PetscCall(DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx)); 1022 PetscCall(DMDASetOwnershipRanges(da2,lx,NULL,NULL)); 1023 PetscCall(PetscFree(lx)); 1024 } 1025 dd2 = (DM_DA*)da2->data; 1026 1027 /* allow overloaded (user replaced) operations to be inherited by refinement clones */ 1028 da2->ops->creatematrix = da->ops->creatematrix; 1029 /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */ 1030 da2->ops->getcoloring = da->ops->getcoloring; 1031 dd2->interptype = dd->interptype; 1032 1033 /* copy fill information if given */ 1034 if (dd->dfill) { 1035 PetscCall(PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill)); 1036 PetscCall(PetscArraycpy(dd2->dfill,dd->dfill,dd->dfill[dd->w]+dd->w+1)); 1037 } 1038 if (dd->ofill) { 1039 PetscCall(PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill)); 1040 PetscCall(PetscArraycpy(dd2->ofill,dd->ofill,dd->ofill[dd->w]+dd->w+1)); 1041 } 1042 /* copy the refine information */ 1043 dd2->coarsen_x = dd2->refine_x = dd->refine_x; 1044 dd2->coarsen_y = dd2->refine_y = dd->refine_y; 1045 dd2->coarsen_z = dd2->refine_z = dd->refine_z; 1046 1047 if (dd->refine_z_hier) { 1048 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) { 1049 dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1]; 1050 } 1051 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) { 1052 dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown]; 1053 } 1054 dd2->refine_z_hier_n = dd->refine_z_hier_n; 1055 PetscCall(PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier)); 1056 PetscCall(PetscArraycpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n)); 1057 } 1058 if (dd->refine_y_hier) { 1059 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) { 1060 dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1]; 1061 } 1062 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) { 1063 dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown]; 1064 } 1065 dd2->refine_y_hier_n = dd->refine_y_hier_n; 1066 PetscCall(PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier)); 1067 PetscCall(PetscArraycpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n)); 1068 } 1069 if (dd->refine_x_hier) { 1070 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) { 1071 dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1]; 1072 } 1073 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) { 1074 dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown]; 1075 } 1076 dd2->refine_x_hier_n = dd->refine_x_hier_n; 1077 PetscCall(PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier)); 1078 PetscCall(PetscArraycpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n)); 1079 } 1080 1081 /* copy vector type information */ 1082 PetscCall(DMSetVecType(da2,da->vectype)); 1083 1084 dd2->lf = dd->lf; 1085 dd2->lj = dd->lj; 1086 1087 da2->leveldown = da->leveldown; 1088 da2->levelup = da->levelup + 1; 1089 1090 PetscCall(DMSetUp(da2)); 1091 1092 /* interpolate coordinates if they are set on the coarse grid */ 1093 if (da->coordinates) { 1094 DM cdaf,cdac; 1095 Vec coordsc,coordsf; 1096 Mat II; 1097 1098 PetscCall(DMGetCoordinateDM(da,&cdac)); 1099 PetscCall(DMGetCoordinates(da,&coordsc)); 1100 PetscCall(DMGetCoordinateDM(da2,&cdaf)); 1101 /* force creation of the coordinate vector */ 1102 PetscCall(DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0)); 1103 PetscCall(DMGetCoordinates(da2,&coordsf)); 1104 PetscCall(DMCreateInterpolation(cdac,cdaf,&II,NULL)); 1105 PetscCall(MatInterpolate(II,coordsc,coordsf)); 1106 PetscCall(MatDestroy(&II)); 1107 } 1108 1109 for (i=0; i<da->bs; i++) { 1110 const char *fieldname; 1111 PetscCall(DMDAGetFieldName(da,i,&fieldname)); 1112 PetscCall(DMDASetFieldName(da2,i,fieldname)); 1113 } 1114 1115 *daref = da2; 1116 PetscFunctionReturn(0); 1117 } 1118 1119 PetscErrorCode DMCoarsen_DA(DM dmf, MPI_Comm comm,DM *dmc) 1120 { 1121 PetscInt M,N,P,i,dim; 1122 DM dmc2; 1123 DM_DA *dd = (DM_DA*)dmf->data,*dd2; 1124 1125 PetscFunctionBegin; 1126 PetscValidHeaderSpecificType(dmf,DM_CLASSID,1,DMDA); 1127 PetscValidPointer(dmc,3); 1128 1129 PetscCall(DMGetDimension(dmf, &dim)); 1130 if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1131 M = dd->M / dd->coarsen_x; 1132 } else { 1133 M = 1 + (dd->M - 1) / dd->coarsen_x; 1134 } 1135 if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1136 if (dim > 1) { 1137 N = dd->N / dd->coarsen_y; 1138 } else { 1139 N = 1; 1140 } 1141 } else { 1142 N = 1 + (dd->N - 1) / dd->coarsen_y; 1143 } 1144 if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1145 if (dim > 2) { 1146 P = dd->P / dd->coarsen_z; 1147 } else { 1148 P = 1; 1149 } 1150 } else { 1151 P = 1 + (dd->P - 1) / dd->coarsen_z; 1152 } 1153 PetscCall(DMDACreate(PetscObjectComm((PetscObject)dmf),&dmc2)); 1154 PetscCall(DMSetOptionsPrefix(dmc2,((PetscObject)dmf)->prefix)); 1155 PetscCall(DMSetDimension(dmc2,dim)); 1156 PetscCall(DMDASetSizes(dmc2,M,N,P)); 1157 PetscCall(DMDASetNumProcs(dmc2,dd->m,dd->n,dd->p)); 1158 PetscCall(DMDASetBoundaryType(dmc2,dd->bx,dd->by,dd->bz)); 1159 PetscCall(DMDASetDof(dmc2,dd->w)); 1160 PetscCall(DMDASetStencilType(dmc2,dd->stencil_type)); 1161 PetscCall(DMDASetStencilWidth(dmc2,dd->s)); 1162 if (dim == 3) { 1163 PetscInt *lx,*ly,*lz; 1164 PetscCall(PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz)); 1165 PetscCall(DMDACoarsenOwnershipRanges(dmf,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx)); 1166 PetscCall(DMDACoarsenOwnershipRanges(dmf,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly)); 1167 PetscCall(DMDACoarsenOwnershipRanges(dmf,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_z,dd->p,dd->lz,lz)); 1168 PetscCall(DMDASetOwnershipRanges(dmc2,lx,ly,lz)); 1169 PetscCall(PetscFree3(lx,ly,lz)); 1170 } else if (dim == 2) { 1171 PetscInt *lx,*ly; 1172 PetscCall(PetscMalloc2(dd->m,&lx,dd->n,&ly)); 1173 PetscCall(DMDACoarsenOwnershipRanges(dmf,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx)); 1174 PetscCall(DMDACoarsenOwnershipRanges(dmf,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly)); 1175 PetscCall(DMDASetOwnershipRanges(dmc2,lx,ly,NULL)); 1176 PetscCall(PetscFree2(lx,ly)); 1177 } else if (dim == 1) { 1178 PetscInt *lx; 1179 PetscCall(PetscMalloc1(dd->m,&lx)); 1180 PetscCall(DMDACoarsenOwnershipRanges(dmf,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx)); 1181 PetscCall(DMDASetOwnershipRanges(dmc2,lx,NULL,NULL)); 1182 PetscCall(PetscFree(lx)); 1183 } 1184 dd2 = (DM_DA*)dmc2->data; 1185 1186 /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */ 1187 /* dmc2->ops->createinterpolation = dmf->ops->createinterpolation; copying this one causes trouble for DMSetVI */ 1188 dmc2->ops->creatematrix = dmf->ops->creatematrix; 1189 dmc2->ops->getcoloring = dmf->ops->getcoloring; 1190 dd2->interptype = dd->interptype; 1191 1192 /* copy fill information if given */ 1193 if (dd->dfill) { 1194 PetscCall(PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill)); 1195 PetscCall(PetscArraycpy(dd2->dfill,dd->dfill,dd->dfill[dd->w]+dd->w+1)); 1196 } 1197 if (dd->ofill) { 1198 PetscCall(PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill)); 1199 PetscCall(PetscArraycpy(dd2->ofill,dd->ofill,dd->ofill[dd->w]+dd->w+1)); 1200 } 1201 /* copy the refine information */ 1202 dd2->coarsen_x = dd2->refine_x = dd->coarsen_x; 1203 dd2->coarsen_y = dd2->refine_y = dd->coarsen_y; 1204 dd2->coarsen_z = dd2->refine_z = dd->coarsen_z; 1205 1206 if (dd->refine_z_hier) { 1207 if (dmf->levelup - dmf->leveldown -1 > -1 && dmf->levelup - dmf->leveldown - 1< dd->refine_z_hier_n) { 1208 dd2->refine_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 1]; 1209 } 1210 if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_z_hier_n) { 1211 dd2->coarsen_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 2]; 1212 } 1213 dd2->refine_z_hier_n = dd->refine_z_hier_n; 1214 PetscCall(PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier)); 1215 PetscCall(PetscArraycpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n)); 1216 } 1217 if (dd->refine_y_hier) { 1218 if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1< dd->refine_y_hier_n) { 1219 dd2->refine_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 1]; 1220 } 1221 if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_y_hier_n) { 1222 dd2->coarsen_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 2]; 1223 } 1224 dd2->refine_y_hier_n = dd->refine_y_hier_n; 1225 PetscCall(PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier)); 1226 PetscCall(PetscArraycpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n)); 1227 } 1228 if (dd->refine_x_hier) { 1229 if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_x_hier_n) { 1230 dd2->refine_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 1]; 1231 } 1232 if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_x_hier_n) { 1233 dd2->coarsen_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 2]; 1234 } 1235 dd2->refine_x_hier_n = dd->refine_x_hier_n; 1236 PetscCall(PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier)); 1237 PetscCall(PetscArraycpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n)); 1238 } 1239 1240 /* copy vector type information */ 1241 PetscCall(DMSetVecType(dmc2,dmf->vectype)); 1242 1243 dd2->lf = dd->lf; 1244 dd2->lj = dd->lj; 1245 1246 dmc2->leveldown = dmf->leveldown + 1; 1247 dmc2->levelup = dmf->levelup; 1248 1249 PetscCall(DMSetUp(dmc2)); 1250 1251 /* inject coordinates if they are set on the fine grid */ 1252 if (dmf->coordinates) { 1253 DM cdaf,cdac; 1254 Vec coordsc,coordsf; 1255 Mat inject; 1256 VecScatter vscat; 1257 1258 PetscCall(DMGetCoordinateDM(dmf,&cdaf)); 1259 PetscCall(DMGetCoordinates(dmf,&coordsf)); 1260 PetscCall(DMGetCoordinateDM(dmc2,&cdac)); 1261 /* force creation of the coordinate vector */ 1262 PetscCall(DMDASetUniformCoordinates(dmc2,0.0,1.0,0.0,1.0,0.0,1.0)); 1263 PetscCall(DMGetCoordinates(dmc2,&coordsc)); 1264 1265 PetscCall(DMCreateInjection(cdac,cdaf,&inject)); 1266 PetscCall(MatScatterGetVecScatter(inject,&vscat)); 1267 PetscCall(VecScatterBegin(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD)); 1268 PetscCall(VecScatterEnd(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD)); 1269 PetscCall(MatDestroy(&inject)); 1270 } 1271 1272 for (i=0; i<dmf->bs; i++) { 1273 const char *fieldname; 1274 PetscCall(DMDAGetFieldName(dmf,i,&fieldname)); 1275 PetscCall(DMDASetFieldName(dmc2,i,fieldname)); 1276 } 1277 1278 *dmc = dmc2; 1279 PetscFunctionReturn(0); 1280 } 1281 1282 PetscErrorCode DMRefineHierarchy_DA(DM da,PetscInt nlevels,DM daf[]) 1283 { 1284 PetscInt i,n,*refx,*refy,*refz; 1285 1286 PetscFunctionBegin; 1287 PetscValidHeaderSpecific(da,DM_CLASSID,1); 1288 PetscCheck(nlevels >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1289 if (nlevels == 0) PetscFunctionReturn(0); 1290 PetscValidPointer(daf,3); 1291 1292 /* Get refinement factors, defaults taken from the coarse DMDA */ 1293 PetscCall(PetscMalloc3(nlevels,&refx,nlevels,&refy,nlevels,&refz)); 1294 for (i=0; i<nlevels; i++) { 1295 PetscCall(DMDAGetRefinementFactor(da,&refx[i],&refy[i],&refz[i])); 1296 } 1297 n = nlevels; 1298 PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_x",refx,&n,NULL)); 1299 n = nlevels; 1300 PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_y",refy,&n,NULL)); 1301 n = nlevels; 1302 PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_z",refz,&n,NULL)); 1303 1304 PetscCall(DMDASetRefinementFactor(da,refx[0],refy[0],refz[0])); 1305 PetscCall(DMRefine(da,PetscObjectComm((PetscObject)da),&daf[0])); 1306 for (i=1; i<nlevels; i++) { 1307 PetscCall(DMDASetRefinementFactor(daf[i-1],refx[i],refy[i],refz[i])); 1308 PetscCall(DMRefine(daf[i-1],PetscObjectComm((PetscObject)da),&daf[i])); 1309 } 1310 PetscCall(PetscFree3(refx,refy,refz)); 1311 PetscFunctionReturn(0); 1312 } 1313 1314 PetscErrorCode DMCoarsenHierarchy_DA(DM da,PetscInt nlevels,DM dac[]) 1315 { 1316 PetscInt i; 1317 1318 PetscFunctionBegin; 1319 PetscValidHeaderSpecific(da,DM_CLASSID,1); 1320 PetscCheck(nlevels >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1321 if (nlevels == 0) PetscFunctionReturn(0); 1322 PetscValidPointer(dac,3); 1323 PetscCall(DMCoarsen(da,PetscObjectComm((PetscObject)da),&dac[0])); 1324 for (i=1; i<nlevels; i++) { 1325 PetscCall(DMCoarsen(dac[i-1],PetscObjectComm((PetscObject)da),&dac[i])); 1326 } 1327 PetscFunctionReturn(0); 1328 } 1329 1330 PetscErrorCode DMDASetGLLCoordinates_1d(DM dm,PetscInt n,PetscReal *nodes) 1331 { 1332 PetscInt i,j,xs,xn,q; 1333 PetscScalar *xx; 1334 PetscReal h; 1335 Vec x; 1336 DM_DA *da = (DM_DA*)dm->data; 1337 1338 PetscFunctionBegin; 1339 if (da->bx != DM_BOUNDARY_PERIODIC) { 1340 PetscCall(DMDAGetInfo(dm,NULL,&q,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL)); 1341 q = (q-1)/(n-1); /* number of spectral elements */ 1342 h = 2.0/q; 1343 PetscCall(DMDAGetCorners(dm,&xs,NULL,NULL,&xn,NULL,NULL)); 1344 xs = xs/(n-1); 1345 xn = xn/(n-1); 1346 PetscCall(DMDASetUniformCoordinates(dm,-1.,1.,0.,0.,0.,0.)); 1347 PetscCall(DMGetCoordinates(dm,&x)); 1348 PetscCall(DMDAVecGetArray(dm,x,&xx)); 1349 1350 /* loop over local spectral elements */ 1351 for (j=xs; j<xs+xn; j++) { 1352 /* 1353 Except for the first process, each process starts on the second GLL point of the first element on that process 1354 */ 1355 for (i= (j == xs && xs > 0)? 1 : 0; i<n; i++) { 1356 xx[j*(n-1) + i] = -1.0 + h*j + h*(nodes[i]+1.0)/2.; 1357 } 1358 } 1359 PetscCall(DMDAVecRestoreArray(dm,x,&xx)); 1360 } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for periodic"); 1361 PetscFunctionReturn(0); 1362 } 1363 1364 /*@ 1365 1366 DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points 1367 1368 Collective on da 1369 1370 Input Parameters: 1371 + da - the DMDA object 1372 - n - the number of GLL nodes 1373 - nodes - the GLL nodes 1374 1375 Notes: 1376 the parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points 1377 on each process much be divisible by the number of GLL elements needed per process. This depends on whether the DM is 1378 periodic or not. 1379 1380 Level: advanced 1381 1382 .seealso: `DMDACreate()`, `PetscDTGaussLobattoLegendreQuadrature()`, `DMGetCoordinates()` 1383 @*/ 1384 PetscErrorCode DMDASetGLLCoordinates(DM da,PetscInt n,PetscReal *nodes) 1385 { 1386 PetscFunctionBegin; 1387 if (da->dim == 1) { 1388 PetscCall(DMDASetGLLCoordinates_1d(da,n,nodes)); 1389 } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for 2 or 3d"); 1390 PetscFunctionReturn(0); 1391 } 1392 1393 PETSC_INTERN PetscErrorCode DMGetCompatibility_DA(DM da1,DM dm2,PetscBool *compatible,PetscBool *set) 1394 { 1395 DM_DA *dd1 = (DM_DA*)da1->data,*dd2; 1396 DM da2; 1397 DMType dmtype2; 1398 PetscBool isda,compatibleLocal; 1399 PetscInt i; 1400 1401 PetscFunctionBegin; 1402 PetscCheck(da1->setupcalled,PetscObjectComm((PetscObject)da1),PETSC_ERR_ARG_WRONGSTATE,"DMSetUp() must be called on first DM before DMGetCompatibility()"); 1403 PetscCall(DMGetType(dm2,&dmtype2)); 1404 PetscCall(PetscStrcmp(dmtype2,DMDA,&isda)); 1405 if (isda) { 1406 da2 = dm2; 1407 dd2 = (DM_DA*)da2->data; 1408 PetscCheck(da2->setupcalled,PetscObjectComm((PetscObject)da2),PETSC_ERR_ARG_WRONGSTATE,"DMSetUp() must be called on second DM before DMGetCompatibility()"); 1409 compatibleLocal = (PetscBool)(da1->dim == da2->dim); 1410 if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->s == dd2->s)); /* Stencil width */ 1411 /* Global size ranks Boundary type */ 1412 if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->M == dd2->M) && (dd1->m == dd2->m) && (dd1->bx == dd2->bx)); 1413 if (compatibleLocal && da1->dim > 1) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->N == dd2->N) && (dd1->n == dd2->n) && (dd1->by == dd2->by)); 1414 if (compatibleLocal && da1->dim > 2) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->P == dd2->P) && (dd1->p == dd2->p) && (dd1->bz == dd2->bz)); 1415 if (compatibleLocal) { 1416 for (i=0; i<dd1->m; ++i) { 1417 compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lx[i] == dd2->lx[i])); /* Local size */ 1418 } 1419 } 1420 if (compatibleLocal && da1->dim > 1) { 1421 for (i=0; i<dd1->n; ++i) { 1422 compatibleLocal = (PetscBool)(compatibleLocal && (dd1->ly[i] == dd2->ly[i])); 1423 } 1424 } 1425 if (compatibleLocal && da1->dim > 2) { 1426 for (i=0; i<dd1->p; ++i) { 1427 compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lz[i] == dd2->lz[i])); 1428 } 1429 } 1430 *compatible = compatibleLocal; 1431 *set = PETSC_TRUE; 1432 } else { 1433 /* Decline to determine compatibility with other DM types */ 1434 *set = PETSC_FALSE; 1435 } 1436 PetscFunctionReturn(0); 1437 } 1438