1 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 2 3 /*@ 4 DMDASetSizes - Sets the number of grid points in the three dimensional directions 5 6 Logically Collective on da 7 8 Input Parameters: 9 + da - the DMDA 10 . M - the global X size 11 . N - the global Y size 12 - P - the global Z size 13 14 Level: intermediate 15 16 Developer Notes: 17 Since the dimension may not yet have been set the code cannot error check for non-positive Y and Z number of grid points 18 19 .seealso: `PetscSplitOwnership()` 20 @*/ 21 PetscErrorCode DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P) { 22 DM_DA *dd = (DM_DA *)da->data; 23 24 PetscFunctionBegin; 25 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 26 PetscValidLogicalCollectiveInt(da, M, 2); 27 PetscValidLogicalCollectiveInt(da, N, 3); 28 PetscValidLogicalCollectiveInt(da, P, 4); 29 PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()"); 30 PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in X direction must be positive"); 31 PetscCheck(N >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Y direction must be positive"); 32 PetscCheck(P >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Z direction must be positive"); 33 34 dd->M = M; 35 dd->N = N; 36 dd->P = P; 37 PetscFunctionReturn(0); 38 } 39 40 /*@ 41 DMDASetNumProcs - Sets the number of processes in each dimension 42 43 Logically Collective on da 44 45 Input Parameters: 46 + da - the DMDA 47 . m - the number of X procs (or PETSC_DECIDE) 48 . n - the number of Y procs (or PETSC_DECIDE) 49 - p - the number of Z procs (or PETSC_DECIDE) 50 51 Level: intermediate 52 53 .seealso: `DMDASetSizes()`, `PetscSplitOwnership()` 54 @*/ 55 PetscErrorCode DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p) { 56 DM_DA *dd = (DM_DA *)da->data; 57 58 PetscFunctionBegin; 59 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 60 PetscValidLogicalCollectiveInt(da, m, 2); 61 PetscValidLogicalCollectiveInt(da, n, 3); 62 PetscValidLogicalCollectiveInt(da, p, 4); 63 PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()"); 64 dd->m = m; 65 dd->n = n; 66 dd->p = p; 67 if (da->dim == 2) { 68 PetscMPIInt size; 69 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da), &size)); 70 if ((dd->m > 0) && (dd->n < 0)) { 71 dd->n = size / dd->m; 72 PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in X direction not divisible into comm size %d", m, size); 73 } 74 if ((dd->n > 0) && (dd->m < 0)) { 75 dd->m = size / dd->n; 76 PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in Y direction not divisible into comm size %d", n, size); 77 } 78 } 79 PetscFunctionReturn(0); 80 } 81 82 /*@ 83 DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries. 84 85 Not collective 86 87 Input Parameters: 88 + da - The DMDA 89 - bx,by,bz - One of DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, DM_BOUNDARY_PERIODIC 90 91 Level: intermediate 92 93 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA`, `DMBoundaryType` 94 @*/ 95 PetscErrorCode DMDASetBoundaryType(DM da, DMBoundaryType bx, DMBoundaryType by, DMBoundaryType bz) { 96 DM_DA *dd = (DM_DA *)da->data; 97 98 PetscFunctionBegin; 99 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 100 PetscValidLogicalCollectiveEnum(da, bx, 2); 101 PetscValidLogicalCollectiveEnum(da, by, 3); 102 PetscValidLogicalCollectiveEnum(da, bz, 4); 103 PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()"); 104 dd->bx = bx; 105 dd->by = by; 106 dd->bz = bz; 107 PetscFunctionReturn(0); 108 } 109 110 /*@ 111 DMDASetDof - Sets the number of degrees of freedom per vertex 112 113 Not collective 114 115 Input Parameters: 116 + da - The DMDA 117 - dof - Number of degrees of freedom 118 119 Level: intermediate 120 121 .seealso: `DMDAGetDof()`, `DMDACreate()`, `DMDestroy()`, `DMDA` 122 @*/ 123 PetscErrorCode DMDASetDof(DM da, PetscInt dof) { 124 DM_DA *dd = (DM_DA *)da->data; 125 126 PetscFunctionBegin; 127 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 128 PetscValidLogicalCollectiveInt(da, dof, 2); 129 PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()"); 130 dd->w = dof; 131 da->bs = dof; 132 PetscFunctionReturn(0); 133 } 134 135 /*@ 136 DMDAGetDof - Gets the number of degrees of freedom per vertex 137 138 Not collective 139 140 Input Parameter: 141 . da - The DMDA 142 143 Output Parameter: 144 . dof - Number of degrees of freedom 145 146 Level: intermediate 147 148 .seealso: `DMDASetDof()`, `DMDACreate()`, `DMDestroy()`, `DMDA` 149 @*/ 150 PetscErrorCode DMDAGetDof(DM da, PetscInt *dof) { 151 DM_DA *dd = (DM_DA *)da->data; 152 153 PetscFunctionBegin; 154 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 155 PetscValidIntPointer(dof, 2); 156 *dof = dd->w; 157 PetscFunctionReturn(0); 158 } 159 160 /*@ 161 DMDAGetOverlap - Gets the size of the per-processor overlap. 162 163 Not collective 164 165 Input Parameter: 166 . da - The DMDA 167 168 Output Parameters: 169 + x - Overlap in the x direction 170 . y - Overlap in the y direction 171 - z - Overlap in the z direction 172 173 Level: intermediate 174 175 .seealso: `DMCreateDomainDecomposition()`, `DMDASetOverlap()`, `DMDA` 176 @*/ 177 PetscErrorCode DMDAGetOverlap(DM da, PetscInt *x, PetscInt *y, PetscInt *z) { 178 DM_DA *dd = (DM_DA *)da->data; 179 180 PetscFunctionBegin; 181 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 182 if (x) *x = dd->xol; 183 if (y) *y = dd->yol; 184 if (z) *z = dd->zol; 185 PetscFunctionReturn(0); 186 } 187 188 /*@ 189 DMDASetOverlap - Sets the size of the per-processor overlap. 190 191 Not collective 192 193 Input Parameters: 194 + da - The DMDA 195 . x - Overlap in the x direction 196 . y - Overlap in the y direction 197 - z - Overlap in the z direction 198 199 Level: intermediate 200 201 .seealso: `DMCreateDomainDecomposition()`, `DMDAGetOverlap()`, `DMDA` 202 @*/ 203 PetscErrorCode DMDASetOverlap(DM da, PetscInt x, PetscInt y, PetscInt z) { 204 DM_DA *dd = (DM_DA *)da->data; 205 206 PetscFunctionBegin; 207 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 208 PetscValidLogicalCollectiveInt(da, x, 2); 209 PetscValidLogicalCollectiveInt(da, y, 3); 210 PetscValidLogicalCollectiveInt(da, z, 4); 211 dd->xol = x; 212 dd->yol = y; 213 dd->zol = z; 214 PetscFunctionReturn(0); 215 } 216 217 /*@ 218 DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition. 219 220 Not collective 221 222 Input Parameters: 223 . da - The DMDA 224 225 Output Parameters: 226 . Nsub - Number of local subdomains created upon decomposition 227 228 Level: intermediate 229 230 .seealso: `DMCreateDomainDecomposition()`, `DMDASetNumLocalSubDomains()`, `DMDA` 231 @*/ 232 PetscErrorCode DMDAGetNumLocalSubDomains(DM da, PetscInt *Nsub) { 233 DM_DA *dd = (DM_DA *)da->data; 234 235 PetscFunctionBegin; 236 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 237 if (Nsub) *Nsub = dd->Nsub; 238 PetscFunctionReturn(0); 239 } 240 241 /*@ 242 DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition. 243 244 Not collective 245 246 Input Parameters: 247 + da - The DMDA 248 - Nsub - The number of local subdomains requested 249 250 Level: intermediate 251 252 .seealso: `DMCreateDomainDecomposition()`, `DMDAGetNumLocalSubDomains()`, `DMDA` 253 @*/ 254 PetscErrorCode DMDASetNumLocalSubDomains(DM da, PetscInt Nsub) { 255 DM_DA *dd = (DM_DA *)da->data; 256 257 PetscFunctionBegin; 258 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 259 PetscValidLogicalCollectiveInt(da, Nsub, 2); 260 dd->Nsub = Nsub; 261 PetscFunctionReturn(0); 262 } 263 264 /*@ 265 DMDASetOffset - Sets the index offset of the DA. 266 267 Collective on DA 268 269 Input Parameters: 270 + da - The DMDA 271 . xo - The offset in the x direction 272 . yo - The offset in the y direction 273 - zo - The offset in the z direction 274 275 Level: intermediate 276 277 Notes: 278 This is used primarily to overlap a computation on a local DA with that on a global DA without 279 changing boundary conditions or subdomain features that depend upon the global offsets. 280 281 .seealso: `DMDAGetOffset()`, `DMDAVecGetArray()` 282 @*/ 283 PetscErrorCode DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po) { 284 DM_DA *dd = (DM_DA *)da->data; 285 286 PetscFunctionBegin; 287 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 288 PetscValidLogicalCollectiveInt(da, xo, 2); 289 PetscValidLogicalCollectiveInt(da, yo, 3); 290 PetscValidLogicalCollectiveInt(da, zo, 4); 291 PetscValidLogicalCollectiveInt(da, Mo, 5); 292 PetscValidLogicalCollectiveInt(da, No, 6); 293 PetscValidLogicalCollectiveInt(da, Po, 7); 294 dd->xo = xo; 295 dd->yo = yo; 296 dd->zo = zo; 297 dd->Mo = Mo; 298 dd->No = No; 299 dd->Po = Po; 300 301 if (da->coordinates[0].dm) PetscCall(DMDASetOffset(da->coordinates[0].dm, xo, yo, zo, Mo, No, Po)); 302 PetscFunctionReturn(0); 303 } 304 305 /*@ 306 DMDAGetOffset - Gets the index offset of the DA. 307 308 Not collective 309 310 Input Parameter: 311 . da - The DMDA 312 313 Output Parameters: 314 + xo - The offset in the x direction 315 . yo - The offset in the y direction 316 . zo - The offset in the z direction 317 . Mo - The global size in the x direction 318 . No - The global size in the y direction 319 - Po - The global size in the z direction 320 321 Level: intermediate 322 323 .seealso: `DMDASetOffset()`, `DMDAVecGetArray()` 324 @*/ 325 PetscErrorCode DMDAGetOffset(DM da, PetscInt *xo, PetscInt *yo, PetscInt *zo, PetscInt *Mo, PetscInt *No, PetscInt *Po) { 326 DM_DA *dd = (DM_DA *)da->data; 327 328 PetscFunctionBegin; 329 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 330 if (xo) *xo = dd->xo; 331 if (yo) *yo = dd->yo; 332 if (zo) *zo = dd->zo; 333 if (Mo) *Mo = dd->Mo; 334 if (No) *No = dd->No; 335 if (Po) *Po = dd->Po; 336 PetscFunctionReturn(0); 337 } 338 339 /*@ 340 DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain DM. 341 342 Not collective 343 344 Input Parameter: 345 . da - The DMDA 346 347 Output Parameters: 348 + xs - The start of the region in x 349 . ys - The start of the region in y 350 . zs - The start of the region in z 351 . xs - The size of the region in x 352 . ys - The size of the region in y 353 - zs - The size of the region in z 354 355 Level: intermediate 356 357 .seealso: `DMDAGetOffset()`, `DMDAVecGetArray()` 358 @*/ 359 PetscErrorCode DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm) { 360 DM_DA *dd = (DM_DA *)da->data; 361 362 PetscFunctionBegin; 363 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 364 if (xs) *xs = dd->nonxs; 365 if (ys) *ys = dd->nonys; 366 if (zs) *zs = dd->nonzs; 367 if (xm) *xm = dd->nonxm; 368 if (ym) *ym = dd->nonym; 369 if (zm) *zm = dd->nonzm; 370 PetscFunctionReturn(0); 371 } 372 373 /*@ 374 DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain DM. 375 376 Collective on DA 377 378 Input Parameters: 379 + da - The DMDA 380 . xs - The start of the region in x 381 . ys - The start of the region in y 382 . zs - The start of the region in z 383 . xs - The size of the region in x 384 . ys - The size of the region in y 385 - zs - The size of the region in z 386 387 Level: intermediate 388 389 .seealso: `DMDAGetOffset()`, `DMDAVecGetArray()` 390 @*/ 391 PetscErrorCode DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm) { 392 DM_DA *dd = (DM_DA *)da->data; 393 394 PetscFunctionBegin; 395 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 396 PetscValidLogicalCollectiveInt(da, xs, 2); 397 PetscValidLogicalCollectiveInt(da, ys, 3); 398 PetscValidLogicalCollectiveInt(da, zs, 4); 399 PetscValidLogicalCollectiveInt(da, xm, 5); 400 PetscValidLogicalCollectiveInt(da, ym, 6); 401 PetscValidLogicalCollectiveInt(da, zm, 7); 402 dd->nonxs = xs; 403 dd->nonys = ys; 404 dd->nonzs = zs; 405 dd->nonxm = xm; 406 dd->nonym = ym; 407 dd->nonzm = zm; 408 409 PetscFunctionReturn(0); 410 } 411 412 /*@ 413 DMDASetStencilType - Sets the type of the communication stencil 414 415 Logically Collective on da 416 417 Input Parameters: 418 + da - The DMDA 419 - stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR. 420 421 Level: intermediate 422 423 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 424 @*/ 425 PetscErrorCode DMDASetStencilType(DM da, DMDAStencilType stype) { 426 DM_DA *dd = (DM_DA *)da->data; 427 428 PetscFunctionBegin; 429 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 430 PetscValidLogicalCollectiveEnum(da, stype, 2); 431 PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()"); 432 dd->stencil_type = stype; 433 PetscFunctionReturn(0); 434 } 435 436 /*@ 437 DMDAGetStencilType - Gets the type of the communication stencil 438 439 Not collective 440 441 Input Parameter: 442 . da - The DMDA 443 444 Output Parameter: 445 . stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR. 446 447 Level: intermediate 448 449 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 450 @*/ 451 PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype) { 452 DM_DA *dd = (DM_DA *)da->data; 453 454 PetscFunctionBegin; 455 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 456 PetscValidPointer(stype, 2); 457 *stype = dd->stencil_type; 458 PetscFunctionReturn(0); 459 } 460 461 /*@ 462 DMDASetStencilWidth - Sets the width of the communication stencil 463 464 Logically Collective on da 465 466 Input Parameters: 467 + da - The DMDA 468 - width - The stencil width 469 470 Level: intermediate 471 472 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 473 @*/ 474 PetscErrorCode DMDASetStencilWidth(DM da, PetscInt width) { 475 DM_DA *dd = (DM_DA *)da->data; 476 477 PetscFunctionBegin; 478 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 479 PetscValidLogicalCollectiveInt(da, width, 2); 480 PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()"); 481 dd->s = width; 482 PetscFunctionReturn(0); 483 } 484 485 /*@ 486 DMDAGetStencilWidth - Gets the width of the communication stencil 487 488 Not collective 489 490 Input Parameter: 491 . da - The DMDA 492 493 Output Parameter: 494 . width - The stencil width 495 496 Level: intermediate 497 498 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 499 @*/ 500 PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width) { 501 DM_DA *dd = (DM_DA *)da->data; 502 503 PetscFunctionBegin; 504 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 505 PetscValidIntPointer(width, 2); 506 *width = dd->s; 507 PetscFunctionReturn(0); 508 } 509 510 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da, PetscInt M, PetscInt m, const PetscInt lx[]) { 511 PetscInt i, sum; 512 513 PetscFunctionBegin; 514 PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Global dimension not set"); 515 for (i = sum = 0; i < m; i++) sum += lx[i]; 516 PetscCheck(sum == M, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_INCOMP, "Ownership ranges sum to %" PetscInt_FMT " but global dimension is %" PetscInt_FMT, sum, M); 517 PetscFunctionReturn(0); 518 } 519 520 /*@ 521 DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process 522 523 Logically Collective on da 524 525 Input Parameters: 526 + da - The DMDA 527 . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m 528 . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n 529 - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p. 530 531 Level: intermediate 532 533 Note: these numbers are NOT multiplied by the number of dof per node. 534 535 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 536 @*/ 537 PetscErrorCode DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[]) { 538 DM_DA *dd = (DM_DA *)da->data; 539 540 PetscFunctionBegin; 541 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 542 PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()"); 543 if (lx) { 544 PetscCheck(dd->m >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of procs"); 545 PetscCall(DMDACheckOwnershipRanges_Private(da, dd->M, dd->m, lx)); 546 if (!dd->lx) { PetscCall(PetscMalloc1(dd->m, &dd->lx)); } 547 PetscCall(PetscArraycpy(dd->lx, lx, dd->m)); 548 } 549 if (ly) { 550 PetscCheck(dd->n >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of procs"); 551 PetscCall(DMDACheckOwnershipRanges_Private(da, dd->N, dd->n, ly)); 552 if (!dd->ly) { PetscCall(PetscMalloc1(dd->n, &dd->ly)); } 553 PetscCall(PetscArraycpy(dd->ly, ly, dd->n)); 554 } 555 if (lz) { 556 PetscCheck(dd->p >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of procs"); 557 PetscCall(DMDACheckOwnershipRanges_Private(da, dd->P, dd->p, lz)); 558 if (!dd->lz) { PetscCall(PetscMalloc1(dd->p, &dd->lz)); } 559 PetscCall(PetscArraycpy(dd->lz, lz, dd->p)); 560 } 561 PetscFunctionReturn(0); 562 } 563 564 /*@ 565 DMDASetInterpolationType - Sets the type of interpolation that will be 566 returned by DMCreateInterpolation() 567 568 Logically Collective on da 569 570 Input Parameters: 571 + da - initial distributed array 572 - ctype - DMDA_Q1 and DMDA_Q0 are currently the only supported forms 573 574 Level: intermediate 575 576 Notes: 577 you should call this on the coarser of the two DMDAs you pass to DMCreateInterpolation() 578 579 .seealso: `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMDestroy()`, `DMDA`, `DMDAInterpolationType` 580 @*/ 581 PetscErrorCode DMDASetInterpolationType(DM da, DMDAInterpolationType ctype) { 582 DM_DA *dd = (DM_DA *)da->data; 583 584 PetscFunctionBegin; 585 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 586 PetscValidLogicalCollectiveEnum(da, ctype, 2); 587 dd->interptype = ctype; 588 PetscFunctionReturn(0); 589 } 590 591 /*@ 592 DMDAGetInterpolationType - Gets the type of interpolation that will be 593 used by DMCreateInterpolation() 594 595 Not Collective 596 597 Input Parameter: 598 . da - distributed array 599 600 Output Parameter: 601 . ctype - interpolation type (DMDA_Q1 and DMDA_Q0 are currently the only supported forms) 602 603 Level: intermediate 604 605 .seealso: `DMDA`, `DMDAInterpolationType`, `DMDASetInterpolationType()`, `DMCreateInterpolation()` 606 @*/ 607 PetscErrorCode DMDAGetInterpolationType(DM da, DMDAInterpolationType *ctype) { 608 DM_DA *dd = (DM_DA *)da->data; 609 610 PetscFunctionBegin; 611 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 612 PetscValidPointer(ctype, 2); 613 *ctype = dd->interptype; 614 PetscFunctionReturn(0); 615 } 616 617 /*@C 618 DMDAGetNeighbors - Gets an array containing the MPI rank of all the current 619 processes neighbors. 620 621 Not Collective 622 623 Input Parameter: 624 . da - the DMDA object 625 626 Output Parameters: 627 . ranks - the neighbors ranks, stored with the x index increasing most rapidly. 628 this process itself is in the list 629 630 Notes: 631 In 2d the array is of length 9, in 3d of length 27 632 Not supported in 1d 633 Do not free the array, it is freed when the DMDA is destroyed. 634 635 Fortran Notes: 636 In fortran you must pass in an array of the appropriate length. 637 638 Level: intermediate 639 640 @*/ 641 PetscErrorCode DMDAGetNeighbors(DM da, const PetscMPIInt *ranks[]) { 642 DM_DA *dd = (DM_DA *)da->data; 643 644 PetscFunctionBegin; 645 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 646 *ranks = dd->neighbors; 647 PetscFunctionReturn(0); 648 } 649 650 /*@C 651 DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process 652 653 Not Collective 654 655 Input Parameter: 656 . da - the DMDA object 657 658 Output Parameters: 659 + lx - ownership along x direction (optional) 660 . ly - ownership along y direction (optional) 661 - lz - ownership along z direction (optional) 662 663 Level: intermediate 664 665 Note: these correspond to the optional final arguments passed to DMDACreate(), DMDACreate2d(), DMDACreate3d() 666 667 In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and 668 eighth arguments from DMDAGetInfo() 669 670 In C you should not free these arrays, nor change the values in them. They will only have valid values while the 671 DMDA they came from still exists (has not been destroyed). 672 673 These numbers are NOT multiplied by the number of dof per node. 674 675 .seealso: `DMDAGetCorners()`, `DMDAGetGhostCorners()`, `DMDACreate()`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `VecGetOwnershipRanges()` 676 @*/ 677 PetscErrorCode DMDAGetOwnershipRanges(DM da, const PetscInt *lx[], const PetscInt *ly[], const PetscInt *lz[]) { 678 DM_DA *dd = (DM_DA *)da->data; 679 680 PetscFunctionBegin; 681 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 682 if (lx) *lx = dd->lx; 683 if (ly) *ly = dd->ly; 684 if (lz) *lz = dd->lz; 685 PetscFunctionReturn(0); 686 } 687 688 /*@ 689 DMDASetRefinementFactor - Set the ratios that the DMDA grid is refined 690 691 Logically Collective on da 692 693 Input Parameters: 694 + da - the DMDA object 695 . refine_x - ratio of fine grid to coarse in x direction (2 by default) 696 . refine_y - ratio of fine grid to coarse in y direction (2 by default) 697 - refine_z - ratio of fine grid to coarse in z direction (2 by default) 698 699 Options Database: 700 + -da_refine_x refine_x - refinement ratio in x direction 701 . -da_refine_y rafine_y - refinement ratio in y direction 702 . -da_refine_z refine_z - refinement ratio in z direction 703 - -da_refine <n> - refine the DMDA object n times when it is created. 704 705 Level: intermediate 706 707 Notes: 708 Pass PETSC_IGNORE to leave a value unchanged 709 710 .seealso: `DMRefine()`, `DMDAGetRefinementFactor()` 711 @*/ 712 PetscErrorCode DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y, PetscInt refine_z) { 713 DM_DA *dd = (DM_DA *)da->data; 714 715 PetscFunctionBegin; 716 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 717 PetscValidLogicalCollectiveInt(da, refine_x, 2); 718 PetscValidLogicalCollectiveInt(da, refine_y, 3); 719 PetscValidLogicalCollectiveInt(da, refine_z, 4); 720 721 if (refine_x > 0) dd->refine_x = refine_x; 722 if (refine_y > 0) dd->refine_y = refine_y; 723 if (refine_z > 0) dd->refine_z = refine_z; 724 PetscFunctionReturn(0); 725 } 726 727 /*@C 728 DMDAGetRefinementFactor - Gets the ratios that the DMDA grid is refined 729 730 Not Collective 731 732 Input Parameter: 733 . da - the DMDA object 734 735 Output Parameters: 736 + refine_x - ratio of fine grid to coarse in x direction (2 by default) 737 . refine_y - ratio of fine grid to coarse in y direction (2 by default) 738 - refine_z - ratio of fine grid to coarse in z direction (2 by default) 739 740 Level: intermediate 741 742 Notes: 743 Pass NULL for values you do not need 744 745 .seealso: `DMRefine()`, `DMDASetRefinementFactor()` 746 @*/ 747 PetscErrorCode DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y, PetscInt *refine_z) { 748 DM_DA *dd = (DM_DA *)da->data; 749 750 PetscFunctionBegin; 751 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 752 if (refine_x) *refine_x = dd->refine_x; 753 if (refine_y) *refine_y = dd->refine_y; 754 if (refine_z) *refine_z = dd->refine_z; 755 PetscFunctionReturn(0); 756 } 757 758 /*@C 759 DMDASetGetMatrix - Sets the routine used by the DMDA to allocate a matrix. 760 761 Logically Collective on da 762 763 Input Parameters: 764 + da - the DMDA object 765 - f - the function that allocates the matrix for that specific DMDA 766 767 Level: developer 768 769 Notes: 770 See DMDASetBlockFills() that provides a simple way to provide the nonzero structure for 771 the diagonal and off-diagonal blocks of the matrix 772 773 Not supported from Fortran 774 775 .seealso: `DMCreateMatrix()`, `DMDASetBlockFills()` 776 @*/ 777 PetscErrorCode DMDASetGetMatrix(DM da, PetscErrorCode (*f)(DM, Mat *)) { 778 PetscFunctionBegin; 779 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 780 da->ops->creatematrix = f; 781 PetscFunctionReturn(0); 782 } 783 784 /*@ 785 DMDAMapMatStencilToGlobal - Map a list of MatStencils on a grid to global indices. 786 787 Not Collective 788 789 Input Parameters: 790 + da - the DMDA object 791 . m - number of MatStencils 792 - idxm - grid points (and component number when dof > 1) 793 794 Output Parameter: 795 . gidxm - global row indices 796 797 Level: intermediate 798 799 .seealso: `MatStencil` 800 @*/ 801 PetscErrorCode DMDAMapMatStencilToGlobal(DM da, PetscInt m, const MatStencil idxm[], PetscInt gidxm[]) { 802 const DM_DA *dd = (const DM_DA *)da->data; 803 const PetscInt *dxm = (const PetscInt *)idxm; 804 PetscInt i, j, sdim, tmp, dim; 805 PetscInt dims[4], starts[4], dims2[3], starts2[3], dof = dd->w; 806 ISLocalToGlobalMapping ltog; 807 808 PetscFunctionBegin; 809 if (m <= 0) PetscFunctionReturn(0); 810 811 /* Code adapted from DMDAGetGhostCorners() */ 812 starts2[0] = dd->Xs / dof + dd->xo; 813 starts2[1] = dd->Ys + dd->yo; 814 starts2[2] = dd->Zs + dd->zo; 815 dims2[0] = (dd->Xe - dd->Xs) / dof; 816 dims2[1] = (dd->Ye - dd->Ys); 817 dims2[2] = (dd->Ze - dd->Zs); 818 819 /* As if we do MatSetStencil() to get dims[]/starts[] of mat->stencil */ 820 dim = da->dim; /* DA dim: 1 to 3 */ 821 sdim = dim + (dof > 1 ? 1 : 0); /* Dimensions in MatStencil's (k,j,i,c) view */ 822 for (i = 0; i < dim; i++) { /* Reverse the order and also skip the unused dimensions */ 823 dims[i] = dims2[dim - i - 1]; /* ex. dims/starts[] are in order of {i} for 1D, {j,i} for 2D and {k,j,i} for 3D */ 824 starts[i] = starts2[dim - i - 1]; 825 } 826 starts[dim] = 0; /* Append the extra dim for dof (won't be used below if dof=1) */ 827 dims[dim] = dof; 828 829 /* Map stencils to local indices (code adapted from MatSetValuesStencil()) */ 830 for (i = 0; i < m; i++) { 831 dxm += 3 - dim; /* Input is {k,j,i,c}; move the pointer to the first used index, e.g., j in 2D */ 832 tmp = 0; 833 for (j = 0; j < sdim; j++) { /* Iter over, ex. j,i or j,i,c in 2D */ 834 if (tmp < 0 || dxm[j] < starts[j] || dxm[j] >= (starts[j] + dims[j])) tmp = -1; /* Beyond the ghost region, therefore ignored with negative indices */ 835 else tmp = tmp * dims[j] + (dxm[j] - starts[j]); 836 } 837 gidxm[i] = tmp; 838 /* Move to the next MatStencil point */ 839 if (dof > 1) dxm += sdim; /* c is already counted in sdim */ 840 else dxm += sdim + 1; /* skip the unused c */ 841 } 842 843 /* Map local indices to global indices */ 844 PetscCall(DMGetLocalToGlobalMapping(da, <og)); 845 PetscCall(ISLocalToGlobalMappingApply(ltog, m, gidxm, gidxm)); 846 PetscFunctionReturn(0); 847 } 848 849 /* 850 Creates "balanced" ownership ranges after refinement, constrained by the need for the 851 fine grid boundaries to fall within one stencil width of the coarse partition. 852 853 Uses a greedy algorithm to handle non-ideal layouts, could probably do something better. 854 */ 855 static PetscErrorCode DMDARefineOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lc[], PetscInt lf[]) { 856 PetscInt i, totalc = 0, remaining, startc = 0, startf = 0; 857 858 PetscFunctionBegin; 859 PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio); 860 if (ratio == 1) { 861 PetscCall(PetscArraycpy(lf, lc, m)); 862 PetscFunctionReturn(0); 863 } 864 for (i = 0; i < m; i++) totalc += lc[i]; 865 remaining = (!periodic) + ratio * (totalc - (!periodic)); 866 for (i = 0; i < m; i++) { 867 PetscInt want = remaining / (m - i) + !!(remaining % (m - i)); 868 if (i == m - 1) lf[i] = want; 869 else { 870 const PetscInt nextc = startc + lc[i]; 871 /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one 872 * coarse stencil width of the first coarse node in the next subdomain. */ 873 while ((startf + want) / ratio < nextc - stencil_width) want++; 874 /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one 875 * coarse stencil width of the last coarse node in the current subdomain. */ 876 while ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width) want--; 877 /* Make sure all constraints are satisfied */ 878 if (want < 0 || want > remaining || ((startf + want) / ratio < nextc - stencil_width) || ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width)) 879 SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible refined ownership range"); 880 } 881 lf[i] = want; 882 startc += lc[i]; 883 startf += lf[i]; 884 remaining -= lf[i]; 885 } 886 PetscFunctionReturn(0); 887 } 888 889 /* 890 Creates "balanced" ownership ranges after coarsening, constrained by the need for the 891 fine grid boundaries to fall within one stencil width of the coarse partition. 892 893 Uses a greedy algorithm to handle non-ideal layouts, could probably do something better. 894 */ 895 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lf[], PetscInt lc[]) { 896 PetscInt i, totalf, remaining, startc, startf; 897 898 PetscFunctionBegin; 899 PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio); 900 if (ratio == 1) { 901 PetscCall(PetscArraycpy(lc, lf, m)); 902 PetscFunctionReturn(0); 903 } 904 for (i = 0, totalf = 0; i < m; i++) totalf += lf[i]; 905 remaining = (!periodic) + (totalf - (!periodic)) / ratio; 906 for (i = 0, startc = 0, startf = 0; i < m; i++) { 907 PetscInt want = remaining / (m - i) + !!(remaining % (m - i)); 908 if (i == m - 1) lc[i] = want; 909 else { 910 const PetscInt nextf = startf + lf[i]; 911 /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine 912 * node is within one stencil width. */ 913 while (nextf / ratio < startc + want - stencil_width) want--; 914 /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last 915 * fine node is within one stencil width. */ 916 while ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width) want++; 917 if (want < 0 || want > remaining || (nextf / ratio < startc + want - stencil_width) || ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width)) 918 SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible coarsened ownership range"); 919 } 920 lc[i] = want; 921 startc += lc[i]; 922 startf += lf[i]; 923 remaining -= lc[i]; 924 } 925 PetscFunctionReturn(0); 926 } 927 928 PetscErrorCode DMRefine_DA(DM da, MPI_Comm comm, DM *daref) { 929 PetscInt M, N, P, i, dim; 930 Vec coordsc, coordsf; 931 DM da2; 932 DM_DA *dd = (DM_DA *)da->data, *dd2; 933 934 PetscFunctionBegin; 935 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 936 PetscValidPointer(daref, 3); 937 938 PetscCall(DMGetDimension(da, &dim)); 939 if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 940 M = dd->refine_x * dd->M; 941 } else { 942 M = 1 + dd->refine_x * (dd->M - 1); 943 } 944 if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 945 if (dim > 1) { 946 N = dd->refine_y * dd->N; 947 } else { 948 N = 1; 949 } 950 } else { 951 N = 1 + dd->refine_y * (dd->N - 1); 952 } 953 if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 954 if (dim > 2) { 955 P = dd->refine_z * dd->P; 956 } else { 957 P = 1; 958 } 959 } else { 960 P = 1 + dd->refine_z * (dd->P - 1); 961 } 962 PetscCall(DMDACreate(PetscObjectComm((PetscObject)da), &da2)); 963 PetscCall(DMSetOptionsPrefix(da2, ((PetscObject)da)->prefix)); 964 PetscCall(DMSetDimension(da2, dim)); 965 PetscCall(DMDASetSizes(da2, M, N, P)); 966 PetscCall(DMDASetNumProcs(da2, dd->m, dd->n, dd->p)); 967 PetscCall(DMDASetBoundaryType(da2, dd->bx, dd->by, dd->bz)); 968 PetscCall(DMDASetDof(da2, dd->w)); 969 PetscCall(DMDASetStencilType(da2, dd->stencil_type)); 970 PetscCall(DMDASetStencilWidth(da2, dd->s)); 971 if (dim == 3) { 972 PetscInt *lx, *ly, *lz; 973 PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz)); 974 PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx)); 975 PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly)); 976 PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_z, dd->p, dd->lz, lz)); 977 PetscCall(DMDASetOwnershipRanges(da2, lx, ly, lz)); 978 PetscCall(PetscFree3(lx, ly, lz)); 979 } else if (dim == 2) { 980 PetscInt *lx, *ly; 981 PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly)); 982 PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx)); 983 PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly)); 984 PetscCall(DMDASetOwnershipRanges(da2, lx, ly, NULL)); 985 PetscCall(PetscFree2(lx, ly)); 986 } else if (dim == 1) { 987 PetscInt *lx; 988 PetscCall(PetscMalloc1(dd->m, &lx)); 989 PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx)); 990 PetscCall(DMDASetOwnershipRanges(da2, lx, NULL, NULL)); 991 PetscCall(PetscFree(lx)); 992 } 993 dd2 = (DM_DA *)da2->data; 994 995 /* allow overloaded (user replaced) operations to be inherited by refinement clones */ 996 da2->ops->creatematrix = da->ops->creatematrix; 997 /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */ 998 da2->ops->getcoloring = da->ops->getcoloring; 999 dd2->interptype = dd->interptype; 1000 1001 /* copy fill information if given */ 1002 if (dd->dfill) { 1003 PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill)); 1004 PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1)); 1005 } 1006 if (dd->ofill) { 1007 PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill)); 1008 PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1)); 1009 } 1010 /* copy the refine information */ 1011 dd2->coarsen_x = dd2->refine_x = dd->refine_x; 1012 dd2->coarsen_y = dd2->refine_y = dd->refine_y; 1013 dd2->coarsen_z = dd2->refine_z = dd->refine_z; 1014 1015 if (dd->refine_z_hier) { 1016 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) { dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1]; } 1017 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) { dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown]; } 1018 dd2->refine_z_hier_n = dd->refine_z_hier_n; 1019 PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier)); 1020 PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n)); 1021 } 1022 if (dd->refine_y_hier) { 1023 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) { dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1]; } 1024 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) { dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown]; } 1025 dd2->refine_y_hier_n = dd->refine_y_hier_n; 1026 PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier)); 1027 PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n)); 1028 } 1029 if (dd->refine_x_hier) { 1030 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) { dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1]; } 1031 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) { dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown]; } 1032 dd2->refine_x_hier_n = dd->refine_x_hier_n; 1033 PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier)); 1034 PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n)); 1035 } 1036 1037 /* copy vector type information */ 1038 PetscCall(DMSetVecType(da2, da->vectype)); 1039 1040 dd2->lf = dd->lf; 1041 dd2->lj = dd->lj; 1042 1043 da2->leveldown = da->leveldown; 1044 da2->levelup = da->levelup + 1; 1045 1046 PetscCall(DMSetUp(da2)); 1047 1048 /* interpolate coordinates if they are set on the coarse grid */ 1049 PetscCall(DMGetCoordinates(da, &coordsc)); 1050 if (coordsc) { 1051 DM cdaf, cdac; 1052 Mat II; 1053 1054 PetscCall(DMGetCoordinateDM(da, &cdac)); 1055 PetscCall(DMGetCoordinateDM(da2, &cdaf)); 1056 /* force creation of the coordinate vector */ 1057 PetscCall(DMDASetUniformCoordinates(da2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0)); 1058 PetscCall(DMGetCoordinates(da2, &coordsf)); 1059 PetscCall(DMCreateInterpolation(cdac, cdaf, &II, NULL)); 1060 PetscCall(MatInterpolate(II, coordsc, coordsf)); 1061 PetscCall(MatDestroy(&II)); 1062 } 1063 1064 for (i = 0; i < da->bs; i++) { 1065 const char *fieldname; 1066 PetscCall(DMDAGetFieldName(da, i, &fieldname)); 1067 PetscCall(DMDASetFieldName(da2, i, fieldname)); 1068 } 1069 1070 *daref = da2; 1071 PetscFunctionReturn(0); 1072 } 1073 1074 PetscErrorCode DMCoarsen_DA(DM dmf, MPI_Comm comm, DM *dmc) { 1075 PetscInt M, N, P, i, dim; 1076 Vec coordsc, coordsf; 1077 DM dmc2; 1078 DM_DA *dd = (DM_DA *)dmf->data, *dd2; 1079 1080 PetscFunctionBegin; 1081 PetscValidHeaderSpecificType(dmf, DM_CLASSID, 1, DMDA); 1082 PetscValidPointer(dmc, 3); 1083 1084 PetscCall(DMGetDimension(dmf, &dim)); 1085 if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1086 M = dd->M / dd->coarsen_x; 1087 } else { 1088 M = 1 + (dd->M - 1) / dd->coarsen_x; 1089 } 1090 if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1091 if (dim > 1) { 1092 N = dd->N / dd->coarsen_y; 1093 } else { 1094 N = 1; 1095 } 1096 } else { 1097 N = 1 + (dd->N - 1) / dd->coarsen_y; 1098 } 1099 if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1100 if (dim > 2) { 1101 P = dd->P / dd->coarsen_z; 1102 } else { 1103 P = 1; 1104 } 1105 } else { 1106 P = 1 + (dd->P - 1) / dd->coarsen_z; 1107 } 1108 PetscCall(DMDACreate(PetscObjectComm((PetscObject)dmf), &dmc2)); 1109 PetscCall(DMSetOptionsPrefix(dmc2, ((PetscObject)dmf)->prefix)); 1110 PetscCall(DMSetDimension(dmc2, dim)); 1111 PetscCall(DMDASetSizes(dmc2, M, N, P)); 1112 PetscCall(DMDASetNumProcs(dmc2, dd->m, dd->n, dd->p)); 1113 PetscCall(DMDASetBoundaryType(dmc2, dd->bx, dd->by, dd->bz)); 1114 PetscCall(DMDASetDof(dmc2, dd->w)); 1115 PetscCall(DMDASetStencilType(dmc2, dd->stencil_type)); 1116 PetscCall(DMDASetStencilWidth(dmc2, dd->s)); 1117 if (dim == 3) { 1118 PetscInt *lx, *ly, *lz; 1119 PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz)); 1120 PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx)); 1121 PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly)); 1122 PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_z, dd->p, dd->lz, lz)); 1123 PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, lz)); 1124 PetscCall(PetscFree3(lx, ly, lz)); 1125 } else if (dim == 2) { 1126 PetscInt *lx, *ly; 1127 PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly)); 1128 PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx)); 1129 PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly)); 1130 PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, NULL)); 1131 PetscCall(PetscFree2(lx, ly)); 1132 } else if (dim == 1) { 1133 PetscInt *lx; 1134 PetscCall(PetscMalloc1(dd->m, &lx)); 1135 PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx)); 1136 PetscCall(DMDASetOwnershipRanges(dmc2, lx, NULL, NULL)); 1137 PetscCall(PetscFree(lx)); 1138 } 1139 dd2 = (DM_DA *)dmc2->data; 1140 1141 /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */ 1142 /* dmc2->ops->createinterpolation = dmf->ops->createinterpolation; copying this one causes trouble for DMSetVI */ 1143 dmc2->ops->creatematrix = dmf->ops->creatematrix; 1144 dmc2->ops->getcoloring = dmf->ops->getcoloring; 1145 dd2->interptype = dd->interptype; 1146 1147 /* copy fill information if given */ 1148 if (dd->dfill) { 1149 PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill)); 1150 PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1)); 1151 } 1152 if (dd->ofill) { 1153 PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill)); 1154 PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1)); 1155 } 1156 /* copy the refine information */ 1157 dd2->coarsen_x = dd2->refine_x = dd->coarsen_x; 1158 dd2->coarsen_y = dd2->refine_y = dd->coarsen_y; 1159 dd2->coarsen_z = dd2->refine_z = dd->coarsen_z; 1160 1161 if (dd->refine_z_hier) { 1162 if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_z_hier_n) { dd2->refine_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 1]; } 1163 if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_z_hier_n) { dd2->coarsen_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 2]; } 1164 dd2->refine_z_hier_n = dd->refine_z_hier_n; 1165 PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier)); 1166 PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n)); 1167 } 1168 if (dd->refine_y_hier) { 1169 if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_y_hier_n) { dd2->refine_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 1]; } 1170 if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_y_hier_n) { dd2->coarsen_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 2]; } 1171 dd2->refine_y_hier_n = dd->refine_y_hier_n; 1172 PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier)); 1173 PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n)); 1174 } 1175 if (dd->refine_x_hier) { 1176 if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_x_hier_n) { dd2->refine_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 1]; } 1177 if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_x_hier_n) { dd2->coarsen_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 2]; } 1178 dd2->refine_x_hier_n = dd->refine_x_hier_n; 1179 PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier)); 1180 PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n)); 1181 } 1182 1183 /* copy vector type information */ 1184 PetscCall(DMSetVecType(dmc2, dmf->vectype)); 1185 1186 dd2->lf = dd->lf; 1187 dd2->lj = dd->lj; 1188 1189 dmc2->leveldown = dmf->leveldown + 1; 1190 dmc2->levelup = dmf->levelup; 1191 1192 PetscCall(DMSetUp(dmc2)); 1193 1194 /* inject coordinates if they are set on the fine grid */ 1195 PetscCall(DMGetCoordinates(dmf, &coordsf)); 1196 if (coordsf) { 1197 DM cdaf, cdac; 1198 Mat inject; 1199 VecScatter vscat; 1200 1201 PetscCall(DMGetCoordinateDM(dmf, &cdaf)); 1202 PetscCall(DMGetCoordinateDM(dmc2, &cdac)); 1203 /* force creation of the coordinate vector */ 1204 PetscCall(DMDASetUniformCoordinates(dmc2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0)); 1205 PetscCall(DMGetCoordinates(dmc2, &coordsc)); 1206 1207 PetscCall(DMCreateInjection(cdac, cdaf, &inject)); 1208 PetscCall(MatScatterGetVecScatter(inject, &vscat)); 1209 PetscCall(VecScatterBegin(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD)); 1210 PetscCall(VecScatterEnd(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD)); 1211 PetscCall(MatDestroy(&inject)); 1212 } 1213 1214 for (i = 0; i < dmf->bs; i++) { 1215 const char *fieldname; 1216 PetscCall(DMDAGetFieldName(dmf, i, &fieldname)); 1217 PetscCall(DMDASetFieldName(dmc2, i, fieldname)); 1218 } 1219 1220 *dmc = dmc2; 1221 PetscFunctionReturn(0); 1222 } 1223 1224 PetscErrorCode DMRefineHierarchy_DA(DM da, PetscInt nlevels, DM daf[]) { 1225 PetscInt i, n, *refx, *refy, *refz; 1226 1227 PetscFunctionBegin; 1228 PetscValidHeaderSpecific(da, DM_CLASSID, 1); 1229 PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative"); 1230 if (nlevels == 0) PetscFunctionReturn(0); 1231 PetscValidPointer(daf, 3); 1232 1233 /* Get refinement factors, defaults taken from the coarse DMDA */ 1234 PetscCall(PetscMalloc3(nlevels, &refx, nlevels, &refy, nlevels, &refz)); 1235 for (i = 0; i < nlevels; i++) { PetscCall(DMDAGetRefinementFactor(da, &refx[i], &refy[i], &refz[i])); } 1236 n = nlevels; 1237 PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_x", refx, &n, NULL)); 1238 n = nlevels; 1239 PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_y", refy, &n, NULL)); 1240 n = nlevels; 1241 PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_z", refz, &n, NULL)); 1242 1243 PetscCall(DMDASetRefinementFactor(da, refx[0], refy[0], refz[0])); 1244 PetscCall(DMRefine(da, PetscObjectComm((PetscObject)da), &daf[0])); 1245 for (i = 1; i < nlevels; i++) { 1246 PetscCall(DMDASetRefinementFactor(daf[i - 1], refx[i], refy[i], refz[i])); 1247 PetscCall(DMRefine(daf[i - 1], PetscObjectComm((PetscObject)da), &daf[i])); 1248 } 1249 PetscCall(PetscFree3(refx, refy, refz)); 1250 PetscFunctionReturn(0); 1251 } 1252 1253 PetscErrorCode DMCoarsenHierarchy_DA(DM da, PetscInt nlevels, DM dac[]) { 1254 PetscInt i; 1255 1256 PetscFunctionBegin; 1257 PetscValidHeaderSpecific(da, DM_CLASSID, 1); 1258 PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative"); 1259 if (nlevels == 0) PetscFunctionReturn(0); 1260 PetscValidPointer(dac, 3); 1261 PetscCall(DMCoarsen(da, PetscObjectComm((PetscObject)da), &dac[0])); 1262 for (i = 1; i < nlevels; i++) { PetscCall(DMCoarsen(dac[i - 1], PetscObjectComm((PetscObject)da), &dac[i])); } 1263 PetscFunctionReturn(0); 1264 } 1265 1266 PetscErrorCode DMDASetGLLCoordinates_1d(DM dm, PetscInt n, PetscReal *nodes) { 1267 PetscInt i, j, xs, xn, q; 1268 PetscScalar *xx; 1269 PetscReal h; 1270 Vec x; 1271 DM_DA *da = (DM_DA *)dm->data; 1272 1273 PetscFunctionBegin; 1274 if (da->bx != DM_BOUNDARY_PERIODIC) { 1275 PetscCall(DMDAGetInfo(dm, NULL, &q, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)); 1276 q = (q - 1) / (n - 1); /* number of spectral elements */ 1277 h = 2.0 / q; 1278 PetscCall(DMDAGetCorners(dm, &xs, NULL, NULL, &xn, NULL, NULL)); 1279 xs = xs / (n - 1); 1280 xn = xn / (n - 1); 1281 PetscCall(DMDASetUniformCoordinates(dm, -1., 1., 0., 0., 0., 0.)); 1282 PetscCall(DMGetCoordinates(dm, &x)); 1283 PetscCall(DMDAVecGetArray(dm, x, &xx)); 1284 1285 /* loop over local spectral elements */ 1286 for (j = xs; j < xs + xn; j++) { 1287 /* 1288 Except for the first process, each process starts on the second GLL point of the first element on that process 1289 */ 1290 for (i = (j == xs && xs > 0) ? 1 : 0; i < n; i++) { xx[j * (n - 1) + i] = -1.0 + h * j + h * (nodes[i] + 1.0) / 2.; } 1291 } 1292 PetscCall(DMDAVecRestoreArray(dm, x, &xx)); 1293 } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for periodic"); 1294 PetscFunctionReturn(0); 1295 } 1296 1297 /*@ 1298 1299 DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points 1300 1301 Collective on da 1302 1303 Input Parameters: 1304 + da - the DMDA object 1305 - n - the number of GLL nodes 1306 - nodes - the GLL nodes 1307 1308 Notes: 1309 the parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points 1310 on each process much be divisible by the number of GLL elements needed per process. This depends on whether the DM is 1311 periodic or not. 1312 1313 Level: advanced 1314 1315 .seealso: `DMDACreate()`, `PetscDTGaussLobattoLegendreQuadrature()`, `DMGetCoordinates()` 1316 @*/ 1317 PetscErrorCode DMDASetGLLCoordinates(DM da, PetscInt n, PetscReal *nodes) { 1318 PetscFunctionBegin; 1319 if (da->dim == 1) { 1320 PetscCall(DMDASetGLLCoordinates_1d(da, n, nodes)); 1321 } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for 2 or 3d"); 1322 PetscFunctionReturn(0); 1323 } 1324 1325 PETSC_INTERN PetscErrorCode DMGetCompatibility_DA(DM da1, DM dm2, PetscBool *compatible, PetscBool *set) { 1326 DM_DA *dd1 = (DM_DA *)da1->data, *dd2; 1327 DM da2; 1328 DMType dmtype2; 1329 PetscBool isda, compatibleLocal; 1330 PetscInt i; 1331 1332 PetscFunctionBegin; 1333 PetscCheck(da1->setupcalled, PetscObjectComm((PetscObject)da1), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on first DM before DMGetCompatibility()"); 1334 PetscCall(DMGetType(dm2, &dmtype2)); 1335 PetscCall(PetscStrcmp(dmtype2, DMDA, &isda)); 1336 if (isda) { 1337 da2 = dm2; 1338 dd2 = (DM_DA *)da2->data; 1339 PetscCheck(da2->setupcalled, PetscObjectComm((PetscObject)da2), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on second DM before DMGetCompatibility()"); 1340 compatibleLocal = (PetscBool)(da1->dim == da2->dim); 1341 if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->s == dd2->s)); /* Stencil width */ 1342 /* Global size ranks Boundary type */ 1343 if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->M == dd2->M) && (dd1->m == dd2->m) && (dd1->bx == dd2->bx)); 1344 if (compatibleLocal && da1->dim > 1) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->N == dd2->N) && (dd1->n == dd2->n) && (dd1->by == dd2->by)); 1345 if (compatibleLocal && da1->dim > 2) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->P == dd2->P) && (dd1->p == dd2->p) && (dd1->bz == dd2->bz)); 1346 if (compatibleLocal) { 1347 for (i = 0; i < dd1->m; ++i) { compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lx[i] == dd2->lx[i])); /* Local size */ } 1348 } 1349 if (compatibleLocal && da1->dim > 1) { 1350 for (i = 0; i < dd1->n; ++i) { compatibleLocal = (PetscBool)(compatibleLocal && (dd1->ly[i] == dd2->ly[i])); } 1351 } 1352 if (compatibleLocal && da1->dim > 2) { 1353 for (i = 0; i < dd1->p; ++i) { compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lz[i] == dd2->lz[i])); } 1354 } 1355 *compatible = compatibleLocal; 1356 *set = PETSC_TRUE; 1357 } else { 1358 /* Decline to determine compatibility with other DM types */ 1359 *set = PETSC_FALSE; 1360 } 1361 PetscFunctionReturn(0); 1362 } 1363