1 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 2 3 /*@ 4 DMDASetSizes - Sets the number of grid points in the three dimensional directions 5 6 Logically Collective 7 8 Input Parameters: 9 + da - the `DMDA` 10 . M - the global X size 11 . N - the global Y size 12 - P - the global Z size 13 14 Level: intermediate 15 16 Developer Note: 17 Since the dimension may not yet have been set the code cannot error check for non-positive Y and Z number of grid points 18 19 .seealso: [](sec_struct), `DM`, `DMDA`, `PetscSplitOwnership()` 20 @*/ 21 PetscErrorCode DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P) 22 { 23 DM_DA *dd = (DM_DA *)da->data; 24 25 PetscFunctionBegin; 26 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 27 PetscValidLogicalCollectiveInt(da, M, 2); 28 PetscValidLogicalCollectiveInt(da, N, 3); 29 PetscValidLogicalCollectiveInt(da, P, 4); 30 PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()"); 31 PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in X direction must be positive"); 32 PetscCheck(N >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Y direction must be positive"); 33 PetscCheck(P >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Z direction must be positive"); 34 35 dd->M = M; 36 dd->N = N; 37 dd->P = P; 38 PetscFunctionReturn(PETSC_SUCCESS); 39 } 40 41 /*@ 42 DMDASetNumProcs - Sets the number of processes in each dimension 43 44 Logically Collective 45 46 Input Parameters: 47 + da - the `DMDA` 48 . m - the number of X processes (or `PETSC_DECIDE`) 49 . n - the number of Y processes (or `PETSC_DECIDE`) 50 - p - the number of Z processes (or `PETSC_DECIDE`) 51 52 Level: intermediate 53 54 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDASetSizes()`, `PetscSplitOwnership()` 55 @*/ 56 PetscErrorCode DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p) 57 { 58 DM_DA *dd = (DM_DA *)da->data; 59 60 PetscFunctionBegin; 61 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 62 PetscValidLogicalCollectiveInt(da, m, 2); 63 PetscValidLogicalCollectiveInt(da, n, 3); 64 PetscValidLogicalCollectiveInt(da, p, 4); 65 PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()"); 66 dd->m = m; 67 dd->n = n; 68 dd->p = p; 69 if (da->dim == 2) { 70 PetscMPIInt size; 71 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da), &size)); 72 if ((dd->m > 0) && (dd->n < 0)) { 73 dd->n = size / dd->m; 74 PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in X direction not divisible into comm size %d", m, size); 75 } 76 if ((dd->n > 0) && (dd->m < 0)) { 77 dd->m = size / dd->n; 78 PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in Y direction not divisible into comm size %d", n, size); 79 } 80 } 81 PetscFunctionReturn(PETSC_SUCCESS); 82 } 83 84 /*@ 85 DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries. 86 87 Not Collective 88 89 Input Parameters: 90 + da - The `DMDA` 91 . bx - x boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC` 92 . by - y boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC` 93 - bz - z boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC` 94 95 Level: intermediate 96 97 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMBoundaryType`, `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC` 98 @*/ 99 PetscErrorCode DMDASetBoundaryType(DM da, DMBoundaryType bx, DMBoundaryType by, DMBoundaryType bz) 100 { 101 DM_DA *dd = (DM_DA *)da->data; 102 103 PetscFunctionBegin; 104 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 105 PetscValidLogicalCollectiveEnum(da, bx, 2); 106 PetscValidLogicalCollectiveEnum(da, by, 3); 107 PetscValidLogicalCollectiveEnum(da, bz, 4); 108 PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()"); 109 dd->bx = bx; 110 dd->by = by; 111 dd->bz = bz; 112 PetscFunctionReturn(PETSC_SUCCESS); 113 } 114 115 /*@ 116 DMDASetDof - Sets the number of degrees of freedom per vertex 117 118 Not Collective 119 120 Input Parameters: 121 + da - The `DMDA` 122 - dof - Number of degrees of freedom per vertex 123 124 Level: intermediate 125 126 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetDof()`, `DMDACreate()`, `DMDestroy()` 127 @*/ 128 PetscErrorCode DMDASetDof(DM da, PetscInt dof) 129 { 130 DM_DA *dd = (DM_DA *)da->data; 131 132 PetscFunctionBegin; 133 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 134 PetscValidLogicalCollectiveInt(da, dof, 2); 135 PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()"); 136 dd->w = dof; 137 da->bs = dof; 138 PetscFunctionReturn(PETSC_SUCCESS); 139 } 140 141 /*@ 142 DMDAGetDof - Gets the number of degrees of freedom per vertex 143 144 Not Collective 145 146 Input Parameter: 147 . da - The `DMDA` 148 149 Output Parameter: 150 . dof - Number of degrees of freedom per vertex 151 152 Level: intermediate 153 154 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDASetDof()`, `DMDACreate()`, `DMDestroy()` 155 @*/ 156 PetscErrorCode DMDAGetDof(DM da, PetscInt *dof) 157 { 158 DM_DA *dd = (DM_DA *)da->data; 159 160 PetscFunctionBegin; 161 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 162 PetscAssertPointer(dof, 2); 163 *dof = dd->w; 164 PetscFunctionReturn(PETSC_SUCCESS); 165 } 166 167 /*@ 168 DMDAGetOverlap - Gets the size of the per-processor overlap. 169 170 Not Collective 171 172 Input Parameter: 173 . da - The `DMDA` 174 175 Output Parameters: 176 + x - Overlap in the x direction 177 . y - Overlap in the y direction 178 - z - Overlap in the z direction 179 180 Level: intermediate 181 182 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDASetOverlap()` 183 @*/ 184 PetscErrorCode DMDAGetOverlap(DM da, PetscInt *x, PetscInt *y, PetscInt *z) 185 { 186 DM_DA *dd = (DM_DA *)da->data; 187 188 PetscFunctionBegin; 189 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 190 if (x) *x = dd->xol; 191 if (y) *y = dd->yol; 192 if (z) *z = dd->zol; 193 PetscFunctionReturn(PETSC_SUCCESS); 194 } 195 196 /*@ 197 DMDASetOverlap - Sets the size of the per-processor overlap. 198 199 Not Collective 200 201 Input Parameters: 202 + da - The `DMDA` 203 . x - Overlap in the x direction 204 . y - Overlap in the y direction 205 - z - Overlap in the z direction 206 207 Level: intermediate 208 209 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDAGetOverlap()` 210 @*/ 211 PetscErrorCode DMDASetOverlap(DM da, PetscInt x, PetscInt y, PetscInt z) 212 { 213 DM_DA *dd = (DM_DA *)da->data; 214 215 PetscFunctionBegin; 216 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 217 PetscValidLogicalCollectiveInt(da, x, 2); 218 PetscValidLogicalCollectiveInt(da, y, 3); 219 PetscValidLogicalCollectiveInt(da, z, 4); 220 dd->xol = x; 221 dd->yol = y; 222 dd->zol = z; 223 PetscFunctionReturn(PETSC_SUCCESS); 224 } 225 226 /*@ 227 DMDAGetNumLocalSubDomains - Gets the number of local subdomains that would be created upon decomposition. 228 229 Not Collective 230 231 Input Parameter: 232 . da - The `DMDA` 233 234 Output Parameter: 235 . Nsub - Number of local subdomains created upon decomposition 236 237 Level: intermediate 238 239 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDASetNumLocalSubDomains()` 240 @*/ 241 PetscErrorCode DMDAGetNumLocalSubDomains(DM da, PetscInt *Nsub) 242 { 243 DM_DA *dd = (DM_DA *)da->data; 244 245 PetscFunctionBegin; 246 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 247 if (Nsub) *Nsub = dd->Nsub; 248 PetscFunctionReturn(PETSC_SUCCESS); 249 } 250 251 /*@ 252 DMDASetNumLocalSubDomains - Sets the number of local subdomains to create when decomposing with `DMCreateDomainDecomposition()` 253 254 Not Collective 255 256 Input Parameters: 257 + da - The `DMDA` 258 - Nsub - The number of local subdomains requested 259 260 Level: intermediate 261 262 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDAGetNumLocalSubDomains()` 263 @*/ 264 PetscErrorCode DMDASetNumLocalSubDomains(DM da, PetscInt Nsub) 265 { 266 DM_DA *dd = (DM_DA *)da->data; 267 268 PetscFunctionBegin; 269 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 270 PetscValidLogicalCollectiveInt(da, Nsub, 2); 271 dd->Nsub = Nsub; 272 PetscFunctionReturn(PETSC_SUCCESS); 273 } 274 275 /*@ 276 DMDASetOffset - Sets the index offset of the `DMDA`. 277 278 Collective 279 280 Input Parameters: 281 + da - The `DMDA` 282 . xo - The offset in the x direction 283 . yo - The offset in the y direction 284 . zo - The offset in the z direction 285 . Mo - The problem offset in the x direction 286 . No - The problem offset in the y direction 287 - Po - The problem offset in the z direction 288 289 Level: developer 290 291 Note: 292 This is used primarily to overlap a computation on a local `DMDA` with that on a global `DMDA` without 293 changing boundary conditions or subdomain features that depend upon the global offsets. 294 295 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()` 296 @*/ 297 PetscErrorCode DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po) 298 { 299 DM_DA *dd = (DM_DA *)da->data; 300 301 PetscFunctionBegin; 302 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 303 PetscValidLogicalCollectiveInt(da, xo, 2); 304 PetscValidLogicalCollectiveInt(da, yo, 3); 305 PetscValidLogicalCollectiveInt(da, zo, 4); 306 PetscValidLogicalCollectiveInt(da, Mo, 5); 307 PetscValidLogicalCollectiveInt(da, No, 6); 308 PetscValidLogicalCollectiveInt(da, Po, 7); 309 dd->xo = xo; 310 dd->yo = yo; 311 dd->zo = zo; 312 dd->Mo = Mo; 313 dd->No = No; 314 dd->Po = Po; 315 316 if (da->coordinates[0].dm) PetscCall(DMDASetOffset(da->coordinates[0].dm, xo, yo, zo, Mo, No, Po)); 317 PetscFunctionReturn(PETSC_SUCCESS); 318 } 319 320 /*@ 321 DMDAGetOffset - Gets the index offset of the `DMDA`. 322 323 Not Collective 324 325 Input Parameter: 326 . da - The `DMDA` 327 328 Output Parameters: 329 + xo - The offset in the x direction 330 . yo - The offset in the y direction 331 . zo - The offset in the z direction 332 . Mo - The global size in the x direction 333 . No - The global size in the y direction 334 - Po - The global size in the z direction 335 336 Level: developer 337 338 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDASetOffset()`, `DMDAVecGetArray()` 339 @*/ 340 PetscErrorCode DMDAGetOffset(DM da, PetscInt *xo, PetscInt *yo, PetscInt *zo, PetscInt *Mo, PetscInt *No, PetscInt *Po) 341 { 342 DM_DA *dd = (DM_DA *)da->data; 343 344 PetscFunctionBegin; 345 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 346 if (xo) *xo = dd->xo; 347 if (yo) *yo = dd->yo; 348 if (zo) *zo = dd->zo; 349 if (Mo) *Mo = dd->Mo; 350 if (No) *No = dd->No; 351 if (Po) *Po = dd->Po; 352 PetscFunctionReturn(PETSC_SUCCESS); 353 } 354 355 /*@ 356 DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain `DMDA`. 357 358 Not Collective 359 360 Input Parameter: 361 . da - The `DMDA` 362 363 Output Parameters: 364 + xs - The start of the region in x 365 . ys - The start of the region in y 366 . zs - The start of the region in z 367 . xm - The size of the region in x 368 . ym - The size of the region in y 369 - zm - The size of the region in z 370 371 Level: intermediate 372 373 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()` 374 @*/ 375 PetscErrorCode DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm) 376 { 377 DM_DA *dd = (DM_DA *)da->data; 378 379 PetscFunctionBegin; 380 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 381 if (xs) *xs = dd->nonxs; 382 if (ys) *ys = dd->nonys; 383 if (zs) *zs = dd->nonzs; 384 if (xm) *xm = dd->nonxm; 385 if (ym) *ym = dd->nonym; 386 if (zm) *zm = dd->nonzm; 387 PetscFunctionReturn(PETSC_SUCCESS); 388 } 389 390 /*@ 391 DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain `DMDA`. 392 393 Collective 394 395 Input Parameters: 396 + da - The `DMDA` 397 . xs - The start of the region in x 398 . ys - The start of the region in y 399 . zs - The start of the region in z 400 . xm - The size of the region in x 401 . ym - The size of the region in y 402 - zm - The size of the region in z 403 404 Level: intermediate 405 406 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()` 407 @*/ 408 PetscErrorCode DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm) 409 { 410 DM_DA *dd = (DM_DA *)da->data; 411 412 PetscFunctionBegin; 413 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 414 PetscValidLogicalCollectiveInt(da, xs, 2); 415 PetscValidLogicalCollectiveInt(da, ys, 3); 416 PetscValidLogicalCollectiveInt(da, zs, 4); 417 PetscValidLogicalCollectiveInt(da, xm, 5); 418 PetscValidLogicalCollectiveInt(da, ym, 6); 419 PetscValidLogicalCollectiveInt(da, zm, 7); 420 dd->nonxs = xs; 421 dd->nonys = ys; 422 dd->nonzs = zs; 423 dd->nonxm = xm; 424 dd->nonym = ym; 425 dd->nonzm = zm; 426 427 PetscFunctionReturn(PETSC_SUCCESS); 428 } 429 430 /*@ 431 DMDASetStencilType - Sets the type of the communication stencil 432 433 Logically Collective 434 435 Input Parameters: 436 + da - The `DMDA` 437 - stype - The stencil type, use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`. 438 439 Level: intermediate 440 441 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.` 442 @*/ 443 PetscErrorCode DMDASetStencilType(DM da, DMDAStencilType stype) 444 { 445 DM_DA *dd = (DM_DA *)da->data; 446 447 PetscFunctionBegin; 448 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 449 PetscValidLogicalCollectiveEnum(da, stype, 2); 450 PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()"); 451 dd->stencil_type = stype; 452 PetscFunctionReturn(PETSC_SUCCESS); 453 } 454 455 /*@ 456 DMDAGetStencilType - Gets the type of the communication stencil 457 458 Not Collective 459 460 Input Parameter: 461 . da - The `DMDA` 462 463 Output Parameter: 464 . stype - The stencil type, use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`. 465 466 Level: intermediate 467 468 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.` 469 @*/ 470 PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype) 471 { 472 DM_DA *dd = (DM_DA *)da->data; 473 474 PetscFunctionBegin; 475 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 476 PetscAssertPointer(stype, 2); 477 *stype = dd->stencil_type; 478 PetscFunctionReturn(PETSC_SUCCESS); 479 } 480 481 /*@ 482 DMDASetStencilWidth - Sets the width of the communication stencil 483 484 Logically Collective 485 486 Input Parameters: 487 + da - The `DMDA` 488 - width - The stencil width 489 490 Level: intermediate 491 492 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.` 493 @*/ 494 PetscErrorCode DMDASetStencilWidth(DM da, PetscInt width) 495 { 496 DM_DA *dd = (DM_DA *)da->data; 497 498 PetscFunctionBegin; 499 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 500 PetscValidLogicalCollectiveInt(da, width, 2); 501 PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()"); 502 dd->s = width; 503 PetscFunctionReturn(PETSC_SUCCESS); 504 } 505 506 /*@ 507 DMDAGetStencilWidth - Gets the width of the communication stencil 508 509 Not Collective 510 511 Input Parameter: 512 . da - The `DMDA` 513 514 Output Parameter: 515 . width - The stencil width 516 517 Level: intermediate 518 519 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.` 520 @*/ 521 PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width) 522 { 523 DM_DA *dd = (DM_DA *)da->data; 524 525 PetscFunctionBegin; 526 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 527 PetscAssertPointer(width, 2); 528 *width = dd->s; 529 PetscFunctionReturn(PETSC_SUCCESS); 530 } 531 532 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da, PetscInt M, PetscInt m, const PetscInt lx[]) 533 { 534 PetscInt i, sum; 535 536 PetscFunctionBegin; 537 PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Global dimension not set"); 538 for (i = sum = 0; i < m; i++) sum += lx[i]; 539 PetscCheck(sum == M, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_INCOMP, "Ownership ranges sum to %" PetscInt_FMT " but global dimension is %" PetscInt_FMT, sum, M); 540 PetscFunctionReturn(PETSC_SUCCESS); 541 } 542 543 /*@ 544 DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process 545 546 Logically Collective 547 548 Input Parameters: 549 + da - The `DMDA` 550 . lx - array containing number of nodes in the X direction on each process, or `NULL`. If non-null, must be of length da->m 551 . ly - array containing number of nodes in the Y direction on each process, or `NULL`. If non-null, must be of length da->n 552 - lz - array containing number of nodes in the Z direction on each process, or `NULL`. If non-null, must be of length da->p. 553 554 Level: intermediate 555 556 Note: 557 These numbers are NOT multiplied by the number of dof per node. 558 559 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()` 560 @*/ 561 PetscErrorCode DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[]) 562 { 563 DM_DA *dd = (DM_DA *)da->data; 564 565 PetscFunctionBegin; 566 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 567 PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()"); 568 if (lx) { 569 PetscCheck(dd->m >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of processes"); 570 PetscCall(DMDACheckOwnershipRanges_Private(da, dd->M, dd->m, lx)); 571 if (!dd->lx) PetscCall(PetscMalloc1(dd->m, &dd->lx)); 572 PetscCall(PetscArraycpy(dd->lx, lx, dd->m)); 573 } 574 if (ly) { 575 PetscCheck(dd->n >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of processes"); 576 PetscCall(DMDACheckOwnershipRanges_Private(da, dd->N, dd->n, ly)); 577 if (!dd->ly) PetscCall(PetscMalloc1(dd->n, &dd->ly)); 578 PetscCall(PetscArraycpy(dd->ly, ly, dd->n)); 579 } 580 if (lz) { 581 PetscCheck(dd->p >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of processes"); 582 PetscCall(DMDACheckOwnershipRanges_Private(da, dd->P, dd->p, lz)); 583 if (!dd->lz) PetscCall(PetscMalloc1(dd->p, &dd->lz)); 584 PetscCall(PetscArraycpy(dd->lz, lz, dd->p)); 585 } 586 PetscFunctionReturn(PETSC_SUCCESS); 587 } 588 589 /*@ 590 DMDASetInterpolationType - Sets the type of interpolation that will be 591 returned by `DMCreateInterpolation()` 592 593 Logically Collective 594 595 Input Parameters: 596 + da - initial distributed array 597 - ctype - `DMDA_Q1` and `DMDA_Q0` are currently the only supported forms 598 599 Level: intermediate 600 601 Note: 602 You should call this on the coarser of the two `DMDA` you pass to `DMCreateInterpolation()` 603 604 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMDestroy()`, `DMDAInterpolationType`, 605 `DMDA_Q1`, `DMDA_Q0` 606 @*/ 607 PetscErrorCode DMDASetInterpolationType(DM da, DMDAInterpolationType ctype) 608 { 609 DM_DA *dd = (DM_DA *)da->data; 610 611 PetscFunctionBegin; 612 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 613 PetscValidLogicalCollectiveEnum(da, ctype, 2); 614 dd->interptype = ctype; 615 PetscFunctionReturn(PETSC_SUCCESS); 616 } 617 618 /*@ 619 DMDAGetInterpolationType - Gets the type of interpolation that will be 620 used by `DMCreateInterpolation()` 621 622 Not Collective 623 624 Input Parameter: 625 . da - distributed array 626 627 Output Parameter: 628 . ctype - interpolation type (`DMDA_Q1` and `DMDA_Q0` are currently the only supported forms) 629 630 Level: intermediate 631 632 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAInterpolationType`, `DMDASetInterpolationType()`, `DMCreateInterpolation()`, 633 `DMDA_Q1`, `DMDA_Q0` 634 @*/ 635 PetscErrorCode DMDAGetInterpolationType(DM da, DMDAInterpolationType *ctype) 636 { 637 DM_DA *dd = (DM_DA *)da->data; 638 639 PetscFunctionBegin; 640 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 641 PetscAssertPointer(ctype, 2); 642 *ctype = dd->interptype; 643 PetscFunctionReturn(PETSC_SUCCESS); 644 } 645 646 /*@C 647 DMDAGetNeighbors - Gets an array containing the MPI rank of all the current 648 processes neighbors. 649 650 Not Collective 651 652 Input Parameter: 653 . da - the `DMDA` object 654 655 Output Parameter: 656 . ranks - the neighbors ranks, stored with the x index increasing most rapidly. The process itself is in the list 657 658 Level: intermediate 659 660 Notes: 661 In 2d the array is of length 9, in 3d of length 27 662 663 Not supported in 1d 664 665 Do not free the array, it is freed when the `DMDA` is destroyed. 666 667 Fortran Note: 668 Pass in an array of the appropriate length to contain the values 669 670 .seealso: [](sec_struct), `DMDA`, `DM` 671 @*/ 672 PetscErrorCode DMDAGetNeighbors(DM da, const PetscMPIInt *ranks[]) 673 { 674 DM_DA *dd = (DM_DA *)da->data; 675 676 PetscFunctionBegin; 677 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 678 *ranks = dd->neighbors; 679 PetscFunctionReturn(PETSC_SUCCESS); 680 } 681 682 /*@C 683 DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process 684 685 Not Collective 686 687 Input Parameter: 688 . da - the `DMDA` object 689 690 Output Parameters: 691 + lx - ownership along x direction (optional) 692 . ly - ownership along y direction (optional) 693 - lz - ownership along z direction (optional) 694 695 Level: intermediate 696 697 Note: 698 These correspond to the optional final arguments passed to `DMDACreate()`, `DMDACreate2d()`, `DMDACreate3d()` 699 700 In C you should not free these arrays, nor change the values in them. They will only have valid values while the 701 `DMDA` they came from still exists (has not been destroyed). 702 703 These numbers are NOT multiplied by the number of dof per node. 704 705 Fortran Note: 706 Pass in arrays `lx`, `ly`, and `lz` of the appropriate length to hold the values; the sixth, seventh and 707 eighth arguments from `DMDAGetInfo()` 708 709 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetCorners()`, `DMDAGetGhostCorners()`, `DMDACreate()`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `VecGetOwnershipRanges()` 710 @*/ 711 PetscErrorCode DMDAGetOwnershipRanges(DM da, const PetscInt *lx[], const PetscInt *ly[], const PetscInt *lz[]) 712 { 713 DM_DA *dd = (DM_DA *)da->data; 714 715 PetscFunctionBegin; 716 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 717 if (lx) *lx = dd->lx; 718 if (ly) *ly = dd->ly; 719 if (lz) *lz = dd->lz; 720 PetscFunctionReturn(PETSC_SUCCESS); 721 } 722 723 /*@ 724 DMDASetRefinementFactor - Set the ratios that the `DMDA` grid is refined 725 726 Logically Collective 727 728 Input Parameters: 729 + da - the `DMDA` object 730 . refine_x - ratio of fine grid to coarse in x direction (2 by default) 731 . refine_y - ratio of fine grid to coarse in y direction (2 by default) 732 - refine_z - ratio of fine grid to coarse in z direction (2 by default) 733 734 Options Database Keys: 735 + -da_refine_x refine_x - refinement ratio in x direction 736 . -da_refine_y rafine_y - refinement ratio in y direction 737 . -da_refine_z refine_z - refinement ratio in z direction 738 - -da_refine <n> - refine the `DMDA` object n times when it is created. 739 740 Level: intermediate 741 742 Note: 743 Pass `PETSC_IGNORE` to leave a value unchanged 744 745 .seealso: [](sec_struct), `DM`, `DMDA`, `DMRefine()`, `DMDAGetRefinementFactor()` 746 @*/ 747 PetscErrorCode DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y, PetscInt refine_z) 748 { 749 DM_DA *dd = (DM_DA *)da->data; 750 751 PetscFunctionBegin; 752 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 753 PetscValidLogicalCollectiveInt(da, refine_x, 2); 754 PetscValidLogicalCollectiveInt(da, refine_y, 3); 755 PetscValidLogicalCollectiveInt(da, refine_z, 4); 756 757 if (refine_x > 0) dd->refine_x = refine_x; 758 if (refine_y > 0) dd->refine_y = refine_y; 759 if (refine_z > 0) dd->refine_z = refine_z; 760 PetscFunctionReturn(PETSC_SUCCESS); 761 } 762 763 /*@C 764 DMDAGetRefinementFactor - Gets the ratios that the `DMDA` grid is refined 765 766 Not Collective 767 768 Input Parameter: 769 . da - the `DMDA` object 770 771 Output Parameters: 772 + refine_x - ratio of fine grid to coarse in x direction (2 by default) 773 . refine_y - ratio of fine grid to coarse in y direction (2 by default) 774 - refine_z - ratio of fine grid to coarse in z direction (2 by default) 775 776 Level: intermediate 777 778 Note: 779 Pass `NULL` for values you do not need 780 781 .seealso: [](sec_struct), `DM`, `DMDA`, `DMRefine()`, `DMDASetRefinementFactor()` 782 @*/ 783 PetscErrorCode DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y, PetscInt *refine_z) 784 { 785 DM_DA *dd = (DM_DA *)da->data; 786 787 PetscFunctionBegin; 788 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 789 if (refine_x) *refine_x = dd->refine_x; 790 if (refine_y) *refine_y = dd->refine_y; 791 if (refine_z) *refine_z = dd->refine_z; 792 PetscFunctionReturn(PETSC_SUCCESS); 793 } 794 795 /*@C 796 DMDASetGetMatrix - Sets the routine used by the `DMDA` to allocate a matrix. 797 798 Logically Collective; No Fortran Support 799 800 Input Parameters: 801 + da - the `DMDA` object 802 - f - the function that allocates the matrix for that specific `DMDA` 803 804 Calling sequence of `f`: 805 + da - the `DMDA` object 806 - A - the created matrix 807 808 Level: developer 809 810 Notes: 811 If the function is not provided a default function is used that uses the `DMDAStencilType`, `DMBoundaryType`, and value of `DMDASetStencilWidth()` 812 to construct the matrix. 813 814 See `DMDASetBlockFills()` that provides a simple way to provide the nonzero structure for 815 the diagonal and off-diagonal blocks of the matrix without providing a custom function 816 817 Developer Note: 818 This should be called `DMDASetCreateMatrix()` 819 820 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateMatrix()`, `DMDASetBlockFills()` 821 @*/ 822 PetscErrorCode DMDASetGetMatrix(DM da, PetscErrorCode (*f)(DM da, Mat *A)) 823 { 824 PetscFunctionBegin; 825 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 826 da->ops->creatematrix = f; 827 PetscFunctionReturn(PETSC_SUCCESS); 828 } 829 830 /*@ 831 DMDAMapMatStencilToGlobal - Map a list of `MatStencil` on a grid to global indices. 832 833 Not Collective 834 835 Input Parameters: 836 + da - the `DMDA` object 837 . m - number of `MatStencil` to map 838 - idxm - grid points (and component number when dof > 1) 839 840 Output Parameter: 841 . gidxm - global row indices 842 843 Level: intermediate 844 845 .seealso: [](sec_struct), `DM`, `DMDA`, `MatStencil` 846 @*/ 847 PetscErrorCode DMDAMapMatStencilToGlobal(DM da, PetscInt m, const MatStencil idxm[], PetscInt gidxm[]) 848 { 849 const DM_DA *dd = (const DM_DA *)da->data; 850 const PetscInt *dxm = (const PetscInt *)idxm; 851 PetscInt i, j, sdim, tmp, dim; 852 PetscInt dims[4], starts[4], dims2[3], starts2[3], dof = dd->w; 853 ISLocalToGlobalMapping ltog; 854 855 PetscFunctionBegin; 856 if (m <= 0) PetscFunctionReturn(PETSC_SUCCESS); 857 858 /* Code adapted from DMDAGetGhostCorners() */ 859 starts2[0] = dd->Xs / dof + dd->xo; 860 starts2[1] = dd->Ys + dd->yo; 861 starts2[2] = dd->Zs + dd->zo; 862 dims2[0] = (dd->Xe - dd->Xs) / dof; 863 dims2[1] = (dd->Ye - dd->Ys); 864 dims2[2] = (dd->Ze - dd->Zs); 865 866 /* As if we do MatSetStencil() to get dims[]/starts[] of mat->stencil */ 867 dim = da->dim; /* DA dim: 1 to 3 */ 868 sdim = dim + (dof > 1 ? 1 : 0); /* Dimensions in MatStencil's (k,j,i,c) view */ 869 for (i = 0; i < dim; i++) { /* Reverse the order and also skip the unused dimensions */ 870 dims[i] = dims2[dim - i - 1]; /* ex. dims/starts[] are in order of {i} for 1D, {j,i} for 2D and {k,j,i} for 3D */ 871 starts[i] = starts2[dim - i - 1]; 872 } 873 starts[dim] = 0; /* Append the extra dim for dof (won't be used below if dof=1) */ 874 dims[dim] = dof; 875 876 /* Map stencils to local indices (code adapted from MatSetValuesStencil()) */ 877 for (i = 0; i < m; i++) { 878 dxm += 3 - dim; /* Input is {k,j,i,c}; move the pointer to the first used index, e.g., j in 2D */ 879 tmp = 0; 880 for (j = 0; j < sdim; j++) { /* Iter over, ex. j,i or j,i,c in 2D */ 881 if (tmp < 0 || dxm[j] < starts[j] || dxm[j] >= (starts[j] + dims[j])) tmp = -1; /* Beyond the ghost region, therefore ignored with negative indices */ 882 else tmp = tmp * dims[j] + (dxm[j] - starts[j]); 883 } 884 gidxm[i] = tmp; 885 /* Move to the next MatStencil point */ 886 if (dof > 1) dxm += sdim; /* c is already counted in sdim */ 887 else dxm += sdim + 1; /* skip the unused c */ 888 } 889 890 /* Map local indices to global indices */ 891 PetscCall(DMGetLocalToGlobalMapping(da, <og)); 892 PetscCall(ISLocalToGlobalMappingApply(ltog, m, gidxm, gidxm)); 893 PetscFunctionReturn(PETSC_SUCCESS); 894 } 895 896 /* 897 Creates "balanced" ownership ranges after refinement, constrained by the need for the 898 fine grid boundaries to fall within one stencil width of the coarse partition. 899 900 Uses a greedy algorithm to handle non-ideal layouts, could probably do something better. 901 */ 902 static PetscErrorCode DMDARefineOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lc[], PetscInt lf[]) 903 { 904 PetscInt i, totalc = 0, remaining, startc = 0, startf = 0; 905 906 PetscFunctionBegin; 907 PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio); 908 if (ratio == 1) { 909 PetscCall(PetscArraycpy(lf, lc, m)); 910 PetscFunctionReturn(PETSC_SUCCESS); 911 } 912 for (i = 0; i < m; i++) totalc += lc[i]; 913 remaining = (!periodic) + ratio * (totalc - (!periodic)); 914 for (i = 0; i < m; i++) { 915 PetscInt want = remaining / (m - i) + !!(remaining % (m - i)); 916 if (i == m - 1) lf[i] = want; 917 else { 918 const PetscInt nextc = startc + lc[i]; 919 /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one 920 * coarse stencil width of the first coarse node in the next subdomain. */ 921 while ((startf + want) / ratio < nextc - stencil_width) want++; 922 /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one 923 * coarse stencil width of the last coarse node in the current subdomain. */ 924 while ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width) want--; 925 /* Make sure all constraints are satisfied */ 926 if (want < 0 || want > remaining || ((startf + want) / ratio < nextc - stencil_width) || ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width)) 927 SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible refined ownership range"); 928 } 929 lf[i] = want; 930 startc += lc[i]; 931 startf += lf[i]; 932 remaining -= lf[i]; 933 } 934 PetscFunctionReturn(PETSC_SUCCESS); 935 } 936 937 /* 938 Creates "balanced" ownership ranges after coarsening, constrained by the need for the 939 fine grid boundaries to fall within one stencil width of the coarse partition. 940 941 Uses a greedy algorithm to handle non-ideal layouts, could probably do something better. 942 */ 943 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lf[], PetscInt lc[]) 944 { 945 PetscInt i, totalf, remaining, startc, startf; 946 947 PetscFunctionBegin; 948 PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio); 949 if (ratio == 1) { 950 PetscCall(PetscArraycpy(lc, lf, m)); 951 PetscFunctionReturn(PETSC_SUCCESS); 952 } 953 for (i = 0, totalf = 0; i < m; i++) totalf += lf[i]; 954 remaining = (!periodic) + (totalf - (!periodic)) / ratio; 955 for (i = 0, startc = 0, startf = 0; i < m; i++) { 956 PetscInt want = remaining / (m - i) + !!(remaining % (m - i)); 957 if (i == m - 1) lc[i] = want; 958 else { 959 const PetscInt nextf = startf + lf[i]; 960 /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine 961 * node is within one stencil width. */ 962 while (nextf / ratio < startc + want - stencil_width) want--; 963 /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last 964 * fine node is within one stencil width. */ 965 while ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width) want++; 966 if (want < 0 || want > remaining || (nextf / ratio < startc + want - stencil_width) || ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width)) 967 SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible coarsened ownership range"); 968 } 969 lc[i] = want; 970 startc += lc[i]; 971 startf += lf[i]; 972 remaining -= lc[i]; 973 } 974 PetscFunctionReturn(PETSC_SUCCESS); 975 } 976 977 PetscErrorCode DMRefine_DA(DM da, MPI_Comm comm, DM *daref) 978 { 979 PetscInt M, N, P, i, dim; 980 Vec coordsc, coordsf; 981 DM da2; 982 DM_DA *dd = (DM_DA *)da->data, *dd2; 983 984 PetscFunctionBegin; 985 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 986 PetscAssertPointer(daref, 3); 987 988 PetscCall(DMGetDimension(da, &dim)); 989 if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 990 M = dd->refine_x * dd->M; 991 } else { 992 M = 1 + dd->refine_x * (dd->M - 1); 993 } 994 if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 995 if (dim > 1) { 996 N = dd->refine_y * dd->N; 997 } else { 998 N = 1; 999 } 1000 } else { 1001 N = 1 + dd->refine_y * (dd->N - 1); 1002 } 1003 if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1004 if (dim > 2) { 1005 P = dd->refine_z * dd->P; 1006 } else { 1007 P = 1; 1008 } 1009 } else { 1010 P = 1 + dd->refine_z * (dd->P - 1); 1011 } 1012 PetscCall(DMDACreate(PetscObjectComm((PetscObject)da), &da2)); 1013 PetscCall(DMSetOptionsPrefix(da2, ((PetscObject)da)->prefix)); 1014 PetscCall(DMSetDimension(da2, dim)); 1015 PetscCall(DMDASetSizes(da2, M, N, P)); 1016 PetscCall(DMDASetNumProcs(da2, dd->m, dd->n, dd->p)); 1017 PetscCall(DMDASetBoundaryType(da2, dd->bx, dd->by, dd->bz)); 1018 PetscCall(DMDASetDof(da2, dd->w)); 1019 PetscCall(DMDASetStencilType(da2, dd->stencil_type)); 1020 PetscCall(DMDASetStencilWidth(da2, dd->s)); 1021 if (dim == 3) { 1022 PetscInt *lx, *ly, *lz; 1023 PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz)); 1024 PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx)); 1025 PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly)); 1026 PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_z, dd->p, dd->lz, lz)); 1027 PetscCall(DMDASetOwnershipRanges(da2, lx, ly, lz)); 1028 PetscCall(PetscFree3(lx, ly, lz)); 1029 } else if (dim == 2) { 1030 PetscInt *lx, *ly; 1031 PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly)); 1032 PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx)); 1033 PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly)); 1034 PetscCall(DMDASetOwnershipRanges(da2, lx, ly, NULL)); 1035 PetscCall(PetscFree2(lx, ly)); 1036 } else if (dim == 1) { 1037 PetscInt *lx; 1038 PetscCall(PetscMalloc1(dd->m, &lx)); 1039 PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx)); 1040 PetscCall(DMDASetOwnershipRanges(da2, lx, NULL, NULL)); 1041 PetscCall(PetscFree(lx)); 1042 } 1043 dd2 = (DM_DA *)da2->data; 1044 1045 /* allow overloaded (user replaced) operations to be inherited by refinement clones */ 1046 da2->ops->creatematrix = da->ops->creatematrix; 1047 /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */ 1048 da2->ops->getcoloring = da->ops->getcoloring; 1049 dd2->interptype = dd->interptype; 1050 1051 /* copy fill information if given */ 1052 if (dd->dfill) { 1053 PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill)); 1054 PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1)); 1055 } 1056 if (dd->ofill) { 1057 PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill)); 1058 PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1)); 1059 } 1060 /* copy the refine information */ 1061 dd2->coarsen_x = dd2->refine_x = dd->refine_x; 1062 dd2->coarsen_y = dd2->refine_y = dd->refine_y; 1063 dd2->coarsen_z = dd2->refine_z = dd->refine_z; 1064 1065 if (dd->refine_z_hier) { 1066 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1]; 1067 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown]; 1068 dd2->refine_z_hier_n = dd->refine_z_hier_n; 1069 PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier)); 1070 PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n)); 1071 } 1072 if (dd->refine_y_hier) { 1073 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1]; 1074 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown]; 1075 dd2->refine_y_hier_n = dd->refine_y_hier_n; 1076 PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier)); 1077 PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n)); 1078 } 1079 if (dd->refine_x_hier) { 1080 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1]; 1081 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown]; 1082 dd2->refine_x_hier_n = dd->refine_x_hier_n; 1083 PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier)); 1084 PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n)); 1085 } 1086 1087 /* copy vector type information */ 1088 PetscCall(DMSetVecType(da2, da->vectype)); 1089 1090 dd2->lf = dd->lf; 1091 dd2->lj = dd->lj; 1092 1093 da2->leveldown = da->leveldown; 1094 da2->levelup = da->levelup + 1; 1095 1096 PetscCall(DMSetUp(da2)); 1097 1098 /* interpolate coordinates if they are set on the coarse grid */ 1099 PetscCall(DMGetCoordinates(da, &coordsc)); 1100 if (coordsc) { 1101 DM cdaf, cdac; 1102 Mat II; 1103 1104 PetscCall(DMGetCoordinateDM(da, &cdac)); 1105 PetscCall(DMGetCoordinateDM(da2, &cdaf)); 1106 /* force creation of the coordinate vector */ 1107 PetscCall(DMDASetUniformCoordinates(da2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0)); 1108 PetscCall(DMGetCoordinates(da2, &coordsf)); 1109 PetscCall(DMCreateInterpolation(cdac, cdaf, &II, NULL)); 1110 PetscCall(MatInterpolate(II, coordsc, coordsf)); 1111 PetscCall(MatDestroy(&II)); 1112 } 1113 1114 for (i = 0; i < da->bs; i++) { 1115 const char *fieldname; 1116 PetscCall(DMDAGetFieldName(da, i, &fieldname)); 1117 PetscCall(DMDASetFieldName(da2, i, fieldname)); 1118 } 1119 1120 *daref = da2; 1121 PetscFunctionReturn(PETSC_SUCCESS); 1122 } 1123 1124 PetscErrorCode DMCoarsen_DA(DM dmf, MPI_Comm comm, DM *dmc) 1125 { 1126 PetscInt M, N, P, i, dim; 1127 Vec coordsc, coordsf; 1128 DM dmc2; 1129 DM_DA *dd = (DM_DA *)dmf->data, *dd2; 1130 1131 PetscFunctionBegin; 1132 PetscValidHeaderSpecificType(dmf, DM_CLASSID, 1, DMDA); 1133 PetscAssertPointer(dmc, 3); 1134 1135 PetscCall(DMGetDimension(dmf, &dim)); 1136 if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1137 M = dd->M / dd->coarsen_x; 1138 } else { 1139 M = 1 + (dd->M - 1) / dd->coarsen_x; 1140 } 1141 if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1142 if (dim > 1) { 1143 N = dd->N / dd->coarsen_y; 1144 } else { 1145 N = 1; 1146 } 1147 } else { 1148 N = 1 + (dd->N - 1) / dd->coarsen_y; 1149 } 1150 if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1151 if (dim > 2) { 1152 P = dd->P / dd->coarsen_z; 1153 } else { 1154 P = 1; 1155 } 1156 } else { 1157 P = 1 + (dd->P - 1) / dd->coarsen_z; 1158 } 1159 PetscCall(DMDACreate(PetscObjectComm((PetscObject)dmf), &dmc2)); 1160 PetscCall(DMSetOptionsPrefix(dmc2, ((PetscObject)dmf)->prefix)); 1161 PetscCall(DMSetDimension(dmc2, dim)); 1162 PetscCall(DMDASetSizes(dmc2, M, N, P)); 1163 PetscCall(DMDASetNumProcs(dmc2, dd->m, dd->n, dd->p)); 1164 PetscCall(DMDASetBoundaryType(dmc2, dd->bx, dd->by, dd->bz)); 1165 PetscCall(DMDASetDof(dmc2, dd->w)); 1166 PetscCall(DMDASetStencilType(dmc2, dd->stencil_type)); 1167 PetscCall(DMDASetStencilWidth(dmc2, dd->s)); 1168 if (dim == 3) { 1169 PetscInt *lx, *ly, *lz; 1170 PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz)); 1171 PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx)); 1172 PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly)); 1173 PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_z, dd->p, dd->lz, lz)); 1174 PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, lz)); 1175 PetscCall(PetscFree3(lx, ly, lz)); 1176 } else if (dim == 2) { 1177 PetscInt *lx, *ly; 1178 PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly)); 1179 PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx)); 1180 PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly)); 1181 PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, NULL)); 1182 PetscCall(PetscFree2(lx, ly)); 1183 } else if (dim == 1) { 1184 PetscInt *lx; 1185 PetscCall(PetscMalloc1(dd->m, &lx)); 1186 PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx)); 1187 PetscCall(DMDASetOwnershipRanges(dmc2, lx, NULL, NULL)); 1188 PetscCall(PetscFree(lx)); 1189 } 1190 dd2 = (DM_DA *)dmc2->data; 1191 1192 /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */ 1193 /* dmc2->ops->createinterpolation = dmf->ops->createinterpolation; copying this one causes trouble for DMSetVI */ 1194 dmc2->ops->creatematrix = dmf->ops->creatematrix; 1195 dmc2->ops->getcoloring = dmf->ops->getcoloring; 1196 dd2->interptype = dd->interptype; 1197 1198 /* copy fill information if given */ 1199 if (dd->dfill) { 1200 PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill)); 1201 PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1)); 1202 } 1203 if (dd->ofill) { 1204 PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill)); 1205 PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1)); 1206 } 1207 /* copy the refine information */ 1208 dd2->coarsen_x = dd2->refine_x = dd->coarsen_x; 1209 dd2->coarsen_y = dd2->refine_y = dd->coarsen_y; 1210 dd2->coarsen_z = dd2->refine_z = dd->coarsen_z; 1211 1212 if (dd->refine_z_hier) { 1213 if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_z_hier_n) dd2->refine_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 1]; 1214 if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_z_hier_n) dd2->coarsen_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 2]; 1215 dd2->refine_z_hier_n = dd->refine_z_hier_n; 1216 PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier)); 1217 PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n)); 1218 } 1219 if (dd->refine_y_hier) { 1220 if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_y_hier_n) dd2->refine_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 1]; 1221 if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_y_hier_n) dd2->coarsen_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 2]; 1222 dd2->refine_y_hier_n = dd->refine_y_hier_n; 1223 PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier)); 1224 PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n)); 1225 } 1226 if (dd->refine_x_hier) { 1227 if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_x_hier_n) dd2->refine_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 1]; 1228 if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_x_hier_n) dd2->coarsen_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 2]; 1229 dd2->refine_x_hier_n = dd->refine_x_hier_n; 1230 PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier)); 1231 PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n)); 1232 } 1233 1234 /* copy vector type information */ 1235 PetscCall(DMSetVecType(dmc2, dmf->vectype)); 1236 1237 dd2->lf = dd->lf; 1238 dd2->lj = dd->lj; 1239 1240 dmc2->leveldown = dmf->leveldown + 1; 1241 dmc2->levelup = dmf->levelup; 1242 1243 PetscCall(DMSetUp(dmc2)); 1244 1245 /* inject coordinates if they are set on the fine grid */ 1246 PetscCall(DMGetCoordinates(dmf, &coordsf)); 1247 if (coordsf) { 1248 DM cdaf, cdac; 1249 Mat inject; 1250 VecScatter vscat; 1251 1252 PetscCall(DMGetCoordinateDM(dmf, &cdaf)); 1253 PetscCall(DMGetCoordinateDM(dmc2, &cdac)); 1254 /* force creation of the coordinate vector */ 1255 PetscCall(DMDASetUniformCoordinates(dmc2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0)); 1256 PetscCall(DMGetCoordinates(dmc2, &coordsc)); 1257 1258 PetscCall(DMCreateInjection(cdac, cdaf, &inject)); 1259 PetscCall(MatScatterGetVecScatter(inject, &vscat)); 1260 PetscCall(VecScatterBegin(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD)); 1261 PetscCall(VecScatterEnd(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD)); 1262 PetscCall(MatDestroy(&inject)); 1263 } 1264 1265 for (i = 0; i < dmf->bs; i++) { 1266 const char *fieldname; 1267 PetscCall(DMDAGetFieldName(dmf, i, &fieldname)); 1268 PetscCall(DMDASetFieldName(dmc2, i, fieldname)); 1269 } 1270 1271 *dmc = dmc2; 1272 PetscFunctionReturn(PETSC_SUCCESS); 1273 } 1274 1275 PetscErrorCode DMRefineHierarchy_DA(DM da, PetscInt nlevels, DM daf[]) 1276 { 1277 PetscInt i, n, *refx, *refy, *refz; 1278 1279 PetscFunctionBegin; 1280 PetscValidHeaderSpecific(da, DM_CLASSID, 1); 1281 PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative"); 1282 if (nlevels == 0) PetscFunctionReturn(PETSC_SUCCESS); 1283 PetscAssertPointer(daf, 3); 1284 1285 /* Get refinement factors, defaults taken from the coarse DMDA */ 1286 PetscCall(PetscMalloc3(nlevels, &refx, nlevels, &refy, nlevels, &refz)); 1287 for (i = 0; i < nlevels; i++) PetscCall(DMDAGetRefinementFactor(da, &refx[i], &refy[i], &refz[i])); 1288 n = nlevels; 1289 PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_x", refx, &n, NULL)); 1290 n = nlevels; 1291 PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_y", refy, &n, NULL)); 1292 n = nlevels; 1293 PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_z", refz, &n, NULL)); 1294 1295 PetscCall(DMDASetRefinementFactor(da, refx[0], refy[0], refz[0])); 1296 PetscCall(DMRefine(da, PetscObjectComm((PetscObject)da), &daf[0])); 1297 for (i = 1; i < nlevels; i++) { 1298 PetscCall(DMDASetRefinementFactor(daf[i - 1], refx[i], refy[i], refz[i])); 1299 PetscCall(DMRefine(daf[i - 1], PetscObjectComm((PetscObject)da), &daf[i])); 1300 } 1301 PetscCall(PetscFree3(refx, refy, refz)); 1302 PetscFunctionReturn(PETSC_SUCCESS); 1303 } 1304 1305 PetscErrorCode DMCoarsenHierarchy_DA(DM da, PetscInt nlevels, DM dac[]) 1306 { 1307 PetscInt i; 1308 1309 PetscFunctionBegin; 1310 PetscValidHeaderSpecific(da, DM_CLASSID, 1); 1311 PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative"); 1312 if (nlevels == 0) PetscFunctionReturn(PETSC_SUCCESS); 1313 PetscAssertPointer(dac, 3); 1314 PetscCall(DMCoarsen(da, PetscObjectComm((PetscObject)da), &dac[0])); 1315 for (i = 1; i < nlevels; i++) PetscCall(DMCoarsen(dac[i - 1], PetscObjectComm((PetscObject)da), &dac[i])); 1316 PetscFunctionReturn(PETSC_SUCCESS); 1317 } 1318 1319 static PetscErrorCode DMDASetGLLCoordinates_1d(DM dm, PetscInt n, PetscReal *nodes) 1320 { 1321 PetscInt i, j, xs, xn, q; 1322 PetscScalar *xx; 1323 PetscReal h; 1324 Vec x; 1325 DM_DA *da = (DM_DA *)dm->data; 1326 1327 PetscFunctionBegin; 1328 if (da->bx != DM_BOUNDARY_PERIODIC) { 1329 PetscCall(DMDAGetInfo(dm, NULL, &q, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)); 1330 q = (q - 1) / (n - 1); /* number of spectral elements */ 1331 h = 2.0 / q; 1332 PetscCall(DMDAGetCorners(dm, &xs, NULL, NULL, &xn, NULL, NULL)); 1333 xs = xs / (n - 1); 1334 xn = xn / (n - 1); 1335 PetscCall(DMDASetUniformCoordinates(dm, -1., 1., 0., 0., 0., 0.)); 1336 PetscCall(DMGetCoordinates(dm, &x)); 1337 PetscCall(DMDAVecGetArray(dm, x, &xx)); 1338 1339 /* loop over local spectral elements */ 1340 for (j = xs; j < xs + xn; j++) { 1341 /* 1342 Except for the first process, each process starts on the second GLL point of the first element on that process 1343 */ 1344 for (i = (j == xs && xs > 0) ? 1 : 0; i < n; i++) xx[j * (n - 1) + i] = -1.0 + h * j + h * (nodes[i] + 1.0) / 2.; 1345 } 1346 PetscCall(DMDAVecRestoreArray(dm, x, &xx)); 1347 } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for periodic"); 1348 PetscFunctionReturn(PETSC_SUCCESS); 1349 } 1350 1351 /*@ 1352 1353 DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points 1354 1355 Collective 1356 1357 Input Parameters: 1358 + da - the `DMDA` object 1359 . n - the number of GLL nodes 1360 - nodes - the GLL nodes 1361 1362 Level: advanced 1363 1364 Note: 1365 The parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points 1366 on each process much be divisible by the number of GLL elements needed per process. This depends on whether the `DMDA` is 1367 periodic or not. 1368 1369 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `PetscDTGaussLobattoLegendreQuadrature()`, `DMGetCoordinates()` 1370 @*/ 1371 PetscErrorCode DMDASetGLLCoordinates(DM da, PetscInt n, PetscReal *nodes) 1372 { 1373 PetscFunctionBegin; 1374 if (da->dim == 1) { 1375 PetscCall(DMDASetGLLCoordinates_1d(da, n, nodes)); 1376 } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for 2 or 3d"); 1377 PetscFunctionReturn(PETSC_SUCCESS); 1378 } 1379 1380 PetscErrorCode DMGetCompatibility_DA(DM da1, DM dm2, PetscBool *compatible, PetscBool *set) 1381 { 1382 DM_DA *dd1 = (DM_DA *)da1->data, *dd2; 1383 DM da2; 1384 DMType dmtype2; 1385 PetscBool isda, compatibleLocal; 1386 PetscInt i; 1387 1388 PetscFunctionBegin; 1389 PetscCheck(da1->setupcalled, PetscObjectComm((PetscObject)da1), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on first DM before DMGetCompatibility()"); 1390 PetscCall(DMGetType(dm2, &dmtype2)); 1391 PetscCall(PetscStrcmp(dmtype2, DMDA, &isda)); 1392 if (isda) { 1393 da2 = dm2; 1394 dd2 = (DM_DA *)da2->data; 1395 PetscCheck(da2->setupcalled, PetscObjectComm((PetscObject)da2), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on second DM before DMGetCompatibility()"); 1396 compatibleLocal = (PetscBool)(da1->dim == da2->dim); 1397 if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->s == dd2->s)); /* Stencil width */ 1398 /* Global size ranks Boundary type */ 1399 if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->M == dd2->M) && (dd1->m == dd2->m) && (dd1->bx == dd2->bx)); 1400 if (compatibleLocal && da1->dim > 1) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->N == dd2->N) && (dd1->n == dd2->n) && (dd1->by == dd2->by)); 1401 if (compatibleLocal && da1->dim > 2) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->P == dd2->P) && (dd1->p == dd2->p) && (dd1->bz == dd2->bz)); 1402 if (compatibleLocal) { 1403 for (i = 0; i < dd1->m; ++i) { compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lx[i] == dd2->lx[i])); /* Local size */ } 1404 } 1405 if (compatibleLocal && da1->dim > 1) { 1406 for (i = 0; i < dd1->n; ++i) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->ly[i] == dd2->ly[i])); 1407 } 1408 if (compatibleLocal && da1->dim > 2) { 1409 for (i = 0; i < dd1->p; ++i) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lz[i] == dd2->lz[i])); 1410 } 1411 *compatible = compatibleLocal; 1412 *set = PETSC_TRUE; 1413 } else { 1414 /* Decline to determine compatibility with other DM types */ 1415 *set = PETSC_FALSE; 1416 } 1417 PetscFunctionReturn(PETSC_SUCCESS); 1418 } 1419