xref: /petsc/src/dm/impls/da/da.c (revision 4ad8454beace47809662cdae21ee081016eaa39a)
1 #include <petsc/private/dmdaimpl.h> /*I   "petscdmda.h"   I*/
2 
3 /*@
4   DMDASetSizes - Sets the number of grid points in the three dimensional directions
5 
6   Logically Collective
7 
8   Input Parameters:
9 + da - the `DMDA`
10 . M  - the global X size
11 . N  - the global Y size
12 - P  - the global Z size
13 
14   Level: intermediate
15 
16   Developer Note:
17   Since the dimension may not yet have been set the code cannot error check for non-positive Y and Z number of grid points
18 
19 .seealso: [](sec_struct), `DM`, `DMDA`, `PetscSplitOwnership()`
20 @*/
21 PetscErrorCode DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
22 {
23   DM_DA *dd = (DM_DA *)da->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
27   PetscValidLogicalCollectiveInt(da, M, 2);
28   PetscValidLogicalCollectiveInt(da, N, 3);
29   PetscValidLogicalCollectiveInt(da, P, 4);
30   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
31   PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in X direction must be positive");
32   PetscCheck(N >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Y direction must be positive");
33   PetscCheck(P >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Z direction must be positive");
34 
35   dd->M = M;
36   dd->N = N;
37   dd->P = P;
38   PetscFunctionReturn(PETSC_SUCCESS);
39 }
40 
41 /*@
42   DMDASetNumProcs - Sets the number of processes in each dimension
43 
44   Logically Collective
45 
46   Input Parameters:
47 + da - the `DMDA`
48 . m  - the number of X processes (or `PETSC_DECIDE`)
49 . n  - the number of Y processes (or `PETSC_DECIDE`)
50 - p  - the number of Z processes (or `PETSC_DECIDE`)
51 
52   Level: intermediate
53 
54 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDASetSizes()`, `PetscSplitOwnership()`
55 @*/
56 PetscErrorCode DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
57 {
58   DM_DA *dd = (DM_DA *)da->data;
59 
60   PetscFunctionBegin;
61   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
62   PetscValidLogicalCollectiveInt(da, m, 2);
63   PetscValidLogicalCollectiveInt(da, n, 3);
64   PetscValidLogicalCollectiveInt(da, p, 4);
65   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
66   dd->m = m;
67   dd->n = n;
68   dd->p = p;
69   if (da->dim == 2) {
70     PetscMPIInt size;
71     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da), &size));
72     if ((dd->m > 0) && (dd->n < 0)) {
73       dd->n = size / dd->m;
74       PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in X direction not divisible into comm size %d", m, size);
75     }
76     if ((dd->n > 0) && (dd->m < 0)) {
77       dd->m = size / dd->n;
78       PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in Y direction not divisible into comm size %d", n, size);
79     }
80   }
81   PetscFunctionReturn(PETSC_SUCCESS);
82 }
83 
84 /*@
85   DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries.
86 
87   Not Collective
88 
89   Input Parameters:
90 + da - The `DMDA`
91 . bx - x boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
92 . by - y boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
93 - bz - z boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
94 
95   Level: intermediate
96 
97 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMBoundaryType`, `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
98 @*/
99 PetscErrorCode DMDASetBoundaryType(DM da, DMBoundaryType bx, DMBoundaryType by, DMBoundaryType bz)
100 {
101   DM_DA *dd = (DM_DA *)da->data;
102 
103   PetscFunctionBegin;
104   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
105   PetscValidLogicalCollectiveEnum(da, bx, 2);
106   PetscValidLogicalCollectiveEnum(da, by, 3);
107   PetscValidLogicalCollectiveEnum(da, bz, 4);
108   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
109   dd->bx = bx;
110   dd->by = by;
111   dd->bz = bz;
112   PetscFunctionReturn(PETSC_SUCCESS);
113 }
114 
115 /*@
116   DMDASetDof - Sets the number of degrees of freedom per vertex
117 
118   Not Collective
119 
120   Input Parameters:
121 + da  - The `DMDA`
122 - dof - Number of degrees of freedom per vertex
123 
124   Level: intermediate
125 
126 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetDof()`, `DMDACreate()`, `DMDestroy()`
127 @*/
128 PetscErrorCode DMDASetDof(DM da, PetscInt dof)
129 {
130   DM_DA *dd = (DM_DA *)da->data;
131 
132   PetscFunctionBegin;
133   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
134   PetscValidLogicalCollectiveInt(da, dof, 2);
135   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
136   dd->w  = dof;
137   da->bs = dof;
138   PetscFunctionReturn(PETSC_SUCCESS);
139 }
140 
141 /*@
142   DMDAGetDof - Gets the number of degrees of freedom per vertex
143 
144   Not Collective
145 
146   Input Parameter:
147 . da - The `DMDA`
148 
149   Output Parameter:
150 . dof - Number of degrees of freedom per vertex
151 
152   Level: intermediate
153 
154 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDASetDof()`, `DMDACreate()`, `DMDestroy()`
155 @*/
156 PetscErrorCode DMDAGetDof(DM da, PetscInt *dof)
157 {
158   DM_DA *dd = (DM_DA *)da->data;
159 
160   PetscFunctionBegin;
161   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
162   PetscAssertPointer(dof, 2);
163   *dof = dd->w;
164   PetscFunctionReturn(PETSC_SUCCESS);
165 }
166 
167 /*@
168   DMDAGetOverlap - Gets the size of the per-processor overlap.
169 
170   Not Collective
171 
172   Input Parameter:
173 . da - The `DMDA`
174 
175   Output Parameters:
176 + x - Overlap in the x direction
177 . y - Overlap in the y direction
178 - z - Overlap in the z direction
179 
180   Level: intermediate
181 
182 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDASetOverlap()`
183 @*/
184 PetscErrorCode DMDAGetOverlap(DM da, PetscInt *x, PetscInt *y, PetscInt *z)
185 {
186   DM_DA *dd = (DM_DA *)da->data;
187 
188   PetscFunctionBegin;
189   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
190   if (x) *x = dd->xol;
191   if (y) *y = dd->yol;
192   if (z) *z = dd->zol;
193   PetscFunctionReturn(PETSC_SUCCESS);
194 }
195 
196 /*@
197   DMDASetOverlap - Sets the size of the per-processor overlap.
198 
199   Not Collective
200 
201   Input Parameters:
202 + da - The `DMDA`
203 . x  - Overlap in the x direction
204 . y  - Overlap in the y direction
205 - z  - Overlap in the z direction
206 
207   Level: intermediate
208 
209 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDAGetOverlap()`
210 @*/
211 PetscErrorCode DMDASetOverlap(DM da, PetscInt x, PetscInt y, PetscInt z)
212 {
213   DM_DA *dd = (DM_DA *)da->data;
214 
215   PetscFunctionBegin;
216   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
217   PetscValidLogicalCollectiveInt(da, x, 2);
218   PetscValidLogicalCollectiveInt(da, y, 3);
219   PetscValidLogicalCollectiveInt(da, z, 4);
220   dd->xol = x;
221   dd->yol = y;
222   dd->zol = z;
223   PetscFunctionReturn(PETSC_SUCCESS);
224 }
225 
226 /*@
227   DMDAGetNumLocalSubDomains - Gets the number of local subdomains that would be created upon decomposition.
228 
229   Not Collective
230 
231   Input Parameter:
232 . da - The `DMDA`
233 
234   Output Parameter:
235 . Nsub - Number of local subdomains created upon decomposition
236 
237   Level: intermediate
238 
239 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDASetNumLocalSubDomains()`
240 @*/
241 PetscErrorCode DMDAGetNumLocalSubDomains(DM da, PetscInt *Nsub)
242 {
243   DM_DA *dd = (DM_DA *)da->data;
244 
245   PetscFunctionBegin;
246   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
247   if (Nsub) *Nsub = dd->Nsub;
248   PetscFunctionReturn(PETSC_SUCCESS);
249 }
250 
251 /*@
252   DMDASetNumLocalSubDomains - Sets the number of local subdomains to create when decomposing with `DMCreateDomainDecomposition()`
253 
254   Not Collective
255 
256   Input Parameters:
257 + da   - The `DMDA`
258 - Nsub - The number of local subdomains requested
259 
260   Level: intermediate
261 
262 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDAGetNumLocalSubDomains()`
263 @*/
264 PetscErrorCode DMDASetNumLocalSubDomains(DM da, PetscInt Nsub)
265 {
266   DM_DA *dd = (DM_DA *)da->data;
267 
268   PetscFunctionBegin;
269   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
270   PetscValidLogicalCollectiveInt(da, Nsub, 2);
271   dd->Nsub = Nsub;
272   PetscFunctionReturn(PETSC_SUCCESS);
273 }
274 
275 /*@
276   DMDASetOffset - Sets the index offset of the `DMDA`.
277 
278   Collective
279 
280   Input Parameters:
281 + da - The `DMDA`
282 . xo - The offset in the x direction
283 . yo - The offset in the y direction
284 . zo - The offset in the z direction
285 . Mo - The problem offset in the x direction
286 . No - The problem offset in the y direction
287 - Po - The problem offset in the z direction
288 
289   Level: developer
290 
291   Note:
292   This is used primarily to overlap a computation on a local `DMDA` with that on a global `DMDA` without
293   changing boundary conditions or subdomain features that depend upon the global offsets.
294 
295 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
296 @*/
297 PetscErrorCode DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
298 {
299   DM_DA *dd = (DM_DA *)da->data;
300 
301   PetscFunctionBegin;
302   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
303   PetscValidLogicalCollectiveInt(da, xo, 2);
304   PetscValidLogicalCollectiveInt(da, yo, 3);
305   PetscValidLogicalCollectiveInt(da, zo, 4);
306   PetscValidLogicalCollectiveInt(da, Mo, 5);
307   PetscValidLogicalCollectiveInt(da, No, 6);
308   PetscValidLogicalCollectiveInt(da, Po, 7);
309   dd->xo = xo;
310   dd->yo = yo;
311   dd->zo = zo;
312   dd->Mo = Mo;
313   dd->No = No;
314   dd->Po = Po;
315 
316   if (da->coordinates[0].dm) PetscCall(DMDASetOffset(da->coordinates[0].dm, xo, yo, zo, Mo, No, Po));
317   PetscFunctionReturn(PETSC_SUCCESS);
318 }
319 
320 /*@
321   DMDAGetOffset - Gets the index offset of the `DMDA`.
322 
323   Not Collective
324 
325   Input Parameter:
326 . da - The `DMDA`
327 
328   Output Parameters:
329 + xo - The offset in the x direction
330 . yo - The offset in the y direction
331 . zo - The offset in the z direction
332 . Mo - The global size in the x direction
333 . No - The global size in the y direction
334 - Po - The global size in the z direction
335 
336   Level: developer
337 
338 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDASetOffset()`, `DMDAVecGetArray()`
339 @*/
340 PetscErrorCode DMDAGetOffset(DM da, PetscInt *xo, PetscInt *yo, PetscInt *zo, PetscInt *Mo, PetscInt *No, PetscInt *Po)
341 {
342   DM_DA *dd = (DM_DA *)da->data;
343 
344   PetscFunctionBegin;
345   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
346   if (xo) *xo = dd->xo;
347   if (yo) *yo = dd->yo;
348   if (zo) *zo = dd->zo;
349   if (Mo) *Mo = dd->Mo;
350   if (No) *No = dd->No;
351   if (Po) *Po = dd->Po;
352   PetscFunctionReturn(PETSC_SUCCESS);
353 }
354 
355 /*@
356   DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain `DMDA`.
357 
358   Not Collective
359 
360   Input Parameter:
361 . da - The `DMDA`
362 
363   Output Parameters:
364 + xs - The start of the region in x
365 . ys - The start of the region in y
366 . zs - The start of the region in z
367 . xm - The size of the region in x
368 . ym - The size of the region in y
369 - zm - The size of the region in z
370 
371   Level: intermediate
372 
373 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
374 @*/
375 PetscErrorCode DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
376 {
377   DM_DA *dd = (DM_DA *)da->data;
378 
379   PetscFunctionBegin;
380   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
381   if (xs) *xs = dd->nonxs;
382   if (ys) *ys = dd->nonys;
383   if (zs) *zs = dd->nonzs;
384   if (xm) *xm = dd->nonxm;
385   if (ym) *ym = dd->nonym;
386   if (zm) *zm = dd->nonzm;
387   PetscFunctionReturn(PETSC_SUCCESS);
388 }
389 
390 /*@
391   DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain `DMDA`.
392 
393   Collective
394 
395   Input Parameters:
396 + da - The `DMDA`
397 . xs - The start of the region in x
398 . ys - The start of the region in y
399 . zs - The start of the region in z
400 . xm - The size of the region in x
401 . ym - The size of the region in y
402 - zm - The size of the region in z
403 
404   Level: intermediate
405 
406 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
407 @*/
408 PetscErrorCode DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
409 {
410   DM_DA *dd = (DM_DA *)da->data;
411 
412   PetscFunctionBegin;
413   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
414   PetscValidLogicalCollectiveInt(da, xs, 2);
415   PetscValidLogicalCollectiveInt(da, ys, 3);
416   PetscValidLogicalCollectiveInt(da, zs, 4);
417   PetscValidLogicalCollectiveInt(da, xm, 5);
418   PetscValidLogicalCollectiveInt(da, ym, 6);
419   PetscValidLogicalCollectiveInt(da, zm, 7);
420   dd->nonxs = xs;
421   dd->nonys = ys;
422   dd->nonzs = zs;
423   dd->nonxm = xm;
424   dd->nonym = ym;
425   dd->nonzm = zm;
426   PetscFunctionReturn(PETSC_SUCCESS);
427 }
428 
429 /*@
430   DMDASetStencilType - Sets the type of the communication stencil
431 
432   Logically Collective
433 
434   Input Parameters:
435 + da    - The `DMDA`
436 - stype - The stencil type, use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`.
437 
438   Level: intermediate
439 
440 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.`
441 @*/
442 PetscErrorCode DMDASetStencilType(DM da, DMDAStencilType stype)
443 {
444   DM_DA *dd = (DM_DA *)da->data;
445 
446   PetscFunctionBegin;
447   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
448   PetscValidLogicalCollectiveEnum(da, stype, 2);
449   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
450   dd->stencil_type = stype;
451   PetscFunctionReturn(PETSC_SUCCESS);
452 }
453 
454 /*@
455   DMDAGetStencilType - Gets the type of the communication stencil
456 
457   Not Collective
458 
459   Input Parameter:
460 . da - The `DMDA`
461 
462   Output Parameter:
463 . stype - The stencil type, use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`.
464 
465   Level: intermediate
466 
467 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.`
468 @*/
469 PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype)
470 {
471   DM_DA *dd = (DM_DA *)da->data;
472 
473   PetscFunctionBegin;
474   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
475   PetscAssertPointer(stype, 2);
476   *stype = dd->stencil_type;
477   PetscFunctionReturn(PETSC_SUCCESS);
478 }
479 
480 /*@
481   DMDASetStencilWidth - Sets the width of the communication stencil
482 
483   Logically Collective
484 
485   Input Parameters:
486 + da    - The `DMDA`
487 - width - The stencil width
488 
489   Level: intermediate
490 
491 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.`
492 @*/
493 PetscErrorCode DMDASetStencilWidth(DM da, PetscInt width)
494 {
495   DM_DA *dd = (DM_DA *)da->data;
496 
497   PetscFunctionBegin;
498   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
499   PetscValidLogicalCollectiveInt(da, width, 2);
500   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
501   dd->s = width;
502   PetscFunctionReturn(PETSC_SUCCESS);
503 }
504 
505 /*@
506   DMDAGetStencilWidth - Gets the width of the communication stencil
507 
508   Not Collective
509 
510   Input Parameter:
511 . da - The `DMDA`
512 
513   Output Parameter:
514 . width - The stencil width
515 
516   Level: intermediate
517 
518 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.`
519 @*/
520 PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width)
521 {
522   DM_DA *dd = (DM_DA *)da->data;
523 
524   PetscFunctionBegin;
525   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
526   PetscAssertPointer(width, 2);
527   *width = dd->s;
528   PetscFunctionReturn(PETSC_SUCCESS);
529 }
530 
531 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da, PetscInt M, PetscInt m, const PetscInt lx[])
532 {
533   PetscInt i, sum;
534 
535   PetscFunctionBegin;
536   PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Global dimension not set");
537   for (i = sum = 0; i < m; i++) sum += lx[i];
538   PetscCheck(sum == M, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_INCOMP, "Ownership ranges sum to %" PetscInt_FMT " but global dimension is %" PetscInt_FMT, sum, M);
539   PetscFunctionReturn(PETSC_SUCCESS);
540 }
541 
542 /*@
543   DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process
544 
545   Logically Collective
546 
547   Input Parameters:
548 + da - The `DMDA`
549 . lx - array containing number of nodes in the X direction on each process, or `NULL`. If non-null, must be of length da->m
550 . ly - array containing number of nodes in the Y direction on each process, or `NULL`. If non-null, must be of length da->n
551 - lz - array containing number of nodes in the Z direction on each process, or `NULL`. If non-null, must be of length da->p.
552 
553   Level: intermediate
554 
555   Note:
556   These numbers are NOT multiplied by the number of dof per node.
557 
558 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
559 @*/
560 PetscErrorCode DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
561 {
562   DM_DA *dd = (DM_DA *)da->data;
563 
564   PetscFunctionBegin;
565   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
566   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
567   if (lx) {
568     PetscCheck(dd->m >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of processes");
569     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->M, dd->m, lx));
570     if (!dd->lx) PetscCall(PetscMalloc1(dd->m, &dd->lx));
571     PetscCall(PetscArraycpy(dd->lx, lx, dd->m));
572   }
573   if (ly) {
574     PetscCheck(dd->n >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of processes");
575     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->N, dd->n, ly));
576     if (!dd->ly) PetscCall(PetscMalloc1(dd->n, &dd->ly));
577     PetscCall(PetscArraycpy(dd->ly, ly, dd->n));
578   }
579   if (lz) {
580     PetscCheck(dd->p >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of processes");
581     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->P, dd->p, lz));
582     if (!dd->lz) PetscCall(PetscMalloc1(dd->p, &dd->lz));
583     PetscCall(PetscArraycpy(dd->lz, lz, dd->p));
584   }
585   PetscFunctionReturn(PETSC_SUCCESS);
586 }
587 
588 /*@
589   DMDASetInterpolationType - Sets the type of interpolation that will be
590   returned by `DMCreateInterpolation()`
591 
592   Logically Collective
593 
594   Input Parameters:
595 + da    - initial distributed array
596 - ctype - `DMDA_Q1` and `DMDA_Q0` are currently the only supported forms
597 
598   Level: intermediate
599 
600   Note:
601   You should call this on the coarser of the two `DMDA` you pass to `DMCreateInterpolation()`
602 
603 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMDestroy()`, `DMDAInterpolationType`,
604           `DMDA_Q1`, `DMDA_Q0`
605 @*/
606 PetscErrorCode DMDASetInterpolationType(DM da, DMDAInterpolationType ctype)
607 {
608   DM_DA *dd = (DM_DA *)da->data;
609 
610   PetscFunctionBegin;
611   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
612   PetscValidLogicalCollectiveEnum(da, ctype, 2);
613   dd->interptype = ctype;
614   PetscFunctionReturn(PETSC_SUCCESS);
615 }
616 
617 /*@
618   DMDAGetInterpolationType - Gets the type of interpolation that will be
619   used by `DMCreateInterpolation()`
620 
621   Not Collective
622 
623   Input Parameter:
624 . da - distributed array
625 
626   Output Parameter:
627 . ctype - interpolation type (`DMDA_Q1` and `DMDA_Q0` are currently the only supported forms)
628 
629   Level: intermediate
630 
631 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAInterpolationType`, `DMDASetInterpolationType()`, `DMCreateInterpolation()`,
632           `DMDA_Q1`, `DMDA_Q0`
633 @*/
634 PetscErrorCode DMDAGetInterpolationType(DM da, DMDAInterpolationType *ctype)
635 {
636   DM_DA *dd = (DM_DA *)da->data;
637 
638   PetscFunctionBegin;
639   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
640   PetscAssertPointer(ctype, 2);
641   *ctype = dd->interptype;
642   PetscFunctionReturn(PETSC_SUCCESS);
643 }
644 
645 /*@C
646   DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
647   processes neighbors.
648 
649   Not Collective
650 
651   Input Parameter:
652 . da - the `DMDA` object
653 
654   Output Parameter:
655 . ranks - the neighbors ranks, stored with the x index increasing most rapidly. The process itself is in the list
656 
657   Level: intermediate
658 
659   Notes:
660   In 2d the array is of length 9, in 3d of length 27
661 
662   Not supported in 1d
663 
664   Do not free the array, it is freed when the `DMDA` is destroyed.
665 
666   Fortran Note:
667   Pass in an array of the appropriate length to contain the values
668 
669 .seealso: [](sec_struct), `DMDA`, `DM`
670 @*/
671 PetscErrorCode DMDAGetNeighbors(DM da, const PetscMPIInt *ranks[])
672 {
673   DM_DA *dd = (DM_DA *)da->data;
674 
675   PetscFunctionBegin;
676   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
677   *ranks = dd->neighbors;
678   PetscFunctionReturn(PETSC_SUCCESS);
679 }
680 
681 /*@C
682   DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process
683 
684   Not Collective
685 
686   Input Parameter:
687 . da - the `DMDA` object
688 
689   Output Parameters:
690 + lx - ownership along x direction (optional)
691 . ly - ownership along y direction (optional)
692 - lz - ownership along z direction (optional)
693 
694   Level: intermediate
695 
696   Note:
697   These correspond to the optional final arguments passed to `DMDACreate()`, `DMDACreate2d()`, `DMDACreate3d()`
698 
699   In C you should not free these arrays, nor change the values in them. They will only have valid values while the
700   `DMDA` they came from still exists (has not been destroyed).
701 
702   These numbers are NOT multiplied by the number of dof per node.
703 
704   Fortran Note:
705   Pass in arrays `lx`, `ly`, and `lz` of the appropriate length to hold the values; the sixth, seventh and
706   eighth arguments from `DMDAGetInfo()`
707 
708 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetCorners()`, `DMDAGetGhostCorners()`, `DMDACreate()`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `VecGetOwnershipRanges()`
709 @*/
710 PetscErrorCode DMDAGetOwnershipRanges(DM da, const PetscInt *lx[], const PetscInt *ly[], const PetscInt *lz[])
711 {
712   DM_DA *dd = (DM_DA *)da->data;
713 
714   PetscFunctionBegin;
715   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
716   if (lx) *lx = dd->lx;
717   if (ly) *ly = dd->ly;
718   if (lz) *lz = dd->lz;
719   PetscFunctionReturn(PETSC_SUCCESS);
720 }
721 
722 /*@
723   DMDASetRefinementFactor - Set the ratios that the `DMDA` grid is refined
724 
725   Logically Collective
726 
727   Input Parameters:
728 + da       - the `DMDA` object
729 . refine_x - ratio of fine grid to coarse in x direction (2 by default)
730 . refine_y - ratio of fine grid to coarse in y direction (2 by default)
731 - refine_z - ratio of fine grid to coarse in z direction (2 by default)
732 
733   Options Database Keys:
734 + -da_refine_x refine_x - refinement ratio in x direction
735 . -da_refine_y rafine_y - refinement ratio in y direction
736 . -da_refine_z refine_z - refinement ratio in z direction
737 - -da_refine <n>        - refine the `DMDA` object n times when it is created.
738 
739   Level: intermediate
740 
741   Note:
742   Pass `PETSC_IGNORE` to leave a value unchanged
743 
744 .seealso: [](sec_struct), `DM`, `DMDA`, `DMRefine()`, `DMDAGetRefinementFactor()`
745 @*/
746 PetscErrorCode DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y, PetscInt refine_z)
747 {
748   DM_DA *dd = (DM_DA *)da->data;
749 
750   PetscFunctionBegin;
751   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
752   PetscValidLogicalCollectiveInt(da, refine_x, 2);
753   PetscValidLogicalCollectiveInt(da, refine_y, 3);
754   PetscValidLogicalCollectiveInt(da, refine_z, 4);
755 
756   if (refine_x > 0) dd->refine_x = refine_x;
757   if (refine_y > 0) dd->refine_y = refine_y;
758   if (refine_z > 0) dd->refine_z = refine_z;
759   PetscFunctionReturn(PETSC_SUCCESS);
760 }
761 
762 /*@C
763   DMDAGetRefinementFactor - Gets the ratios that the `DMDA` grid is refined
764 
765   Not Collective
766 
767   Input Parameter:
768 . da - the `DMDA` object
769 
770   Output Parameters:
771 + refine_x - ratio of fine grid to coarse in x direction (2 by default)
772 . refine_y - ratio of fine grid to coarse in y direction (2 by default)
773 - refine_z - ratio of fine grid to coarse in z direction (2 by default)
774 
775   Level: intermediate
776 
777   Note:
778   Pass `NULL` for values you do not need
779 
780 .seealso: [](sec_struct), `DM`, `DMDA`, `DMRefine()`, `DMDASetRefinementFactor()`
781 @*/
782 PetscErrorCode DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y, PetscInt *refine_z)
783 {
784   DM_DA *dd = (DM_DA *)da->data;
785 
786   PetscFunctionBegin;
787   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
788   if (refine_x) *refine_x = dd->refine_x;
789   if (refine_y) *refine_y = dd->refine_y;
790   if (refine_z) *refine_z = dd->refine_z;
791   PetscFunctionReturn(PETSC_SUCCESS);
792 }
793 
794 /*@C
795   DMDASetGetMatrix - Sets the routine used by the `DMDA` to allocate a matrix.
796 
797   Logically Collective; No Fortran Support
798 
799   Input Parameters:
800 + da - the `DMDA` object
801 - f  - the function that allocates the matrix for that specific `DMDA`
802 
803   Calling sequence of `f`:
804 + da - the `DMDA` object
805 - A  - the created matrix
806 
807   Level: developer
808 
809   Notes:
810   If the function is not provided a default function is used that uses the `DMDAStencilType`, `DMBoundaryType`, and value of `DMDASetStencilWidth()`
811   to construct the matrix.
812 
813   See `DMDASetBlockFills()` that provides a simple way to provide the nonzero structure for
814   the diagonal and off-diagonal blocks of the matrix without providing a custom function
815 
816   Developer Note:
817   This should be called `DMDASetCreateMatrix()`
818 
819 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateMatrix()`, `DMDASetBlockFills()`
820 @*/
821 PetscErrorCode DMDASetGetMatrix(DM da, PetscErrorCode (*f)(DM da, Mat *A))
822 {
823   PetscFunctionBegin;
824   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
825   da->ops->creatematrix = f;
826   PetscFunctionReturn(PETSC_SUCCESS);
827 }
828 
829 /*@
830   DMDAMapMatStencilToGlobal - Map a list of `MatStencil` on a grid to global indices.
831 
832   Not Collective
833 
834   Input Parameters:
835 + da   - the `DMDA` object
836 . m    - number of `MatStencil` to map
837 - idxm - grid points (and component number when dof > 1)
838 
839   Output Parameter:
840 . gidxm - global row indices
841 
842   Level: intermediate
843 
844 .seealso: [](sec_struct), `DM`, `DMDA`, `MatStencil`
845 @*/
846 PetscErrorCode DMDAMapMatStencilToGlobal(DM da, PetscInt m, const MatStencil idxm[], PetscInt gidxm[])
847 {
848   const DM_DA           *dd  = (const DM_DA *)da->data;
849   const PetscInt        *dxm = (const PetscInt *)idxm;
850   PetscInt               i, j, sdim, tmp, dim;
851   PetscInt               dims[4], starts[4], dims2[3], starts2[3], dof = dd->w;
852   ISLocalToGlobalMapping ltog;
853 
854   PetscFunctionBegin;
855   if (m <= 0) PetscFunctionReturn(PETSC_SUCCESS);
856 
857   /* Code adapted from DMDAGetGhostCorners() */
858   starts2[0] = dd->Xs / dof + dd->xo;
859   starts2[1] = dd->Ys + dd->yo;
860   starts2[2] = dd->Zs + dd->zo;
861   dims2[0]   = (dd->Xe - dd->Xs) / dof;
862   dims2[1]   = (dd->Ye - dd->Ys);
863   dims2[2]   = (dd->Ze - dd->Zs);
864 
865   /* As if we do MatSetStencil() to get dims[]/starts[] of mat->stencil */
866   dim  = da->dim;                   /* DA dim: 1 to 3 */
867   sdim = dim + (dof > 1 ? 1 : 0);   /* Dimensions in MatStencil's (k,j,i,c) view */
868   for (i = 0; i < dim; i++) {       /* Reverse the order and also skip the unused dimensions */
869     dims[i]   = dims2[dim - i - 1]; /* ex. dims/starts[] are in order of {i} for 1D, {j,i} for 2D and {k,j,i} for 3D */
870     starts[i] = starts2[dim - i - 1];
871   }
872   starts[dim] = 0; /* Append the extra dim for dof (won't be used below if dof=1) */
873   dims[dim]   = dof;
874 
875   /* Map stencils to local indices (code adapted from MatSetValuesStencil()) */
876   for (i = 0; i < m; i++) {
877     dxm += 3 - dim; /* Input is {k,j,i,c}; move the pointer to the first used index, e.g., j in 2D */
878     tmp = 0;
879     for (j = 0; j < sdim; j++) {                                                      /* Iter over, ex. j,i or j,i,c in 2D */
880       if (tmp < 0 || dxm[j] < starts[j] || dxm[j] >= (starts[j] + dims[j])) tmp = -1; /* Beyond the ghost region, therefore ignored with negative indices */
881       else tmp = tmp * dims[j] + (dxm[j] - starts[j]);
882     }
883     gidxm[i] = tmp;
884     /* Move to the next MatStencil point */
885     if (dof > 1) dxm += sdim; /* c is already counted in sdim */
886     else dxm += sdim + 1;     /* skip the unused c */
887   }
888 
889   /* Map local indices to global indices */
890   PetscCall(DMGetLocalToGlobalMapping(da, &ltog));
891   PetscCall(ISLocalToGlobalMappingApply(ltog, m, gidxm, gidxm));
892   PetscFunctionReturn(PETSC_SUCCESS);
893 }
894 
895 /*
896   Creates "balanced" ownership ranges after refinement, constrained by the need for the
897   fine grid boundaries to fall within one stencil width of the coarse partition.
898 
899   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
900 */
901 static PetscErrorCode DMDARefineOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lc[], PetscInt lf[])
902 {
903   PetscInt i, totalc = 0, remaining, startc = 0, startf = 0;
904 
905   PetscFunctionBegin;
906   PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio);
907   if (ratio == 1) {
908     PetscCall(PetscArraycpy(lf, lc, m));
909     PetscFunctionReturn(PETSC_SUCCESS);
910   }
911   for (i = 0; i < m; i++) totalc += lc[i];
912   remaining = (!periodic) + ratio * (totalc - (!periodic));
913   for (i = 0; i < m; i++) {
914     PetscInt want = remaining / (m - i) + !!(remaining % (m - i));
915     if (i == m - 1) lf[i] = want;
916     else {
917       const PetscInt nextc = startc + lc[i];
918       /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
919        * coarse stencil width of the first coarse node in the next subdomain. */
920       while ((startf + want) / ratio < nextc - stencil_width) want++;
921       /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
922        * coarse stencil width of the last coarse node in the current subdomain. */
923       while ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width) want--;
924       /* Make sure all constraints are satisfied */
925       if (want < 0 || want > remaining || ((startf + want) / ratio < nextc - stencil_width) || ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width))
926         SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible refined ownership range");
927     }
928     lf[i] = want;
929     startc += lc[i];
930     startf += lf[i];
931     remaining -= lf[i];
932   }
933   PetscFunctionReturn(PETSC_SUCCESS);
934 }
935 
936 /*
937   Creates "balanced" ownership ranges after coarsening, constrained by the need for the
938   fine grid boundaries to fall within one stencil width of the coarse partition.
939 
940   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
941 */
942 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lf[], PetscInt lc[])
943 {
944   PetscInt i, totalf, remaining, startc, startf;
945 
946   PetscFunctionBegin;
947   PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio);
948   if (ratio == 1) {
949     PetscCall(PetscArraycpy(lc, lf, m));
950     PetscFunctionReturn(PETSC_SUCCESS);
951   }
952   for (i = 0, totalf = 0; i < m; i++) totalf += lf[i];
953   remaining = (!periodic) + (totalf - (!periodic)) / ratio;
954   for (i = 0, startc = 0, startf = 0; i < m; i++) {
955     PetscInt want = remaining / (m - i) + !!(remaining % (m - i));
956     if (i == m - 1) lc[i] = want;
957     else {
958       const PetscInt nextf = startf + lf[i];
959       /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
960        * node is within one stencil width. */
961       while (nextf / ratio < startc + want - stencil_width) want--;
962       /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
963        * fine node is within one stencil width. */
964       while ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width) want++;
965       if (want < 0 || want > remaining || (nextf / ratio < startc + want - stencil_width) || ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width))
966         SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible coarsened ownership range");
967     }
968     lc[i] = want;
969     startc += lc[i];
970     startf += lf[i];
971     remaining -= lc[i];
972   }
973   PetscFunctionReturn(PETSC_SUCCESS);
974 }
975 
976 PetscErrorCode DMRefine_DA(DM da, MPI_Comm comm, DM *daref)
977 {
978   PetscInt M, N, P, i, dim;
979   Vec      coordsc, coordsf;
980   DM       da2;
981   DM_DA   *dd = (DM_DA *)da->data, *dd2;
982 
983   PetscFunctionBegin;
984   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
985   PetscAssertPointer(daref, 3);
986 
987   PetscCall(DMGetDimension(da, &dim));
988   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
989     M = dd->refine_x * dd->M;
990   } else {
991     M = 1 + dd->refine_x * (dd->M - 1);
992   }
993   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
994     if (dim > 1) {
995       N = dd->refine_y * dd->N;
996     } else {
997       N = 1;
998     }
999   } else {
1000     N = 1 + dd->refine_y * (dd->N - 1);
1001   }
1002   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1003     if (dim > 2) {
1004       P = dd->refine_z * dd->P;
1005     } else {
1006       P = 1;
1007     }
1008   } else {
1009     P = 1 + dd->refine_z * (dd->P - 1);
1010   }
1011   PetscCall(DMDACreate(PetscObjectComm((PetscObject)da), &da2));
1012   PetscCall(DMSetOptionsPrefix(da2, ((PetscObject)da)->prefix));
1013   PetscCall(DMSetDimension(da2, dim));
1014   PetscCall(DMDASetSizes(da2, M, N, P));
1015   PetscCall(DMDASetNumProcs(da2, dd->m, dd->n, dd->p));
1016   PetscCall(DMDASetBoundaryType(da2, dd->bx, dd->by, dd->bz));
1017   PetscCall(DMDASetDof(da2, dd->w));
1018   PetscCall(DMDASetStencilType(da2, dd->stencil_type));
1019   PetscCall(DMDASetStencilWidth(da2, dd->s));
1020   if (dim == 3) {
1021     PetscInt *lx, *ly, *lz;
1022     PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz));
1023     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1024     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly));
1025     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_z, dd->p, dd->lz, lz));
1026     PetscCall(DMDASetOwnershipRanges(da2, lx, ly, lz));
1027     PetscCall(PetscFree3(lx, ly, lz));
1028   } else if (dim == 2) {
1029     PetscInt *lx, *ly;
1030     PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly));
1031     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1032     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly));
1033     PetscCall(DMDASetOwnershipRanges(da2, lx, ly, NULL));
1034     PetscCall(PetscFree2(lx, ly));
1035   } else if (dim == 1) {
1036     PetscInt *lx;
1037     PetscCall(PetscMalloc1(dd->m, &lx));
1038     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1039     PetscCall(DMDASetOwnershipRanges(da2, lx, NULL, NULL));
1040     PetscCall(PetscFree(lx));
1041   }
1042   dd2 = (DM_DA *)da2->data;
1043 
1044   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
1045   da2->ops->creatematrix = da->ops->creatematrix;
1046   /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
1047   da2->ops->getcoloring = da->ops->getcoloring;
1048   dd2->interptype       = dd->interptype;
1049 
1050   /* copy fill information if given */
1051   if (dd->dfill) {
1052     PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill));
1053     PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1));
1054   }
1055   if (dd->ofill) {
1056     PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill));
1057     PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1));
1058   }
1059   /* copy the refine information */
1060   dd2->coarsen_x = dd2->refine_x = dd->refine_x;
1061   dd2->coarsen_y = dd2->refine_y = dd->refine_y;
1062   dd2->coarsen_z = dd2->refine_z = dd->refine_z;
1063 
1064   if (dd->refine_z_hier) {
1065     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1];
1066     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown];
1067     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1068     PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier));
1069     PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n));
1070   }
1071   if (dd->refine_y_hier) {
1072     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1];
1073     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown];
1074     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1075     PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier));
1076     PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n));
1077   }
1078   if (dd->refine_x_hier) {
1079     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1];
1080     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown];
1081     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1082     PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier));
1083     PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n));
1084   }
1085 
1086   /* copy vector type information */
1087   PetscCall(DMSetVecType(da2, da->vectype));
1088 
1089   dd2->lf = dd->lf;
1090   dd2->lj = dd->lj;
1091 
1092   da2->leveldown = da->leveldown;
1093   da2->levelup   = da->levelup + 1;
1094 
1095   PetscCall(DMSetUp(da2));
1096 
1097   /* interpolate coordinates if they are set on the coarse grid */
1098   PetscCall(DMGetCoordinates(da, &coordsc));
1099   if (coordsc) {
1100     DM  cdaf, cdac;
1101     Mat II;
1102 
1103     PetscCall(DMGetCoordinateDM(da, &cdac));
1104     PetscCall(DMGetCoordinateDM(da2, &cdaf));
1105     /* force creation of the coordinate vector */
1106     PetscCall(DMDASetUniformCoordinates(da2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
1107     PetscCall(DMGetCoordinates(da2, &coordsf));
1108     PetscCall(DMCreateInterpolation(cdac, cdaf, &II, NULL));
1109     PetscCall(MatInterpolate(II, coordsc, coordsf));
1110     PetscCall(MatDestroy(&II));
1111   }
1112 
1113   for (i = 0; i < da->bs; i++) {
1114     const char *fieldname;
1115     PetscCall(DMDAGetFieldName(da, i, &fieldname));
1116     PetscCall(DMDASetFieldName(da2, i, fieldname));
1117   }
1118 
1119   *daref = da2;
1120   PetscFunctionReturn(PETSC_SUCCESS);
1121 }
1122 
1123 PetscErrorCode DMCoarsen_DA(DM dmf, MPI_Comm comm, DM *dmc)
1124 {
1125   PetscInt M, N, P, i, dim;
1126   Vec      coordsc, coordsf;
1127   DM       dmc2;
1128   DM_DA   *dd = (DM_DA *)dmf->data, *dd2;
1129 
1130   PetscFunctionBegin;
1131   PetscValidHeaderSpecificType(dmf, DM_CLASSID, 1, DMDA);
1132   PetscAssertPointer(dmc, 3);
1133 
1134   PetscCall(DMGetDimension(dmf, &dim));
1135   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1136     M = dd->M / dd->coarsen_x;
1137   } else {
1138     M = 1 + (dd->M - 1) / dd->coarsen_x;
1139   }
1140   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1141     if (dim > 1) {
1142       N = dd->N / dd->coarsen_y;
1143     } else {
1144       N = 1;
1145     }
1146   } else {
1147     N = 1 + (dd->N - 1) / dd->coarsen_y;
1148   }
1149   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1150     if (dim > 2) {
1151       P = dd->P / dd->coarsen_z;
1152     } else {
1153       P = 1;
1154     }
1155   } else {
1156     P = 1 + (dd->P - 1) / dd->coarsen_z;
1157   }
1158   PetscCall(DMDACreate(PetscObjectComm((PetscObject)dmf), &dmc2));
1159   PetscCall(DMSetOptionsPrefix(dmc2, ((PetscObject)dmf)->prefix));
1160   PetscCall(DMSetDimension(dmc2, dim));
1161   PetscCall(DMDASetSizes(dmc2, M, N, P));
1162   PetscCall(DMDASetNumProcs(dmc2, dd->m, dd->n, dd->p));
1163   PetscCall(DMDASetBoundaryType(dmc2, dd->bx, dd->by, dd->bz));
1164   PetscCall(DMDASetDof(dmc2, dd->w));
1165   PetscCall(DMDASetStencilType(dmc2, dd->stencil_type));
1166   PetscCall(DMDASetStencilWidth(dmc2, dd->s));
1167   if (dim == 3) {
1168     PetscInt *lx, *ly, *lz;
1169     PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz));
1170     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1171     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly));
1172     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_z, dd->p, dd->lz, lz));
1173     PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, lz));
1174     PetscCall(PetscFree3(lx, ly, lz));
1175   } else if (dim == 2) {
1176     PetscInt *lx, *ly;
1177     PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly));
1178     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1179     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly));
1180     PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, NULL));
1181     PetscCall(PetscFree2(lx, ly));
1182   } else if (dim == 1) {
1183     PetscInt *lx;
1184     PetscCall(PetscMalloc1(dd->m, &lx));
1185     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1186     PetscCall(DMDASetOwnershipRanges(dmc2, lx, NULL, NULL));
1187     PetscCall(PetscFree(lx));
1188   }
1189   dd2 = (DM_DA *)dmc2->data;
1190 
1191   /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1192   /* dmc2->ops->createinterpolation = dmf->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1193   dmc2->ops->creatematrix = dmf->ops->creatematrix;
1194   dmc2->ops->getcoloring  = dmf->ops->getcoloring;
1195   dd2->interptype         = dd->interptype;
1196 
1197   /* copy fill information if given */
1198   if (dd->dfill) {
1199     PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill));
1200     PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1));
1201   }
1202   if (dd->ofill) {
1203     PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill));
1204     PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1));
1205   }
1206   /* copy the refine information */
1207   dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1208   dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1209   dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;
1210 
1211   if (dd->refine_z_hier) {
1212     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_z_hier_n) dd2->refine_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 1];
1213     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_z_hier_n) dd2->coarsen_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 2];
1214     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1215     PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier));
1216     PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n));
1217   }
1218   if (dd->refine_y_hier) {
1219     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_y_hier_n) dd2->refine_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 1];
1220     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_y_hier_n) dd2->coarsen_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 2];
1221     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1222     PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier));
1223     PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n));
1224   }
1225   if (dd->refine_x_hier) {
1226     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_x_hier_n) dd2->refine_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 1];
1227     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_x_hier_n) dd2->coarsen_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 2];
1228     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1229     PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier));
1230     PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n));
1231   }
1232 
1233   /* copy vector type information */
1234   PetscCall(DMSetVecType(dmc2, dmf->vectype));
1235 
1236   dd2->lf = dd->lf;
1237   dd2->lj = dd->lj;
1238 
1239   dmc2->leveldown = dmf->leveldown + 1;
1240   dmc2->levelup   = dmf->levelup;
1241 
1242   PetscCall(DMSetUp(dmc2));
1243 
1244   /* inject coordinates if they are set on the fine grid */
1245   PetscCall(DMGetCoordinates(dmf, &coordsf));
1246   if (coordsf) {
1247     DM         cdaf, cdac;
1248     Mat        inject;
1249     VecScatter vscat;
1250 
1251     PetscCall(DMGetCoordinateDM(dmf, &cdaf));
1252     PetscCall(DMGetCoordinateDM(dmc2, &cdac));
1253     /* force creation of the coordinate vector */
1254     PetscCall(DMDASetUniformCoordinates(dmc2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
1255     PetscCall(DMGetCoordinates(dmc2, &coordsc));
1256 
1257     PetscCall(DMCreateInjection(cdac, cdaf, &inject));
1258     PetscCall(MatScatterGetVecScatter(inject, &vscat));
1259     PetscCall(VecScatterBegin(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD));
1260     PetscCall(VecScatterEnd(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD));
1261     PetscCall(MatDestroy(&inject));
1262   }
1263 
1264   for (i = 0; i < dmf->bs; i++) {
1265     const char *fieldname;
1266     PetscCall(DMDAGetFieldName(dmf, i, &fieldname));
1267     PetscCall(DMDASetFieldName(dmc2, i, fieldname));
1268   }
1269 
1270   *dmc = dmc2;
1271   PetscFunctionReturn(PETSC_SUCCESS);
1272 }
1273 
1274 PetscErrorCode DMRefineHierarchy_DA(DM da, PetscInt nlevels, DM daf[])
1275 {
1276   PetscInt i, n, *refx, *refy, *refz;
1277 
1278   PetscFunctionBegin;
1279   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
1280   PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative");
1281   if (nlevels == 0) PetscFunctionReturn(PETSC_SUCCESS);
1282   PetscAssertPointer(daf, 3);
1283 
1284   /* Get refinement factors, defaults taken from the coarse DMDA */
1285   PetscCall(PetscMalloc3(nlevels, &refx, nlevels, &refy, nlevels, &refz));
1286   for (i = 0; i < nlevels; i++) PetscCall(DMDAGetRefinementFactor(da, &refx[i], &refy[i], &refz[i]));
1287   n = nlevels;
1288   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_x", refx, &n, NULL));
1289   n = nlevels;
1290   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_y", refy, &n, NULL));
1291   n = nlevels;
1292   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_z", refz, &n, NULL));
1293 
1294   PetscCall(DMDASetRefinementFactor(da, refx[0], refy[0], refz[0]));
1295   PetscCall(DMRefine(da, PetscObjectComm((PetscObject)da), &daf[0]));
1296   for (i = 1; i < nlevels; i++) {
1297     PetscCall(DMDASetRefinementFactor(daf[i - 1], refx[i], refy[i], refz[i]));
1298     PetscCall(DMRefine(daf[i - 1], PetscObjectComm((PetscObject)da), &daf[i]));
1299   }
1300   PetscCall(PetscFree3(refx, refy, refz));
1301   PetscFunctionReturn(PETSC_SUCCESS);
1302 }
1303 
1304 PetscErrorCode DMCoarsenHierarchy_DA(DM da, PetscInt nlevels, DM dac[])
1305 {
1306   PetscInt i;
1307 
1308   PetscFunctionBegin;
1309   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
1310   PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative");
1311   if (nlevels == 0) PetscFunctionReturn(PETSC_SUCCESS);
1312   PetscAssertPointer(dac, 3);
1313   PetscCall(DMCoarsen(da, PetscObjectComm((PetscObject)da), &dac[0]));
1314   for (i = 1; i < nlevels; i++) PetscCall(DMCoarsen(dac[i - 1], PetscObjectComm((PetscObject)da), &dac[i]));
1315   PetscFunctionReturn(PETSC_SUCCESS);
1316 }
1317 
1318 static PetscErrorCode DMDASetGLLCoordinates_1d(DM dm, PetscInt n, PetscReal *nodes)
1319 {
1320   PetscInt     i, j, xs, xn, q;
1321   PetscScalar *xx;
1322   PetscReal    h;
1323   Vec          x;
1324   DM_DA       *da = (DM_DA *)dm->data;
1325 
1326   PetscFunctionBegin;
1327   if (da->bx != DM_BOUNDARY_PERIODIC) {
1328     PetscCall(DMDAGetInfo(dm, NULL, &q, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
1329     q = (q - 1) / (n - 1); /* number of spectral elements */
1330     h = 2.0 / q;
1331     PetscCall(DMDAGetCorners(dm, &xs, NULL, NULL, &xn, NULL, NULL));
1332     xs = xs / (n - 1);
1333     xn = xn / (n - 1);
1334     PetscCall(DMDASetUniformCoordinates(dm, -1., 1., 0., 0., 0., 0.));
1335     PetscCall(DMGetCoordinates(dm, &x));
1336     PetscCall(DMDAVecGetArray(dm, x, &xx));
1337 
1338     /* loop over local spectral elements */
1339     for (j = xs; j < xs + xn; j++) {
1340       /*
1341        Except for the first process, each process starts on the second GLL point of the first element on that process
1342        */
1343       for (i = (j == xs && xs > 0) ? 1 : 0; i < n; i++) xx[j * (n - 1) + i] = -1.0 + h * j + h * (nodes[i] + 1.0) / 2.;
1344     }
1345     PetscCall(DMDAVecRestoreArray(dm, x, &xx));
1346   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for periodic");
1347   PetscFunctionReturn(PETSC_SUCCESS);
1348 }
1349 
1350 /*@
1351 
1352   DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points
1353 
1354   Collective
1355 
1356   Input Parameters:
1357 + da    - the `DMDA` object
1358 . n     - the number of GLL nodes
1359 - nodes - the GLL nodes
1360 
1361   Level: advanced
1362 
1363   Note:
1364   The parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points
1365   on each process much be divisible by the number of GLL elements needed per process. This depends on whether the `DMDA` is
1366   periodic or not.
1367 
1368 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `PetscDTGaussLobattoLegendreQuadrature()`, `DMGetCoordinates()`
1369 @*/
1370 PetscErrorCode DMDASetGLLCoordinates(DM da, PetscInt n, PetscReal *nodes)
1371 {
1372   PetscFunctionBegin;
1373   if (da->dim == 1) {
1374     PetscCall(DMDASetGLLCoordinates_1d(da, n, nodes));
1375   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for 2 or 3d");
1376   PetscFunctionReturn(PETSC_SUCCESS);
1377 }
1378 
1379 PetscErrorCode DMGetCompatibility_DA(DM da1, DM dm2, PetscBool *compatible, PetscBool *set)
1380 {
1381   DM_DA    *dd1 = (DM_DA *)da1->data, *dd2;
1382   DM        da2;
1383   DMType    dmtype2;
1384   PetscBool isda, compatibleLocal;
1385   PetscInt  i;
1386 
1387   PetscFunctionBegin;
1388   PetscCheck(da1->setupcalled, PetscObjectComm((PetscObject)da1), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on first DM before DMGetCompatibility()");
1389   PetscCall(DMGetType(dm2, &dmtype2));
1390   PetscCall(PetscStrcmp(dmtype2, DMDA, &isda));
1391   if (isda) {
1392     da2 = dm2;
1393     dd2 = (DM_DA *)da2->data;
1394     PetscCheck(da2->setupcalled, PetscObjectComm((PetscObject)da2), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on second DM before DMGetCompatibility()");
1395     compatibleLocal = (PetscBool)(da1->dim == da2->dim);
1396     if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->s == dd2->s)); /* Stencil width */
1397     /*                                                                           Global size              ranks               Boundary type */
1398     if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->M == dd2->M) && (dd1->m == dd2->m) && (dd1->bx == dd2->bx));
1399     if (compatibleLocal && da1->dim > 1) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->N == dd2->N) && (dd1->n == dd2->n) && (dd1->by == dd2->by));
1400     if (compatibleLocal && da1->dim > 2) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->P == dd2->P) && (dd1->p == dd2->p) && (dd1->bz == dd2->bz));
1401     if (compatibleLocal) {
1402       for (i = 0; i < dd1->m; ++i) { compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lx[i] == dd2->lx[i])); /* Local size     */ }
1403     }
1404     if (compatibleLocal && da1->dim > 1) {
1405       for (i = 0; i < dd1->n; ++i) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->ly[i] == dd2->ly[i]));
1406     }
1407     if (compatibleLocal && da1->dim > 2) {
1408       for (i = 0; i < dd1->p; ++i) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lz[i] == dd2->lz[i]));
1409     }
1410     *compatible = compatibleLocal;
1411     *set        = PETSC_TRUE;
1412   } else {
1413     /* Decline to determine compatibility with other DM types */
1414     *set = PETSC_FALSE;
1415   }
1416   PetscFunctionReturn(PETSC_SUCCESS);
1417 }
1418