xref: /petsc/src/dm/impls/da/da.c (revision 5d8720fa41fb4169420198de95a3fb9ffc339d07)
1 #include <petsc/private/dmdaimpl.h> /*I   "petscdmda.h"   I*/
2 
3 /*@
4   DMDASetSizes - Sets the number of grid points in the three dimensional directions
5 
6   Logically Collective
7 
8   Input Parameters:
9 + da - the `DMDA`
10 . M  - the global X size
11 . N  - the global Y size
12 - P  - the global Z size
13 
14   Level: intermediate
15 
16   Developer Note:
17   Since the dimension may not yet have been set the code cannot error check for non-positive Y and Z number of grid points
18 
19 .seealso: [](sec_struct), `DM`, `DMDA`, `PetscSplitOwnership()`
20 @*/
21 PetscErrorCode DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
22 {
23   DM_DA *dd = (DM_DA *)da->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
27   PetscValidLogicalCollectiveInt(da, M, 2);
28   PetscValidLogicalCollectiveInt(da, N, 3);
29   PetscValidLogicalCollectiveInt(da, P, 4);
30   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
31   PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in X direction must be positive");
32   PetscCheck(N >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Y direction must be positive");
33   PetscCheck(P >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Z direction must be positive");
34 
35   dd->M = M;
36   dd->N = N;
37   dd->P = P;
38   PetscFunctionReturn(PETSC_SUCCESS);
39 }
40 
41 /*@
42   DMDASetNumProcs - Sets the number of processes in each dimension
43 
44   Logically Collective
45 
46   Input Parameters:
47 + da - the `DMDA`
48 . m  - the number of X processes (or `PETSC_DECIDE`)
49 . n  - the number of Y processes (or `PETSC_DECIDE`)
50 - p  - the number of Z processes (or `PETSC_DECIDE`)
51 
52   Level: intermediate
53 
54 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDASetSizes()`, `PetscSplitOwnership()`
55 @*/
56 PetscErrorCode DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
57 {
58   DM_DA *dd = (DM_DA *)da->data;
59 
60   PetscFunctionBegin;
61   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
62   PetscValidLogicalCollectiveInt(da, m, 2);
63   PetscValidLogicalCollectiveInt(da, n, 3);
64   PetscValidLogicalCollectiveInt(da, p, 4);
65   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
66   dd->m = m;
67   dd->n = n;
68   dd->p = p;
69   if (da->dim == 2) {
70     PetscMPIInt size;
71     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da), &size));
72     if ((dd->m > 0) && (dd->n < 0)) {
73       dd->n = size / dd->m;
74       PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in X direction not divisible into comm size %d", m, size);
75     }
76     if ((dd->n > 0) && (dd->m < 0)) {
77       dd->m = size / dd->n;
78       PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in Y direction not divisible into comm size %d", n, size);
79     }
80   }
81   PetscFunctionReturn(PETSC_SUCCESS);
82 }
83 
84 /*@
85   DMDAGetBoundaryType - Gets the type of ghost nodes on domain boundaries.
86 
87   Not Collective
88 
89   Input Parameter:
90 . da - The `DMDA`
91 
92   Output Parameters:
93 + bx - x boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
94 . by - y boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
95 - bz - z boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
96 
97   Level: intermediate
98 
99 .seealso: [](sec_struct), `DMDASetBoundaryType()`, `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMBoundaryType`, `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
100 @*/
101 PetscErrorCode DMDAGetBoundaryType(DM da, DMBoundaryType *bx, DMBoundaryType *by, DMBoundaryType *bz)
102 {
103   DM_DA *dd = (DM_DA *)da->data;
104 
105   PetscFunctionBegin;
106   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
107   if (bx) PetscAssertPointer(bx, 2);
108   if (by) PetscAssertPointer(by, 3);
109   if (bz) PetscAssertPointer(bz, 4);
110   if (bx) *bx = dd->bx;
111   if (by) *by = dd->by;
112   if (bz) *bz = dd->bz;
113   PetscFunctionReturn(PETSC_SUCCESS);
114 }
115 
116 /*@
117   DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries for a `DMDA` object.
118 
119   Not Collective
120 
121   Input Parameters:
122 + da - The `DMDA`
123 . bx - x boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
124 . by - y boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
125 - bz - z boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
126 
127   Level: intermediate
128 
129   Note:
130   The default is `DM_BOUNDARY_NONE`
131 
132 .seealso: [](sec_struct), `DMDAGetBoundaryType()`, `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMBoundaryType`, `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
133 @*/
134 PetscErrorCode DMDASetBoundaryType(DM da, DMBoundaryType bx, DMBoundaryType by, DMBoundaryType bz)
135 {
136   DM_DA *dd = (DM_DA *)da->data;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
140   PetscValidLogicalCollectiveEnum(da, bx, 2);
141   PetscValidLogicalCollectiveEnum(da, by, 3);
142   PetscValidLogicalCollectiveEnum(da, bz, 4);
143   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
144   dd->bx = bx;
145   dd->by = by;
146   dd->bz = bz;
147   PetscFunctionReturn(PETSC_SUCCESS);
148 }
149 
150 /*@
151   DMDASetDof - Sets the number of degrees of freedom per vertex
152 
153   Not Collective
154 
155   Input Parameters:
156 + da  - The `DMDA`
157 - dof - Number of degrees of freedom per vertex
158 
159   Level: intermediate
160 
161 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetDof()`, `DMDACreate()`, `DMDestroy()`
162 @*/
163 PetscErrorCode DMDASetDof(DM da, PetscInt dof)
164 {
165   DM_DA *dd = (DM_DA *)da->data;
166 
167   PetscFunctionBegin;
168   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
169   PetscValidLogicalCollectiveInt(da, dof, 2);
170   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
171   dd->w  = dof;
172   da->bs = dof;
173   PetscFunctionReturn(PETSC_SUCCESS);
174 }
175 
176 /*@
177   DMDAGetDof - Gets the number of degrees of freedom per vertex
178 
179   Not Collective
180 
181   Input Parameter:
182 . da - The `DMDA`
183 
184   Output Parameter:
185 . dof - Number of degrees of freedom per vertex
186 
187   Level: intermediate
188 
189 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDASetDof()`, `DMDACreate()`, `DMDestroy()`
190 @*/
191 PetscErrorCode DMDAGetDof(DM da, PetscInt *dof)
192 {
193   DM_DA *dd = (DM_DA *)da->data;
194 
195   PetscFunctionBegin;
196   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
197   PetscAssertPointer(dof, 2);
198   *dof = dd->w;
199   PetscFunctionReturn(PETSC_SUCCESS);
200 }
201 
202 /*@
203   DMDAGetOverlap - Gets the size of the per-processor overlap.
204 
205   Not Collective
206 
207   Input Parameter:
208 . da - The `DMDA`
209 
210   Output Parameters:
211 + x - Overlap in the x direction
212 . y - Overlap in the y direction
213 - z - Overlap in the z direction
214 
215   Level: intermediate
216 
217 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDASetOverlap()`
218 @*/
219 PetscErrorCode DMDAGetOverlap(DM da, PetscInt *x, PetscInt *y, PetscInt *z)
220 {
221   DM_DA *dd = (DM_DA *)da->data;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
225   if (x) *x = dd->xol;
226   if (y) *y = dd->yol;
227   if (z) *z = dd->zol;
228   PetscFunctionReturn(PETSC_SUCCESS);
229 }
230 
231 /*@
232   DMDASetOverlap - Sets the size of the per-processor overlap.
233 
234   Not Collective
235 
236   Input Parameters:
237 + da - The `DMDA`
238 . x  - Overlap in the x direction
239 . y  - Overlap in the y direction
240 - z  - Overlap in the z direction
241 
242   Level: intermediate
243 
244 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDAGetOverlap()`
245 @*/
246 PetscErrorCode DMDASetOverlap(DM da, PetscInt x, PetscInt y, PetscInt z)
247 {
248   DM_DA *dd = (DM_DA *)da->data;
249 
250   PetscFunctionBegin;
251   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
252   PetscValidLogicalCollectiveInt(da, x, 2);
253   PetscValidLogicalCollectiveInt(da, y, 3);
254   PetscValidLogicalCollectiveInt(da, z, 4);
255   dd->xol = x;
256   dd->yol = y;
257   dd->zol = z;
258   PetscFunctionReturn(PETSC_SUCCESS);
259 }
260 
261 /*@
262   DMDAGetNumLocalSubDomains - Gets the number of local subdomains that would be created upon decomposition.
263 
264   Not Collective
265 
266   Input Parameter:
267 . da - The `DMDA`
268 
269   Output Parameter:
270 . Nsub - Number of local subdomains created upon decomposition
271 
272   Level: intermediate
273 
274 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDASetNumLocalSubDomains()`
275 @*/
276 PetscErrorCode DMDAGetNumLocalSubDomains(DM da, PetscInt *Nsub)
277 {
278   DM_DA *dd = (DM_DA *)da->data;
279 
280   PetscFunctionBegin;
281   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
282   if (Nsub) *Nsub = dd->Nsub;
283   PetscFunctionReturn(PETSC_SUCCESS);
284 }
285 
286 /*@
287   DMDASetNumLocalSubDomains - Sets the number of local subdomains to create when decomposing with `DMCreateDomainDecomposition()`
288 
289   Not Collective
290 
291   Input Parameters:
292 + da   - The `DMDA`
293 - Nsub - The number of local subdomains requested
294 
295   Level: intermediate
296 
297 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDAGetNumLocalSubDomains()`
298 @*/
299 PetscErrorCode DMDASetNumLocalSubDomains(DM da, PetscInt Nsub)
300 {
301   DM_DA *dd = (DM_DA *)da->data;
302 
303   PetscFunctionBegin;
304   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
305   PetscValidLogicalCollectiveInt(da, Nsub, 2);
306   dd->Nsub = Nsub;
307   PetscFunctionReturn(PETSC_SUCCESS);
308 }
309 
310 /*@
311   DMDASetOffset - Sets the index offset of the `DMDA`.
312 
313   Collective
314 
315   Input Parameters:
316 + da - The `DMDA`
317 . xo - The offset in the x direction
318 . yo - The offset in the y direction
319 . zo - The offset in the z direction
320 . Mo - The problem offset in the x direction
321 . No - The problem offset in the y direction
322 - Po - The problem offset in the z direction
323 
324   Level: developer
325 
326   Note:
327   This is used primarily to overlap a computation on a local `DMDA` with that on a global `DMDA` without
328   changing boundary conditions or subdomain features that depend upon the global offsets.
329 
330 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
331 @*/
332 PetscErrorCode DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
333 {
334   DM_DA *dd = (DM_DA *)da->data;
335 
336   PetscFunctionBegin;
337   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
338   PetscValidLogicalCollectiveInt(da, xo, 2);
339   PetscValidLogicalCollectiveInt(da, yo, 3);
340   PetscValidLogicalCollectiveInt(da, zo, 4);
341   PetscValidLogicalCollectiveInt(da, Mo, 5);
342   PetscValidLogicalCollectiveInt(da, No, 6);
343   PetscValidLogicalCollectiveInt(da, Po, 7);
344   dd->xo = xo;
345   dd->yo = yo;
346   dd->zo = zo;
347   dd->Mo = Mo;
348   dd->No = No;
349   dd->Po = Po;
350 
351   if (da->coordinates[0].dm) PetscCall(DMDASetOffset(da->coordinates[0].dm, xo, yo, zo, Mo, No, Po));
352   PetscFunctionReturn(PETSC_SUCCESS);
353 }
354 
355 /*@
356   DMDAGetOffset - Gets the index offset of the `DMDA`.
357 
358   Not Collective
359 
360   Input Parameter:
361 . da - The `DMDA`
362 
363   Output Parameters:
364 + xo - The offset in the x direction
365 . yo - The offset in the y direction
366 . zo - The offset in the z direction
367 . Mo - The global size in the x direction
368 . No - The global size in the y direction
369 - Po - The global size in the z direction
370 
371   Level: developer
372 
373 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDASetOffset()`, `DMDAVecGetArray()`
374 @*/
375 PetscErrorCode DMDAGetOffset(DM da, PetscInt *xo, PetscInt *yo, PetscInt *zo, PetscInt *Mo, PetscInt *No, PetscInt *Po)
376 {
377   DM_DA *dd = (DM_DA *)da->data;
378 
379   PetscFunctionBegin;
380   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
381   if (xo) *xo = dd->xo;
382   if (yo) *yo = dd->yo;
383   if (zo) *zo = dd->zo;
384   if (Mo) *Mo = dd->Mo;
385   if (No) *No = dd->No;
386   if (Po) *Po = dd->Po;
387   PetscFunctionReturn(PETSC_SUCCESS);
388 }
389 
390 /*@
391   DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain `DMDA`.
392 
393   Not Collective
394 
395   Input Parameter:
396 . da - The `DMDA`
397 
398   Output Parameters:
399 + xs - The start of the region in x
400 . ys - The start of the region in y
401 . zs - The start of the region in z
402 . xm - The size of the region in x
403 . ym - The size of the region in y
404 - zm - The size of the region in z
405 
406   Level: intermediate
407 
408 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
409 @*/
410 PetscErrorCode DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
411 {
412   DM_DA *dd = (DM_DA *)da->data;
413 
414   PetscFunctionBegin;
415   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
416   if (xs) *xs = dd->nonxs;
417   if (ys) *ys = dd->nonys;
418   if (zs) *zs = dd->nonzs;
419   if (xm) *xm = dd->nonxm;
420   if (ym) *ym = dd->nonym;
421   if (zm) *zm = dd->nonzm;
422   PetscFunctionReturn(PETSC_SUCCESS);
423 }
424 
425 /*@
426   DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain `DMDA`.
427 
428   Collective
429 
430   Input Parameters:
431 + da - The `DMDA`
432 . xs - The start of the region in x
433 . ys - The start of the region in y
434 . zs - The start of the region in z
435 . xm - The size of the region in x
436 . ym - The size of the region in y
437 - zm - The size of the region in z
438 
439   Level: intermediate
440 
441 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
442 @*/
443 PetscErrorCode DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
444 {
445   DM_DA *dd = (DM_DA *)da->data;
446 
447   PetscFunctionBegin;
448   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
449   PetscValidLogicalCollectiveInt(da, xs, 2);
450   PetscValidLogicalCollectiveInt(da, ys, 3);
451   PetscValidLogicalCollectiveInt(da, zs, 4);
452   PetscValidLogicalCollectiveInt(da, xm, 5);
453   PetscValidLogicalCollectiveInt(da, ym, 6);
454   PetscValidLogicalCollectiveInt(da, zm, 7);
455   dd->nonxs = xs;
456   dd->nonys = ys;
457   dd->nonzs = zs;
458   dd->nonxm = xm;
459   dd->nonym = ym;
460   dd->nonzm = zm;
461   PetscFunctionReturn(PETSC_SUCCESS);
462 }
463 
464 /*@
465   DMDASetStencilType - Sets the type of the communication stencil
466 
467   Logically Collective
468 
469   Input Parameters:
470 + da    - The `DMDA`
471 - stype - The stencil type, use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`.
472 
473   Level: intermediate
474 
475 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.`
476 @*/
477 PetscErrorCode DMDASetStencilType(DM da, DMDAStencilType stype)
478 {
479   DM_DA *dd = (DM_DA *)da->data;
480 
481   PetscFunctionBegin;
482   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
483   PetscValidLogicalCollectiveEnum(da, stype, 2);
484   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
485   dd->stencil_type = stype;
486   PetscFunctionReturn(PETSC_SUCCESS);
487 }
488 
489 /*@
490   DMDAGetStencilType - Gets the type of the communication stencil
491 
492   Not Collective
493 
494   Input Parameter:
495 . da - The `DMDA`
496 
497   Output Parameter:
498 . stype - The stencil type, use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`.
499 
500   Level: intermediate
501 
502 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.`
503 @*/
504 PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype)
505 {
506   DM_DA *dd = (DM_DA *)da->data;
507 
508   PetscFunctionBegin;
509   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
510   PetscAssertPointer(stype, 2);
511   *stype = dd->stencil_type;
512   PetscFunctionReturn(PETSC_SUCCESS);
513 }
514 
515 /*@
516   DMDASetStencilWidth - Sets the width of the communication stencil
517 
518   Logically Collective
519 
520   Input Parameters:
521 + da    - The `DMDA`
522 - width - The stencil width
523 
524   Level: intermediate
525 
526 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.`
527 @*/
528 PetscErrorCode DMDASetStencilWidth(DM da, PetscInt width)
529 {
530   DM_DA *dd = (DM_DA *)da->data;
531 
532   PetscFunctionBegin;
533   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
534   PetscValidLogicalCollectiveInt(da, width, 2);
535   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
536   dd->s = width;
537   PetscFunctionReturn(PETSC_SUCCESS);
538 }
539 
540 /*@
541   DMDAGetStencilWidth - Gets the width of the communication stencil
542 
543   Not Collective
544 
545   Input Parameter:
546 . da - The `DMDA`
547 
548   Output Parameter:
549 . width - The stencil width
550 
551   Level: intermediate
552 
553 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.`
554 @*/
555 PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width)
556 {
557   DM_DA *dd = (DM_DA *)da->data;
558 
559   PetscFunctionBegin;
560   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
561   PetscAssertPointer(width, 2);
562   *width = dd->s;
563   PetscFunctionReturn(PETSC_SUCCESS);
564 }
565 
566 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da, PetscInt M, PetscInt m, const PetscInt lx[])
567 {
568   PetscInt i, sum;
569 
570   PetscFunctionBegin;
571   PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Global dimension not set");
572   for (i = sum = 0; i < m; i++) sum += lx[i];
573   PetscCheck(sum == M, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_INCOMP, "Ownership ranges sum to %" PetscInt_FMT " but global dimension is %" PetscInt_FMT, sum, M);
574   PetscFunctionReturn(PETSC_SUCCESS);
575 }
576 
577 /*@
578   DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process
579 
580   Logically Collective
581 
582   Input Parameters:
583 + da - The `DMDA`
584 . lx - array containing number of nodes in the X direction on each process, or `NULL`. If non-null, must be of length da->m
585 . ly - array containing number of nodes in the Y direction on each process, or `NULL`. If non-null, must be of length da->n
586 - lz - array containing number of nodes in the Z direction on each process, or `NULL`. If non-null, must be of length da->p.
587 
588   Level: intermediate
589 
590   Note:
591   These numbers are NOT multiplied by the number of dof per node.
592 
593 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
594 @*/
595 PetscErrorCode DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
596 {
597   DM_DA *dd = (DM_DA *)da->data;
598 
599   PetscFunctionBegin;
600   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
601   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
602   if (lx) {
603     PetscCheck(dd->m >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of processes");
604     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->M, dd->m, lx));
605     if (!dd->lx) PetscCall(PetscMalloc1(dd->m, &dd->lx));
606     PetscCall(PetscArraycpy(dd->lx, lx, dd->m));
607   }
608   if (ly) {
609     PetscCheck(dd->n >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of processes");
610     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->N, dd->n, ly));
611     if (!dd->ly) PetscCall(PetscMalloc1(dd->n, &dd->ly));
612     PetscCall(PetscArraycpy(dd->ly, ly, dd->n));
613   }
614   if (lz) {
615     PetscCheck(dd->p >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of processes");
616     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->P, dd->p, lz));
617     if (!dd->lz) PetscCall(PetscMalloc1(dd->p, &dd->lz));
618     PetscCall(PetscArraycpy(dd->lz, lz, dd->p));
619   }
620   PetscFunctionReturn(PETSC_SUCCESS);
621 }
622 
623 /*@
624   DMDASetInterpolationType - Sets the type of interpolation that will be
625   returned by `DMCreateInterpolation()`
626 
627   Logically Collective
628 
629   Input Parameters:
630 + da    - initial distributed array
631 - ctype - `DMDA_Q1` and `DMDA_Q0` are currently the only supported forms
632 
633   Level: intermediate
634 
635   Note:
636   You should call this on the coarser of the two `DMDA` you pass to `DMCreateInterpolation()`
637 
638 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMDestroy()`, `DMDAInterpolationType`,
639           `DMDA_Q1`, `DMDA_Q0`
640 @*/
641 PetscErrorCode DMDASetInterpolationType(DM da, DMDAInterpolationType ctype)
642 {
643   DM_DA *dd = (DM_DA *)da->data;
644 
645   PetscFunctionBegin;
646   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
647   PetscValidLogicalCollectiveEnum(da, ctype, 2);
648   dd->interptype = ctype;
649   PetscFunctionReturn(PETSC_SUCCESS);
650 }
651 
652 /*@
653   DMDAGetInterpolationType - Gets the type of interpolation that will be
654   used by `DMCreateInterpolation()`
655 
656   Not Collective
657 
658   Input Parameter:
659 . da - distributed array
660 
661   Output Parameter:
662 . ctype - interpolation type (`DMDA_Q1` and `DMDA_Q0` are currently the only supported forms)
663 
664   Level: intermediate
665 
666 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAInterpolationType`, `DMDASetInterpolationType()`, `DMCreateInterpolation()`,
667           `DMDA_Q1`, `DMDA_Q0`
668 @*/
669 PetscErrorCode DMDAGetInterpolationType(DM da, DMDAInterpolationType *ctype)
670 {
671   DM_DA *dd = (DM_DA *)da->data;
672 
673   PetscFunctionBegin;
674   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
675   PetscAssertPointer(ctype, 2);
676   *ctype = dd->interptype;
677   PetscFunctionReturn(PETSC_SUCCESS);
678 }
679 
680 /*@C
681   DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
682   processes neighbors.
683 
684   Not Collective
685 
686   Input Parameter:
687 . da - the `DMDA` object
688 
689   Output Parameter:
690 . ranks - the neighbors ranks, stored with the x index increasing most rapidly. The process itself is in the list
691 
692   Level: intermediate
693 
694   Notes:
695   In 2d the `ranks` is of length 9, in 3d of length 27
696 
697   Not supported in 1d
698 
699   Do not free the array, it is freed when the `DMDA` is destroyed.
700 
701   Fortran Note:
702   Pass in an array of the appropriate length to contain the values
703 
704 .seealso: [](sec_struct), `DMDA`, `DM`
705 @*/
706 PetscErrorCode DMDAGetNeighbors(DM da, const PetscMPIInt *ranks[])
707 {
708   DM_DA *dd = (DM_DA *)da->data;
709 
710   PetscFunctionBegin;
711   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
712   *ranks = dd->neighbors;
713   PetscFunctionReturn(PETSC_SUCCESS);
714 }
715 
716 /*@C
717   DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process
718 
719   Not Collective
720 
721   Input Parameter:
722 . da - the `DMDA` object
723 
724   Output Parameters:
725 + lx - ownership along x direction (optional)
726 . ly - ownership along y direction (optional)
727 - lz - ownership along z direction (optional)
728 
729   Level: intermediate
730 
731   Note:
732   These correspond to the optional final arguments passed to `DMDACreate()`, `DMDACreate2d()`, `DMDACreate3d()`
733 
734   In C you should not free these arrays, nor change the values in them. They will only have valid values while the
735   `DMDA` they came from still exists (has not been destroyed).
736 
737   These numbers are NOT multiplied by the number of dof per node.
738 
739   Fortran Note:
740   Pass in arrays `lx`, `ly`, and `lz` of the appropriate length to hold the values; the sixth, seventh and
741   eighth arguments from `DMDAGetInfo()`
742 
743 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetCorners()`, `DMDAGetGhostCorners()`, `DMDACreate()`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `VecGetOwnershipRanges()`
744 @*/
745 PetscErrorCode DMDAGetOwnershipRanges(DM da, const PetscInt *lx[], const PetscInt *ly[], const PetscInt *lz[])
746 {
747   DM_DA *dd = (DM_DA *)da->data;
748 
749   PetscFunctionBegin;
750   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
751   if (lx) *lx = dd->lx;
752   if (ly) *ly = dd->ly;
753   if (lz) *lz = dd->lz;
754   PetscFunctionReturn(PETSC_SUCCESS);
755 }
756 
757 /*@
758   DMDASetRefinementFactor - Set the ratios that the `DMDA` grid is refined
759 
760   Logically Collective
761 
762   Input Parameters:
763 + da       - the `DMDA` object
764 . refine_x - ratio of fine grid to coarse in x direction (2 by default)
765 . refine_y - ratio of fine grid to coarse in y direction (2 by default)
766 - refine_z - ratio of fine grid to coarse in z direction (2 by default)
767 
768   Options Database Keys:
769 + -da_refine_x refine_x - refinement ratio in x direction
770 . -da_refine_y rafine_y - refinement ratio in y direction
771 . -da_refine_z refine_z - refinement ratio in z direction
772 - -da_refine <n>        - refine the `DMDA` object n times when it is created.
773 
774   Level: intermediate
775 
776   Note:
777   Pass `PETSC_IGNORE` to leave a value unchanged
778 
779 .seealso: [](sec_struct), `DM`, `DMDA`, `DMRefine()`, `DMDAGetRefinementFactor()`
780 @*/
781 PetscErrorCode DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y, PetscInt refine_z)
782 {
783   DM_DA *dd = (DM_DA *)da->data;
784 
785   PetscFunctionBegin;
786   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
787   PetscValidLogicalCollectiveInt(da, refine_x, 2);
788   PetscValidLogicalCollectiveInt(da, refine_y, 3);
789   PetscValidLogicalCollectiveInt(da, refine_z, 4);
790 
791   if (refine_x > 0) dd->refine_x = refine_x;
792   if (refine_y > 0) dd->refine_y = refine_y;
793   if (refine_z > 0) dd->refine_z = refine_z;
794   PetscFunctionReturn(PETSC_SUCCESS);
795 }
796 
797 /*@
798   DMDAGetRefinementFactor - Gets the ratios that the `DMDA` grid is refined
799 
800   Not Collective
801 
802   Input Parameter:
803 . da - the `DMDA` object
804 
805   Output Parameters:
806 + refine_x - ratio of fine grid to coarse in x direction (2 by default)
807 . refine_y - ratio of fine grid to coarse in y direction (2 by default)
808 - refine_z - ratio of fine grid to coarse in z direction (2 by default)
809 
810   Level: intermediate
811 
812   Note:
813   Pass `NULL` for values you do not need
814 
815 .seealso: [](sec_struct), `DM`, `DMDA`, `DMRefine()`, `DMDASetRefinementFactor()`
816 @*/
817 PetscErrorCode DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y, PetscInt *refine_z)
818 {
819   DM_DA *dd = (DM_DA *)da->data;
820 
821   PetscFunctionBegin;
822   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
823   if (refine_x) *refine_x = dd->refine_x;
824   if (refine_y) *refine_y = dd->refine_y;
825   if (refine_z) *refine_z = dd->refine_z;
826   PetscFunctionReturn(PETSC_SUCCESS);
827 }
828 
829 /*@C
830   DMDASetGetMatrix - Sets the routine used by the `DMDA` to allocate a matrix.
831 
832   Logically Collective; No Fortran Support
833 
834   Input Parameters:
835 + da - the `DMDA` object
836 - f  - the function that allocates the matrix for that specific `DMDA`
837 
838   Calling sequence of `f`:
839 + da - the `DMDA` object
840 - A  - the created matrix
841 
842   Level: developer
843 
844   Notes:
845   If the function is not provided a default function is used that uses the `DMDAStencilType`, `DMBoundaryType`, and value of `DMDASetStencilWidth()`
846   to construct the matrix.
847 
848   See `DMDASetBlockFills()` that provides a simple way to provide the nonzero structure for
849   the diagonal and off-diagonal blocks of the matrix without providing a custom function
850 
851   Developer Note:
852   This should be called `DMDASetCreateMatrix()`
853 
854 .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateMatrix()`, `DMDASetBlockFills()`
855 @*/
856 PetscErrorCode DMDASetGetMatrix(DM da, PetscErrorCode (*f)(DM da, Mat *A))
857 {
858   PetscFunctionBegin;
859   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
860   da->ops->creatematrix = f;
861   PetscFunctionReturn(PETSC_SUCCESS);
862 }
863 
864 /*@
865   DMDAMapMatStencilToGlobal - Map a list of `MatStencil` on a grid to global indices.
866 
867   Not Collective
868 
869   Input Parameters:
870 + da   - the `DMDA` object
871 . m    - number of `MatStencil` to map
872 - idxm - grid points (and component number when dof > 1)
873 
874   Output Parameter:
875 . gidxm - global row indices
876 
877   Level: intermediate
878 
879 .seealso: [](sec_struct), `DM`, `DMDA`, `MatStencil`
880 @*/
881 PetscErrorCode DMDAMapMatStencilToGlobal(DM da, PetscInt m, const MatStencil idxm[], PetscInt gidxm[])
882 {
883   const DM_DA           *dd  = (const DM_DA *)da->data;
884   const PetscInt        *dxm = (const PetscInt *)idxm;
885   PetscInt               i, j, sdim, tmp, dim;
886   PetscInt               dims[4], starts[4], dims2[3], starts2[3], dof = dd->w;
887   ISLocalToGlobalMapping ltog;
888 
889   PetscFunctionBegin;
890   if (m <= 0) PetscFunctionReturn(PETSC_SUCCESS);
891 
892   /* Code adapted from DMDAGetGhostCorners() */
893   starts2[0] = dd->Xs / dof + dd->xo;
894   starts2[1] = dd->Ys + dd->yo;
895   starts2[2] = dd->Zs + dd->zo;
896   dims2[0]   = (dd->Xe - dd->Xs) / dof;
897   dims2[1]   = (dd->Ye - dd->Ys);
898   dims2[2]   = (dd->Ze - dd->Zs);
899 
900   /* As if we do MatSetStencil() to get dims[]/starts[] of mat->stencil */
901   dim  = da->dim;                   /* DA dim: 1 to 3 */
902   sdim = dim + (dof > 1 ? 1 : 0);   /* Dimensions in MatStencil's (k,j,i,c) view */
903   for (i = 0; i < dim; i++) {       /* Reverse the order and also skip the unused dimensions */
904     dims[i]   = dims2[dim - i - 1]; /* ex. dims/starts[] are in order of {i} for 1D, {j,i} for 2D and {k,j,i} for 3D */
905     starts[i] = starts2[dim - i - 1];
906   }
907   starts[dim] = 0; /* Append the extra dim for dof (won't be used below if dof=1) */
908   dims[dim]   = dof;
909 
910   /* Map stencils to local indices (code adapted from MatSetValuesStencil()) */
911   for (i = 0; i < m; i++) {
912     dxm += 3 - dim; /* Input is {k,j,i,c}; move the pointer to the first used index, e.g., j in 2D */
913     tmp = 0;
914     for (j = 0; j < sdim; j++) {                                                      /* Iter over, ex. j,i or j,i,c in 2D */
915       if (tmp < 0 || dxm[j] < starts[j] || dxm[j] >= (starts[j] + dims[j])) tmp = -1; /* Beyond the ghost region, therefore ignored with negative indices */
916       else tmp = tmp * dims[j] + (dxm[j] - starts[j]);
917     }
918     gidxm[i] = tmp;
919     /* Move to the next MatStencil point */
920     if (dof > 1) dxm += sdim; /* c is already counted in sdim */
921     else dxm += sdim + 1;     /* skip the unused c */
922   }
923 
924   /* Map local indices to global indices */
925   PetscCall(DMGetLocalToGlobalMapping(da, &ltog));
926   PetscCall(ISLocalToGlobalMappingApply(ltog, m, gidxm, gidxm));
927   PetscFunctionReturn(PETSC_SUCCESS);
928 }
929 
930 /*
931   Creates "balanced" ownership ranges after refinement, constrained by the need for the
932   fine grid boundaries to fall within one stencil width of the coarse partition.
933 
934   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
935 */
936 static PetscErrorCode DMDARefineOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lc[], PetscInt lf[])
937 {
938   PetscInt i, totalc = 0, remaining, startc = 0, startf = 0;
939 
940   PetscFunctionBegin;
941   PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio);
942   if (ratio == 1) {
943     PetscCall(PetscArraycpy(lf, lc, m));
944     PetscFunctionReturn(PETSC_SUCCESS);
945   }
946   for (i = 0; i < m; i++) totalc += lc[i];
947   remaining = (!periodic) + ratio * (totalc - (!periodic));
948   for (i = 0; i < m; i++) {
949     PetscInt want = remaining / (m - i) + !!(remaining % (m - i));
950     if (i == m - 1) lf[i] = want;
951     else {
952       const PetscInt nextc = startc + lc[i];
953       /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
954        * coarse stencil width of the first coarse node in the next subdomain. */
955       while ((startf + want) / ratio < nextc - stencil_width) want++;
956       /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
957        * coarse stencil width of the last coarse node in the current subdomain. */
958       while ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width) want--;
959       /* Make sure all constraints are satisfied */
960       if (want < 0 || want > remaining || ((startf + want) / ratio < nextc - stencil_width) || ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width))
961         SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible refined ownership range");
962     }
963     lf[i] = want;
964     startc += lc[i];
965     startf += lf[i];
966     remaining -= lf[i];
967   }
968   PetscFunctionReturn(PETSC_SUCCESS);
969 }
970 
971 /*
972   Creates "balanced" ownership ranges after coarsening, constrained by the need for the
973   fine grid boundaries to fall within one stencil width of the coarse partition.
974 
975   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
976 */
977 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lf[], PetscInt lc[])
978 {
979   PetscInt i, totalf, remaining, startc, startf;
980 
981   PetscFunctionBegin;
982   PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio);
983   if (ratio == 1) {
984     PetscCall(PetscArraycpy(lc, lf, m));
985     PetscFunctionReturn(PETSC_SUCCESS);
986   }
987   for (i = 0, totalf = 0; i < m; i++) totalf += lf[i];
988   remaining = (!periodic) + (totalf - (!periodic)) / ratio;
989   for (i = 0, startc = 0, startf = 0; i < m; i++) {
990     PetscInt want = remaining / (m - i) + !!(remaining % (m - i));
991     if (i == m - 1) lc[i] = want;
992     else {
993       const PetscInt nextf = startf + lf[i];
994       /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
995        * node is within one stencil width. */
996       while (nextf / ratio < startc + want - stencil_width) want--;
997       /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
998        * fine node is within one stencil width. */
999       while ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width) want++;
1000       if (want < 0 || want > remaining || (nextf / ratio < startc + want - stencil_width) || ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width))
1001         SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible coarsened ownership range");
1002     }
1003     lc[i] = want;
1004     startc += lc[i];
1005     startf += lf[i];
1006     remaining -= lc[i];
1007   }
1008   PetscFunctionReturn(PETSC_SUCCESS);
1009 }
1010 
1011 PetscErrorCode DMRefine_DA(DM da, MPI_Comm comm, DM *daref)
1012 {
1013   PetscInt M, N, P, i, dim;
1014   Vec      coordsc, coordsf;
1015   DM       da2;
1016   DM_DA   *dd = (DM_DA *)da->data, *dd2;
1017 
1018   PetscFunctionBegin;
1019   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
1020   PetscAssertPointer(daref, 3);
1021 
1022   PetscCall(DMGetDimension(da, &dim));
1023   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1024     M = dd->refine_x * dd->M;
1025   } else {
1026     M = 1 + dd->refine_x * (dd->M - 1);
1027   }
1028   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1029     if (dim > 1) {
1030       N = dd->refine_y * dd->N;
1031     } else {
1032       N = 1;
1033     }
1034   } else {
1035     N = 1 + dd->refine_y * (dd->N - 1);
1036   }
1037   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1038     if (dim > 2) {
1039       P = dd->refine_z * dd->P;
1040     } else {
1041       P = 1;
1042     }
1043   } else {
1044     P = 1 + dd->refine_z * (dd->P - 1);
1045   }
1046   PetscCall(DMDACreate(PetscObjectComm((PetscObject)da), &da2));
1047   PetscCall(DMSetOptionsPrefix(da2, ((PetscObject)da)->prefix));
1048   PetscCall(DMSetDimension(da2, dim));
1049   PetscCall(DMDASetSizes(da2, M, N, P));
1050   PetscCall(DMDASetNumProcs(da2, dd->m, dd->n, dd->p));
1051   PetscCall(DMDASetBoundaryType(da2, dd->bx, dd->by, dd->bz));
1052   PetscCall(DMDASetDof(da2, dd->w));
1053   PetscCall(DMDASetStencilType(da2, dd->stencil_type));
1054   PetscCall(DMDASetStencilWidth(da2, dd->s));
1055   if (dim == 3) {
1056     PetscInt *lx, *ly, *lz;
1057     PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz));
1058     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1059     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly));
1060     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_z, dd->p, dd->lz, lz));
1061     PetscCall(DMDASetOwnershipRanges(da2, lx, ly, lz));
1062     PetscCall(PetscFree3(lx, ly, lz));
1063   } else if (dim == 2) {
1064     PetscInt *lx, *ly;
1065     PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly));
1066     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1067     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly));
1068     PetscCall(DMDASetOwnershipRanges(da2, lx, ly, NULL));
1069     PetscCall(PetscFree2(lx, ly));
1070   } else if (dim == 1) {
1071     PetscInt *lx;
1072     PetscCall(PetscMalloc1(dd->m, &lx));
1073     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1074     PetscCall(DMDASetOwnershipRanges(da2, lx, NULL, NULL));
1075     PetscCall(PetscFree(lx));
1076   }
1077   dd2 = (DM_DA *)da2->data;
1078 
1079   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
1080   da2->ops->creatematrix = da->ops->creatematrix;
1081   /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
1082   da2->ops->getcoloring = da->ops->getcoloring;
1083   dd2->interptype       = dd->interptype;
1084 
1085   /* copy fill information if given */
1086   if (dd->dfill) {
1087     PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill));
1088     PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1));
1089   }
1090   if (dd->ofill) {
1091     PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill));
1092     PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1));
1093   }
1094   /* copy the refine information */
1095   dd2->coarsen_x = dd2->refine_x = dd->refine_x;
1096   dd2->coarsen_y = dd2->refine_y = dd->refine_y;
1097   dd2->coarsen_z = dd2->refine_z = dd->refine_z;
1098 
1099   if (dd->refine_z_hier) {
1100     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1];
1101     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown];
1102     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1103     PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier));
1104     PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n));
1105   }
1106   if (dd->refine_y_hier) {
1107     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1];
1108     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown];
1109     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1110     PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier));
1111     PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n));
1112   }
1113   if (dd->refine_x_hier) {
1114     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1];
1115     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown];
1116     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1117     PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier));
1118     PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n));
1119   }
1120 
1121   /* copy vector type information */
1122   PetscCall(DMSetVecType(da2, da->vectype));
1123 
1124   dd2->lf = dd->lf;
1125   dd2->lj = dd->lj;
1126 
1127   da2->leveldown = da->leveldown;
1128   da2->levelup   = da->levelup + 1;
1129 
1130   PetscCall(DMSetUp(da2));
1131 
1132   /* interpolate coordinates if they are set on the coarse grid */
1133   PetscCall(DMGetCoordinates(da, &coordsc));
1134   if (coordsc) {
1135     DM  cdaf, cdac;
1136     Mat II;
1137 
1138     PetscCall(DMGetCoordinateDM(da, &cdac));
1139     PetscCall(DMGetCoordinateDM(da2, &cdaf));
1140     /* force creation of the coordinate vector */
1141     PetscCall(DMDASetUniformCoordinates(da2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
1142     PetscCall(DMGetCoordinates(da2, &coordsf));
1143     PetscCall(DMCreateInterpolation(cdac, cdaf, &II, NULL));
1144     PetscCall(MatInterpolate(II, coordsc, coordsf));
1145     PetscCall(MatDestroy(&II));
1146   }
1147 
1148   for (i = 0; i < da->bs; i++) {
1149     const char *fieldname;
1150     PetscCall(DMDAGetFieldName(da, i, &fieldname));
1151     PetscCall(DMDASetFieldName(da2, i, fieldname));
1152   }
1153 
1154   *daref = da2;
1155   PetscFunctionReturn(PETSC_SUCCESS);
1156 }
1157 
1158 PetscErrorCode DMCoarsen_DA(DM dmf, MPI_Comm comm, DM *dmc)
1159 {
1160   PetscInt M, N, P, i, dim;
1161   Vec      coordsc, coordsf;
1162   DM       dmc2;
1163   DM_DA   *dd = (DM_DA *)dmf->data, *dd2;
1164 
1165   PetscFunctionBegin;
1166   PetscValidHeaderSpecificType(dmf, DM_CLASSID, 1, DMDA);
1167   PetscAssertPointer(dmc, 3);
1168 
1169   PetscCall(DMGetDimension(dmf, &dim));
1170   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1171     M = dd->M / dd->coarsen_x;
1172   } else {
1173     M = 1 + (dd->M - 1) / dd->coarsen_x;
1174   }
1175   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1176     if (dim > 1) {
1177       N = dd->N / dd->coarsen_y;
1178     } else {
1179       N = 1;
1180     }
1181   } else {
1182     N = 1 + (dd->N - 1) / dd->coarsen_y;
1183   }
1184   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1185     if (dim > 2) {
1186       P = dd->P / dd->coarsen_z;
1187     } else {
1188       P = 1;
1189     }
1190   } else {
1191     P = 1 + (dd->P - 1) / dd->coarsen_z;
1192   }
1193   PetscCall(DMDACreate(PetscObjectComm((PetscObject)dmf), &dmc2));
1194   PetscCall(DMSetOptionsPrefix(dmc2, ((PetscObject)dmf)->prefix));
1195   PetscCall(DMSetDimension(dmc2, dim));
1196   PetscCall(DMDASetSizes(dmc2, M, N, P));
1197   PetscCall(DMDASetNumProcs(dmc2, dd->m, dd->n, dd->p));
1198   PetscCall(DMDASetBoundaryType(dmc2, dd->bx, dd->by, dd->bz));
1199   PetscCall(DMDASetDof(dmc2, dd->w));
1200   PetscCall(DMDASetStencilType(dmc2, dd->stencil_type));
1201   PetscCall(DMDASetStencilWidth(dmc2, dd->s));
1202   if (dim == 3) {
1203     PetscInt *lx, *ly, *lz;
1204     PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz));
1205     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1206     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly));
1207     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_z, dd->p, dd->lz, lz));
1208     PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, lz));
1209     PetscCall(PetscFree3(lx, ly, lz));
1210   } else if (dim == 2) {
1211     PetscInt *lx, *ly;
1212     PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly));
1213     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1214     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly));
1215     PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, NULL));
1216     PetscCall(PetscFree2(lx, ly));
1217   } else if (dim == 1) {
1218     PetscInt *lx;
1219     PetscCall(PetscMalloc1(dd->m, &lx));
1220     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1221     PetscCall(DMDASetOwnershipRanges(dmc2, lx, NULL, NULL));
1222     PetscCall(PetscFree(lx));
1223   }
1224   dd2 = (DM_DA *)dmc2->data;
1225 
1226   /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1227   /* dmc2->ops->createinterpolation = dmf->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1228   dmc2->ops->creatematrix = dmf->ops->creatematrix;
1229   dmc2->ops->getcoloring  = dmf->ops->getcoloring;
1230   dd2->interptype         = dd->interptype;
1231 
1232   /* copy fill information if given */
1233   if (dd->dfill) {
1234     PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill));
1235     PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1));
1236   }
1237   if (dd->ofill) {
1238     PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill));
1239     PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1));
1240   }
1241   /* copy the refine information */
1242   dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1243   dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1244   dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;
1245 
1246   if (dd->refine_z_hier) {
1247     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_z_hier_n) dd2->refine_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 1];
1248     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_z_hier_n) dd2->coarsen_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 2];
1249     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1250     PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier));
1251     PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n));
1252   }
1253   if (dd->refine_y_hier) {
1254     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_y_hier_n) dd2->refine_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 1];
1255     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_y_hier_n) dd2->coarsen_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 2];
1256     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1257     PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier));
1258     PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n));
1259   }
1260   if (dd->refine_x_hier) {
1261     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_x_hier_n) dd2->refine_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 1];
1262     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_x_hier_n) dd2->coarsen_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 2];
1263     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1264     PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier));
1265     PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n));
1266   }
1267 
1268   /* copy vector type information */
1269   PetscCall(DMSetVecType(dmc2, dmf->vectype));
1270 
1271   dd2->lf = dd->lf;
1272   dd2->lj = dd->lj;
1273 
1274   dmc2->leveldown = dmf->leveldown + 1;
1275   dmc2->levelup   = dmf->levelup;
1276 
1277   PetscCall(DMSetUp(dmc2));
1278 
1279   /* inject coordinates if they are set on the fine grid */
1280   PetscCall(DMGetCoordinates(dmf, &coordsf));
1281   if (coordsf) {
1282     DM         cdaf, cdac;
1283     Mat        inject;
1284     VecScatter vscat;
1285 
1286     PetscCall(DMGetCoordinateDM(dmf, &cdaf));
1287     PetscCall(DMGetCoordinateDM(dmc2, &cdac));
1288     /* force creation of the coordinate vector */
1289     PetscCall(DMDASetUniformCoordinates(dmc2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
1290     PetscCall(DMGetCoordinates(dmc2, &coordsc));
1291 
1292     PetscCall(DMCreateInjection(cdac, cdaf, &inject));
1293     PetscCall(MatScatterGetVecScatter(inject, &vscat));
1294     PetscCall(VecScatterBegin(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD));
1295     PetscCall(VecScatterEnd(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD));
1296     PetscCall(MatDestroy(&inject));
1297   }
1298 
1299   for (i = 0; i < dmf->bs; i++) {
1300     const char *fieldname;
1301     PetscCall(DMDAGetFieldName(dmf, i, &fieldname));
1302     PetscCall(DMDASetFieldName(dmc2, i, fieldname));
1303   }
1304 
1305   *dmc = dmc2;
1306   PetscFunctionReturn(PETSC_SUCCESS);
1307 }
1308 
1309 PetscErrorCode DMRefineHierarchy_DA(DM da, PetscInt nlevels, DM daf[])
1310 {
1311   PetscInt i, n, *refx, *refy, *refz;
1312 
1313   PetscFunctionBegin;
1314   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
1315   PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative");
1316   if (nlevels == 0) PetscFunctionReturn(PETSC_SUCCESS);
1317   PetscAssertPointer(daf, 3);
1318 
1319   /* Get refinement factors, defaults taken from the coarse DMDA */
1320   PetscCall(PetscMalloc3(nlevels, &refx, nlevels, &refy, nlevels, &refz));
1321   for (i = 0; i < nlevels; i++) PetscCall(DMDAGetRefinementFactor(da, &refx[i], &refy[i], &refz[i]));
1322   n = nlevels;
1323   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_x", refx, &n, NULL));
1324   n = nlevels;
1325   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_y", refy, &n, NULL));
1326   n = nlevels;
1327   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_z", refz, &n, NULL));
1328 
1329   PetscCall(DMDASetRefinementFactor(da, refx[0], refy[0], refz[0]));
1330   PetscCall(DMRefine(da, PetscObjectComm((PetscObject)da), &daf[0]));
1331   for (i = 1; i < nlevels; i++) {
1332     PetscCall(DMDASetRefinementFactor(daf[i - 1], refx[i], refy[i], refz[i]));
1333     PetscCall(DMRefine(daf[i - 1], PetscObjectComm((PetscObject)da), &daf[i]));
1334   }
1335   PetscCall(PetscFree3(refx, refy, refz));
1336   PetscFunctionReturn(PETSC_SUCCESS);
1337 }
1338 
1339 PetscErrorCode DMCoarsenHierarchy_DA(DM da, PetscInt nlevels, DM dac[])
1340 {
1341   PetscInt i;
1342 
1343   PetscFunctionBegin;
1344   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
1345   PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative");
1346   if (nlevels == 0) PetscFunctionReturn(PETSC_SUCCESS);
1347   PetscAssertPointer(dac, 3);
1348   PetscCall(DMCoarsen(da, PetscObjectComm((PetscObject)da), &dac[0]));
1349   for (i = 1; i < nlevels; i++) PetscCall(DMCoarsen(dac[i - 1], PetscObjectComm((PetscObject)da), &dac[i]));
1350   PetscFunctionReturn(PETSC_SUCCESS);
1351 }
1352 
1353 static PetscErrorCode DMDASetGLLCoordinates_1d(DM dm, PetscInt n, PetscReal *nodes)
1354 {
1355   PetscInt     i, j, xs, xn, q;
1356   PetscScalar *xx;
1357   PetscReal    h;
1358   Vec          x;
1359   DM_DA       *da = (DM_DA *)dm->data;
1360 
1361   PetscFunctionBegin;
1362   if (da->bx != DM_BOUNDARY_PERIODIC) {
1363     PetscCall(DMDAGetInfo(dm, NULL, &q, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
1364     q = (q - 1) / (n - 1); /* number of spectral elements */
1365     h = 2.0 / q;
1366     PetscCall(DMDAGetCorners(dm, &xs, NULL, NULL, &xn, NULL, NULL));
1367     xs = xs / (n - 1);
1368     xn = xn / (n - 1);
1369     PetscCall(DMDASetUniformCoordinates(dm, -1., 1., 0., 0., 0., 0.));
1370     PetscCall(DMGetCoordinates(dm, &x));
1371     PetscCall(DMDAVecGetArray(dm, x, &xx));
1372 
1373     /* loop over local spectral elements */
1374     for (j = xs; j < xs + xn; j++) {
1375       /*
1376        Except for the first process, each process starts on the second GLL point of the first element on that process
1377        */
1378       for (i = (j == xs && xs > 0) ? 1 : 0; i < n; i++) xx[j * (n - 1) + i] = -1.0 + h * j + h * (nodes[i] + 1.0) / 2.;
1379     }
1380     PetscCall(DMDAVecRestoreArray(dm, x, &xx));
1381   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for periodic");
1382   PetscFunctionReturn(PETSC_SUCCESS);
1383 }
1384 
1385 /*@
1386 
1387   DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points
1388 
1389   Collective
1390 
1391   Input Parameters:
1392 + da    - the `DMDA` object
1393 . n     - the number of GLL nodes
1394 - nodes - the GLL nodes
1395 
1396   Level: advanced
1397 
1398   Note:
1399   The parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points
1400   on each process much be divisible by the number of GLL elements needed per process. This depends on whether the `DMDA` is
1401   periodic or not.
1402 
1403 .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `PetscDTGaussLobattoLegendreQuadrature()`, `DMGetCoordinates()`
1404 @*/
1405 PetscErrorCode DMDASetGLLCoordinates(DM da, PetscInt n, PetscReal *nodes)
1406 {
1407   PetscFunctionBegin;
1408   if (da->dim == 1) {
1409     PetscCall(DMDASetGLLCoordinates_1d(da, n, nodes));
1410   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for 2 or 3d");
1411   PetscFunctionReturn(PETSC_SUCCESS);
1412 }
1413 
1414 PetscErrorCode DMGetCompatibility_DA(DM da1, DM dm2, PetscBool *compatible, PetscBool *set)
1415 {
1416   DM_DA    *dd1 = (DM_DA *)da1->data, *dd2;
1417   DM        da2;
1418   DMType    dmtype2;
1419   PetscBool isda, compatibleLocal;
1420   PetscInt  i;
1421 
1422   PetscFunctionBegin;
1423   PetscCheck(da1->setupcalled, PetscObjectComm((PetscObject)da1), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on first DM before DMGetCompatibility()");
1424   PetscCall(DMGetType(dm2, &dmtype2));
1425   PetscCall(PetscStrcmp(dmtype2, DMDA, &isda));
1426   if (isda) {
1427     da2 = dm2;
1428     dd2 = (DM_DA *)da2->data;
1429     PetscCheck(da2->setupcalled, PetscObjectComm((PetscObject)da2), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on second DM before DMGetCompatibility()");
1430     compatibleLocal = (PetscBool)(da1->dim == da2->dim);
1431     if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->s == dd2->s)); /* Stencil width */
1432     /*                                                                           Global size              ranks               Boundary type */
1433     if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->M == dd2->M) && (dd1->m == dd2->m) && (dd1->bx == dd2->bx));
1434     if (compatibleLocal && da1->dim > 1) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->N == dd2->N) && (dd1->n == dd2->n) && (dd1->by == dd2->by));
1435     if (compatibleLocal && da1->dim > 2) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->P == dd2->P) && (dd1->p == dd2->p) && (dd1->bz == dd2->bz));
1436     if (compatibleLocal) {
1437       for (i = 0; i < dd1->m; ++i) { compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lx[i] == dd2->lx[i])); /* Local size     */ }
1438     }
1439     if (compatibleLocal && da1->dim > 1) {
1440       for (i = 0; i < dd1->n; ++i) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->ly[i] == dd2->ly[i]));
1441     }
1442     if (compatibleLocal && da1->dim > 2) {
1443       for (i = 0; i < dd1->p; ++i) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lz[i] == dd2->lz[i]));
1444     }
1445     *compatible = compatibleLocal;
1446     *set        = PETSC_TRUE;
1447   } else {
1448     /* Decline to determine compatibility with other DM types */
1449     *set = PETSC_FALSE;
1450   }
1451   PetscFunctionReturn(PETSC_SUCCESS);
1452 }
1453