xref: /petsc/src/dm/impls/da/da.c (revision 9140fee14acbea959c6ed74f4836a1a89f708038)
1 #include <petsc/private/dmdaimpl.h> /*I   "petscdmda.h"   I*/
2 
3 /*@
4   DMDASetSizes - Sets the number of grid points in the three dimensional directions
5 
6   Logically Collective
7 
8   Input Parameters:
9 + da - the `DMDA`
10 . M  - the global X size
11 . N  - the global Y size
12 - P  - the global Z size
13 
14   Level: intermediate
15 
16   Developer Notes:
17   Since the dimension may not yet have been set the code cannot error check for non-positive Y and Z number of grid points
18 
19 .seealso: `DM`, `DMDA`, `PetscSplitOwnership()`
20 @*/
21 PetscErrorCode DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
22 {
23   DM_DA *dd = (DM_DA *)da->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
27   PetscValidLogicalCollectiveInt(da, M, 2);
28   PetscValidLogicalCollectiveInt(da, N, 3);
29   PetscValidLogicalCollectiveInt(da, P, 4);
30   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
31   PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in X direction must be positive");
32   PetscCheck(N >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Y direction must be positive");
33   PetscCheck(P >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Z direction must be positive");
34 
35   dd->M = M;
36   dd->N = N;
37   dd->P = P;
38   PetscFunctionReturn(PETSC_SUCCESS);
39 }
40 
41 /*@
42   DMDASetNumProcs - Sets the number of processes in each dimension
43 
44   Logically Collective
45 
46   Input Parameters:
47 + da - the `DMDA`
48 . m  - the number of X procs (or `PETSC_DECIDE`)
49 . n  - the number of Y procs (or `PETSC_DECIDE`)
50 - p  - the number of Z procs (or `PETSC_DECIDE`)
51 
52   Level: intermediate
53 
54 .seealso: `DM`, `DMDA`, `DMDASetSizes()`, `PetscSplitOwnership()`
55 @*/
56 PetscErrorCode DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
57 {
58   DM_DA *dd = (DM_DA *)da->data;
59 
60   PetscFunctionBegin;
61   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
62   PetscValidLogicalCollectiveInt(da, m, 2);
63   PetscValidLogicalCollectiveInt(da, n, 3);
64   PetscValidLogicalCollectiveInt(da, p, 4);
65   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
66   dd->m = m;
67   dd->n = n;
68   dd->p = p;
69   if (da->dim == 2) {
70     PetscMPIInt size;
71     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da), &size));
72     if ((dd->m > 0) && (dd->n < 0)) {
73       dd->n = size / dd->m;
74       PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in X direction not divisible into comm size %d", m, size);
75     }
76     if ((dd->n > 0) && (dd->m < 0)) {
77       dd->m = size / dd->n;
78       PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in Y direction not divisible into comm size %d", n, size);
79     }
80   }
81   PetscFunctionReturn(PETSC_SUCCESS);
82 }
83 
84 /*@
85   DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries.
86 
87   Not Collective
88 
89   Input Parameters:
90 + da - The `DMDA`
91 . bx - x boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
92 . by - y boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
93 - bz - z boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
94 
95   Level: intermediate
96 
97 .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMBoundaryType`
98 @*/
99 PetscErrorCode DMDASetBoundaryType(DM da, DMBoundaryType bx, DMBoundaryType by, DMBoundaryType bz)
100 {
101   DM_DA *dd = (DM_DA *)da->data;
102 
103   PetscFunctionBegin;
104   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
105   PetscValidLogicalCollectiveEnum(da, bx, 2);
106   PetscValidLogicalCollectiveEnum(da, by, 3);
107   PetscValidLogicalCollectiveEnum(da, bz, 4);
108   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
109   dd->bx = bx;
110   dd->by = by;
111   dd->bz = bz;
112   PetscFunctionReturn(PETSC_SUCCESS);
113 }
114 
115 /*@
116   DMDASetDof - Sets the number of degrees of freedom per vertex
117 
118   Not Collective
119 
120   Input Parameters:
121 + da  - The `DMDA`
122 - dof - Number of degrees of freedom
123 
124   Level: intermediate
125 
126 .seealso: `DM`, `DMDA`, `DMDAGetDof()`, `DMDACreate()`, `DMDestroy()`
127 @*/
128 PetscErrorCode DMDASetDof(DM da, PetscInt dof)
129 {
130   DM_DA *dd = (DM_DA *)da->data;
131 
132   PetscFunctionBegin;
133   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
134   PetscValidLogicalCollectiveInt(da, dof, 2);
135   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
136   dd->w  = dof;
137   da->bs = dof;
138   PetscFunctionReturn(PETSC_SUCCESS);
139 }
140 
141 /*@
142   DMDAGetDof - Gets the number of degrees of freedom per vertex
143 
144   Not Collective
145 
146   Input Parameter:
147 . da - The `DMDA`
148 
149   Output Parameter:
150 . dof - Number of degrees of freedom
151 
152   Level: intermediate
153 
154 .seealso: `DM`, `DMDA`, `DMDASetDof()`, `DMDACreate()`, `DMDestroy()`
155 @*/
156 PetscErrorCode DMDAGetDof(DM da, PetscInt *dof)
157 {
158   DM_DA *dd = (DM_DA *)da->data;
159 
160   PetscFunctionBegin;
161   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
162   PetscAssertPointer(dof, 2);
163   *dof = dd->w;
164   PetscFunctionReturn(PETSC_SUCCESS);
165 }
166 
167 /*@
168   DMDAGetOverlap - Gets the size of the per-processor overlap.
169 
170   Not Collective
171 
172   Input Parameter:
173 . da - The `DMDA`
174 
175   Output Parameters:
176 + x - Overlap in the x direction
177 . y - Overlap in the y direction
178 - z - Overlap in the z direction
179 
180   Level: intermediate
181 
182 .seealso: `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDASetOverlap()`
183 @*/
184 PetscErrorCode DMDAGetOverlap(DM da, PetscInt *x, PetscInt *y, PetscInt *z)
185 {
186   DM_DA *dd = (DM_DA *)da->data;
187 
188   PetscFunctionBegin;
189   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
190   if (x) *x = dd->xol;
191   if (y) *y = dd->yol;
192   if (z) *z = dd->zol;
193   PetscFunctionReturn(PETSC_SUCCESS);
194 }
195 
196 /*@
197   DMDASetOverlap - Sets the size of the per-processor overlap.
198 
199   Not Collective
200 
201   Input Parameters:
202 + da - The `DMDA`
203 . x  - Overlap in the x direction
204 . y  - Overlap in the y direction
205 - z  - Overlap in the z direction
206 
207   Level: intermediate
208 
209 .seealso: `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDAGetOverlap()`
210 @*/
211 PetscErrorCode DMDASetOverlap(DM da, PetscInt x, PetscInt y, PetscInt z)
212 {
213   DM_DA *dd = (DM_DA *)da->data;
214 
215   PetscFunctionBegin;
216   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
217   PetscValidLogicalCollectiveInt(da, x, 2);
218   PetscValidLogicalCollectiveInt(da, y, 3);
219   PetscValidLogicalCollectiveInt(da, z, 4);
220   dd->xol = x;
221   dd->yol = y;
222   dd->zol = z;
223   PetscFunctionReturn(PETSC_SUCCESS);
224 }
225 
226 /*@
227   DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition.
228 
229   Not Collective
230 
231   Input Parameter:
232 . da - The `DMDA`
233 
234   Output Parameter:
235 . Nsub - Number of local subdomains created upon decomposition
236 
237   Level: intermediate
238 
239 .seealso: `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDASetNumLocalSubDomains()`
240 @*/
241 PetscErrorCode DMDAGetNumLocalSubDomains(DM da, PetscInt *Nsub)
242 {
243   DM_DA *dd = (DM_DA *)da->data;
244 
245   PetscFunctionBegin;
246   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
247   if (Nsub) *Nsub = dd->Nsub;
248   PetscFunctionReturn(PETSC_SUCCESS);
249 }
250 
251 /*@
252   DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition.
253 
254   Not Collective
255 
256   Input Parameters:
257 + da   - The `DMDA`
258 - Nsub - The number of local subdomains requested
259 
260   Level: intermediate
261 
262 .seealso: `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDAGetNumLocalSubDomains()`
263 @*/
264 PetscErrorCode DMDASetNumLocalSubDomains(DM da, PetscInt Nsub)
265 {
266   DM_DA *dd = (DM_DA *)da->data;
267 
268   PetscFunctionBegin;
269   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
270   PetscValidLogicalCollectiveInt(da, Nsub, 2);
271   dd->Nsub = Nsub;
272   PetscFunctionReturn(PETSC_SUCCESS);
273 }
274 
275 /*@
276   DMDASetOffset - Sets the index offset of the DA.
277 
278   Collective
279 
280   Input Parameters:
281 + da - The `DMDA`
282 . xo - The offset in the x direction
283 . yo - The offset in the y direction
284 . zo - The offset in the z direction
285 . Mo - The problem offset in the x direction
286 . No - The problem offset in the y direction
287 - Po - The problem offset in the z direction
288 
289   Level: intermediate
290 
291   Note:
292   This is used primarily to overlap a computation on a local `DMDA` with that on a global `DMDA` without
293   changing boundary conditions or subdomain features that depend upon the global offsets.
294 
295 .seealso: `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
296 @*/
297 PetscErrorCode DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
298 {
299   DM_DA *dd = (DM_DA *)da->data;
300 
301   PetscFunctionBegin;
302   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
303   PetscValidLogicalCollectiveInt(da, xo, 2);
304   PetscValidLogicalCollectiveInt(da, yo, 3);
305   PetscValidLogicalCollectiveInt(da, zo, 4);
306   PetscValidLogicalCollectiveInt(da, Mo, 5);
307   PetscValidLogicalCollectiveInt(da, No, 6);
308   PetscValidLogicalCollectiveInt(da, Po, 7);
309   dd->xo = xo;
310   dd->yo = yo;
311   dd->zo = zo;
312   dd->Mo = Mo;
313   dd->No = No;
314   dd->Po = Po;
315 
316   if (da->coordinates[0].dm) PetscCall(DMDASetOffset(da->coordinates[0].dm, xo, yo, zo, Mo, No, Po));
317   PetscFunctionReturn(PETSC_SUCCESS);
318 }
319 
320 /*@
321   DMDAGetOffset - Gets the index offset of the `DMDA`.
322 
323   Not Collective
324 
325   Input Parameter:
326 . da - The `DMDA`
327 
328   Output Parameters:
329 + xo - The offset in the x direction
330 . yo - The offset in the y direction
331 . zo - The offset in the z direction
332 . Mo - The global size in the x direction
333 . No - The global size in the y direction
334 - Po - The global size in the z direction
335 
336   Level: intermediate
337 
338 .seealso: `DM`, `DMDA`, `DMDASetOffset()`, `DMDAVecGetArray()`
339 @*/
340 PetscErrorCode DMDAGetOffset(DM da, PetscInt *xo, PetscInt *yo, PetscInt *zo, PetscInt *Mo, PetscInt *No, PetscInt *Po)
341 {
342   DM_DA *dd = (DM_DA *)da->data;
343 
344   PetscFunctionBegin;
345   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
346   if (xo) *xo = dd->xo;
347   if (yo) *yo = dd->yo;
348   if (zo) *zo = dd->zo;
349   if (Mo) *Mo = dd->Mo;
350   if (No) *No = dd->No;
351   if (Po) *Po = dd->Po;
352   PetscFunctionReturn(PETSC_SUCCESS);
353 }
354 
355 /*@
356   DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain `DM`.
357 
358   Not Collective
359 
360   Input Parameter:
361 . da - The `DMDA`
362 
363   Output Parameters:
364 + xs - The start of the region in x
365 . ys - The start of the region in y
366 . zs - The start of the region in z
367 . xm - The size of the region in x
368 . ym - The size of the region in y
369 - zm - The size of the region in z
370 
371   Level: intermediate
372 
373 .seealso: `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
374 @*/
375 PetscErrorCode DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
376 {
377   DM_DA *dd = (DM_DA *)da->data;
378 
379   PetscFunctionBegin;
380   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
381   if (xs) *xs = dd->nonxs;
382   if (ys) *ys = dd->nonys;
383   if (zs) *zs = dd->nonzs;
384   if (xm) *xm = dd->nonxm;
385   if (ym) *ym = dd->nonym;
386   if (zm) *zm = dd->nonzm;
387   PetscFunctionReturn(PETSC_SUCCESS);
388 }
389 
390 /*@
391   DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain `DM`.
392 
393   Collective
394 
395   Input Parameters:
396 + da - The `DMDA`
397 . xs - The start of the region in x
398 . ys - The start of the region in y
399 . zs - The start of the region in z
400 . xm - The size of the region in x
401 . ym - The size of the region in y
402 - zm - The size of the region in z
403 
404   Level: intermediate
405 
406 .seealso: `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
407 @*/
408 PetscErrorCode DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
409 {
410   DM_DA *dd = (DM_DA *)da->data;
411 
412   PetscFunctionBegin;
413   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
414   PetscValidLogicalCollectiveInt(da, xs, 2);
415   PetscValidLogicalCollectiveInt(da, ys, 3);
416   PetscValidLogicalCollectiveInt(da, zs, 4);
417   PetscValidLogicalCollectiveInt(da, xm, 5);
418   PetscValidLogicalCollectiveInt(da, ym, 6);
419   PetscValidLogicalCollectiveInt(da, zm, 7);
420   dd->nonxs = xs;
421   dd->nonys = ys;
422   dd->nonzs = zs;
423   dd->nonxm = xm;
424   dd->nonym = ym;
425   dd->nonzm = zm;
426 
427   PetscFunctionReturn(PETSC_SUCCESS);
428 }
429 
430 /*@
431   DMDASetStencilType - Sets the type of the communication stencil
432 
433   Logically Collective
434 
435   Input Parameters:
436 + da    - The `DMDA`
437 - stype - The stencil type, use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`.
438 
439   Level: intermediate
440 
441 .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
442 @*/
443 PetscErrorCode DMDASetStencilType(DM da, DMDAStencilType stype)
444 {
445   DM_DA *dd = (DM_DA *)da->data;
446 
447   PetscFunctionBegin;
448   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
449   PetscValidLogicalCollectiveEnum(da, stype, 2);
450   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
451   dd->stencil_type = stype;
452   PetscFunctionReturn(PETSC_SUCCESS);
453 }
454 
455 /*@
456   DMDAGetStencilType - Gets the type of the communication stencil
457 
458   Not Collective
459 
460   Input Parameter:
461 . da - The `DMDA`
462 
463   Output Parameter:
464 . stype - The stencil type, use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`.
465 
466   Level: intermediate
467 
468 .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
469 @*/
470 PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype)
471 {
472   DM_DA *dd = (DM_DA *)da->data;
473 
474   PetscFunctionBegin;
475   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
476   PetscAssertPointer(stype, 2);
477   *stype = dd->stencil_type;
478   PetscFunctionReturn(PETSC_SUCCESS);
479 }
480 
481 /*@
482   DMDASetStencilWidth - Sets the width of the communication stencil
483 
484   Logically Collective
485 
486   Input Parameters:
487 + da    - The `DMDA`
488 - width - The stencil width
489 
490   Level: intermediate
491 
492 .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
493 @*/
494 PetscErrorCode DMDASetStencilWidth(DM da, PetscInt width)
495 {
496   DM_DA *dd = (DM_DA *)da->data;
497 
498   PetscFunctionBegin;
499   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
500   PetscValidLogicalCollectiveInt(da, width, 2);
501   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
502   dd->s = width;
503   PetscFunctionReturn(PETSC_SUCCESS);
504 }
505 
506 /*@
507   DMDAGetStencilWidth - Gets the width of the communication stencil
508 
509   Not Collective
510 
511   Input Parameter:
512 . da - The `DMDA`
513 
514   Output Parameter:
515 . width - The stencil width
516 
517   Level: intermediate
518 
519 .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
520 @*/
521 PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width)
522 {
523   DM_DA *dd = (DM_DA *)da->data;
524 
525   PetscFunctionBegin;
526   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
527   PetscAssertPointer(width, 2);
528   *width = dd->s;
529   PetscFunctionReturn(PETSC_SUCCESS);
530 }
531 
532 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da, PetscInt M, PetscInt m, const PetscInt lx[])
533 {
534   PetscInt i, sum;
535 
536   PetscFunctionBegin;
537   PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Global dimension not set");
538   for (i = sum = 0; i < m; i++) sum += lx[i];
539   PetscCheck(sum == M, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_INCOMP, "Ownership ranges sum to %" PetscInt_FMT " but global dimension is %" PetscInt_FMT, sum, M);
540   PetscFunctionReturn(PETSC_SUCCESS);
541 }
542 
543 /*@
544   DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process
545 
546   Logically Collective
547 
548   Input Parameters:
549 + da - The `DMDA`
550 . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m
551 . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n
552 - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p.
553 
554   Level: intermediate
555 
556   Note:
557   These numbers are NOT multiplied by the number of dof per node.
558 
559 .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
560 @*/
561 PetscErrorCode DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
562 {
563   DM_DA *dd = (DM_DA *)da->data;
564 
565   PetscFunctionBegin;
566   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
567   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
568   if (lx) {
569     PetscCheck(dd->m >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of procs");
570     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->M, dd->m, lx));
571     if (!dd->lx) PetscCall(PetscMalloc1(dd->m, &dd->lx));
572     PetscCall(PetscArraycpy(dd->lx, lx, dd->m));
573   }
574   if (ly) {
575     PetscCheck(dd->n >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of procs");
576     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->N, dd->n, ly));
577     if (!dd->ly) PetscCall(PetscMalloc1(dd->n, &dd->ly));
578     PetscCall(PetscArraycpy(dd->ly, ly, dd->n));
579   }
580   if (lz) {
581     PetscCheck(dd->p >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of procs");
582     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->P, dd->p, lz));
583     if (!dd->lz) PetscCall(PetscMalloc1(dd->p, &dd->lz));
584     PetscCall(PetscArraycpy(dd->lz, lz, dd->p));
585   }
586   PetscFunctionReturn(PETSC_SUCCESS);
587 }
588 
589 /*@
590   DMDASetInterpolationType - Sets the type of interpolation that will be
591   returned by `DMCreateInterpolation()`
592 
593   Logically Collective
594 
595   Input Parameters:
596 + da    - initial distributed array
597 - ctype - `DMDA_Q1` and `DMDA_Q0` are currently the only supported forms
598 
599   Level: intermediate
600 
601   Note:
602   You should call this on the coarser of the two `DMDA` you pass to `DMCreateInterpolation()`
603 
604 .seealso: `DM`, `DMDA`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMDestroy()`, `DMDAInterpolationType`
605 @*/
606 PetscErrorCode DMDASetInterpolationType(DM da, DMDAInterpolationType ctype)
607 {
608   DM_DA *dd = (DM_DA *)da->data;
609 
610   PetscFunctionBegin;
611   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
612   PetscValidLogicalCollectiveEnum(da, ctype, 2);
613   dd->interptype = ctype;
614   PetscFunctionReturn(PETSC_SUCCESS);
615 }
616 
617 /*@
618   DMDAGetInterpolationType - Gets the type of interpolation that will be
619   used by `DMCreateInterpolation()`
620 
621   Not Collective
622 
623   Input Parameter:
624 . da - distributed array
625 
626   Output Parameter:
627 . ctype - interpolation type (`DMDA_Q1` and `DMDA_Q0` are currently the only supported forms)
628 
629   Level: intermediate
630 
631 .seealso: `DM`, `DMDA`, `DMDAInterpolationType`, `DMDASetInterpolationType()`, `DMCreateInterpolation()`
632 @*/
633 PetscErrorCode DMDAGetInterpolationType(DM da, DMDAInterpolationType *ctype)
634 {
635   DM_DA *dd = (DM_DA *)da->data;
636 
637   PetscFunctionBegin;
638   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
639   PetscAssertPointer(ctype, 2);
640   *ctype = dd->interptype;
641   PetscFunctionReturn(PETSC_SUCCESS);
642 }
643 
644 /*@C
645   DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
646   processes neighbors.
647 
648   Not Collective
649 
650   Input Parameter:
651 . da - the `DMDA` object
652 
653   Output Parameter:
654 . ranks - the neighbors ranks, stored with the x index increasing most rapidly.
655               this process itself is in the list
656 
657   Level: intermediate
658 
659   Notes:
660   In 2d the array is of length 9, in 3d of length 27
661 
662   Not supported in 1d
663 
664   Do not free the array, it is freed when the `DMDA` is destroyed.
665 
666   Fortran Notes:
667   In Fortran you must pass in an array of the appropriate length.
668 
669 .seealso: `DMDA`, `DM`
670 @*/
671 PetscErrorCode DMDAGetNeighbors(DM da, const PetscMPIInt *ranks[])
672 {
673   DM_DA *dd = (DM_DA *)da->data;
674 
675   PetscFunctionBegin;
676   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
677   *ranks = dd->neighbors;
678   PetscFunctionReturn(PETSC_SUCCESS);
679 }
680 
681 /*@C
682   DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process
683 
684   Not Collective
685 
686   Input Parameter:
687 . da - the `DMDA` object
688 
689   Output Parameters:
690 + lx - ownership along x direction (optional)
691 . ly - ownership along y direction (optional)
692 - lz - ownership along z direction (optional)
693 
694   Level: intermediate
695 
696   Note:
697   These correspond to the optional final arguments passed to `DMDACreate()`, `DMDACreate2d()`, `DMDACreate3d()`
698 
699   In C you should not free these arrays, nor change the values in them. They will only have valid values while the
700   `DMDA` they came from still exists (has not been destroyed).
701 
702   These numbers are NOT multiplied by the number of dof per node.
703 
704   Fortran Notes:
705   In Fortran one must pass in arrays `lx`, `ly`, and `lz` that are long enough to hold the values; the sixth, seventh and
706   eighth arguments from `DMDAGetInfo()`
707 
708 .seealso: `DM`, `DMDA`, `DMDAGetCorners()`, `DMDAGetGhostCorners()`, `DMDACreate()`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `VecGetOwnershipRanges()`
709 @*/
710 PetscErrorCode DMDAGetOwnershipRanges(DM da, const PetscInt *lx[], const PetscInt *ly[], const PetscInt *lz[])
711 {
712   DM_DA *dd = (DM_DA *)da->data;
713 
714   PetscFunctionBegin;
715   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
716   if (lx) *lx = dd->lx;
717   if (ly) *ly = dd->ly;
718   if (lz) *lz = dd->lz;
719   PetscFunctionReturn(PETSC_SUCCESS);
720 }
721 
722 /*@
723   DMDASetRefinementFactor - Set the ratios that the `DMDA` grid is refined
724 
725   Logically Collective
726 
727   Input Parameters:
728 + da       - the `DMDA` object
729 . refine_x - ratio of fine grid to coarse in x direction (2 by default)
730 . refine_y - ratio of fine grid to coarse in y direction (2 by default)
731 - refine_z - ratio of fine grid to coarse in z direction (2 by default)
732 
733   Options Database Keys:
734 + -da_refine_x refine_x - refinement ratio in x direction
735 . -da_refine_y rafine_y - refinement ratio in y direction
736 . -da_refine_z refine_z - refinement ratio in z direction
737 - -da_refine <n>        - refine the DMDA object n times when it is created.
738 
739   Level: intermediate
740 
741   Note:
742   Pass `PETSC_IGNORE` to leave a value unchanged
743 
744 .seealso: `DM`, `DMDA`, `DMRefine()`, `DMDAGetRefinementFactor()`
745 @*/
746 PetscErrorCode DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y, PetscInt refine_z)
747 {
748   DM_DA *dd = (DM_DA *)da->data;
749 
750   PetscFunctionBegin;
751   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
752   PetscValidLogicalCollectiveInt(da, refine_x, 2);
753   PetscValidLogicalCollectiveInt(da, refine_y, 3);
754   PetscValidLogicalCollectiveInt(da, refine_z, 4);
755 
756   if (refine_x > 0) dd->refine_x = refine_x;
757   if (refine_y > 0) dd->refine_y = refine_y;
758   if (refine_z > 0) dd->refine_z = refine_z;
759   PetscFunctionReturn(PETSC_SUCCESS);
760 }
761 
762 /*@C
763   DMDAGetRefinementFactor - Gets the ratios that the `DMDA` grid is refined
764 
765   Not Collective
766 
767   Input Parameter:
768 . da - the `DMDA` object
769 
770   Output Parameters:
771 + refine_x - ratio of fine grid to coarse in x direction (2 by default)
772 . refine_y - ratio of fine grid to coarse in y direction (2 by default)
773 - refine_z - ratio of fine grid to coarse in z direction (2 by default)
774 
775   Level: intermediate
776 
777   Note:
778   Pass `NULL` for values you do not need
779 
780 .seealso: `DM`, `DMDA`, `DMRefine()`, `DMDASetRefinementFactor()`
781 @*/
782 PetscErrorCode DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y, PetscInt *refine_z)
783 {
784   DM_DA *dd = (DM_DA *)da->data;
785 
786   PetscFunctionBegin;
787   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
788   if (refine_x) *refine_x = dd->refine_x;
789   if (refine_y) *refine_y = dd->refine_y;
790   if (refine_z) *refine_z = dd->refine_z;
791   PetscFunctionReturn(PETSC_SUCCESS);
792 }
793 
794 /*@C
795   DMDASetGetMatrix - Sets the routine used by the `DMDA` to allocate a matrix.
796 
797   Logically Collective; No Fortran Support
798 
799   Input Parameters:
800 + da - the `DMDA` object
801 - f  - the function that allocates the matrix for that specific DMDA
802 
803   Level: developer
804 
805   Note:
806   See `DMDASetBlockFills()` that provides a simple way to provide the nonzero structure for
807   the diagonal and off-diagonal blocks of the matrix
808 
809 .seealso: `DM`, `DMDA`, `DMCreateMatrix()`, `DMDASetBlockFills()`
810 @*/
811 PetscErrorCode DMDASetGetMatrix(DM da, PetscErrorCode (*f)(DM, Mat *))
812 {
813   PetscFunctionBegin;
814   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
815   da->ops->creatematrix = f;
816   PetscFunctionReturn(PETSC_SUCCESS);
817 }
818 
819 /*@
820   DMDAMapMatStencilToGlobal - Map a list of `MatStencil` on a grid to global indices.
821 
822   Not Collective
823 
824   Input Parameters:
825 + da   - the `DMDA` object
826 . m    - number of MatStencils
827 - idxm - grid points (and component number when dof > 1)
828 
829   Output Parameter:
830 . gidxm - global row indices
831 
832   Level: intermediate
833 
834 .seealso: `DM`, `DMDA`, `MatStencil`
835 @*/
836 PetscErrorCode DMDAMapMatStencilToGlobal(DM da, PetscInt m, const MatStencil idxm[], PetscInt gidxm[])
837 {
838   const DM_DA           *dd  = (const DM_DA *)da->data;
839   const PetscInt        *dxm = (const PetscInt *)idxm;
840   PetscInt               i, j, sdim, tmp, dim;
841   PetscInt               dims[4], starts[4], dims2[3], starts2[3], dof = dd->w;
842   ISLocalToGlobalMapping ltog;
843 
844   PetscFunctionBegin;
845   if (m <= 0) PetscFunctionReturn(PETSC_SUCCESS);
846 
847   /* Code adapted from DMDAGetGhostCorners() */
848   starts2[0] = dd->Xs / dof + dd->xo;
849   starts2[1] = dd->Ys + dd->yo;
850   starts2[2] = dd->Zs + dd->zo;
851   dims2[0]   = (dd->Xe - dd->Xs) / dof;
852   dims2[1]   = (dd->Ye - dd->Ys);
853   dims2[2]   = (dd->Ze - dd->Zs);
854 
855   /* As if we do MatSetStencil() to get dims[]/starts[] of mat->stencil */
856   dim  = da->dim;                   /* DA dim: 1 to 3 */
857   sdim = dim + (dof > 1 ? 1 : 0);   /* Dimensions in MatStencil's (k,j,i,c) view */
858   for (i = 0; i < dim; i++) {       /* Reverse the order and also skip the unused dimensions */
859     dims[i]   = dims2[dim - i - 1]; /* ex. dims/starts[] are in order of {i} for 1D, {j,i} for 2D and {k,j,i} for 3D */
860     starts[i] = starts2[dim - i - 1];
861   }
862   starts[dim] = 0; /* Append the extra dim for dof (won't be used below if dof=1) */
863   dims[dim]   = dof;
864 
865   /* Map stencils to local indices (code adapted from MatSetValuesStencil()) */
866   for (i = 0; i < m; i++) {
867     dxm += 3 - dim; /* Input is {k,j,i,c}; move the pointer to the first used index, e.g., j in 2D */
868     tmp = 0;
869     for (j = 0; j < sdim; j++) {                                                      /* Iter over, ex. j,i or j,i,c in 2D */
870       if (tmp < 0 || dxm[j] < starts[j] || dxm[j] >= (starts[j] + dims[j])) tmp = -1; /* Beyond the ghost region, therefore ignored with negative indices */
871       else tmp = tmp * dims[j] + (dxm[j] - starts[j]);
872     }
873     gidxm[i] = tmp;
874     /* Move to the next MatStencil point */
875     if (dof > 1) dxm += sdim; /* c is already counted in sdim */
876     else dxm += sdim + 1;     /* skip the unused c */
877   }
878 
879   /* Map local indices to global indices */
880   PetscCall(DMGetLocalToGlobalMapping(da, &ltog));
881   PetscCall(ISLocalToGlobalMappingApply(ltog, m, gidxm, gidxm));
882   PetscFunctionReturn(PETSC_SUCCESS);
883 }
884 
885 /*
886   Creates "balanced" ownership ranges after refinement, constrained by the need for the
887   fine grid boundaries to fall within one stencil width of the coarse partition.
888 
889   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
890 */
891 static PetscErrorCode DMDARefineOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lc[], PetscInt lf[])
892 {
893   PetscInt i, totalc = 0, remaining, startc = 0, startf = 0;
894 
895   PetscFunctionBegin;
896   PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio);
897   if (ratio == 1) {
898     PetscCall(PetscArraycpy(lf, lc, m));
899     PetscFunctionReturn(PETSC_SUCCESS);
900   }
901   for (i = 0; i < m; i++) totalc += lc[i];
902   remaining = (!periodic) + ratio * (totalc - (!periodic));
903   for (i = 0; i < m; i++) {
904     PetscInt want = remaining / (m - i) + !!(remaining % (m - i));
905     if (i == m - 1) lf[i] = want;
906     else {
907       const PetscInt nextc = startc + lc[i];
908       /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
909        * coarse stencil width of the first coarse node in the next subdomain. */
910       while ((startf + want) / ratio < nextc - stencil_width) want++;
911       /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
912        * coarse stencil width of the last coarse node in the current subdomain. */
913       while ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width) want--;
914       /* Make sure all constraints are satisfied */
915       if (want < 0 || want > remaining || ((startf + want) / ratio < nextc - stencil_width) || ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width))
916         SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible refined ownership range");
917     }
918     lf[i] = want;
919     startc += lc[i];
920     startf += lf[i];
921     remaining -= lf[i];
922   }
923   PetscFunctionReturn(PETSC_SUCCESS);
924 }
925 
926 /*
927   Creates "balanced" ownership ranges after coarsening, constrained by the need for the
928   fine grid boundaries to fall within one stencil width of the coarse partition.
929 
930   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
931 */
932 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lf[], PetscInt lc[])
933 {
934   PetscInt i, totalf, remaining, startc, startf;
935 
936   PetscFunctionBegin;
937   PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio);
938   if (ratio == 1) {
939     PetscCall(PetscArraycpy(lc, lf, m));
940     PetscFunctionReturn(PETSC_SUCCESS);
941   }
942   for (i = 0, totalf = 0; i < m; i++) totalf += lf[i];
943   remaining = (!periodic) + (totalf - (!periodic)) / ratio;
944   for (i = 0, startc = 0, startf = 0; i < m; i++) {
945     PetscInt want = remaining / (m - i) + !!(remaining % (m - i));
946     if (i == m - 1) lc[i] = want;
947     else {
948       const PetscInt nextf = startf + lf[i];
949       /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
950        * node is within one stencil width. */
951       while (nextf / ratio < startc + want - stencil_width) want--;
952       /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
953        * fine node is within one stencil width. */
954       while ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width) want++;
955       if (want < 0 || want > remaining || (nextf / ratio < startc + want - stencil_width) || ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width))
956         SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible coarsened ownership range");
957     }
958     lc[i] = want;
959     startc += lc[i];
960     startf += lf[i];
961     remaining -= lc[i];
962   }
963   PetscFunctionReturn(PETSC_SUCCESS);
964 }
965 
966 PetscErrorCode DMRefine_DA(DM da, MPI_Comm comm, DM *daref)
967 {
968   PetscInt M, N, P, i, dim;
969   Vec      coordsc, coordsf;
970   DM       da2;
971   DM_DA   *dd = (DM_DA *)da->data, *dd2;
972 
973   PetscFunctionBegin;
974   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
975   PetscAssertPointer(daref, 3);
976 
977   PetscCall(DMGetDimension(da, &dim));
978   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
979     M = dd->refine_x * dd->M;
980   } else {
981     M = 1 + dd->refine_x * (dd->M - 1);
982   }
983   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
984     if (dim > 1) {
985       N = dd->refine_y * dd->N;
986     } else {
987       N = 1;
988     }
989   } else {
990     N = 1 + dd->refine_y * (dd->N - 1);
991   }
992   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
993     if (dim > 2) {
994       P = dd->refine_z * dd->P;
995     } else {
996       P = 1;
997     }
998   } else {
999     P = 1 + dd->refine_z * (dd->P - 1);
1000   }
1001   PetscCall(DMDACreate(PetscObjectComm((PetscObject)da), &da2));
1002   PetscCall(DMSetOptionsPrefix(da2, ((PetscObject)da)->prefix));
1003   PetscCall(DMSetDimension(da2, dim));
1004   PetscCall(DMDASetSizes(da2, M, N, P));
1005   PetscCall(DMDASetNumProcs(da2, dd->m, dd->n, dd->p));
1006   PetscCall(DMDASetBoundaryType(da2, dd->bx, dd->by, dd->bz));
1007   PetscCall(DMDASetDof(da2, dd->w));
1008   PetscCall(DMDASetStencilType(da2, dd->stencil_type));
1009   PetscCall(DMDASetStencilWidth(da2, dd->s));
1010   if (dim == 3) {
1011     PetscInt *lx, *ly, *lz;
1012     PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz));
1013     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1014     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly));
1015     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_z, dd->p, dd->lz, lz));
1016     PetscCall(DMDASetOwnershipRanges(da2, lx, ly, lz));
1017     PetscCall(PetscFree3(lx, ly, lz));
1018   } else if (dim == 2) {
1019     PetscInt *lx, *ly;
1020     PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly));
1021     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1022     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly));
1023     PetscCall(DMDASetOwnershipRanges(da2, lx, ly, NULL));
1024     PetscCall(PetscFree2(lx, ly));
1025   } else if (dim == 1) {
1026     PetscInt *lx;
1027     PetscCall(PetscMalloc1(dd->m, &lx));
1028     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1029     PetscCall(DMDASetOwnershipRanges(da2, lx, NULL, NULL));
1030     PetscCall(PetscFree(lx));
1031   }
1032   dd2 = (DM_DA *)da2->data;
1033 
1034   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
1035   da2->ops->creatematrix = da->ops->creatematrix;
1036   /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
1037   da2->ops->getcoloring = da->ops->getcoloring;
1038   dd2->interptype       = dd->interptype;
1039 
1040   /* copy fill information if given */
1041   if (dd->dfill) {
1042     PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill));
1043     PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1));
1044   }
1045   if (dd->ofill) {
1046     PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill));
1047     PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1));
1048   }
1049   /* copy the refine information */
1050   dd2->coarsen_x = dd2->refine_x = dd->refine_x;
1051   dd2->coarsen_y = dd2->refine_y = dd->refine_y;
1052   dd2->coarsen_z = dd2->refine_z = dd->refine_z;
1053 
1054   if (dd->refine_z_hier) {
1055     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1];
1056     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown];
1057     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1058     PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier));
1059     PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n));
1060   }
1061   if (dd->refine_y_hier) {
1062     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1];
1063     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown];
1064     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1065     PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier));
1066     PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n));
1067   }
1068   if (dd->refine_x_hier) {
1069     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1];
1070     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown];
1071     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1072     PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier));
1073     PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n));
1074   }
1075 
1076   /* copy vector type information */
1077   PetscCall(DMSetVecType(da2, da->vectype));
1078 
1079   dd2->lf = dd->lf;
1080   dd2->lj = dd->lj;
1081 
1082   da2->leveldown = da->leveldown;
1083   da2->levelup   = da->levelup + 1;
1084 
1085   PetscCall(DMSetUp(da2));
1086 
1087   /* interpolate coordinates if they are set on the coarse grid */
1088   PetscCall(DMGetCoordinates(da, &coordsc));
1089   if (coordsc) {
1090     DM  cdaf, cdac;
1091     Mat II;
1092 
1093     PetscCall(DMGetCoordinateDM(da, &cdac));
1094     PetscCall(DMGetCoordinateDM(da2, &cdaf));
1095     /* force creation of the coordinate vector */
1096     PetscCall(DMDASetUniformCoordinates(da2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
1097     PetscCall(DMGetCoordinates(da2, &coordsf));
1098     PetscCall(DMCreateInterpolation(cdac, cdaf, &II, NULL));
1099     PetscCall(MatInterpolate(II, coordsc, coordsf));
1100     PetscCall(MatDestroy(&II));
1101   }
1102 
1103   for (i = 0; i < da->bs; i++) {
1104     const char *fieldname;
1105     PetscCall(DMDAGetFieldName(da, i, &fieldname));
1106     PetscCall(DMDASetFieldName(da2, i, fieldname));
1107   }
1108 
1109   *daref = da2;
1110   PetscFunctionReturn(PETSC_SUCCESS);
1111 }
1112 
1113 PetscErrorCode DMCoarsen_DA(DM dmf, MPI_Comm comm, DM *dmc)
1114 {
1115   PetscInt M, N, P, i, dim;
1116   Vec      coordsc, coordsf;
1117   DM       dmc2;
1118   DM_DA   *dd = (DM_DA *)dmf->data, *dd2;
1119 
1120   PetscFunctionBegin;
1121   PetscValidHeaderSpecificType(dmf, DM_CLASSID, 1, DMDA);
1122   PetscAssertPointer(dmc, 3);
1123 
1124   PetscCall(DMGetDimension(dmf, &dim));
1125   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1126     M = dd->M / dd->coarsen_x;
1127   } else {
1128     M = 1 + (dd->M - 1) / dd->coarsen_x;
1129   }
1130   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1131     if (dim > 1) {
1132       N = dd->N / dd->coarsen_y;
1133     } else {
1134       N = 1;
1135     }
1136   } else {
1137     N = 1 + (dd->N - 1) / dd->coarsen_y;
1138   }
1139   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1140     if (dim > 2) {
1141       P = dd->P / dd->coarsen_z;
1142     } else {
1143       P = 1;
1144     }
1145   } else {
1146     P = 1 + (dd->P - 1) / dd->coarsen_z;
1147   }
1148   PetscCall(DMDACreate(PetscObjectComm((PetscObject)dmf), &dmc2));
1149   PetscCall(DMSetOptionsPrefix(dmc2, ((PetscObject)dmf)->prefix));
1150   PetscCall(DMSetDimension(dmc2, dim));
1151   PetscCall(DMDASetSizes(dmc2, M, N, P));
1152   PetscCall(DMDASetNumProcs(dmc2, dd->m, dd->n, dd->p));
1153   PetscCall(DMDASetBoundaryType(dmc2, dd->bx, dd->by, dd->bz));
1154   PetscCall(DMDASetDof(dmc2, dd->w));
1155   PetscCall(DMDASetStencilType(dmc2, dd->stencil_type));
1156   PetscCall(DMDASetStencilWidth(dmc2, dd->s));
1157   if (dim == 3) {
1158     PetscInt *lx, *ly, *lz;
1159     PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz));
1160     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1161     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly));
1162     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_z, dd->p, dd->lz, lz));
1163     PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, lz));
1164     PetscCall(PetscFree3(lx, ly, lz));
1165   } else if (dim == 2) {
1166     PetscInt *lx, *ly;
1167     PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly));
1168     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1169     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly));
1170     PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, NULL));
1171     PetscCall(PetscFree2(lx, ly));
1172   } else if (dim == 1) {
1173     PetscInt *lx;
1174     PetscCall(PetscMalloc1(dd->m, &lx));
1175     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1176     PetscCall(DMDASetOwnershipRanges(dmc2, lx, NULL, NULL));
1177     PetscCall(PetscFree(lx));
1178   }
1179   dd2 = (DM_DA *)dmc2->data;
1180 
1181   /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1182   /* dmc2->ops->createinterpolation = dmf->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1183   dmc2->ops->creatematrix = dmf->ops->creatematrix;
1184   dmc2->ops->getcoloring  = dmf->ops->getcoloring;
1185   dd2->interptype         = dd->interptype;
1186 
1187   /* copy fill information if given */
1188   if (dd->dfill) {
1189     PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill));
1190     PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1));
1191   }
1192   if (dd->ofill) {
1193     PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill));
1194     PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1));
1195   }
1196   /* copy the refine information */
1197   dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1198   dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1199   dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;
1200 
1201   if (dd->refine_z_hier) {
1202     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_z_hier_n) dd2->refine_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 1];
1203     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_z_hier_n) dd2->coarsen_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 2];
1204     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1205     PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier));
1206     PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n));
1207   }
1208   if (dd->refine_y_hier) {
1209     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_y_hier_n) dd2->refine_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 1];
1210     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_y_hier_n) dd2->coarsen_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 2];
1211     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1212     PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier));
1213     PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n));
1214   }
1215   if (dd->refine_x_hier) {
1216     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_x_hier_n) dd2->refine_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 1];
1217     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_x_hier_n) dd2->coarsen_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 2];
1218     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1219     PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier));
1220     PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n));
1221   }
1222 
1223   /* copy vector type information */
1224   PetscCall(DMSetVecType(dmc2, dmf->vectype));
1225 
1226   dd2->lf = dd->lf;
1227   dd2->lj = dd->lj;
1228 
1229   dmc2->leveldown = dmf->leveldown + 1;
1230   dmc2->levelup   = dmf->levelup;
1231 
1232   PetscCall(DMSetUp(dmc2));
1233 
1234   /* inject coordinates if they are set on the fine grid */
1235   PetscCall(DMGetCoordinates(dmf, &coordsf));
1236   if (coordsf) {
1237     DM         cdaf, cdac;
1238     Mat        inject;
1239     VecScatter vscat;
1240 
1241     PetscCall(DMGetCoordinateDM(dmf, &cdaf));
1242     PetscCall(DMGetCoordinateDM(dmc2, &cdac));
1243     /* force creation of the coordinate vector */
1244     PetscCall(DMDASetUniformCoordinates(dmc2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
1245     PetscCall(DMGetCoordinates(dmc2, &coordsc));
1246 
1247     PetscCall(DMCreateInjection(cdac, cdaf, &inject));
1248     PetscCall(MatScatterGetVecScatter(inject, &vscat));
1249     PetscCall(VecScatterBegin(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD));
1250     PetscCall(VecScatterEnd(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD));
1251     PetscCall(MatDestroy(&inject));
1252   }
1253 
1254   for (i = 0; i < dmf->bs; i++) {
1255     const char *fieldname;
1256     PetscCall(DMDAGetFieldName(dmf, i, &fieldname));
1257     PetscCall(DMDASetFieldName(dmc2, i, fieldname));
1258   }
1259 
1260   *dmc = dmc2;
1261   PetscFunctionReturn(PETSC_SUCCESS);
1262 }
1263 
1264 PetscErrorCode DMRefineHierarchy_DA(DM da, PetscInt nlevels, DM daf[])
1265 {
1266   PetscInt i, n, *refx, *refy, *refz;
1267 
1268   PetscFunctionBegin;
1269   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
1270   PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative");
1271   if (nlevels == 0) PetscFunctionReturn(PETSC_SUCCESS);
1272   PetscAssertPointer(daf, 3);
1273 
1274   /* Get refinement factors, defaults taken from the coarse DMDA */
1275   PetscCall(PetscMalloc3(nlevels, &refx, nlevels, &refy, nlevels, &refz));
1276   for (i = 0; i < nlevels; i++) PetscCall(DMDAGetRefinementFactor(da, &refx[i], &refy[i], &refz[i]));
1277   n = nlevels;
1278   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_x", refx, &n, NULL));
1279   n = nlevels;
1280   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_y", refy, &n, NULL));
1281   n = nlevels;
1282   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_z", refz, &n, NULL));
1283 
1284   PetscCall(DMDASetRefinementFactor(da, refx[0], refy[0], refz[0]));
1285   PetscCall(DMRefine(da, PetscObjectComm((PetscObject)da), &daf[0]));
1286   for (i = 1; i < nlevels; i++) {
1287     PetscCall(DMDASetRefinementFactor(daf[i - 1], refx[i], refy[i], refz[i]));
1288     PetscCall(DMRefine(daf[i - 1], PetscObjectComm((PetscObject)da), &daf[i]));
1289   }
1290   PetscCall(PetscFree3(refx, refy, refz));
1291   PetscFunctionReturn(PETSC_SUCCESS);
1292 }
1293 
1294 PetscErrorCode DMCoarsenHierarchy_DA(DM da, PetscInt nlevels, DM dac[])
1295 {
1296   PetscInt i;
1297 
1298   PetscFunctionBegin;
1299   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
1300   PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative");
1301   if (nlevels == 0) PetscFunctionReturn(PETSC_SUCCESS);
1302   PetscAssertPointer(dac, 3);
1303   PetscCall(DMCoarsen(da, PetscObjectComm((PetscObject)da), &dac[0]));
1304   for (i = 1; i < nlevels; i++) PetscCall(DMCoarsen(dac[i - 1], PetscObjectComm((PetscObject)da), &dac[i]));
1305   PetscFunctionReturn(PETSC_SUCCESS);
1306 }
1307 
1308 static PetscErrorCode DMDASetGLLCoordinates_1d(DM dm, PetscInt n, PetscReal *nodes)
1309 {
1310   PetscInt     i, j, xs, xn, q;
1311   PetscScalar *xx;
1312   PetscReal    h;
1313   Vec          x;
1314   DM_DA       *da = (DM_DA *)dm->data;
1315 
1316   PetscFunctionBegin;
1317   if (da->bx != DM_BOUNDARY_PERIODIC) {
1318     PetscCall(DMDAGetInfo(dm, NULL, &q, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
1319     q = (q - 1) / (n - 1); /* number of spectral elements */
1320     h = 2.0 / q;
1321     PetscCall(DMDAGetCorners(dm, &xs, NULL, NULL, &xn, NULL, NULL));
1322     xs = xs / (n - 1);
1323     xn = xn / (n - 1);
1324     PetscCall(DMDASetUniformCoordinates(dm, -1., 1., 0., 0., 0., 0.));
1325     PetscCall(DMGetCoordinates(dm, &x));
1326     PetscCall(DMDAVecGetArray(dm, x, &xx));
1327 
1328     /* loop over local spectral elements */
1329     for (j = xs; j < xs + xn; j++) {
1330       /*
1331        Except for the first process, each process starts on the second GLL point of the first element on that process
1332        */
1333       for (i = (j == xs && xs > 0) ? 1 : 0; i < n; i++) xx[j * (n - 1) + i] = -1.0 + h * j + h * (nodes[i] + 1.0) / 2.;
1334     }
1335     PetscCall(DMDAVecRestoreArray(dm, x, &xx));
1336   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for periodic");
1337   PetscFunctionReturn(PETSC_SUCCESS);
1338 }
1339 
1340 /*@
1341 
1342   DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points
1343 
1344   Collective
1345 
1346   Input Parameters:
1347 + da    - the `DMDA` object
1348 . n     - the number of GLL nodes
1349 - nodes - the GLL nodes
1350 
1351   Level: advanced
1352 
1353   Note:
1354   The parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points
1355   on each process much be divisible by the number of GLL elements needed per process. This depends on whether the `DM` is
1356   periodic or not.
1357 
1358 .seealso: `DM`, `DMDA`, `DMDACreate()`, `PetscDTGaussLobattoLegendreQuadrature()`, `DMGetCoordinates()`
1359 @*/
1360 PetscErrorCode DMDASetGLLCoordinates(DM da, PetscInt n, PetscReal *nodes)
1361 {
1362   PetscFunctionBegin;
1363   if (da->dim == 1) {
1364     PetscCall(DMDASetGLLCoordinates_1d(da, n, nodes));
1365   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for 2 or 3d");
1366   PetscFunctionReturn(PETSC_SUCCESS);
1367 }
1368 
1369 PetscErrorCode DMGetCompatibility_DA(DM da1, DM dm2, PetscBool *compatible, PetscBool *set)
1370 {
1371   DM_DA    *dd1 = (DM_DA *)da1->data, *dd2;
1372   DM        da2;
1373   DMType    dmtype2;
1374   PetscBool isda, compatibleLocal;
1375   PetscInt  i;
1376 
1377   PetscFunctionBegin;
1378   PetscCheck(da1->setupcalled, PetscObjectComm((PetscObject)da1), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on first DM before DMGetCompatibility()");
1379   PetscCall(DMGetType(dm2, &dmtype2));
1380   PetscCall(PetscStrcmp(dmtype2, DMDA, &isda));
1381   if (isda) {
1382     da2 = dm2;
1383     dd2 = (DM_DA *)da2->data;
1384     PetscCheck(da2->setupcalled, PetscObjectComm((PetscObject)da2), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on second DM before DMGetCompatibility()");
1385     compatibleLocal = (PetscBool)(da1->dim == da2->dim);
1386     if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->s == dd2->s)); /* Stencil width */
1387     /*                                                                           Global size              ranks               Boundary type */
1388     if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->M == dd2->M) && (dd1->m == dd2->m) && (dd1->bx == dd2->bx));
1389     if (compatibleLocal && da1->dim > 1) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->N == dd2->N) && (dd1->n == dd2->n) && (dd1->by == dd2->by));
1390     if (compatibleLocal && da1->dim > 2) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->P == dd2->P) && (dd1->p == dd2->p) && (dd1->bz == dd2->bz));
1391     if (compatibleLocal) {
1392       for (i = 0; i < dd1->m; ++i) { compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lx[i] == dd2->lx[i])); /* Local size     */ }
1393     }
1394     if (compatibleLocal && da1->dim > 1) {
1395       for (i = 0; i < dd1->n; ++i) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->ly[i] == dd2->ly[i]));
1396     }
1397     if (compatibleLocal && da1->dim > 2) {
1398       for (i = 0; i < dd1->p; ++i) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lz[i] == dd2->lz[i]));
1399     }
1400     *compatible = compatibleLocal;
1401     *set        = PETSC_TRUE;
1402   } else {
1403     /* Decline to determine compatibility with other DM types */
1404     *set = PETSC_FALSE;
1405   }
1406   PetscFunctionReturn(PETSC_SUCCESS);
1407 }
1408