xref: /petsc/src/dm/impls/da/da.c (revision 6e5f5dad97d5d0d453d1db100f9dbfcf5747de2b)
1 #include <petsc/private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
2 
3 /*@
4   DMDASetSizes - Sets the number of grid points in the three dimensional directions
5 
6   Logically Collective on DMDA
7 
8   Input Parameters:
9 + da - the DMDA
10 . M - the global X size
11 . N - the global Y size
12 - P - the global Z size
13 
14   Level: intermediate
15 
16   Developer Notes: Since the dimension may not yet have been set the code cannot error check for non-positive Y and Z number of grid points
17 
18 .seealso: DMDAGetSize(), PetscSplitOwnership()
19 @*/
20 PetscErrorCode  DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
21 {
22   DM_DA *dd = (DM_DA*)da->data;
23 
24   PetscFunctionBegin;
25   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
26   PetscValidLogicalCollectiveInt(da,M,2);
27   PetscValidLogicalCollectiveInt(da,N,3);
28   PetscValidLogicalCollectiveInt(da,P,4);
29   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
30   if (M < 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Number of grid points in X direction must be positive");
31   if (N < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Number of grid points in Y direction must be positive");
32   if (P < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Number of grid points in Z direction must be positive");
33 
34   dd->M = M;
35   dd->N = N;
36   dd->P = P;
37   PetscFunctionReturn(0);
38 }
39 
40 /*@
41   DMDASetNumProcs - Sets the number of processes in each dimension
42 
43   Logically Collective on DMDA
44 
45   Input Parameters:
46 + da - the DMDA
47 . m - the number of X procs (or PETSC_DECIDE)
48 . n - the number of Y procs (or PETSC_DECIDE)
49 - p - the number of Z procs (or PETSC_DECIDE)
50 
51   Level: intermediate
52 
53 .seealso: DMDASetSizes(), DMDAGetSize(), PetscSplitOwnership()
54 @*/
55 PetscErrorCode  DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
56 {
57   DM_DA          *dd = (DM_DA*)da->data;
58   PetscErrorCode ierr;
59 
60   PetscFunctionBegin;
61   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
62   PetscValidLogicalCollectiveInt(da,m,2);
63   PetscValidLogicalCollectiveInt(da,n,3);
64   PetscValidLogicalCollectiveInt(da,p,4);
65   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
66   dd->m = m;
67   dd->n = n;
68   dd->p = p;
69   if (da->dim == 2) {
70     PetscMPIInt size;
71     ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr);
72     if ((dd->m > 0) && (dd->n < 0)) {
73       dd->n = size/dd->m;
74       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in X direction not divisible into comm size %d",m,size);
75     }
76     if ((dd->n > 0) && (dd->m < 0)) {
77       dd->m = size/dd->n;
78       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in Y direction not divisible into comm size %d",n,size);
79     }
80   }
81   PetscFunctionReturn(0);
82 }
83 
84 /*@
85   DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries.
86 
87   Not collective
88 
89   Input Parameter:
90 + da    - The DMDA
91 - bx,by,bz - One of DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, DM_BOUNDARY_PERIODIC
92 
93   Level: intermediate
94 
95 .keywords:  distributed array, periodicity
96 .seealso: DMDACreate(), DMDestroy(), DMDA, DMBoundaryType
97 @*/
98 PetscErrorCode  DMDASetBoundaryType(DM da,DMBoundaryType bx,DMBoundaryType by,DMBoundaryType bz)
99 {
100   DM_DA *dd = (DM_DA*)da->data;
101 
102   PetscFunctionBegin;
103   PetscValidHeaderSpecific(da,DM_CLASSID,1);
104   PetscValidLogicalCollectiveEnum(da,bx,2);
105   PetscValidLogicalCollectiveEnum(da,by,3);
106   PetscValidLogicalCollectiveEnum(da,bz,4);
107   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
108   dd->bx = bx;
109   dd->by = by;
110   dd->bz = bz;
111   PetscFunctionReturn(0);
112 }
113 
114 /*@
115   DMDASetDof - Sets the number of degrees of freedom per vertex
116 
117   Not collective
118 
119   Input Parameters:
120 + da  - The DMDA
121 - dof - Number of degrees of freedom
122 
123   Level: intermediate
124 
125 .keywords:  distributed array, degrees of freedom
126 .seealso: DMDAGetDof(), DMDACreate(), DMDestroy(), DMDA
127 @*/
128 PetscErrorCode  DMDASetDof(DM da, PetscInt dof)
129 {
130   DM_DA *dd = (DM_DA*)da->data;
131 
132   PetscFunctionBegin;
133   PetscValidHeaderSpecific(da,DM_CLASSID,1);
134   PetscValidLogicalCollectiveInt(da,dof,2);
135   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
136   dd->w  = dof;
137   da->bs = dof;
138   PetscFunctionReturn(0);
139 }
140 
141 /*@
142   DMDAGetDof - Gets the number of degrees of freedom per vertex
143 
144   Not collective
145 
146   Input Parameter:
147 . da  - The DMDA
148 
149   Output Parameter:
150 . dof - Number of degrees of freedom
151 
152   Level: intermediate
153 
154 .keywords:  distributed array, degrees of freedom
155 .seealso: DMDASetDof(), DMDACreate(), DMDestroy(), DMDA
156 @*/
157 PetscErrorCode DMDAGetDof(DM da, PetscInt *dof)
158 {
159   DM_DA *dd = (DM_DA *) da->data;
160 
161   PetscFunctionBegin;
162   PetscValidHeaderSpecific(da,DM_CLASSID,1);
163   PetscValidPointer(dof,2);
164   *dof = dd->w;
165   PetscFunctionReturn(0);
166 }
167 
168 /*@
169   DMDAGetOverlap - Gets the size of the per-processor overlap.
170 
171   Not collective
172 
173   Input Parameters:
174 . da  - The DMDA
175 
176   Output Parameters:
177 + x   - Overlap in the x direction
178 . y   - Overlap in the y direction
179 - z   - Overlap in the z direction
180 
181   Level: intermediate
182 
183 .keywords:  distributed array, overlap, domain decomposition
184 .seealso: DMDACreateDomainDecomposition(), DMDASetOverlap(), DMDA
185 @*/
186 PetscErrorCode  DMDAGetOverlap(DM da,PetscInt *x,PetscInt *y,PetscInt *z)
187 {
188   DM_DA *dd = (DM_DA*)da->data;
189 
190   PetscFunctionBegin;
191   PetscValidHeaderSpecific(da,DM_CLASSID,1);
192   if (x) *x = dd->xol;
193   if (y) *y = dd->yol;
194   if (z) *z = dd->zol;
195   PetscFunctionReturn(0);
196 }
197 
198 /*@
199   DMDASetOverlap - Sets the size of the per-processor overlap.
200 
201   Not collective
202 
203   Input Parameters:
204 + da  - The DMDA
205 . x   - Overlap in the x direction
206 . y   - Overlap in the y direction
207 - z   - Overlap in the z direction
208 
209   Level: intermediate
210 
211 .keywords:  distributed array, overlap, domain decomposition
212 .seealso: DMDACreateDomainDecomposition(), DMDAGetOverlap(), DMDA
213 @*/
214 PetscErrorCode  DMDASetOverlap(DM da,PetscInt x,PetscInt y,PetscInt z)
215 {
216   DM_DA *dd = (DM_DA*)da->data;
217 
218   PetscFunctionBegin;
219   PetscValidHeaderSpecific(da,DM_CLASSID,1);
220   PetscValidLogicalCollectiveInt(da,x,2);
221   PetscValidLogicalCollectiveInt(da,y,3);
222   PetscValidLogicalCollectiveInt(da,z,4);
223   dd->xol = x;
224   dd->yol = y;
225   dd->zol = z;
226   PetscFunctionReturn(0);
227 }
228 
229 
230 /*@
231   DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition.
232 
233   Not collective
234 
235   Input Parameters:
236 . da  - The DMDA
237 
238   Output Parameters:
239 + Nsub   - Number of local subdomains created upon decomposition
240 
241   Level: intermediate
242 
243 .keywords:  distributed array, domain decomposition
244 .seealso: DMDACreateDomainDecomposition(), DMDASetNumLocalSubDomains(), DMDA
245 @*/
246 PetscErrorCode  DMDAGetNumLocalSubDomains(DM da,PetscInt *Nsub)
247 {
248   DM_DA *dd = (DM_DA*)da->data;
249 
250   PetscFunctionBegin;
251   PetscValidHeaderSpecific(da,DM_CLASSID,1);
252   if (Nsub) *Nsub = dd->Nsub;
253   PetscFunctionReturn(0);
254 }
255 
256 /*@
257   DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition.
258 
259   Not collective
260 
261   Input Parameters:
262 + da  - The DMDA
263 - Nsub - The number of local subdomains requested
264 
265   Level: intermediate
266 
267 .keywords:  distributed array, domain decomposition
268 .seealso: DMDACreateDomainDecomposition(), DMDAGetNumLocalSubDomains(), DMDA
269 @*/
270 PetscErrorCode  DMDASetNumLocalSubDomains(DM da,PetscInt Nsub)
271 {
272   DM_DA *dd = (DM_DA*)da->data;
273 
274   PetscFunctionBegin;
275   PetscValidHeaderSpecific(da,DM_CLASSID,1);
276   PetscValidLogicalCollectiveInt(da,Nsub,2);
277   dd->Nsub = Nsub;
278   PetscFunctionReturn(0);
279 }
280 
281 /*@
282   DMDASetOffset - Sets the index offset of the DA.
283 
284   Collective on DA
285 
286   Input Parameter:
287 + da  - The DMDA
288 . xo  - The offset in the x direction
289 . yo  - The offset in the y direction
290 - zo  - The offset in the z direction
291 
292   Level: intermediate
293 
294   Notes: This is used primarily to overlap a computation on a local DA with that on a global DA without
295   changing boundary conditions or subdomain features that depend upon the global offsets.
296 
297 .keywords:  distributed array, degrees of freedom
298 .seealso: DMDAGetOffset(), DMDAVecGetArray()
299 @*/
300 PetscErrorCode  DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
301 {
302   PetscErrorCode ierr;
303   DM_DA          *dd = (DM_DA*)da->data;
304 
305   PetscFunctionBegin;
306   PetscValidHeaderSpecific(da,DM_CLASSID,1);
307   PetscValidLogicalCollectiveInt(da,xo,2);
308   PetscValidLogicalCollectiveInt(da,yo,3);
309   PetscValidLogicalCollectiveInt(da,zo,4);
310   PetscValidLogicalCollectiveInt(da,Mo,5);
311   PetscValidLogicalCollectiveInt(da,No,6);
312   PetscValidLogicalCollectiveInt(da,Po,7);
313   dd->xo = xo;
314   dd->yo = yo;
315   dd->zo = zo;
316   dd->Mo = Mo;
317   dd->No = No;
318   dd->Po = Po;
319 
320   if (da->coordinateDM) {
321     ierr = DMDASetOffset(da->coordinateDM,xo,yo,zo,Mo,No,Po);CHKERRQ(ierr);
322   }
323   PetscFunctionReturn(0);
324 }
325 
326 /*@
327   DMDAGetOffset - Gets the index offset of the DA.
328 
329   Not collective
330 
331   Input Parameter:
332 . da  - The DMDA
333 
334   Output Parameters:
335 + xo  - The offset in the x direction
336 . yo  - The offset in the y direction
337 . zo  - The offset in the z direction
338 . Mo  - The global size in the x direction
339 . No  - The global size in the y direction
340 - Po  - The global size in the z direction
341 
342   Level: intermediate
343 
344 .keywords:  distributed array, degrees of freedom
345 .seealso: DMDASetOffset(), DMDAVecGetArray()
346 @*/
347 PetscErrorCode  DMDAGetOffset(DM da,PetscInt *xo,PetscInt *yo,PetscInt *zo,PetscInt *Mo,PetscInt *No,PetscInt *Po)
348 {
349   DM_DA *dd = (DM_DA*)da->data;
350 
351   PetscFunctionBegin;
352   PetscValidHeaderSpecific(da,DM_CLASSID,1);
353   if (xo) *xo = dd->xo;
354   if (yo) *yo = dd->yo;
355   if (zo) *zo = dd->zo;
356   if (Mo) *Mo = dd->Mo;
357   if (No) *No = dd->No;
358   if (Po) *Po = dd->Po;
359   PetscFunctionReturn(0);
360 }
361 
362 /*@
363   DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain DM.
364 
365   Not collective
366 
367   Input Parameter:
368 . da  - The DMDA
369 
370   Output Parameters:
371 + xs  - The start of the region in x
372 . ys  - The start of the region in y
373 . zs  - The start of the region in z
374 . xs  - The size of the region in x
375 . ys  - The size of the region in y
376 . zs  - The size of the region in z
377 
378   Level: intermediate
379 
380 .keywords:  distributed array, degrees of freedom
381 .seealso: DMDAGetOffset(), DMDAVecGetArray()
382 @*/
383 PetscErrorCode  DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
384 {
385   DM_DA          *dd = (DM_DA*)da->data;
386 
387   PetscFunctionBegin;
388   PetscValidHeaderSpecific(da,DM_CLASSID,1);
389   if (xs) *xs = dd->nonxs;
390   if (ys) *ys = dd->nonys;
391   if (zs) *zs = dd->nonzs;
392   if (xm) *xm = dd->nonxm;
393   if (ym) *ym = dd->nonym;
394   if (zm) *zm = dd->nonzm;
395   PetscFunctionReturn(0);
396 }
397 
398 
399 /*@
400   DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain DM.
401 
402   Collective on DA
403 
404   Input Parameter:
405 + da  - The DMDA
406 . xs  - The start of the region in x
407 . ys  - The start of the region in y
408 . zs  - The start of the region in z
409 . xs  - The size of the region in x
410 . ys  - The size of the region in y
411 . zs  - The size of the region in z
412 
413   Level: intermediate
414 
415 .keywords:  distributed array, degrees of freedom
416 .seealso: DMDAGetOffset(), DMDAVecGetArray()
417 @*/
418 PetscErrorCode  DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
419 {
420   DM_DA          *dd = (DM_DA*)da->data;
421 
422   PetscFunctionBegin;
423   PetscValidHeaderSpecific(da,DM_CLASSID,1);
424   PetscValidLogicalCollectiveInt(da,xs,2);
425   PetscValidLogicalCollectiveInt(da,ys,3);
426   PetscValidLogicalCollectiveInt(da,zs,4);
427   PetscValidLogicalCollectiveInt(da,xm,5);
428   PetscValidLogicalCollectiveInt(da,ym,6);
429   PetscValidLogicalCollectiveInt(da,zm,7);
430   dd->nonxs = xs;
431   dd->nonys = ys;
432   dd->nonzs = zs;
433   dd->nonxm = xm;
434   dd->nonym = ym;
435   dd->nonzm = zm;
436 
437   PetscFunctionReturn(0);
438 }
439 
440 /*@
441   DMDASetStencilType - Sets the type of the communication stencil
442 
443   Logically Collective on DMDA
444 
445   Input Parameter:
446 + da    - The DMDA
447 - stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
448 
449   Level: intermediate
450 
451 .keywords:  distributed array, stencil
452 .seealso: DMDACreate(), DMDestroy(), DMDA
453 @*/
454 PetscErrorCode  DMDASetStencilType(DM da, DMDAStencilType stype)
455 {
456   DM_DA *dd = (DM_DA*)da->data;
457 
458   PetscFunctionBegin;
459   PetscValidHeaderSpecific(da,DM_CLASSID,1);
460   PetscValidLogicalCollectiveEnum(da,stype,2);
461   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
462   dd->stencil_type = stype;
463   PetscFunctionReturn(0);
464 }
465 
466 /*@
467   DMDAGetStencilType - Gets the type of the communication stencil
468 
469   Not collective
470 
471   Input Parameter:
472 . da    - The DMDA
473 
474   Output Parameter:
475 . stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
476 
477   Level: intermediate
478 
479 .keywords:  distributed array, stencil
480 .seealso: DMDACreate(), DMDestroy(), DMDA
481 @*/
482 PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype)
483 {
484   DM_DA *dd = (DM_DA*)da->data;
485 
486   PetscFunctionBegin;
487   PetscValidHeaderSpecific(da,DM_CLASSID,1);
488   PetscValidPointer(stype,2);
489   *stype = dd->stencil_type;
490   PetscFunctionReturn(0);
491 }
492 
493 /*@
494   DMDASetStencilWidth - Sets the width of the communication stencil
495 
496   Logically Collective on DMDA
497 
498   Input Parameter:
499 + da    - The DMDA
500 - width - The stencil width
501 
502   Level: intermediate
503 
504 .keywords:  distributed array, stencil
505 .seealso: DMDACreate(), DMDestroy(), DMDA
506 @*/
507 PetscErrorCode  DMDASetStencilWidth(DM da, PetscInt width)
508 {
509   DM_DA *dd = (DM_DA*)da->data;
510 
511   PetscFunctionBegin;
512   PetscValidHeaderSpecific(da,DM_CLASSID,1);
513   PetscValidLogicalCollectiveInt(da,width,2);
514   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
515   dd->s = width;
516   PetscFunctionReturn(0);
517 }
518 
519 /*@
520   DMDAGetStencilWidth - Gets the width of the communication stencil
521 
522   Not collective
523 
524   Input Parameter:
525 . da    - The DMDA
526 
527   Output Parameter:
528 . width - The stencil width
529 
530   Level: intermediate
531 
532 .keywords:  distributed array, stencil
533 .seealso: DMDACreate(), DMDestroy(), DMDA
534 @*/
535 PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width)
536 {
537   DM_DA *dd = (DM_DA *) da->data;
538 
539   PetscFunctionBegin;
540   PetscValidHeaderSpecific(da,DM_CLASSID,1);
541   PetscValidPointer(width,2);
542   *width = dd->s;
543   PetscFunctionReturn(0);
544 }
545 
546 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da,PetscInt M,PetscInt m,const PetscInt lx[])
547 {
548   PetscInt i,sum;
549 
550   PetscFunctionBegin;
551   if (M < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Global dimension not set");
552   for (i=sum=0; i<m; i++) sum += lx[i];
553   if (sum != M) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Ownership ranges sum to %D but global dimension is %D",sum,M);
554   PetscFunctionReturn(0);
555 }
556 
557 /*@
558   DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process
559 
560   Logically Collective on DMDA
561 
562   Input Parameter:
563 + da - The DMDA
564 . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m
565 . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n
566 - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p.
567 
568   Level: intermediate
569 
570   Note: these numbers are NOT multiplied by the number of dof per node.
571 
572 .keywords:  distributed array
573 .seealso: DMDACreate(), DMDestroy(), DMDA
574 @*/
575 PetscErrorCode  DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
576 {
577   PetscErrorCode ierr;
578   DM_DA          *dd = (DM_DA*)da->data;
579 
580   PetscFunctionBegin;
581   PetscValidHeaderSpecific(da,DM_CLASSID,1);
582   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
583   if (lx) {
584     if (dd->m < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
585     ierr = DMDACheckOwnershipRanges_Private(da,dd->M,dd->m,lx);CHKERRQ(ierr);
586     if (!dd->lx) {
587       ierr = PetscMalloc1(dd->m, &dd->lx);CHKERRQ(ierr);
588     }
589     ierr = PetscMemcpy(dd->lx, lx, dd->m*sizeof(PetscInt));CHKERRQ(ierr);
590   }
591   if (ly) {
592     if (dd->n < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
593     ierr = DMDACheckOwnershipRanges_Private(da,dd->N,dd->n,ly);CHKERRQ(ierr);
594     if (!dd->ly) {
595       ierr = PetscMalloc1(dd->n, &dd->ly);CHKERRQ(ierr);
596     }
597     ierr = PetscMemcpy(dd->ly, ly, dd->n*sizeof(PetscInt));CHKERRQ(ierr);
598   }
599   if (lz) {
600     if (dd->p < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
601     ierr = DMDACheckOwnershipRanges_Private(da,dd->P,dd->p,lz);CHKERRQ(ierr);
602     if (!dd->lz) {
603       ierr = PetscMalloc1(dd->p, &dd->lz);CHKERRQ(ierr);
604     }
605     ierr = PetscMemcpy(dd->lz, lz, dd->p*sizeof(PetscInt));CHKERRQ(ierr);
606   }
607   PetscFunctionReturn(0);
608 }
609 
610 /*@
611        DMDASetInterpolationType - Sets the type of interpolation that will be
612           returned by DMCreateInterpolation()
613 
614    Logically Collective on DMDA
615 
616    Input Parameter:
617 +  da - initial distributed array
618 .  ctype - DMDA_Q1 and DMDA_Q0 are currently the only supported forms
619 
620    Level: intermediate
621 
622    Notes: you should call this on the coarser of the two DMDAs you pass to DMCreateInterpolation()
623 
624 .keywords:  distributed array, interpolation
625 
626 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType
627 @*/
628 PetscErrorCode  DMDASetInterpolationType(DM da,DMDAInterpolationType ctype)
629 {
630   DM_DA *dd = (DM_DA*)da->data;
631 
632   PetscFunctionBegin;
633   PetscValidHeaderSpecific(da,DM_CLASSID,1);
634   PetscValidLogicalCollectiveEnum(da,ctype,2);
635   dd->interptype = ctype;
636   PetscFunctionReturn(0);
637 }
638 
639 /*@
640        DMDAGetInterpolationType - Gets the type of interpolation that will be
641           used by DMCreateInterpolation()
642 
643    Not Collective
644 
645    Input Parameter:
646 .  da - distributed array
647 
648    Output Parameter:
649 .  ctype - interpolation type (DMDA_Q1 and DMDA_Q0 are currently the only supported forms)
650 
651    Level: intermediate
652 
653 .keywords:  distributed array, interpolation
654 
655 .seealso: DMDA, DMDAInterpolationType, DMDASetInterpolationType(), DMCreateInterpolation()
656 @*/
657 PetscErrorCode  DMDAGetInterpolationType(DM da,DMDAInterpolationType *ctype)
658 {
659   DM_DA *dd = (DM_DA*)da->data;
660 
661   PetscFunctionBegin;
662   PetscValidHeaderSpecific(da,DM_CLASSID,1);
663   PetscValidPointer(ctype,2);
664   *ctype = dd->interptype;
665   PetscFunctionReturn(0);
666 }
667 
668 /*@C
669       DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
670         processes neighbors.
671 
672     Not Collective
673 
674    Input Parameter:
675 .     da - the DMDA object
676 
677    Output Parameters:
678 .     ranks - the neighbors ranks, stored with the x index increasing most rapidly.
679               this process itself is in the list
680 
681    Notes: In 2d the array is of length 9, in 3d of length 27
682           Not supported in 1d
683           Do not free the array, it is freed when the DMDA is destroyed.
684 
685    Fortran Notes: In fortran you must pass in an array of the appropriate length.
686 
687    Level: intermediate
688 
689 @*/
690 PetscErrorCode  DMDAGetNeighbors(DM da,const PetscMPIInt *ranks[])
691 {
692   DM_DA *dd = (DM_DA*)da->data;
693 
694   PetscFunctionBegin;
695   PetscValidHeaderSpecific(da,DM_CLASSID,1);
696   *ranks = dd->neighbors;
697   PetscFunctionReturn(0);
698 }
699 
700 /*@C
701       DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process
702 
703     Not Collective
704 
705    Input Parameter:
706 .     da - the DMDA object
707 
708    Output Parameter:
709 +     lx - ownership along x direction (optional)
710 .     ly - ownership along y direction (optional)
711 -     lz - ownership along z direction (optional)
712 
713    Level: intermediate
714 
715     Note: these correspond to the optional final arguments passed to DMDACreate(), DMDACreate2d(), DMDACreate3d()
716 
717     In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and
718     eighth arguments from DMDAGetInfo()
719 
720      In C you should not free these arrays, nor change the values in them. They will only have valid values while the
721     DMDA they came from still exists (has not been destroyed).
722 
723     These numbers are NOT multiplied by the number of dof per node.
724 
725      Not available from Fortran
726 
727 .seealso: DMDAGetCorners(), DMDAGetGhostCorners(), DMDACreate(), DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), VecGetOwnershipRanges()
728 @*/
729 PetscErrorCode  DMDAGetOwnershipRanges(DM da,const PetscInt *lx[],const PetscInt *ly[],const PetscInt *lz[])
730 {
731   DM_DA *dd = (DM_DA*)da->data;
732 
733   PetscFunctionBegin;
734   PetscValidHeaderSpecific(da,DM_CLASSID,1);
735   if (lx) *lx = dd->lx;
736   if (ly) *ly = dd->ly;
737   if (lz) *lz = dd->lz;
738   PetscFunctionReturn(0);
739 }
740 
741 /*@
742      DMDASetRefinementFactor - Set the ratios that the DMDA grid is refined
743 
744     Logically Collective on DMDA
745 
746   Input Parameters:
747 +    da - the DMDA object
748 .    refine_x - ratio of fine grid to coarse in x direction (2 by default)
749 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
750 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
751 
752   Options Database:
753 +  -da_refine_x - refinement ratio in x direction
754 .  -da_refine_y - refinement ratio in y direction
755 -  -da_refine_z - refinement ratio in z direction
756 
757   Level: intermediate
758 
759     Notes: Pass PETSC_IGNORE to leave a value unchanged
760 
761 .seealso: DMRefine(), DMDAGetRefinementFactor()
762 @*/
763 PetscErrorCode  DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z)
764 {
765   DM_DA *dd = (DM_DA*)da->data;
766 
767   PetscFunctionBegin;
768   PetscValidHeaderSpecific(da,DM_CLASSID,1);
769   PetscValidLogicalCollectiveInt(da,refine_x,2);
770   PetscValidLogicalCollectiveInt(da,refine_y,3);
771   PetscValidLogicalCollectiveInt(da,refine_z,4);
772 
773   if (refine_x > 0) dd->refine_x = refine_x;
774   if (refine_y > 0) dd->refine_y = refine_y;
775   if (refine_z > 0) dd->refine_z = refine_z;
776   PetscFunctionReturn(0);
777 }
778 
779 /*@C
780      DMDAGetRefinementFactor - Gets the ratios that the DMDA grid is refined
781 
782     Not Collective
783 
784   Input Parameter:
785 .    da - the DMDA object
786 
787   Output Parameters:
788 +    refine_x - ratio of fine grid to coarse in x direction (2 by default)
789 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
790 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
791 
792   Level: intermediate
793 
794     Notes: Pass NULL for values you do not need
795 
796 .seealso: DMRefine(), DMDASetRefinementFactor()
797 @*/
798 PetscErrorCode  DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z)
799 {
800   DM_DA *dd = (DM_DA*)da->data;
801 
802   PetscFunctionBegin;
803   PetscValidHeaderSpecific(da,DM_CLASSID,1);
804   if (refine_x) *refine_x = dd->refine_x;
805   if (refine_y) *refine_y = dd->refine_y;
806   if (refine_z) *refine_z = dd->refine_z;
807   PetscFunctionReturn(0);
808 }
809 
810 /*@C
811      DMDASetGetMatrix - Sets the routine used by the DMDA to allocate a matrix.
812 
813     Logically Collective on DMDA
814 
815   Input Parameters:
816 +    da - the DMDA object
817 -    f - the function that allocates the matrix for that specific DMDA
818 
819   Level: developer
820 
821    Notes: See DMDASetBlockFills() that provides a simple way to provide the nonzero structure for
822        the diagonal and off-diagonal blocks of the matrix
823 
824    Not supported from Fortran
825 
826 .seealso: DMCreateMatrix(), DMDASetBlockFills()
827 @*/
828 PetscErrorCode  DMDASetGetMatrix(DM da,PetscErrorCode (*f)(DM, Mat*))
829 {
830   PetscFunctionBegin;
831   PetscValidHeaderSpecific(da,DM_CLASSID,1);
832   da->ops->creatematrix = f;
833   PetscFunctionReturn(0);
834 }
835 
836 /*
837   Creates "balanced" ownership ranges after refinement, constrained by the need for the
838   fine grid boundaries to fall within one stencil width of the coarse partition.
839 
840   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
841 */
842 static PetscErrorCode DMDARefineOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lc[],PetscInt lf[])
843 {
844   PetscInt       i,totalc = 0,remaining,startc = 0,startf = 0;
845   PetscErrorCode ierr;
846 
847   PetscFunctionBegin;
848   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
849   if (ratio == 1) {
850     ierr = PetscMemcpy(lf,lc,m*sizeof(lc[0]));CHKERRQ(ierr);
851     PetscFunctionReturn(0);
852   }
853   for (i=0; i<m; i++) totalc += lc[i];
854   remaining = (!periodic) + ratio * (totalc - (!periodic));
855   for (i=0; i<m; i++) {
856     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
857     if (i == m-1) lf[i] = want;
858     else {
859       const PetscInt nextc = startc + lc[i];
860       /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
861        * coarse stencil width of the first coarse node in the next subdomain. */
862       while ((startf+want)/ratio < nextc - stencil_width) want++;
863       /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
864        * coarse stencil width of the last coarse node in the current subdomain. */
865       while ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width) want--;
866       /* Make sure all constraints are satisfied */
867       if (want < 0 || want > remaining || ((startf+want)/ratio < nextc - stencil_width)
868           || ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible refined ownership range");
869     }
870     lf[i]      = want;
871     startc    += lc[i];
872     startf    += lf[i];
873     remaining -= lf[i];
874   }
875   PetscFunctionReturn(0);
876 }
877 
878 /*
879   Creates "balanced" ownership ranges after coarsening, constrained by the need for the
880   fine grid boundaries to fall within one stencil width of the coarse partition.
881 
882   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
883 */
884 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lf[],PetscInt lc[])
885 {
886   PetscInt       i,totalf,remaining,startc,startf;
887   PetscErrorCode ierr;
888 
889   PetscFunctionBegin;
890   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
891   if (ratio == 1) {
892     ierr = PetscMemcpy(lc,lf,m*sizeof(lf[0]));CHKERRQ(ierr);
893     PetscFunctionReturn(0);
894   }
895   for (i=0,totalf=0; i<m; i++) totalf += lf[i];
896   remaining = (!periodic) + (totalf - (!periodic)) / ratio;
897   for (i=0,startc=0,startf=0; i<m; i++) {
898     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
899     if (i == m-1) lc[i] = want;
900     else {
901       const PetscInt nextf = startf+lf[i];
902       /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
903        * node is within one stencil width. */
904       while (nextf/ratio < startc+want-stencil_width) want--;
905       /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
906        * fine node is within one stencil width. */
907       while ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width) want++;
908       if (want < 0 || want > remaining
909           || (nextf/ratio < startc+want-stencil_width) || ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible coarsened ownership range");
910     }
911     lc[i]      = want;
912     startc    += lc[i];
913     startf    += lf[i];
914     remaining -= lc[i];
915   }
916   PetscFunctionReturn(0);
917 }
918 
919 PetscErrorCode  DMRefine_DA(DM da,MPI_Comm comm,DM *daref)
920 {
921   PetscErrorCode ierr;
922   PetscInt       M,N,P,i,dim;
923   DM             da2;
924   DM_DA          *dd = (DM_DA*)da->data,*dd2;
925 
926   PetscFunctionBegin;
927   PetscValidHeaderSpecific(da,DM_CLASSID,1);
928   PetscValidPointer(daref,3);
929 
930   ierr = DMGetDimension(da, &dim);CHKERRQ(ierr);
931   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
932     M = dd->refine_x*dd->M;
933   } else {
934     M = 1 + dd->refine_x*(dd->M - 1);
935   }
936   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
937     if (dim > 1) {
938       N = dd->refine_y*dd->N;
939     } else {
940       N = 1;
941     }
942   } else {
943     N = 1 + dd->refine_y*(dd->N - 1);
944   }
945   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
946     if (dim > 2) {
947       P = dd->refine_z*dd->P;
948     } else {
949       P = 1;
950     }
951   } else {
952     P = 1 + dd->refine_z*(dd->P - 1);
953   }
954   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
955   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
956   ierr = DMSetDimension(da2,dim);CHKERRQ(ierr);
957   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
958   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
959   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
960   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
961   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
962   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
963   if (dim == 3) {
964     PetscInt *lx,*ly,*lz;
965     ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr);
966     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
967     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
968     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
969     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
970     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
971   } else if (dim == 2) {
972     PetscInt *lx,*ly;
973     ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr);
974     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
975     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
976     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
977     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
978   } else if (dim == 1) {
979     PetscInt *lx;
980     ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr);
981     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
982     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
983     ierr = PetscFree(lx);CHKERRQ(ierr);
984   }
985   dd2 = (DM_DA*)da2->data;
986 
987   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
988   da2->ops->creatematrix = da->ops->creatematrix;
989   /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
990   da2->ops->getcoloring = da->ops->getcoloring;
991   dd2->interptype       = dd->interptype;
992 
993   /* copy fill information if given */
994   if (dd->dfill) {
995     ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr);
996     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
997   }
998   if (dd->ofill) {
999     ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr);
1000     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1001   }
1002   /* copy the refine information */
1003   dd2->coarsen_x = dd2->refine_x = dd->refine_x;
1004   dd2->coarsen_y = dd2->refine_y = dd->refine_y;
1005   dd2->coarsen_z = dd2->refine_z = dd->refine_z;
1006 
1007   if (dd->refine_z_hier) {
1008     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) {
1009       dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1];
1010     }
1011     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) {
1012       dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown];
1013     }
1014     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1015     ierr = PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);CHKERRQ(ierr);
1016     ierr = PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1017   }
1018   if (dd->refine_y_hier) {
1019     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) {
1020       dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1];
1021     }
1022     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) {
1023       dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown];
1024     }
1025     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1026     ierr = PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);CHKERRQ(ierr);
1027     ierr = PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1028   }
1029   if (dd->refine_x_hier) {
1030     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) {
1031       dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1];
1032     }
1033     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) {
1034       dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown];
1035     }
1036     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1037     ierr = PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);CHKERRQ(ierr);
1038     ierr = PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1039   }
1040 
1041 
1042   /* copy vector type information */
1043   ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr);
1044 
1045   dd2->lf = dd->lf;
1046   dd2->lj = dd->lj;
1047 
1048   da2->leveldown = da->leveldown;
1049   da2->levelup   = da->levelup + 1;
1050 
1051   ierr = DMSetUp(da2);CHKERRQ(ierr);
1052 
1053   /* interpolate coordinates if they are set on the coarse grid */
1054   if (da->coordinates) {
1055     DM  cdaf,cdac;
1056     Vec coordsc,coordsf;
1057     Mat II;
1058 
1059     ierr = DMGetCoordinateDM(da,&cdac);CHKERRQ(ierr);
1060     ierr = DMGetCoordinates(da,&coordsc);CHKERRQ(ierr);
1061     ierr = DMGetCoordinateDM(da2,&cdaf);CHKERRQ(ierr);
1062     /* force creation of the coordinate vector */
1063     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1064     ierr = DMGetCoordinates(da2,&coordsf);CHKERRQ(ierr);
1065     ierr = DMCreateInterpolation(cdac,cdaf,&II,NULL);CHKERRQ(ierr);
1066     ierr = MatInterpolate(II,coordsc,coordsf);CHKERRQ(ierr);
1067     ierr = MatDestroy(&II);CHKERRQ(ierr);
1068   }
1069 
1070   for (i=0; i<da->bs; i++) {
1071     const char *fieldname;
1072     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1073     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1074   }
1075 
1076   *daref = da2;
1077   PetscFunctionReturn(0);
1078 }
1079 
1080 
1081 PetscErrorCode  DMCoarsen_DA(DM da, MPI_Comm comm,DM *daref)
1082 {
1083   PetscErrorCode ierr;
1084   PetscInt       M,N,P,i,dim;
1085   DM             da2;
1086   DM_DA          *dd = (DM_DA*)da->data,*dd2;
1087 
1088   PetscFunctionBegin;
1089   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1090   PetscValidPointer(daref,3);
1091 
1092   ierr = DMGetDimension(da, &dim);CHKERRQ(ierr);
1093   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1094     M = dd->M / dd->coarsen_x;
1095   } else {
1096     M = 1 + (dd->M - 1) / dd->coarsen_x;
1097   }
1098   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1099     if (dim > 1) {
1100       N = dd->N / dd->coarsen_y;
1101     } else {
1102       N = 1;
1103     }
1104   } else {
1105     N = 1 + (dd->N - 1) / dd->coarsen_y;
1106   }
1107   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1108     if (dim > 2) {
1109       P = dd->P / dd->coarsen_z;
1110     } else {
1111       P = 1;
1112     }
1113   } else {
1114     P = 1 + (dd->P - 1) / dd->coarsen_z;
1115   }
1116   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
1117   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
1118   ierr = DMSetDimension(da2,dim);CHKERRQ(ierr);
1119   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
1120   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
1121   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
1122   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
1123   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
1124   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
1125   if (dim == 3) {
1126     PetscInt *lx,*ly,*lz;
1127     ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr);
1128     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1129     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1130     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
1131     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
1132     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
1133   } else if (dim == 2) {
1134     PetscInt *lx,*ly;
1135     ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr);
1136     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1137     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1138     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
1139     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
1140   } else if (dim == 1) {
1141     PetscInt *lx;
1142     ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr);
1143     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1144     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
1145     ierr = PetscFree(lx);CHKERRQ(ierr);
1146   }
1147   dd2 = (DM_DA*)da2->data;
1148 
1149   /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1150   /* da2->ops->createinterpolation = da->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1151   da2->ops->creatematrix = da->ops->creatematrix;
1152   da2->ops->getcoloring  = da->ops->getcoloring;
1153   dd2->interptype        = dd->interptype;
1154 
1155   /* copy fill information if given */
1156   if (dd->dfill) {
1157     ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr);
1158     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1159   }
1160   if (dd->ofill) {
1161     ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr);
1162     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1163   }
1164   /* copy the refine information */
1165   dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1166   dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1167   dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;
1168 
1169   if (dd->refine_z_hier) {
1170     if (da->levelup - da->leveldown -1 > -1 && da->levelup - da->leveldown - 1< dd->refine_z_hier_n) {
1171       dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown - 1];
1172     }
1173     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_z_hier_n) {
1174       dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown - 2];
1175     }
1176     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1177     ierr = PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);CHKERRQ(ierr);
1178     ierr = PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1179   }
1180   if (dd->refine_y_hier) {
1181     if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1< dd->refine_y_hier_n) {
1182       dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown - 1];
1183     }
1184     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_y_hier_n) {
1185       dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown - 2];
1186     }
1187     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1188     ierr = PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);CHKERRQ(ierr);
1189     ierr = PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1190   }
1191   if (dd->refine_x_hier) {
1192     if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1 < dd->refine_x_hier_n) {
1193       dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown - 1];
1194     }
1195     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_x_hier_n) {
1196       dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown - 2];
1197     }
1198     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1199     ierr = PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);CHKERRQ(ierr);
1200     ierr = PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1201   }
1202 
1203   /* copy vector type information */
1204   ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr);
1205 
1206   dd2->lf = dd->lf;
1207   dd2->lj = dd->lj;
1208 
1209   da2->leveldown = da->leveldown + 1;
1210   da2->levelup   = da->levelup;
1211 
1212   ierr = DMSetUp(da2);CHKERRQ(ierr);
1213 
1214   /* inject coordinates if they are set on the fine grid */
1215   if (da->coordinates) {
1216     DM         cdaf,cdac;
1217     Vec        coordsc,coordsf;
1218     Mat        inject;
1219     VecScatter vscat;
1220 
1221     ierr = DMGetCoordinateDM(da,&cdaf);CHKERRQ(ierr);
1222     ierr = DMGetCoordinates(da,&coordsf);CHKERRQ(ierr);
1223     ierr = DMGetCoordinateDM(da2,&cdac);CHKERRQ(ierr);
1224     /* force creation of the coordinate vector */
1225     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1226     ierr = DMGetCoordinates(da2,&coordsc);CHKERRQ(ierr);
1227 
1228     ierr = DMCreateInjection(cdac,cdaf,&inject);CHKERRQ(ierr);
1229     ierr = MatScatterGetVecScatter(inject,&vscat);CHKERRQ(ierr);
1230     ierr = VecScatterBegin(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1231     ierr = VecScatterEnd(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1232     ierr = MatDestroy(&inject);CHKERRQ(ierr);
1233   }
1234 
1235   for (i=0; i<da->bs; i++) {
1236     const char *fieldname;
1237     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1238     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1239   }
1240 
1241   *daref = da2;
1242   PetscFunctionReturn(0);
1243 }
1244 
1245 PetscErrorCode  DMRefineHierarchy_DA(DM da,PetscInt nlevels,DM daf[])
1246 {
1247   PetscErrorCode ierr;
1248   PetscInt       i,n,*refx,*refy,*refz;
1249 
1250   PetscFunctionBegin;
1251   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1252   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1253   if (nlevels == 0) PetscFunctionReturn(0);
1254   PetscValidPointer(daf,3);
1255 
1256   /* Get refinement factors, defaults taken from the coarse DMDA */
1257   ierr = PetscMalloc3(nlevels,&refx,nlevels,&refy,nlevels,&refz);CHKERRQ(ierr);
1258   for (i=0; i<nlevels; i++) {
1259     ierr = DMDAGetRefinementFactor(da,&refx[i],&refy[i],&refz[i]);CHKERRQ(ierr);
1260   }
1261   n    = nlevels;
1262   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_x",refx,&n,NULL);CHKERRQ(ierr);
1263   n    = nlevels;
1264   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_y",refy,&n,NULL);CHKERRQ(ierr);
1265   n    = nlevels;
1266   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_z",refz,&n,NULL);CHKERRQ(ierr);
1267 
1268   ierr = DMDASetRefinementFactor(da,refx[0],refy[0],refz[0]);CHKERRQ(ierr);
1269   ierr = DMRefine(da,PetscObjectComm((PetscObject)da),&daf[0]);CHKERRQ(ierr);
1270   for (i=1; i<nlevels; i++) {
1271     ierr = DMDASetRefinementFactor(daf[i-1],refx[i],refy[i],refz[i]);CHKERRQ(ierr);
1272     ierr = DMRefine(daf[i-1],PetscObjectComm((PetscObject)da),&daf[i]);CHKERRQ(ierr);
1273   }
1274   ierr = PetscFree3(refx,refy,refz);CHKERRQ(ierr);
1275   PetscFunctionReturn(0);
1276 }
1277 
1278 PetscErrorCode  DMCoarsenHierarchy_DA(DM da,PetscInt nlevels,DM dac[])
1279 {
1280   PetscErrorCode ierr;
1281   PetscInt       i;
1282 
1283   PetscFunctionBegin;
1284   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1285   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1286   if (nlevels == 0) PetscFunctionReturn(0);
1287   PetscValidPointer(dac,3);
1288   ierr = DMCoarsen(da,PetscObjectComm((PetscObject)da),&dac[0]);CHKERRQ(ierr);
1289   for (i=1; i<nlevels; i++) {
1290     ierr = DMCoarsen(dac[i-1],PetscObjectComm((PetscObject)da),&dac[i]);CHKERRQ(ierr);
1291   }
1292   PetscFunctionReturn(0);
1293 }
1294 
1295 #include <petscgll.h>
1296 
1297 PetscErrorCode DMDASetGLLCoordinates_1d(DM dm,PetscGLL *gll)
1298 {
1299   PetscErrorCode ierr;
1300   PetscInt       i,j,n = gll->n,xs,xn,q;
1301   PetscScalar    *xx;
1302   PetscReal      h;
1303   Vec            x;
1304   DM_DA          *da = (DM_DA*)dm->data;
1305 
1306   PetscFunctionBegin;
1307   if (da->bx != DM_BOUNDARY_PERIODIC) {
1308     ierr = DMDAGetInfo(dm,NULL,&q,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr);
1309     q    = (q-1)/(n-1);  /* number of spectral elements */
1310     h    = 2.0/q;
1311     ierr = DMDAGetCorners(dm,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr);
1312     xs   = xs/(n-1);
1313     xn   = xn/(n-1);
1314     ierr = DMDASetUniformCoordinates(dm,-1.,1.,0.,0.,0.,0.);CHKERRQ(ierr);
1315     ierr = DMGetCoordinates(dm,&x);CHKERRQ(ierr);
1316     ierr = DMDAVecGetArray(dm,x,&xx);CHKERRQ(ierr);
1317 
1318     /* loop over local spectral elements */
1319     for (j=xs; j<xs+xn; j++) {
1320       /*
1321        Except for the first process, each process starts on the second GLL point of the first element on that process
1322        */
1323       for (i= (j == xs && xs > 0)? 1 : 0; i<n; i++) {
1324         xx[j*(n-1) + i] = -1.0 + h*j + h*(gll->nodes[i]+1.0)/2.;
1325       }
1326     }
1327     ierr = DMDAVecRestoreArray(dm,x,&xx);CHKERRQ(ierr);
1328   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for periodic");
1329   PetscFunctionReturn(0);
1330 }
1331 
1332 /*@
1333 
1334      DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points
1335 
1336    Collective on DM
1337 
1338    Input Parameters:
1339 +   da - the DMDA object
1340 -   gll - the GLL object
1341 
1342    Notes: the parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points
1343           on each process much be divisible by the number of GLL elements needed per process. This depends on whether the DM is
1344           periodic or not.
1345 
1346    Level: advanced
1347 
1348 .seealso:   DMDACreate(), PetscGLLCreate(), DMGetCoordinates()
1349 @*/
1350 PetscErrorCode DMDASetGLLCoordinates(DM da,PetscGLL *gll)
1351 {
1352   PetscErrorCode ierr;
1353 
1354   PetscFunctionBegin;
1355   if (da->dim == 1) {
1356     ierr = DMDASetGLLCoordinates_1d(da,gll);CHKERRQ(ierr);
1357   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for 2 or 3d");
1358   PetscFunctionReturn(0);
1359 }
1360