xref: /petsc/src/dm/impls/da/da.c (revision dc424cfae46ffd055acbb99fbc61fc85fc92f9ad)
1 #include <petsc/private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
2 
3 /*@
4   DMDASetSizes - Sets the global sizes
5 
6   Logically Collective on DMDA
7 
8   Input Parameters:
9 + da - the DMDA
10 . M - the global X size (or PETSC_DECIDE)
11 . N - the global Y size (or PETSC_DECIDE)
12 - P - the global Z size (or PETSC_DECIDE)
13 
14   Level: intermediate
15 
16 .seealso: DMDAGetSize(), PetscSplitOwnership()
17 @*/
18 PetscErrorCode  DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
19 {
20   DM_DA *dd = (DM_DA*)da->data;
21 
22   PetscFunctionBegin;
23   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
24   PetscValidLogicalCollectiveInt(da,M,2);
25   PetscValidLogicalCollectiveInt(da,N,3);
26   PetscValidLogicalCollectiveInt(da,P,4);
27   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
28 
29   dd->M = M;
30   dd->N = N;
31   dd->P = P;
32   PetscFunctionReturn(0);
33 }
34 
35 /*@
36   DMDASetNumProcs - Sets the number of processes in each dimension
37 
38   Logically Collective on DMDA
39 
40   Input Parameters:
41 + da - the DMDA
42 . m - the number of X procs (or PETSC_DECIDE)
43 . n - the number of Y procs (or PETSC_DECIDE)
44 - p - the number of Z procs (or PETSC_DECIDE)
45 
46   Level: intermediate
47 
48 .seealso: DMDASetSizes(), DMDAGetSize(), PetscSplitOwnership()
49 @*/
50 PetscErrorCode  DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
51 {
52   DM_DA          *dd = (DM_DA*)da->data;
53   PetscErrorCode ierr;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
57   PetscValidLogicalCollectiveInt(da,m,2);
58   PetscValidLogicalCollectiveInt(da,n,3);
59   PetscValidLogicalCollectiveInt(da,p,4);
60   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
61   dd->m = m;
62   dd->n = n;
63   dd->p = p;
64   if (da->dim == 2) {
65     PetscMPIInt size;
66     ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr);
67     if ((dd->m > 0) && (dd->n < 0)) {
68       dd->n = size/dd->m;
69       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in X direction not divisible into comm size %d",m,size);
70     }
71     if ((dd->n > 0) && (dd->m < 0)) {
72       dd->m = size/dd->n;
73       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in Y direction not divisible into comm size %d",n,size);
74     }
75   }
76   PetscFunctionReturn(0);
77 }
78 
79 /*@
80   DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries.
81 
82   Not collective
83 
84   Input Parameter:
85 + da    - The DMDA
86 - bx,by,bz - One of DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, DM_BOUNDARY_PERIODIC
87 
88   Level: intermediate
89 
90 .keywords:  distributed array, periodicity
91 .seealso: DMDACreate(), DMDestroy(), DMDA, DMBoundaryType
92 @*/
93 PetscErrorCode  DMDASetBoundaryType(DM da,DMBoundaryType bx,DMBoundaryType by,DMBoundaryType bz)
94 {
95   DM_DA *dd = (DM_DA*)da->data;
96 
97   PetscFunctionBegin;
98   PetscValidHeaderSpecific(da,DM_CLASSID,1);
99   PetscValidLogicalCollectiveEnum(da,bx,2);
100   PetscValidLogicalCollectiveEnum(da,by,3);
101   PetscValidLogicalCollectiveEnum(da,bz,4);
102   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
103   dd->bx = bx;
104   dd->by = by;
105   dd->bz = bz;
106   PetscFunctionReturn(0);
107 }
108 
109 /*@
110   DMDASetDof - Sets the number of degrees of freedom per vertex
111 
112   Not collective
113 
114   Input Parameters:
115 + da  - The DMDA
116 - dof - Number of degrees of freedom
117 
118   Level: intermediate
119 
120 .keywords:  distributed array, degrees of freedom
121 .seealso: DMDAGetDof(), DMDACreate(), DMDestroy(), DMDA
122 @*/
123 PetscErrorCode  DMDASetDof(DM da, PetscInt dof)
124 {
125   DM_DA *dd = (DM_DA*)da->data;
126 
127   PetscFunctionBegin;
128   PetscValidHeaderSpecific(da,DM_CLASSID,1);
129   PetscValidLogicalCollectiveInt(da,dof,2);
130   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
131   dd->w  = dof;
132   da->bs = dof;
133   PetscFunctionReturn(0);
134 }
135 
136 /*@
137   DMDAGetDof - Gets the number of degrees of freedom per vertex
138 
139   Not collective
140 
141   Input Parameter:
142 . da  - The DMDA
143 
144   Output Parameter:
145 . dof - Number of degrees of freedom
146 
147   Level: intermediate
148 
149 .keywords:  distributed array, degrees of freedom
150 .seealso: DMDASetDof(), DMDACreate(), DMDestroy(), DMDA
151 @*/
152 PetscErrorCode DMDAGetDof(DM da, PetscInt *dof)
153 {
154   DM_DA *dd = (DM_DA *) da->data;
155 
156   PetscFunctionBegin;
157   PetscValidHeaderSpecific(da,DM_CLASSID,1);
158   PetscValidPointer(dof,2);
159   *dof = dd->w;
160   PetscFunctionReturn(0);
161 }
162 
163 /*@
164   DMDAGetOverlap - Gets the size of the per-processor overlap.
165 
166   Not collective
167 
168   Input Parameters:
169 . da  - The DMDA
170 
171   Output Parameters:
172 + x   - Overlap in the x direction
173 . y   - Overlap in the y direction
174 - z   - Overlap in the z direction
175 
176   Level: intermediate
177 
178 .keywords:  distributed array, overlap, domain decomposition
179 .seealso: DMDACreateDomainDecomposition(), DMDASetOverlap(), DMDA
180 @*/
181 PetscErrorCode  DMDAGetOverlap(DM da,PetscInt *x,PetscInt *y,PetscInt *z)
182 {
183   DM_DA *dd = (DM_DA*)da->data;
184 
185   PetscFunctionBegin;
186   PetscValidHeaderSpecific(da,DM_CLASSID,1);
187   if (x) *x = dd->xol;
188   if (y) *y = dd->yol;
189   if (z) *z = dd->zol;
190   PetscFunctionReturn(0);
191 }
192 
193 /*@
194   DMDASetOverlap - Sets the size of the per-processor overlap.
195 
196   Not collective
197 
198   Input Parameters:
199 + da  - The DMDA
200 . x   - Overlap in the x direction
201 . y   - Overlap in the y direction
202 - z   - Overlap in the z direction
203 
204   Level: intermediate
205 
206 .keywords:  distributed array, overlap, domain decomposition
207 .seealso: DMDACreateDomainDecomposition(), DMDAGetOverlap(), DMDA
208 @*/
209 PetscErrorCode  DMDASetOverlap(DM da,PetscInt x,PetscInt y,PetscInt z)
210 {
211   DM_DA *dd = (DM_DA*)da->data;
212 
213   PetscFunctionBegin;
214   PetscValidHeaderSpecific(da,DM_CLASSID,1);
215   PetscValidLogicalCollectiveInt(da,x,2);
216   PetscValidLogicalCollectiveInt(da,y,3);
217   PetscValidLogicalCollectiveInt(da,z,4);
218   dd->xol = x;
219   dd->yol = y;
220   dd->zol = z;
221   PetscFunctionReturn(0);
222 }
223 
224 
225 /*@
226   DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition.
227 
228   Not collective
229 
230   Input Parameters:
231 . da  - The DMDA
232 
233   Output Parameters:
234 + Nsub   - Number of local subdomains created upon decomposition
235 
236   Level: intermediate
237 
238 .keywords:  distributed array, domain decomposition
239 .seealso: DMDACreateDomainDecomposition(), DMDASetNumLocalSubDomains(), DMDA
240 @*/
241 PetscErrorCode  DMDAGetNumLocalSubDomains(DM da,PetscInt *Nsub)
242 {
243   DM_DA *dd = (DM_DA*)da->data;
244 
245   PetscFunctionBegin;
246   PetscValidHeaderSpecific(da,DM_CLASSID,1);
247   if (Nsub) *Nsub = dd->Nsub;
248   PetscFunctionReturn(0);
249 }
250 
251 /*@
252   DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition.
253 
254   Not collective
255 
256   Input Parameters:
257 + da  - The DMDA
258 - Nsub - The number of local subdomains requested
259 
260   Level: intermediate
261 
262 .keywords:  distributed array, domain decomposition
263 .seealso: DMDACreateDomainDecomposition(), DMDAGetNumLocalSubDomains(), DMDA
264 @*/
265 PetscErrorCode  DMDASetNumLocalSubDomains(DM da,PetscInt Nsub)
266 {
267   DM_DA *dd = (DM_DA*)da->data;
268 
269   PetscFunctionBegin;
270   PetscValidHeaderSpecific(da,DM_CLASSID,1);
271   PetscValidLogicalCollectiveInt(da,Nsub,2);
272   dd->Nsub = Nsub;
273   PetscFunctionReturn(0);
274 }
275 
276 /*@
277   DMDASetOffset - Sets the index offset of the DA.
278 
279   Collective on DA
280 
281   Input Parameter:
282 + da  - The DMDA
283 . xo  - The offset in the x direction
284 . yo  - The offset in the y direction
285 - zo  - The offset in the z direction
286 
287   Level: intermediate
288 
289   Notes: This is used primarily to overlap a computation on a local DA with that on a global DA without
290   changing boundary conditions or subdomain features that depend upon the global offsets.
291 
292 .keywords:  distributed array, degrees of freedom
293 .seealso: DMDAGetOffset(), DMDAVecGetArray()
294 @*/
295 PetscErrorCode  DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
296 {
297   PetscErrorCode ierr;
298   DM_DA          *dd = (DM_DA*)da->data;
299 
300   PetscFunctionBegin;
301   PetscValidHeaderSpecific(da,DM_CLASSID,1);
302   PetscValidLogicalCollectiveInt(da,xo,2);
303   PetscValidLogicalCollectiveInt(da,yo,3);
304   PetscValidLogicalCollectiveInt(da,zo,4);
305   PetscValidLogicalCollectiveInt(da,Mo,5);
306   PetscValidLogicalCollectiveInt(da,No,6);
307   PetscValidLogicalCollectiveInt(da,Po,7);
308   dd->xo = xo;
309   dd->yo = yo;
310   dd->zo = zo;
311   dd->Mo = Mo;
312   dd->No = No;
313   dd->Po = Po;
314 
315   if (da->coordinateDM) {
316     ierr = DMDASetOffset(da->coordinateDM,xo,yo,zo,Mo,No,Po);CHKERRQ(ierr);
317   }
318   PetscFunctionReturn(0);
319 }
320 
321 /*@
322   DMDAGetOffset - Gets the index offset of the DA.
323 
324   Not collective
325 
326   Input Parameter:
327 . da  - The DMDA
328 
329   Output Parameters:
330 + xo  - The offset in the x direction
331 . yo  - The offset in the y direction
332 . zo  - The offset in the z direction
333 . Mo  - The global size in the x direction
334 . No  - The global size in the y direction
335 - Po  - The global size in the z direction
336 
337   Level: intermediate
338 
339 .keywords:  distributed array, degrees of freedom
340 .seealso: DMDASetOffset(), DMDAVecGetArray()
341 @*/
342 PetscErrorCode  DMDAGetOffset(DM da,PetscInt *xo,PetscInt *yo,PetscInt *zo,PetscInt *Mo,PetscInt *No,PetscInt *Po)
343 {
344   DM_DA *dd = (DM_DA*)da->data;
345 
346   PetscFunctionBegin;
347   PetscValidHeaderSpecific(da,DM_CLASSID,1);
348   if (xo) *xo = dd->xo;
349   if (yo) *yo = dd->yo;
350   if (zo) *zo = dd->zo;
351   if (Mo) *Mo = dd->Mo;
352   if (No) *No = dd->No;
353   if (Po) *Po = dd->Po;
354   PetscFunctionReturn(0);
355 }
356 
357 /*@
358   DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain DM.
359 
360   Not collective
361 
362   Input Parameter:
363 . da  - The DMDA
364 
365   Output Parameters:
366 + xs  - The start of the region in x
367 . ys  - The start of the region in y
368 . zs  - The start of the region in z
369 . xs  - The size of the region in x
370 . ys  - The size of the region in y
371 . zs  - The size of the region in z
372 
373   Level: intermediate
374 
375 .keywords:  distributed array, degrees of freedom
376 .seealso: DMDAGetOffset(), DMDAVecGetArray()
377 @*/
378 PetscErrorCode  DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
379 {
380   DM_DA          *dd = (DM_DA*)da->data;
381 
382   PetscFunctionBegin;
383   PetscValidHeaderSpecific(da,DM_CLASSID,1);
384   if (xs) *xs = dd->nonxs;
385   if (ys) *ys = dd->nonys;
386   if (zs) *zs = dd->nonzs;
387   if (xm) *xm = dd->nonxm;
388   if (ym) *ym = dd->nonym;
389   if (zm) *zm = dd->nonzm;
390   PetscFunctionReturn(0);
391 }
392 
393 
394 /*@
395   DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain DM.
396 
397   Collective on DA
398 
399   Input Parameter:
400 + da  - The DMDA
401 . xs  - The start of the region in x
402 . ys  - The start of the region in y
403 . zs  - The start of the region in z
404 . xs  - The size of the region in x
405 . ys  - The size of the region in y
406 . zs  - The size of the region in z
407 
408   Level: intermediate
409 
410 .keywords:  distributed array, degrees of freedom
411 .seealso: DMDAGetOffset(), DMDAVecGetArray()
412 @*/
413 PetscErrorCode  DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
414 {
415   DM_DA          *dd = (DM_DA*)da->data;
416 
417   PetscFunctionBegin;
418   PetscValidHeaderSpecific(da,DM_CLASSID,1);
419   PetscValidLogicalCollectiveInt(da,xs,2);
420   PetscValidLogicalCollectiveInt(da,ys,3);
421   PetscValidLogicalCollectiveInt(da,zs,4);
422   PetscValidLogicalCollectiveInt(da,xm,5);
423   PetscValidLogicalCollectiveInt(da,ym,6);
424   PetscValidLogicalCollectiveInt(da,zm,7);
425   dd->nonxs = xs;
426   dd->nonys = ys;
427   dd->nonzs = zs;
428   dd->nonxm = xm;
429   dd->nonym = ym;
430   dd->nonzm = zm;
431 
432   PetscFunctionReturn(0);
433 }
434 
435 /*@
436   DMDASetStencilType - Sets the type of the communication stencil
437 
438   Logically Collective on DMDA
439 
440   Input Parameter:
441 + da    - The DMDA
442 - stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
443 
444   Level: intermediate
445 
446 .keywords:  distributed array, stencil
447 .seealso: DMDACreate(), DMDestroy(), DMDA
448 @*/
449 PetscErrorCode  DMDASetStencilType(DM da, DMDAStencilType stype)
450 {
451   DM_DA *dd = (DM_DA*)da->data;
452 
453   PetscFunctionBegin;
454   PetscValidHeaderSpecific(da,DM_CLASSID,1);
455   PetscValidLogicalCollectiveEnum(da,stype,2);
456   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
457   dd->stencil_type = stype;
458   PetscFunctionReturn(0);
459 }
460 
461 /*@
462   DMDAGetStencilType - Gets the type of the communication stencil
463 
464   Not collective
465 
466   Input Parameter:
467 . da    - The DMDA
468 
469   Output Parameter:
470 . stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
471 
472   Level: intermediate
473 
474 .keywords:  distributed array, stencil
475 .seealso: DMDACreate(), DMDestroy(), DMDA
476 @*/
477 PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype)
478 {
479   DM_DA *dd = (DM_DA*)da->data;
480 
481   PetscFunctionBegin;
482   PetscValidHeaderSpecific(da,DM_CLASSID,1);
483   PetscValidPointer(stype,2);
484   *stype = dd->stencil_type;
485   PetscFunctionReturn(0);
486 }
487 
488 /*@
489   DMDASetStencilWidth - Sets the width of the communication stencil
490 
491   Logically Collective on DMDA
492 
493   Input Parameter:
494 + da    - The DMDA
495 - width - The stencil width
496 
497   Level: intermediate
498 
499 .keywords:  distributed array, stencil
500 .seealso: DMDACreate(), DMDestroy(), DMDA
501 @*/
502 PetscErrorCode  DMDASetStencilWidth(DM da, PetscInt width)
503 {
504   DM_DA *dd = (DM_DA*)da->data;
505 
506   PetscFunctionBegin;
507   PetscValidHeaderSpecific(da,DM_CLASSID,1);
508   PetscValidLogicalCollectiveInt(da,width,2);
509   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
510   dd->s = width;
511   PetscFunctionReturn(0);
512 }
513 
514 /*@
515   DMDAGetStencilWidth - Gets the width of the communication stencil
516 
517   Not collective
518 
519   Input Parameter:
520 . da    - The DMDA
521 
522   Output Parameter:
523 . width - The stencil width
524 
525   Level: intermediate
526 
527 .keywords:  distributed array, stencil
528 .seealso: DMDACreate(), DMDestroy(), DMDA
529 @*/
530 PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width)
531 {
532   DM_DA *dd = (DM_DA *) da->data;
533 
534   PetscFunctionBegin;
535   PetscValidHeaderSpecific(da,DM_CLASSID,1);
536   PetscValidPointer(width,2);
537   *width = dd->s;
538   PetscFunctionReturn(0);
539 }
540 
541 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da,PetscInt M,PetscInt m,const PetscInt lx[])
542 {
543   PetscInt i,sum;
544 
545   PetscFunctionBegin;
546   if (M < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Global dimension not set");
547   for (i=sum=0; i<m; i++) sum += lx[i];
548   if (sum != M) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Ownership ranges sum to %D but global dimension is %D",sum,M);
549   PetscFunctionReturn(0);
550 }
551 
552 /*@
553   DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process
554 
555   Logically Collective on DMDA
556 
557   Input Parameter:
558 + da - The DMDA
559 . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m
560 . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n
561 - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p.
562 
563   Level: intermediate
564 
565   Note: these numbers are NOT multiplied by the number of dof per node.
566 
567 .keywords:  distributed array
568 .seealso: DMDACreate(), DMDestroy(), DMDA
569 @*/
570 PetscErrorCode  DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
571 {
572   PetscErrorCode ierr;
573   DM_DA          *dd = (DM_DA*)da->data;
574 
575   PetscFunctionBegin;
576   PetscValidHeaderSpecific(da,DM_CLASSID,1);
577   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
578   if (lx) {
579     if (dd->m < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
580     ierr = DMDACheckOwnershipRanges_Private(da,dd->M,dd->m,lx);CHKERRQ(ierr);
581     if (!dd->lx) {
582       ierr = PetscMalloc1(dd->m, &dd->lx);CHKERRQ(ierr);
583     }
584     ierr = PetscMemcpy(dd->lx, lx, dd->m*sizeof(PetscInt));CHKERRQ(ierr);
585   }
586   if (ly) {
587     if (dd->n < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
588     ierr = DMDACheckOwnershipRanges_Private(da,dd->N,dd->n,ly);CHKERRQ(ierr);
589     if (!dd->ly) {
590       ierr = PetscMalloc1(dd->n, &dd->ly);CHKERRQ(ierr);
591     }
592     ierr = PetscMemcpy(dd->ly, ly, dd->n*sizeof(PetscInt));CHKERRQ(ierr);
593   }
594   if (lz) {
595     if (dd->p < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
596     ierr = DMDACheckOwnershipRanges_Private(da,dd->P,dd->p,lz);CHKERRQ(ierr);
597     if (!dd->lz) {
598       ierr = PetscMalloc1(dd->p, &dd->lz);CHKERRQ(ierr);
599     }
600     ierr = PetscMemcpy(dd->lz, lz, dd->p*sizeof(PetscInt));CHKERRQ(ierr);
601   }
602   PetscFunctionReturn(0);
603 }
604 
605 /*@
606        DMDASetInterpolationType - Sets the type of interpolation that will be
607           returned by DMCreateInterpolation()
608 
609    Logically Collective on DMDA
610 
611    Input Parameter:
612 +  da - initial distributed array
613 .  ctype - DMDA_Q1 and DMDA_Q0 are currently the only supported forms
614 
615    Level: intermediate
616 
617    Notes: you should call this on the coarser of the two DMDAs you pass to DMCreateInterpolation()
618 
619 .keywords:  distributed array, interpolation
620 
621 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType
622 @*/
623 PetscErrorCode  DMDASetInterpolationType(DM da,DMDAInterpolationType ctype)
624 {
625   DM_DA *dd = (DM_DA*)da->data;
626 
627   PetscFunctionBegin;
628   PetscValidHeaderSpecific(da,DM_CLASSID,1);
629   PetscValidLogicalCollectiveEnum(da,ctype,2);
630   dd->interptype = ctype;
631   PetscFunctionReturn(0);
632 }
633 
634 /*@
635        DMDAGetInterpolationType - Gets the type of interpolation that will be
636           used by DMCreateInterpolation()
637 
638    Not Collective
639 
640    Input Parameter:
641 .  da - distributed array
642 
643    Output Parameter:
644 .  ctype - interpolation type (DMDA_Q1 and DMDA_Q0 are currently the only supported forms)
645 
646    Level: intermediate
647 
648 .keywords:  distributed array, interpolation
649 
650 .seealso: DMDA, DMDAInterpolationType, DMDASetInterpolationType(), DMCreateInterpolation()
651 @*/
652 PetscErrorCode  DMDAGetInterpolationType(DM da,DMDAInterpolationType *ctype)
653 {
654   DM_DA *dd = (DM_DA*)da->data;
655 
656   PetscFunctionBegin;
657   PetscValidHeaderSpecific(da,DM_CLASSID,1);
658   PetscValidPointer(ctype,2);
659   *ctype = dd->interptype;
660   PetscFunctionReturn(0);
661 }
662 
663 /*@C
664       DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
665         processes neighbors.
666 
667     Not Collective
668 
669    Input Parameter:
670 .     da - the DMDA object
671 
672    Output Parameters:
673 .     ranks - the neighbors ranks, stored with the x index increasing most rapidly.
674               this process itself is in the list
675 
676    Notes: In 2d the array is of length 9, in 3d of length 27
677           Not supported in 1d
678           Do not free the array, it is freed when the DMDA is destroyed.
679 
680    Fortran Notes: In fortran you must pass in an array of the appropriate length.
681 
682    Level: intermediate
683 
684 @*/
685 PetscErrorCode  DMDAGetNeighbors(DM da,const PetscMPIInt *ranks[])
686 {
687   DM_DA *dd = (DM_DA*)da->data;
688 
689   PetscFunctionBegin;
690   PetscValidHeaderSpecific(da,DM_CLASSID,1);
691   *ranks = dd->neighbors;
692   PetscFunctionReturn(0);
693 }
694 
695 /*@C
696       DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process
697 
698     Not Collective
699 
700    Input Parameter:
701 .     da - the DMDA object
702 
703    Output Parameter:
704 +     lx - ownership along x direction (optional)
705 .     ly - ownership along y direction (optional)
706 -     lz - ownership along z direction (optional)
707 
708    Level: intermediate
709 
710     Note: these correspond to the optional final arguments passed to DMDACreate(), DMDACreate2d(), DMDACreate3d()
711 
712     In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and
713     eighth arguments from DMDAGetInfo()
714 
715      In C you should not free these arrays, nor change the values in them. They will only have valid values while the
716     DMDA they came from still exists (has not been destroyed).
717 
718     These numbers are NOT multiplied by the number of dof per node.
719 
720 .seealso: DMDAGetCorners(), DMDAGetGhostCorners(), DMDACreate(), DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), VecGetOwnershipRanges()
721 @*/
722 PetscErrorCode  DMDAGetOwnershipRanges(DM da,const PetscInt *lx[],const PetscInt *ly[],const PetscInt *lz[])
723 {
724   DM_DA *dd = (DM_DA*)da->data;
725 
726   PetscFunctionBegin;
727   PetscValidHeaderSpecific(da,DM_CLASSID,1);
728   if (lx) *lx = dd->lx;
729   if (ly) *ly = dd->ly;
730   if (lz) *lz = dd->lz;
731   PetscFunctionReturn(0);
732 }
733 
734 /*@
735      DMDASetRefinementFactor - Set the ratios that the DMDA grid is refined
736 
737     Logically Collective on DMDA
738 
739   Input Parameters:
740 +    da - the DMDA object
741 .    refine_x - ratio of fine grid to coarse in x direction (2 by default)
742 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
743 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
744 
745   Options Database:
746 +  -da_refine_x - refinement ratio in x direction
747 .  -da_refine_y - refinement ratio in y direction
748 -  -da_refine_z - refinement ratio in z direction
749 
750   Level: intermediate
751 
752     Notes: Pass PETSC_IGNORE to leave a value unchanged
753 
754 .seealso: DMRefine(), DMDAGetRefinementFactor()
755 @*/
756 PetscErrorCode  DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z)
757 {
758   DM_DA *dd = (DM_DA*)da->data;
759 
760   PetscFunctionBegin;
761   PetscValidHeaderSpecific(da,DM_CLASSID,1);
762   PetscValidLogicalCollectiveInt(da,refine_x,2);
763   PetscValidLogicalCollectiveInt(da,refine_y,3);
764   PetscValidLogicalCollectiveInt(da,refine_z,4);
765 
766   if (refine_x > 0) dd->refine_x = refine_x;
767   if (refine_y > 0) dd->refine_y = refine_y;
768   if (refine_z > 0) dd->refine_z = refine_z;
769   PetscFunctionReturn(0);
770 }
771 
772 /*@C
773      DMDAGetRefinementFactor - Gets the ratios that the DMDA grid is refined
774 
775     Not Collective
776 
777   Input Parameter:
778 .    da - the DMDA object
779 
780   Output Parameters:
781 +    refine_x - ratio of fine grid to coarse in x direction (2 by default)
782 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
783 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
784 
785   Level: intermediate
786 
787     Notes: Pass NULL for values you do not need
788 
789 .seealso: DMRefine(), DMDASetRefinementFactor()
790 @*/
791 PetscErrorCode  DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z)
792 {
793   DM_DA *dd = (DM_DA*)da->data;
794 
795   PetscFunctionBegin;
796   PetscValidHeaderSpecific(da,DM_CLASSID,1);
797   if (refine_x) *refine_x = dd->refine_x;
798   if (refine_y) *refine_y = dd->refine_y;
799   if (refine_z) *refine_z = dd->refine_z;
800   PetscFunctionReturn(0);
801 }
802 
803 /*@C
804      DMDASetGetMatrix - Sets the routine used by the DMDA to allocate a matrix.
805 
806     Logically Collective on DMDA
807 
808   Input Parameters:
809 +    da - the DMDA object
810 -    f - the function that allocates the matrix for that specific DMDA
811 
812   Level: developer
813 
814    Notes: See DMDASetBlockFills() that provides a simple way to provide the nonzero structure for
815        the diagonal and off-diagonal blocks of the matrix
816 
817 .seealso: DMCreateMatrix(), DMDASetBlockFills()
818 @*/
819 PetscErrorCode  DMDASetGetMatrix(DM da,PetscErrorCode (*f)(DM, Mat*))
820 {
821   PetscFunctionBegin;
822   PetscValidHeaderSpecific(da,DM_CLASSID,1);
823   da->ops->creatematrix = f;
824   PetscFunctionReturn(0);
825 }
826 
827 /*
828   Creates "balanced" ownership ranges after refinement, constrained by the need for the
829   fine grid boundaries to fall within one stencil width of the coarse partition.
830 
831   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
832 */
833 static PetscErrorCode DMDARefineOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lc[],PetscInt lf[])
834 {
835   PetscInt       i,totalc = 0,remaining,startc = 0,startf = 0;
836   PetscErrorCode ierr;
837 
838   PetscFunctionBegin;
839   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
840   if (ratio == 1) {
841     ierr = PetscMemcpy(lf,lc,m*sizeof(lc[0]));CHKERRQ(ierr);
842     PetscFunctionReturn(0);
843   }
844   for (i=0; i<m; i++) totalc += lc[i];
845   remaining = (!periodic) + ratio * (totalc - (!periodic));
846   for (i=0; i<m; i++) {
847     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
848     if (i == m-1) lf[i] = want;
849     else {
850       const PetscInt nextc = startc + lc[i];
851       /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
852        * coarse stencil width of the first coarse node in the next subdomain. */
853       while ((startf+want)/ratio < nextc - stencil_width) want++;
854       /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
855        * coarse stencil width of the last coarse node in the current subdomain. */
856       while ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width) want--;
857       /* Make sure all constraints are satisfied */
858       if (want < 0 || want > remaining || ((startf+want)/ratio < nextc - stencil_width)
859           || ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible refined ownership range");
860     }
861     lf[i]      = want;
862     startc    += lc[i];
863     startf    += lf[i];
864     remaining -= lf[i];
865   }
866   PetscFunctionReturn(0);
867 }
868 
869 /*
870   Creates "balanced" ownership ranges after coarsening, constrained by the need for the
871   fine grid boundaries to fall within one stencil width of the coarse partition.
872 
873   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
874 */
875 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lf[],PetscInt lc[])
876 {
877   PetscInt       i,totalf,remaining,startc,startf;
878   PetscErrorCode ierr;
879 
880   PetscFunctionBegin;
881   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
882   if (ratio == 1) {
883     ierr = PetscMemcpy(lc,lf,m*sizeof(lf[0]));CHKERRQ(ierr);
884     PetscFunctionReturn(0);
885   }
886   for (i=0,totalf=0; i<m; i++) totalf += lf[i];
887   remaining = (!periodic) + (totalf - (!periodic)) / ratio;
888   for (i=0,startc=0,startf=0; i<m; i++) {
889     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
890     if (i == m-1) lc[i] = want;
891     else {
892       const PetscInt nextf = startf+lf[i];
893       /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
894        * node is within one stencil width. */
895       while (nextf/ratio < startc+want-stencil_width) want--;
896       /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
897        * fine node is within one stencil width. */
898       while ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width) want++;
899       if (want < 0 || want > remaining
900           || (nextf/ratio < startc+want-stencil_width) || ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible coarsened ownership range");
901     }
902     lc[i]      = want;
903     startc    += lc[i];
904     startf    += lf[i];
905     remaining -= lc[i];
906   }
907   PetscFunctionReturn(0);
908 }
909 
910 PetscErrorCode  DMRefine_DA(DM da,MPI_Comm comm,DM *daref)
911 {
912   PetscErrorCode ierr;
913   PetscInt       M,N,P,i,dim;
914   DM             da2;
915   DM_DA          *dd = (DM_DA*)da->data,*dd2;
916 
917   PetscFunctionBegin;
918   PetscValidHeaderSpecific(da,DM_CLASSID,1);
919   PetscValidPointer(daref,3);
920 
921   ierr = DMGetDimension(da, &dim);CHKERRQ(ierr);
922   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
923     M = dd->refine_x*dd->M;
924   } else {
925     M = 1 + dd->refine_x*(dd->M - 1);
926   }
927   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
928     if (dim > 1) {
929       N = dd->refine_y*dd->N;
930     } else {
931       N = 1;
932     }
933   } else {
934     N = 1 + dd->refine_y*(dd->N - 1);
935   }
936   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
937     if (dim > 2) {
938       P = dd->refine_z*dd->P;
939     } else {
940       P = 1;
941     }
942   } else {
943     P = 1 + dd->refine_z*(dd->P - 1);
944   }
945   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
946   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
947   ierr = DMSetDimension(da2,dim);CHKERRQ(ierr);
948   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
949   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
950   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
951   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
952   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
953   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
954   if (dim == 3) {
955     PetscInt *lx,*ly,*lz;
956     ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr);
957     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
958     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
959     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
960     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
961     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
962   } else if (dim == 2) {
963     PetscInt *lx,*ly;
964     ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr);
965     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
966     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
967     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
968     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
969   } else if (dim == 1) {
970     PetscInt *lx;
971     ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr);
972     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
973     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
974     ierr = PetscFree(lx);CHKERRQ(ierr);
975   }
976   dd2 = (DM_DA*)da2->data;
977 
978   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
979   da2->ops->creatematrix = da->ops->creatematrix;
980   /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
981   da2->ops->getcoloring = da->ops->getcoloring;
982   dd2->interptype       = dd->interptype;
983 
984   /* copy fill information if given */
985   if (dd->dfill) {
986     ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr);
987     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
988   }
989   if (dd->ofill) {
990     ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr);
991     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
992   }
993   /* copy the refine information */
994   dd2->coarsen_x = dd2->refine_x = dd->refine_x;
995   dd2->coarsen_y = dd2->refine_y = dd->refine_y;
996   dd2->coarsen_z = dd2->refine_z = dd->refine_z;
997 
998   if (dd->refine_z_hier) {
999     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) {
1000       dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1];
1001     }
1002     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) {
1003       dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown];
1004     }
1005     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1006     ierr = PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);CHKERRQ(ierr);
1007     ierr = PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1008   }
1009   if (dd->refine_y_hier) {
1010     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) {
1011       dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1];
1012     }
1013     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) {
1014       dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown];
1015     }
1016     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1017     ierr = PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);CHKERRQ(ierr);
1018     ierr = PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1019   }
1020   if (dd->refine_x_hier) {
1021     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) {
1022       dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1];
1023     }
1024     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) {
1025       dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown];
1026     }
1027     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1028     ierr = PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);CHKERRQ(ierr);
1029     ierr = PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1030   }
1031 
1032 
1033   /* copy vector type information */
1034   ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr);
1035 
1036   dd2->lf = dd->lf;
1037   dd2->lj = dd->lj;
1038 
1039   da2->leveldown = da->leveldown;
1040   da2->levelup   = da->levelup + 1;
1041 
1042   ierr = DMSetUp(da2);CHKERRQ(ierr);
1043 
1044   /* interpolate coordinates if they are set on the coarse grid */
1045   if (da->coordinates) {
1046     DM  cdaf,cdac;
1047     Vec coordsc,coordsf;
1048     Mat II;
1049 
1050     ierr = DMGetCoordinateDM(da,&cdac);CHKERRQ(ierr);
1051     ierr = DMGetCoordinates(da,&coordsc);CHKERRQ(ierr);
1052     ierr = DMGetCoordinateDM(da2,&cdaf);CHKERRQ(ierr);
1053     /* force creation of the coordinate vector */
1054     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1055     ierr = DMGetCoordinates(da2,&coordsf);CHKERRQ(ierr);
1056     ierr = DMCreateInterpolation(cdac,cdaf,&II,NULL);CHKERRQ(ierr);
1057     ierr = MatInterpolate(II,coordsc,coordsf);CHKERRQ(ierr);
1058     ierr = MatDestroy(&II);CHKERRQ(ierr);
1059   }
1060 
1061   for (i=0; i<da->bs; i++) {
1062     const char *fieldname;
1063     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1064     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1065   }
1066 
1067   *daref = da2;
1068   PetscFunctionReturn(0);
1069 }
1070 
1071 
1072 PetscErrorCode  DMCoarsen_DA(DM da, MPI_Comm comm,DM *daref)
1073 {
1074   PetscErrorCode ierr;
1075   PetscInt       M,N,P,i,dim;
1076   DM             da2;
1077   DM_DA          *dd = (DM_DA*)da->data,*dd2;
1078 
1079   PetscFunctionBegin;
1080   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1081   PetscValidPointer(daref,3);
1082 
1083   ierr = DMGetDimension(da, &dim);CHKERRQ(ierr);
1084   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1085     M = dd->M / dd->coarsen_x;
1086   } else {
1087     M = 1 + (dd->M - 1) / dd->coarsen_x;
1088   }
1089   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1090     if (dim > 1) {
1091       N = dd->N / dd->coarsen_y;
1092     } else {
1093       N = 1;
1094     }
1095   } else {
1096     N = 1 + (dd->N - 1) / dd->coarsen_y;
1097   }
1098   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1099     if (dim > 2) {
1100       P = dd->P / dd->coarsen_z;
1101     } else {
1102       P = 1;
1103     }
1104   } else {
1105     P = 1 + (dd->P - 1) / dd->coarsen_z;
1106   }
1107   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
1108   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
1109   ierr = DMSetDimension(da2,dim);CHKERRQ(ierr);
1110   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
1111   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
1112   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
1113   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
1114   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
1115   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
1116   if (dim == 3) {
1117     PetscInt *lx,*ly,*lz;
1118     ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr);
1119     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1120     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1121     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
1122     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
1123     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
1124   } else if (dim == 2) {
1125     PetscInt *lx,*ly;
1126     ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr);
1127     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1128     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1129     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
1130     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
1131   } else if (dim == 1) {
1132     PetscInt *lx;
1133     ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr);
1134     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1135     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
1136     ierr = PetscFree(lx);CHKERRQ(ierr);
1137   }
1138   dd2 = (DM_DA*)da2->data;
1139 
1140   /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1141   /* da2->ops->createinterpolation = da->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1142   da2->ops->creatematrix = da->ops->creatematrix;
1143   da2->ops->getcoloring  = da->ops->getcoloring;
1144   dd2->interptype        = dd->interptype;
1145 
1146   /* copy fill information if given */
1147   if (dd->dfill) {
1148     ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr);
1149     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1150   }
1151   if (dd->ofill) {
1152     ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr);
1153     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1154   }
1155   /* copy the refine information */
1156   dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1157   dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1158   dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;
1159 
1160   if (dd->refine_z_hier) {
1161     if (da->levelup - da->leveldown -1 > -1 && da->levelup - da->leveldown - 1< dd->refine_z_hier_n) {
1162       dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown - 1];
1163     }
1164     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_z_hier_n) {
1165       dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown - 2];
1166     }
1167     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1168     ierr = PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);CHKERRQ(ierr);
1169     ierr = PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1170   }
1171   if (dd->refine_y_hier) {
1172     if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1< dd->refine_y_hier_n) {
1173       dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown - 1];
1174     }
1175     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_y_hier_n) {
1176       dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown - 2];
1177     }
1178     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1179     ierr = PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);CHKERRQ(ierr);
1180     ierr = PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1181   }
1182   if (dd->refine_x_hier) {
1183     if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1 < dd->refine_x_hier_n) {
1184       dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown - 1];
1185     }
1186     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_x_hier_n) {
1187       dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown - 2];
1188     }
1189     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1190     ierr = PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);CHKERRQ(ierr);
1191     ierr = PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1192   }
1193 
1194   /* copy vector type information */
1195   ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr);
1196 
1197   dd2->lf = dd->lf;
1198   dd2->lj = dd->lj;
1199 
1200   da2->leveldown = da->leveldown + 1;
1201   da2->levelup   = da->levelup;
1202 
1203   ierr = DMSetUp(da2);CHKERRQ(ierr);
1204 
1205   /* inject coordinates if they are set on the fine grid */
1206   if (da->coordinates) {
1207     DM         cdaf,cdac;
1208     Vec        coordsc,coordsf;
1209     Mat        inject;
1210     VecScatter vscat;
1211 
1212     ierr = DMGetCoordinateDM(da,&cdaf);CHKERRQ(ierr);
1213     ierr = DMGetCoordinates(da,&coordsf);CHKERRQ(ierr);
1214     ierr = DMGetCoordinateDM(da2,&cdac);CHKERRQ(ierr);
1215     /* force creation of the coordinate vector */
1216     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1217     ierr = DMGetCoordinates(da2,&coordsc);CHKERRQ(ierr);
1218 
1219     ierr = DMCreateInjection(cdac,cdaf,&inject);CHKERRQ(ierr);
1220     ierr = MatScatterGetVecScatter(inject,&vscat);CHKERRQ(ierr);
1221     ierr = VecScatterBegin(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1222     ierr = VecScatterEnd(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1223     ierr = MatDestroy(&inject);CHKERRQ(ierr);
1224   }
1225 
1226   for (i=0; i<da->bs; i++) {
1227     const char *fieldname;
1228     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1229     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1230   }
1231 
1232   *daref = da2;
1233   PetscFunctionReturn(0);
1234 }
1235 
1236 PetscErrorCode  DMRefineHierarchy_DA(DM da,PetscInt nlevels,DM daf[])
1237 {
1238   PetscErrorCode ierr;
1239   PetscInt       i,n,*refx,*refy,*refz;
1240 
1241   PetscFunctionBegin;
1242   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1243   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1244   if (nlevels == 0) PetscFunctionReturn(0);
1245   PetscValidPointer(daf,3);
1246 
1247   /* Get refinement factors, defaults taken from the coarse DMDA */
1248   ierr = PetscMalloc3(nlevels,&refx,nlevels,&refy,nlevels,&refz);CHKERRQ(ierr);
1249   for (i=0; i<nlevels; i++) {
1250     ierr = DMDAGetRefinementFactor(da,&refx[i],&refy[i],&refz[i]);CHKERRQ(ierr);
1251   }
1252   n    = nlevels;
1253   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_x",refx,&n,NULL);CHKERRQ(ierr);
1254   n    = nlevels;
1255   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_y",refy,&n,NULL);CHKERRQ(ierr);
1256   n    = nlevels;
1257   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_z",refz,&n,NULL);CHKERRQ(ierr);
1258 
1259   ierr = DMDASetRefinementFactor(da,refx[0],refy[0],refz[0]);CHKERRQ(ierr);
1260   ierr = DMRefine(da,PetscObjectComm((PetscObject)da),&daf[0]);CHKERRQ(ierr);
1261   for (i=1; i<nlevels; i++) {
1262     ierr = DMDASetRefinementFactor(daf[i-1],refx[i],refy[i],refz[i]);CHKERRQ(ierr);
1263     ierr = DMRefine(daf[i-1],PetscObjectComm((PetscObject)da),&daf[i]);CHKERRQ(ierr);
1264   }
1265   ierr = PetscFree3(refx,refy,refz);CHKERRQ(ierr);
1266   PetscFunctionReturn(0);
1267 }
1268 
1269 PetscErrorCode  DMCoarsenHierarchy_DA(DM da,PetscInt nlevels,DM dac[])
1270 {
1271   PetscErrorCode ierr;
1272   PetscInt       i;
1273 
1274   PetscFunctionBegin;
1275   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1276   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1277   if (nlevels == 0) PetscFunctionReturn(0);
1278   PetscValidPointer(dac,3);
1279   ierr = DMCoarsen(da,PetscObjectComm((PetscObject)da),&dac[0]);CHKERRQ(ierr);
1280   for (i=1; i<nlevels; i++) {
1281     ierr = DMCoarsen(dac[i-1],PetscObjectComm((PetscObject)da),&dac[i]);CHKERRQ(ierr);
1282   }
1283   PetscFunctionReturn(0);
1284 }
1285 
1286 #include <petscgll.h>
1287 
1288 PetscErrorCode DMDASetGLLCoordinates_1d(DM dm,PetscGLL *gll)
1289 {
1290   PetscErrorCode ierr;
1291   PetscInt       i,j,n = gll->n,xs,xn,q;
1292   PetscScalar    *xx;
1293   PetscReal      h;
1294   Vec            x;
1295   DM_DA          *da = (DM_DA*)dm->data;
1296 
1297   PetscFunctionBegin;
1298   if (da->bx != DM_BOUNDARY_PERIODIC) {
1299     ierr = DMDAGetInfo(dm,NULL,&q,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr);
1300     q    = (q-1)/(n-1);  /* number of spectral elements */
1301     h    = 2.0/q;
1302     ierr = DMDAGetCorners(dm,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr);
1303     xs   = xs/(n-1);
1304     xn   = xn/(n-1);
1305     ierr = DMDASetUniformCoordinates(dm,-1.,1.,0.,0.,0.,0.);CHKERRQ(ierr);
1306     ierr = DMGetCoordinates(dm,&x);CHKERRQ(ierr);
1307     ierr = DMDAVecGetArray(dm,x,&xx);CHKERRQ(ierr);
1308 
1309     /* loop over local spectral elements */
1310     for (j=xs; j<xs+xn; j++) {
1311       /*
1312        Except for the first process, each process starts on the second GLL point of the first element on that process
1313        */
1314       for (i= (j == xs && xs > 0)? 1 : 0; i<n; i++) {
1315         xx[j*(n-1) + i] = -1.0 + h*j + h*(gll->nodes[i]+1.0)/2.;
1316       }
1317     }
1318     ierr = DMDAVecRestoreArray(dm,x,&xx);CHKERRQ(ierr);
1319   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for periodic");
1320   PetscFunctionReturn(0);
1321 }
1322 
1323 /*@C
1324 
1325      DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points
1326 
1327    Collective on DM
1328 
1329    Input Parameters:
1330 +   da - the DMDA object
1331 -   gll - the GLL object
1332 
1333    Notes: the parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points
1334           on each process much be divisible by the number of GLL elements needed per process. This depends on whether the DM is
1335           periodic or not.
1336 
1337    Level: advanced
1338 
1339 .seealso:   DMDACreate(), PetscGLLCreate(), DMGetCoordinates()
1340 @*/
1341 PetscErrorCode DMDASetGLLCoordinates(DM da,PetscGLL *gll)
1342 {
1343   PetscErrorCode ierr;
1344 
1345   PetscFunctionBegin;
1346   if (da->dim == 1) {
1347     ierr = DMDASetGLLCoordinates_1d(da,gll);CHKERRQ(ierr);
1348   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for 2 or 3d");
1349   PetscFunctionReturn(0);
1350 }
1351