xref: /petsc/src/dm/impls/da/da.c (revision 1d32ca44d8d66c218fceca8bfb9a49d6d6aa72f4)
1 #include <petsc/private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
2 
3 /*@
4   DMDASetSizes - Sets the global sizes
5 
6   Logically Collective on DMDA
7 
8   Input Parameters:
9 + da - the DMDA
10 . M - the global X size (or PETSC_DECIDE)
11 . N - the global Y size (or PETSC_DECIDE)
12 - P - the global Z size (or PETSC_DECIDE)
13 
14   Level: intermediate
15 
16 .seealso: DMDAGetSize(), PetscSplitOwnership()
17 @*/
18 PetscErrorCode  DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
19 {
20   DM_DA *dd = (DM_DA*)da->data;
21 
22   PetscFunctionBegin;
23   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
24   PetscValidLogicalCollectiveInt(da,M,2);
25   PetscValidLogicalCollectiveInt(da,N,3);
26   PetscValidLogicalCollectiveInt(da,P,4);
27   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
28 
29   dd->M = M;
30   dd->N = N;
31   dd->P = P;
32   PetscFunctionReturn(0);
33 }
34 
35 /*@
36   DMDASetNumProcs - Sets the number of processes in each dimension
37 
38   Logically Collective on DMDA
39 
40   Input Parameters:
41 + da - the DMDA
42 . m - the number of X procs (or PETSC_DECIDE)
43 . n - the number of Y procs (or PETSC_DECIDE)
44 - p - the number of Z procs (or PETSC_DECIDE)
45 
46   Level: intermediate
47 
48 .seealso: DMDASetSizes(), DMDAGetSize(), PetscSplitOwnership()
49 @*/
50 PetscErrorCode  DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
51 {
52   DM_DA          *dd = (DM_DA*)da->data;
53   PetscErrorCode ierr;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
57   PetscValidLogicalCollectiveInt(da,m,2);
58   PetscValidLogicalCollectiveInt(da,n,3);
59   PetscValidLogicalCollectiveInt(da,p,4);
60   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
61   dd->m = m;
62   dd->n = n;
63   dd->p = p;
64   if (da->dim == 2) {
65     PetscMPIInt size;
66     ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr);
67     if ((dd->m > 0) && (dd->n < 0)) {
68       dd->n = size/dd->m;
69       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in X direction not divisible into comm size %d",m,size);
70     }
71     if ((dd->n > 0) && (dd->m < 0)) {
72       dd->m = size/dd->n;
73       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in Y direction not divisible into comm size %d",n,size);
74     }
75   }
76   PetscFunctionReturn(0);
77 }
78 
79 /*@
80   DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries.
81 
82   Not collective
83 
84   Input Parameter:
85 + da    - The DMDA
86 - bx,by,bz - One of DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, DM_BOUNDARY_PERIODIC
87 
88   Level: intermediate
89 
90 .keywords:  distributed array, periodicity
91 .seealso: DMDACreate(), DMDestroy(), DMDA, DMBoundaryType
92 @*/
93 PetscErrorCode  DMDASetBoundaryType(DM da,DMBoundaryType bx,DMBoundaryType by,DMBoundaryType bz)
94 {
95   DM_DA *dd = (DM_DA*)da->data;
96 
97   PetscFunctionBegin;
98   PetscValidHeaderSpecific(da,DM_CLASSID,1);
99   PetscValidLogicalCollectiveEnum(da,bx,2);
100   PetscValidLogicalCollectiveEnum(da,by,3);
101   PetscValidLogicalCollectiveEnum(da,bz,4);
102   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
103   dd->bx = bx;
104   dd->by = by;
105   dd->bz = bz;
106   PetscFunctionReturn(0);
107 }
108 
109 /*@
110   DMDASetDof - Sets the number of degrees of freedom per vertex
111 
112   Not collective
113 
114   Input Parameters:
115 + da  - The DMDA
116 - dof - Number of degrees of freedom
117 
118   Level: intermediate
119 
120 .keywords:  distributed array, degrees of freedom
121 .seealso: DMDAGetDof(), DMDACreate(), DMDestroy(), DMDA
122 @*/
123 PetscErrorCode  DMDASetDof(DM da, PetscInt dof)
124 {
125   DM_DA *dd = (DM_DA*)da->data;
126 
127   PetscFunctionBegin;
128   PetscValidHeaderSpecific(da,DM_CLASSID,1);
129   PetscValidLogicalCollectiveInt(da,dof,2);
130   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
131   dd->w  = dof;
132   da->bs = dof;
133   PetscFunctionReturn(0);
134 }
135 
136 /*@
137   DMDAGetDof - Gets the number of degrees of freedom per vertex
138 
139   Not collective
140 
141   Input Parameter:
142 . da  - The DMDA
143 
144   Output Parameter:
145 . dof - Number of degrees of freedom
146 
147   Level: intermediate
148 
149 .keywords:  distributed array, degrees of freedom
150 .seealso: DMDASetDof(), DMDACreate(), DMDestroy(), DMDA
151 @*/
152 PetscErrorCode DMDAGetDof(DM da, PetscInt *dof)
153 {
154   DM_DA *dd = (DM_DA *) da->data;
155 
156   PetscFunctionBegin;
157   PetscValidHeaderSpecific(da,DM_CLASSID,1);
158   PetscValidPointer(dof,2);
159   *dof = dd->w;
160   PetscFunctionReturn(0);
161 }
162 
163 /*@
164   DMDAGetOverlap - Gets the size of the per-processor overlap.
165 
166   Not collective
167 
168   Input Parameters:
169 . da  - The DMDA
170 
171   Output Parameters:
172 + x   - Overlap in the x direction
173 . y   - Overlap in the y direction
174 - z   - Overlap in the z direction
175 
176   Level: intermediate
177 
178 .keywords:  distributed array, overlap, domain decomposition
179 .seealso: DMDACreateDomainDecomposition(), DMDASetOverlap(), DMDA
180 @*/
181 PetscErrorCode  DMDAGetOverlap(DM da,PetscInt *x,PetscInt *y,PetscInt *z)
182 {
183   DM_DA *dd = (DM_DA*)da->data;
184 
185   PetscFunctionBegin;
186   PetscValidHeaderSpecific(da,DM_CLASSID,1);
187   if (x) *x = dd->xol;
188   if (y) *y = dd->yol;
189   if (z) *z = dd->zol;
190   PetscFunctionReturn(0);
191 }
192 
193 /*@
194   DMDASetOverlap - Sets the size of the per-processor overlap.
195 
196   Not collective
197 
198   Input Parameters:
199 + da  - The DMDA
200 . x   - Overlap in the x direction
201 . y   - Overlap in the y direction
202 - z   - Overlap in the z direction
203 
204   Level: intermediate
205 
206 .keywords:  distributed array, overlap, domain decomposition
207 .seealso: DMDACreateDomainDecomposition(), DMDAGetOverlap(), DMDA
208 @*/
209 PetscErrorCode  DMDASetOverlap(DM da,PetscInt x,PetscInt y,PetscInt z)
210 {
211   DM_DA *dd = (DM_DA*)da->data;
212 
213   PetscFunctionBegin;
214   PetscValidHeaderSpecific(da,DM_CLASSID,1);
215   PetscValidLogicalCollectiveInt(da,x,2);
216   PetscValidLogicalCollectiveInt(da,y,3);
217   PetscValidLogicalCollectiveInt(da,z,4);
218   dd->xol = x;
219   dd->yol = y;
220   dd->zol = z;
221   PetscFunctionReturn(0);
222 }
223 
224 
225 /*@
226   DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition.
227 
228   Not collective
229 
230   Input Parameters:
231 . da  - The DMDA
232 
233   Output Parameters:
234 + Nsub   - Number of local subdomains created upon decomposition
235 
236   Level: intermediate
237 
238 .keywords:  distributed array, domain decomposition
239 .seealso: DMDACreateDomainDecomposition(), DMDASetNumLocalSubDomains(), DMDA
240 @*/
241 PetscErrorCode  DMDAGetNumLocalSubDomains(DM da,PetscInt *Nsub)
242 {
243   DM_DA *dd = (DM_DA*)da->data;
244 
245   PetscFunctionBegin;
246   PetscValidHeaderSpecific(da,DM_CLASSID,1);
247   if (Nsub) *Nsub = dd->Nsub;
248   PetscFunctionReturn(0);
249 }
250 
251 /*@
252   DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition.
253 
254   Not collective
255 
256   Input Parameters:
257 + da  - The DMDA
258 - Nsub - The number of local subdomains requested
259 
260   Level: intermediate
261 
262 .keywords:  distributed array, domain decomposition
263 .seealso: DMDACreateDomainDecomposition(), DMDAGetNumLocalSubDomains(), DMDA
264 @*/
265 PetscErrorCode  DMDASetNumLocalSubDomains(DM da,PetscInt Nsub)
266 {
267   DM_DA *dd = (DM_DA*)da->data;
268 
269   PetscFunctionBegin;
270   PetscValidHeaderSpecific(da,DM_CLASSID,1);
271   PetscValidLogicalCollectiveInt(da,Nsub,2);
272   dd->Nsub = Nsub;
273   PetscFunctionReturn(0);
274 }
275 
276 /*@
277   DMDASetOffset - Sets the index offset of the DA.
278 
279   Collective on DA
280 
281   Input Parameter:
282 + da  - The DMDA
283 . xo  - The offset in the x direction
284 . yo  - The offset in the y direction
285 - zo  - The offset in the z direction
286 
287   Level: intermediate
288 
289   Notes: This is used primarily to overlap a computation on a local DA with that on a global DA without
290   changing boundary conditions or subdomain features that depend upon the global offsets.
291 
292 .keywords:  distributed array, degrees of freedom
293 .seealso: DMDAGetOffset(), DMDAVecGetArray()
294 @*/
295 PetscErrorCode  DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
296 {
297   PetscErrorCode ierr;
298   DM_DA          *dd = (DM_DA*)da->data;
299 
300   PetscFunctionBegin;
301   PetscValidHeaderSpecific(da,DM_CLASSID,1);
302   PetscValidLogicalCollectiveInt(da,xo,2);
303   PetscValidLogicalCollectiveInt(da,yo,3);
304   PetscValidLogicalCollectiveInt(da,zo,4);
305   PetscValidLogicalCollectiveInt(da,Mo,5);
306   PetscValidLogicalCollectiveInt(da,No,6);
307   PetscValidLogicalCollectiveInt(da,Po,7);
308   dd->xo = xo;
309   dd->yo = yo;
310   dd->zo = zo;
311   dd->Mo = Mo;
312   dd->No = No;
313   dd->Po = Po;
314 
315   if (da->coordinateDM) {
316     ierr = DMDASetOffset(da->coordinateDM,xo,yo,zo,Mo,No,Po);CHKERRQ(ierr);
317   }
318   PetscFunctionReturn(0);
319 }
320 
321 /*@
322   DMDAGetOffset - Gets the index offset of the DA.
323 
324   Not collective
325 
326   Input Parameter:
327 . da  - The DMDA
328 
329   Output Parameters:
330 + xo  - The offset in the x direction
331 . yo  - The offset in the y direction
332 . zo  - The offset in the z direction
333 . Mo  - The global size in the x direction
334 . No  - The global size in the y direction
335 - Po  - The global size in the z direction
336 
337   Level: intermediate
338 
339 .keywords:  distributed array, degrees of freedom
340 .seealso: DMDASetOffset(), DMDAVecGetArray()
341 @*/
342 PetscErrorCode  DMDAGetOffset(DM da,PetscInt *xo,PetscInt *yo,PetscInt *zo,PetscInt *Mo,PetscInt *No,PetscInt *Po)
343 {
344   DM_DA *dd = (DM_DA*)da->data;
345 
346   PetscFunctionBegin;
347   PetscValidHeaderSpecific(da,DM_CLASSID,1);
348   if (xo) *xo = dd->xo;
349   if (yo) *yo = dd->yo;
350   if (zo) *zo = dd->zo;
351   if (Mo) *Mo = dd->Mo;
352   if (No) *No = dd->No;
353   if (Po) *Po = dd->Po;
354   PetscFunctionReturn(0);
355 }
356 
357 /*@
358   DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain DM.
359 
360   Not collective
361 
362   Input Parameter:
363 . da  - The DMDA
364 
365   Output Parameters:
366 + xs  - The start of the region in x
367 . ys  - The start of the region in y
368 . zs  - The start of the region in z
369 . xs  - The size of the region in x
370 . ys  - The size of the region in y
371 . zs  - The size of the region in z
372 
373   Level: intermediate
374 
375 .keywords:  distributed array, degrees of freedom
376 .seealso: DMDAGetOffset(), DMDAVecGetArray()
377 @*/
378 PetscErrorCode  DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
379 {
380   DM_DA          *dd = (DM_DA*)da->data;
381 
382   PetscFunctionBegin;
383   PetscValidHeaderSpecific(da,DM_CLASSID,1);
384   if (xs) *xs = dd->nonxs;
385   if (ys) *ys = dd->nonys;
386   if (zs) *zs = dd->nonzs;
387   if (xm) *xm = dd->nonxm;
388   if (ym) *ym = dd->nonym;
389   if (zm) *zm = dd->nonzm;
390   PetscFunctionReturn(0);
391 }
392 
393 
394 /*@
395   DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain DM.
396 
397   Collective on DA
398 
399   Input Parameter:
400 + da  - The DMDA
401 . xs  - The start of the region in x
402 . ys  - The start of the region in y
403 . zs  - The start of the region in z
404 . xs  - The size of the region in x
405 . ys  - The size of the region in y
406 . zs  - The size of the region in z
407 
408   Level: intermediate
409 
410 .keywords:  distributed array, degrees of freedom
411 .seealso: DMDAGetOffset(), DMDAVecGetArray()
412 @*/
413 PetscErrorCode  DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
414 {
415   DM_DA          *dd = (DM_DA*)da->data;
416 
417   PetscFunctionBegin;
418   PetscValidHeaderSpecific(da,DM_CLASSID,1);
419   PetscValidLogicalCollectiveInt(da,xs,2);
420   PetscValidLogicalCollectiveInt(da,ys,3);
421   PetscValidLogicalCollectiveInt(da,zs,4);
422   PetscValidLogicalCollectiveInt(da,xm,5);
423   PetscValidLogicalCollectiveInt(da,ym,6);
424   PetscValidLogicalCollectiveInt(da,zm,7);
425   dd->nonxs = xs;
426   dd->nonys = ys;
427   dd->nonzs = zs;
428   dd->nonxm = xm;
429   dd->nonym = ym;
430   dd->nonzm = zm;
431 
432   PetscFunctionReturn(0);
433 }
434 
435 /*@
436   DMDASetStencilType - Sets the type of the communication stencil
437 
438   Logically Collective on DMDA
439 
440   Input Parameter:
441 + da    - The DMDA
442 - stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
443 
444   Level: intermediate
445 
446 .keywords:  distributed array, stencil
447 .seealso: DMDACreate(), DMDestroy(), DMDA
448 @*/
449 PetscErrorCode  DMDASetStencilType(DM da, DMDAStencilType stype)
450 {
451   DM_DA *dd = (DM_DA*)da->data;
452 
453   PetscFunctionBegin;
454   PetscValidHeaderSpecific(da,DM_CLASSID,1);
455   PetscValidLogicalCollectiveEnum(da,stype,2);
456   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
457   dd->stencil_type = stype;
458   PetscFunctionReturn(0);
459 }
460 
461 /*@
462   DMDAGetStencilType - Gets the type of the communication stencil
463 
464   Not collective
465 
466   Input Parameter:
467 . da    - The DMDA
468 
469   Output Parameter:
470 . stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
471 
472   Level: intermediate
473 
474 .keywords:  distributed array, stencil
475 .seealso: DMDACreate(), DMDestroy(), DMDA
476 @*/
477 PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype)
478 {
479   DM_DA *dd = (DM_DA*)da->data;
480 
481   PetscFunctionBegin;
482   PetscValidHeaderSpecific(da,DM_CLASSID,1);
483   PetscValidPointer(stype,2);
484   *stype = dd->stencil_type;
485   PetscFunctionReturn(0);
486 }
487 
488 /*@
489   DMDASetStencilWidth - Sets the width of the communication stencil
490 
491   Logically Collective on DMDA
492 
493   Input Parameter:
494 + da    - The DMDA
495 - width - The stencil width
496 
497   Level: intermediate
498 
499 .keywords:  distributed array, stencil
500 .seealso: DMDACreate(), DMDestroy(), DMDA
501 @*/
502 PetscErrorCode  DMDASetStencilWidth(DM da, PetscInt width)
503 {
504   DM_DA *dd = (DM_DA*)da->data;
505 
506   PetscFunctionBegin;
507   PetscValidHeaderSpecific(da,DM_CLASSID,1);
508   PetscValidLogicalCollectiveInt(da,width,2);
509   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
510   dd->s = width;
511   PetscFunctionReturn(0);
512 }
513 
514 /*@
515   DMDAGetStencilWidth - Gets the width of the communication stencil
516 
517   Not collective
518 
519   Input Parameter:
520 . da    - The DMDA
521 
522   Output Parameter:
523 . width - The stencil width
524 
525   Level: intermediate
526 
527 .keywords:  distributed array, stencil
528 .seealso: DMDACreate(), DMDestroy(), DMDA
529 @*/
530 PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width)
531 {
532   DM_DA *dd = (DM_DA *) da->data;
533 
534   PetscFunctionBegin;
535   PetscValidHeaderSpecific(da,DM_CLASSID,1);
536   PetscValidPointer(width,2);
537   *width = dd->s;
538   PetscFunctionReturn(0);
539 }
540 
541 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da,PetscInt M,PetscInt m,const PetscInt lx[])
542 {
543   PetscInt i,sum;
544 
545   PetscFunctionBegin;
546   if (M < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Global dimension not set");
547   for (i=sum=0; i<m; i++) sum += lx[i];
548   if (sum != M) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Ownership ranges sum to %D but global dimension is %D",sum,M);
549   PetscFunctionReturn(0);
550 }
551 
552 /*@
553   DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process
554 
555   Logically Collective on DMDA
556 
557   Input Parameter:
558 + da - The DMDA
559 . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m
560 . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n
561 - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p.
562 
563   Level: intermediate
564 
565   Note: these numbers are NOT multiplied by the number of dof per node.
566 
567 .keywords:  distributed array
568 .seealso: DMDACreate(), DMDestroy(), DMDA
569 @*/
570 PetscErrorCode  DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
571 {
572   PetscErrorCode ierr;
573   DM_DA          *dd = (DM_DA*)da->data;
574 
575   PetscFunctionBegin;
576   PetscValidHeaderSpecific(da,DM_CLASSID,1);
577   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
578   if (lx) {
579     if (dd->m < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
580     ierr = DMDACheckOwnershipRanges_Private(da,dd->M,dd->m,lx);CHKERRQ(ierr);
581     if (!dd->lx) {
582       ierr = PetscMalloc1(dd->m, &dd->lx);CHKERRQ(ierr);
583     }
584     ierr = PetscMemcpy(dd->lx, lx, dd->m*sizeof(PetscInt));CHKERRQ(ierr);
585   }
586   if (ly) {
587     if (dd->n < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
588     ierr = DMDACheckOwnershipRanges_Private(da,dd->N,dd->n,ly);CHKERRQ(ierr);
589     if (!dd->ly) {
590       ierr = PetscMalloc1(dd->n, &dd->ly);CHKERRQ(ierr);
591     }
592     ierr = PetscMemcpy(dd->ly, ly, dd->n*sizeof(PetscInt));CHKERRQ(ierr);
593   }
594   if (lz) {
595     if (dd->p < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
596     ierr = DMDACheckOwnershipRanges_Private(da,dd->P,dd->p,lz);CHKERRQ(ierr);
597     if (!dd->lz) {
598       ierr = PetscMalloc1(dd->p, &dd->lz);CHKERRQ(ierr);
599     }
600     ierr = PetscMemcpy(dd->lz, lz, dd->p*sizeof(PetscInt));CHKERRQ(ierr);
601   }
602   PetscFunctionReturn(0);
603 }
604 
605 /*@
606        DMDASetInterpolationType - Sets the type of interpolation that will be
607           returned by DMCreateInterpolation()
608 
609    Logically Collective on DMDA
610 
611    Input Parameter:
612 +  da - initial distributed array
613 .  ctype - DMDA_Q1 and DMDA_Q0 are currently the only supported forms
614 
615    Level: intermediate
616 
617    Notes: you should call this on the coarser of the two DMDAs you pass to DMCreateInterpolation()
618 
619 .keywords:  distributed array, interpolation
620 
621 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType
622 @*/
623 PetscErrorCode  DMDASetInterpolationType(DM da,DMDAInterpolationType ctype)
624 {
625   DM_DA *dd = (DM_DA*)da->data;
626 
627   PetscFunctionBegin;
628   PetscValidHeaderSpecific(da,DM_CLASSID,1);
629   PetscValidLogicalCollectiveEnum(da,ctype,2);
630   dd->interptype = ctype;
631   /* Injection not coded for Q0 yet. */
632   if (ctype == DMDA_Q0) da->ops->getinjection = NULL;
633   PetscFunctionReturn(0);
634 }
635 
636 /*@
637        DMDAGetInterpolationType - Gets the type of interpolation that will be
638           used by DMCreateInterpolation()
639 
640    Not Collective
641 
642    Input Parameter:
643 .  da - distributed array
644 
645    Output Parameter:
646 .  ctype - interpolation type (DMDA_Q1 and DMDA_Q0 are currently the only supported forms)
647 
648    Level: intermediate
649 
650 .keywords:  distributed array, interpolation
651 
652 .seealso: DMDA, DMDAInterpolationType, DMDASetInterpolationType(), DMCreateInterpolation()
653 @*/
654 PetscErrorCode  DMDAGetInterpolationType(DM da,DMDAInterpolationType *ctype)
655 {
656   DM_DA *dd = (DM_DA*)da->data;
657 
658   PetscFunctionBegin;
659   PetscValidHeaderSpecific(da,DM_CLASSID,1);
660   PetscValidPointer(ctype,2);
661   *ctype = dd->interptype;
662   PetscFunctionReturn(0);
663 }
664 
665 /*@C
666       DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
667         processes neighbors.
668 
669     Not Collective
670 
671    Input Parameter:
672 .     da - the DMDA object
673 
674    Output Parameters:
675 .     ranks - the neighbors ranks, stored with the x index increasing most rapidly.
676               this process itself is in the list
677 
678    Notes: In 2d the array is of length 9, in 3d of length 27
679           Not supported in 1d
680           Do not free the array, it is freed when the DMDA is destroyed.
681 
682    Fortran Notes: In fortran you must pass in an array of the appropriate length.
683 
684    Level: intermediate
685 
686 @*/
687 PetscErrorCode  DMDAGetNeighbors(DM da,const PetscMPIInt *ranks[])
688 {
689   DM_DA *dd = (DM_DA*)da->data;
690 
691   PetscFunctionBegin;
692   PetscValidHeaderSpecific(da,DM_CLASSID,1);
693   *ranks = dd->neighbors;
694   PetscFunctionReturn(0);
695 }
696 
697 /*@C
698       DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process
699 
700     Not Collective
701 
702    Input Parameter:
703 .     da - the DMDA object
704 
705    Output Parameter:
706 +     lx - ownership along x direction (optional)
707 .     ly - ownership along y direction (optional)
708 -     lz - ownership along z direction (optional)
709 
710    Level: intermediate
711 
712     Note: these correspond to the optional final arguments passed to DMDACreate(), DMDACreate2d(), DMDACreate3d()
713 
714     In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and
715     eighth arguments from DMDAGetInfo()
716 
717      In C you should not free these arrays, nor change the values in them. They will only have valid values while the
718     DMDA they came from still exists (has not been destroyed).
719 
720     These numbers are NOT multiplied by the number of dof per node.
721 
722      Not available from Fortran
723 
724 .seealso: DMDAGetCorners(), DMDAGetGhostCorners(), DMDACreate(), DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), VecGetOwnershipRanges()
725 @*/
726 PetscErrorCode  DMDAGetOwnershipRanges(DM da,const PetscInt *lx[],const PetscInt *ly[],const PetscInt *lz[])
727 {
728   DM_DA *dd = (DM_DA*)da->data;
729 
730   PetscFunctionBegin;
731   PetscValidHeaderSpecific(da,DM_CLASSID,1);
732   if (lx) *lx = dd->lx;
733   if (ly) *ly = dd->ly;
734   if (lz) *lz = dd->lz;
735   PetscFunctionReturn(0);
736 }
737 
738 /*@
739      DMDASetRefinementFactor - Set the ratios that the DMDA grid is refined
740 
741     Logically Collective on DMDA
742 
743   Input Parameters:
744 +    da - the DMDA object
745 .    refine_x - ratio of fine grid to coarse in x direction (2 by default)
746 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
747 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
748 
749   Options Database:
750 +  -da_refine_x - refinement ratio in x direction
751 .  -da_refine_y - refinement ratio in y direction
752 -  -da_refine_z - refinement ratio in z direction
753 
754   Level: intermediate
755 
756     Notes: Pass PETSC_IGNORE to leave a value unchanged
757 
758 .seealso: DMRefine(), DMDAGetRefinementFactor()
759 @*/
760 PetscErrorCode  DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z)
761 {
762   DM_DA *dd = (DM_DA*)da->data;
763 
764   PetscFunctionBegin;
765   PetscValidHeaderSpecific(da,DM_CLASSID,1);
766   PetscValidLogicalCollectiveInt(da,refine_x,2);
767   PetscValidLogicalCollectiveInt(da,refine_y,3);
768   PetscValidLogicalCollectiveInt(da,refine_z,4);
769 
770   if (refine_x > 0) dd->refine_x = refine_x;
771   if (refine_y > 0) dd->refine_y = refine_y;
772   if (refine_z > 0) dd->refine_z = refine_z;
773   PetscFunctionReturn(0);
774 }
775 
776 /*@C
777      DMDAGetRefinementFactor - Gets the ratios that the DMDA grid is refined
778 
779     Not Collective
780 
781   Input Parameter:
782 .    da - the DMDA object
783 
784   Output Parameters:
785 +    refine_x - ratio of fine grid to coarse in x direction (2 by default)
786 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
787 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
788 
789   Level: intermediate
790 
791     Notes: Pass NULL for values you do not need
792 
793 .seealso: DMRefine(), DMDASetRefinementFactor()
794 @*/
795 PetscErrorCode  DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z)
796 {
797   DM_DA *dd = (DM_DA*)da->data;
798 
799   PetscFunctionBegin;
800   PetscValidHeaderSpecific(da,DM_CLASSID,1);
801   if (refine_x) *refine_x = dd->refine_x;
802   if (refine_y) *refine_y = dd->refine_y;
803   if (refine_z) *refine_z = dd->refine_z;
804   PetscFunctionReturn(0);
805 }
806 
807 /*@C
808      DMDASetGetMatrix - Sets the routine used by the DMDA to allocate a matrix.
809 
810     Logically Collective on DMDA
811 
812   Input Parameters:
813 +    da - the DMDA object
814 -    f - the function that allocates the matrix for that specific DMDA
815 
816   Level: developer
817 
818    Notes: See DMDASetBlockFills() that provides a simple way to provide the nonzero structure for
819        the diagonal and off-diagonal blocks of the matrix
820 
821    Not supported from Fortran
822 
823 .seealso: DMCreateMatrix(), DMDASetBlockFills()
824 @*/
825 PetscErrorCode  DMDASetGetMatrix(DM da,PetscErrorCode (*f)(DM, Mat*))
826 {
827   PetscFunctionBegin;
828   PetscValidHeaderSpecific(da,DM_CLASSID,1);
829   da->ops->creatematrix = f;
830   PetscFunctionReturn(0);
831 }
832 
833 /*
834   Creates "balanced" ownership ranges after refinement, constrained by the need for the
835   fine grid boundaries to fall within one stencil width of the coarse partition.
836 
837   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
838 */
839 static PetscErrorCode DMDARefineOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lc[],PetscInt lf[])
840 {
841   PetscInt       i,totalc = 0,remaining,startc = 0,startf = 0;
842   PetscErrorCode ierr;
843 
844   PetscFunctionBegin;
845   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
846   if (ratio == 1) {
847     ierr = PetscMemcpy(lf,lc,m*sizeof(lc[0]));CHKERRQ(ierr);
848     PetscFunctionReturn(0);
849   }
850   for (i=0; i<m; i++) totalc += lc[i];
851   remaining = (!periodic) + ratio * (totalc - (!periodic));
852   for (i=0; i<m; i++) {
853     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
854     if (i == m-1) lf[i] = want;
855     else {
856       const PetscInt nextc = startc + lc[i];
857       /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
858        * coarse stencil width of the first coarse node in the next subdomain. */
859       while ((startf+want)/ratio < nextc - stencil_width) want++;
860       /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
861        * coarse stencil width of the last coarse node in the current subdomain. */
862       while ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width) want--;
863       /* Make sure all constraints are satisfied */
864       if (want < 0 || want > remaining || ((startf+want)/ratio < nextc - stencil_width)
865           || ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible refined ownership range");
866     }
867     lf[i]      = want;
868     startc    += lc[i];
869     startf    += lf[i];
870     remaining -= lf[i];
871   }
872   PetscFunctionReturn(0);
873 }
874 
875 /*
876   Creates "balanced" ownership ranges after coarsening, constrained by the need for the
877   fine grid boundaries to fall within one stencil width of the coarse partition.
878 
879   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
880 */
881 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lf[],PetscInt lc[])
882 {
883   PetscInt       i,totalf,remaining,startc,startf;
884   PetscErrorCode ierr;
885 
886   PetscFunctionBegin;
887   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
888   if (ratio == 1) {
889     ierr = PetscMemcpy(lc,lf,m*sizeof(lf[0]));CHKERRQ(ierr);
890     PetscFunctionReturn(0);
891   }
892   for (i=0,totalf=0; i<m; i++) totalf += lf[i];
893   remaining = (!periodic) + (totalf - (!periodic)) / ratio;
894   for (i=0,startc=0,startf=0; i<m; i++) {
895     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
896     if (i == m-1) lc[i] = want;
897     else {
898       const PetscInt nextf = startf+lf[i];
899       /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
900        * node is within one stencil width. */
901       while (nextf/ratio < startc+want-stencil_width) want--;
902       /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
903        * fine node is within one stencil width. */
904       while ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width) want++;
905       if (want < 0 || want > remaining
906           || (nextf/ratio < startc+want-stencil_width) || ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible coarsened ownership range");
907     }
908     lc[i]      = want;
909     startc    += lc[i];
910     startf    += lf[i];
911     remaining -= lc[i];
912   }
913   PetscFunctionReturn(0);
914 }
915 
916 PetscErrorCode  DMRefine_DA(DM da,MPI_Comm comm,DM *daref)
917 {
918   PetscErrorCode ierr;
919   PetscInt       M,N,P,i,dim;
920   DM             da2;
921   DM_DA          *dd = (DM_DA*)da->data,*dd2;
922 
923   PetscFunctionBegin;
924   PetscValidHeaderSpecific(da,DM_CLASSID,1);
925   PetscValidPointer(daref,3);
926 
927   ierr = DMGetDimension(da, &dim);CHKERRQ(ierr);
928   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
929     M = dd->refine_x*dd->M;
930   } else {
931     M = 1 + dd->refine_x*(dd->M - 1);
932   }
933   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
934     if (dim > 1) {
935       N = dd->refine_y*dd->N;
936     } else {
937       N = 1;
938     }
939   } else {
940     N = 1 + dd->refine_y*(dd->N - 1);
941   }
942   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
943     if (dim > 2) {
944       P = dd->refine_z*dd->P;
945     } else {
946       P = 1;
947     }
948   } else {
949     P = 1 + dd->refine_z*(dd->P - 1);
950   }
951   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
952   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
953   ierr = DMSetDimension(da2,dim);CHKERRQ(ierr);
954   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
955   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
956   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
957   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
958   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
959   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
960   ierr = DMDASetInterpolationType(da2,dd->interptype);CHKERRQ(ierr);
961   if (dim == 3) {
962     PetscInt *lx,*ly,*lz;
963     ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr);
964     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
965     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
966     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
967     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
968     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
969   } else if (dim == 2) {
970     PetscInt *lx,*ly;
971     ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr);
972     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
973     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
974     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
975     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
976   } else if (dim == 1) {
977     PetscInt *lx;
978     ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr);
979     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
980     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
981     ierr = PetscFree(lx);CHKERRQ(ierr);
982   }
983   dd2 = (DM_DA*)da2->data;
984 
985   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
986   da2->ops->creatematrix = da->ops->creatematrix;
987   /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
988   da2->ops->getcoloring = da->ops->getcoloring;
989 
990   /* copy fill information if given */
991   if (dd->dfill) {
992     ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr);
993     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
994   }
995   if (dd->ofill) {
996     ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr);
997     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
998   }
999   /* copy the refine information */
1000   dd2->coarsen_x = dd2->refine_x = dd->refine_x;
1001   dd2->coarsen_y = dd2->refine_y = dd->refine_y;
1002   dd2->coarsen_z = dd2->refine_z = dd->refine_z;
1003 
1004   if (dd->refine_z_hier) {
1005     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) {
1006       dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1];
1007     }
1008     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) {
1009       dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown];
1010     }
1011     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1012     ierr = PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);CHKERRQ(ierr);
1013     ierr = PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1014   }
1015   if (dd->refine_y_hier) {
1016     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) {
1017       dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1];
1018     }
1019     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) {
1020       dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown];
1021     }
1022     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1023     ierr = PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);CHKERRQ(ierr);
1024     ierr = PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1025   }
1026   if (dd->refine_x_hier) {
1027     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) {
1028       dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1];
1029     }
1030     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) {
1031       dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown];
1032     }
1033     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1034     ierr = PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);CHKERRQ(ierr);
1035     ierr = PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1036   }
1037 
1038 
1039   /* copy vector type information */
1040   ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr);
1041 
1042   dd2->lf = dd->lf;
1043   dd2->lj = dd->lj;
1044 
1045   da2->leveldown = da->leveldown;
1046   da2->levelup   = da->levelup + 1;
1047 
1048   ierr = DMSetUp(da2);CHKERRQ(ierr);
1049 
1050   /* interpolate coordinates if they are set on the coarse grid */
1051   if (da->coordinates) {
1052     DM  cdaf,cdac;
1053     Vec coordsc,coordsf;
1054     Mat II;
1055 
1056     ierr = DMGetCoordinateDM(da,&cdac);CHKERRQ(ierr);
1057     ierr = DMGetCoordinates(da,&coordsc);CHKERRQ(ierr);
1058     ierr = DMGetCoordinateDM(da2,&cdaf);CHKERRQ(ierr);
1059     /* force creation of the coordinate vector */
1060     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1061     ierr = DMGetCoordinates(da2,&coordsf);CHKERRQ(ierr);
1062     ierr = DMCreateInterpolation(cdac,cdaf,&II,NULL);CHKERRQ(ierr);
1063     ierr = MatInterpolate(II,coordsc,coordsf);CHKERRQ(ierr);
1064     ierr = MatDestroy(&II);CHKERRQ(ierr);
1065   }
1066 
1067   for (i=0; i<da->bs; i++) {
1068     const char *fieldname;
1069     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1070     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1071   }
1072 
1073   *daref = da2;
1074   PetscFunctionReturn(0);
1075 }
1076 
1077 
1078 PetscErrorCode  DMCoarsen_DA(DM da, MPI_Comm comm,DM *daref)
1079 {
1080   PetscErrorCode ierr;
1081   PetscInt       M,N,P,i,dim;
1082   DM             da2;
1083   DM_DA          *dd = (DM_DA*)da->data,*dd2;
1084 
1085   PetscFunctionBegin;
1086   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1087   PetscValidPointer(daref,3);
1088 
1089   ierr = DMGetDimension(da, &dim);CHKERRQ(ierr);
1090   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1091     M = dd->M / dd->coarsen_x;
1092   } else {
1093     M = 1 + (dd->M - 1) / dd->coarsen_x;
1094   }
1095   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1096     if (dim > 1) {
1097       N = dd->N / dd->coarsen_y;
1098     } else {
1099       N = 1;
1100     }
1101   } else {
1102     N = 1 + (dd->N - 1) / dd->coarsen_y;
1103   }
1104   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1105     if (dim > 2) {
1106       P = dd->P / dd->coarsen_z;
1107     } else {
1108       P = 1;
1109     }
1110   } else {
1111     P = 1 + (dd->P - 1) / dd->coarsen_z;
1112   }
1113   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
1114   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
1115   ierr = DMSetDimension(da2,dim);CHKERRQ(ierr);
1116   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
1117   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
1118   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
1119   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
1120   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
1121   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
1122   ierr = DMDASetInterpolationType(da2,dd->interptype);CHKERRQ(ierr);
1123   if (dim == 3) {
1124     PetscInt *lx,*ly,*lz;
1125     ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr);
1126     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1127     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1128     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
1129     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
1130     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
1131   } else if (dim == 2) {
1132     PetscInt *lx,*ly;
1133     ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr);
1134     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1135     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1136     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
1137     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
1138   } else if (dim == 1) {
1139     PetscInt *lx;
1140     ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr);
1141     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1142     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
1143     ierr = PetscFree(lx);CHKERRQ(ierr);
1144   }
1145   dd2 = (DM_DA*)da2->data;
1146 
1147   /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1148   /* da2->ops->createinterpolation = da->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1149   da2->ops->creatematrix = da->ops->creatematrix;
1150   da2->ops->getcoloring  = da->ops->getcoloring;
1151 
1152   /* copy fill information if given */
1153   if (dd->dfill) {
1154     ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr);
1155     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1156   }
1157   if (dd->ofill) {
1158     ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr);
1159     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1160   }
1161   /* copy the refine information */
1162   dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1163   dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1164   dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;
1165 
1166   if (dd->refine_z_hier) {
1167     if (da->levelup - da->leveldown -1 > -1 && da->levelup - da->leveldown - 1< dd->refine_z_hier_n) {
1168       dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown - 1];
1169     }
1170     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_z_hier_n) {
1171       dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown - 2];
1172     }
1173     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1174     ierr = PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);CHKERRQ(ierr);
1175     ierr = PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1176   }
1177   if (dd->refine_y_hier) {
1178     if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1< dd->refine_y_hier_n) {
1179       dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown - 1];
1180     }
1181     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_y_hier_n) {
1182       dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown - 2];
1183     }
1184     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1185     ierr = PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);CHKERRQ(ierr);
1186     ierr = PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1187   }
1188   if (dd->refine_x_hier) {
1189     if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1 < dd->refine_x_hier_n) {
1190       dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown - 1];
1191     }
1192     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_x_hier_n) {
1193       dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown - 2];
1194     }
1195     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1196     ierr = PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);CHKERRQ(ierr);
1197     ierr = PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1198   }
1199 
1200   /* copy vector type information */
1201   ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr);
1202 
1203   dd2->lf = dd->lf;
1204   dd2->lj = dd->lj;
1205 
1206   da2->leveldown = da->leveldown + 1;
1207   da2->levelup   = da->levelup;
1208 
1209   ierr = DMSetUp(da2);CHKERRQ(ierr);
1210 
1211   /* inject coordinates if they are set on the fine grid */
1212   if (da->coordinates) {
1213     DM         cdaf,cdac;
1214     Vec        coordsc,coordsf;
1215     Mat        inject;
1216     VecScatter vscat;
1217 
1218     ierr = DMGetCoordinateDM(da,&cdaf);CHKERRQ(ierr);
1219     ierr = DMGetCoordinates(da,&coordsf);CHKERRQ(ierr);
1220     ierr = DMGetCoordinateDM(da2,&cdac);CHKERRQ(ierr);
1221     /* force creation of the coordinate vector */
1222     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1223     ierr = DMGetCoordinates(da2,&coordsc);CHKERRQ(ierr);
1224 
1225     ierr = DMCreateInjection(cdac,cdaf,&inject);CHKERRQ(ierr);
1226     ierr = MatScatterGetVecScatter(inject,&vscat);CHKERRQ(ierr);
1227     ierr = VecScatterBegin(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1228     ierr = VecScatterEnd(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1229     ierr = MatDestroy(&inject);CHKERRQ(ierr);
1230   }
1231 
1232   for (i=0; i<da->bs; i++) {
1233     const char *fieldname;
1234     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1235     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1236   }
1237 
1238   *daref = da2;
1239   PetscFunctionReturn(0);
1240 }
1241 
1242 PetscErrorCode  DMRefineHierarchy_DA(DM da,PetscInt nlevels,DM daf[])
1243 {
1244   PetscErrorCode ierr;
1245   PetscInt       i,n,*refx,*refy,*refz;
1246 
1247   PetscFunctionBegin;
1248   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1249   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1250   if (nlevels == 0) PetscFunctionReturn(0);
1251   PetscValidPointer(daf,3);
1252 
1253   /* Get refinement factors, defaults taken from the coarse DMDA */
1254   ierr = PetscMalloc3(nlevels,&refx,nlevels,&refy,nlevels,&refz);CHKERRQ(ierr);
1255   for (i=0; i<nlevels; i++) {
1256     ierr = DMDAGetRefinementFactor(da,&refx[i],&refy[i],&refz[i]);CHKERRQ(ierr);
1257   }
1258   n    = nlevels;
1259   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_x",refx,&n,NULL);CHKERRQ(ierr);
1260   n    = nlevels;
1261   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_y",refy,&n,NULL);CHKERRQ(ierr);
1262   n    = nlevels;
1263   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_z",refz,&n,NULL);CHKERRQ(ierr);
1264 
1265   ierr = DMDASetRefinementFactor(da,refx[0],refy[0],refz[0]);CHKERRQ(ierr);
1266   ierr = DMRefine(da,PetscObjectComm((PetscObject)da),&daf[0]);CHKERRQ(ierr);
1267   for (i=1; i<nlevels; i++) {
1268     ierr = DMDASetRefinementFactor(daf[i-1],refx[i],refy[i],refz[i]);CHKERRQ(ierr);
1269     ierr = DMRefine(daf[i-1],PetscObjectComm((PetscObject)da),&daf[i]);CHKERRQ(ierr);
1270   }
1271   ierr = PetscFree3(refx,refy,refz);CHKERRQ(ierr);
1272   PetscFunctionReturn(0);
1273 }
1274 
1275 PetscErrorCode  DMCoarsenHierarchy_DA(DM da,PetscInt nlevels,DM dac[])
1276 {
1277   PetscErrorCode ierr;
1278   PetscInt       i;
1279 
1280   PetscFunctionBegin;
1281   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1282   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1283   if (nlevels == 0) PetscFunctionReturn(0);
1284   PetscValidPointer(dac,3);
1285   ierr = DMCoarsen(da,PetscObjectComm((PetscObject)da),&dac[0]);CHKERRQ(ierr);
1286   for (i=1; i<nlevels; i++) {
1287     ierr = DMCoarsen(dac[i-1],PetscObjectComm((PetscObject)da),&dac[i]);CHKERRQ(ierr);
1288   }
1289   PetscFunctionReturn(0);
1290 }
1291 
1292 #include <petscgll.h>
1293 
1294 PetscErrorCode DMDASetGLLCoordinates_1d(DM dm,PetscGLL *gll)
1295 {
1296   PetscErrorCode ierr;
1297   PetscInt       i,j,n = gll->n,xs,xn,q;
1298   PetscScalar    *xx;
1299   PetscReal      h;
1300   Vec            x;
1301   DM_DA          *da = (DM_DA*)dm->data;
1302 
1303   PetscFunctionBegin;
1304   if (da->bx != DM_BOUNDARY_PERIODIC) {
1305     ierr = DMDAGetInfo(dm,NULL,&q,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr);
1306     q    = (q-1)/(n-1);  /* number of spectral elements */
1307     h    = 2.0/q;
1308     ierr = DMDAGetCorners(dm,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr);
1309     xs   = xs/(n-1);
1310     xn   = xn/(n-1);
1311     ierr = DMDASetUniformCoordinates(dm,-1.,1.,0.,0.,0.,0.);CHKERRQ(ierr);
1312     ierr = DMGetCoordinates(dm,&x);CHKERRQ(ierr);
1313     ierr = DMDAVecGetArray(dm,x,&xx);CHKERRQ(ierr);
1314 
1315     /* loop over local spectral elements */
1316     for (j=xs; j<xs+xn; j++) {
1317       /*
1318        Except for the first process, each process starts on the second GLL point of the first element on that process
1319        */
1320       for (i= (j == xs && xs > 0)? 1 : 0; i<n; i++) {
1321         xx[j*(n-1) + i] = -1.0 + h*j + h*(gll->nodes[i]+1.0)/2.;
1322       }
1323     }
1324     ierr = DMDAVecRestoreArray(dm,x,&xx);CHKERRQ(ierr);
1325   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for periodic");
1326   PetscFunctionReturn(0);
1327 }
1328 
1329 /*@
1330 
1331      DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points
1332 
1333    Collective on DM
1334 
1335    Input Parameters:
1336 +   da - the DMDA object
1337 -   gll - the GLL object
1338 
1339    Notes: the parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points
1340           on each process much be divisible by the number of GLL elements needed per process. This depends on whether the DM is
1341           periodic or not.
1342 
1343    Level: advanced
1344 
1345 .seealso:   DMDACreate(), PetscGLLCreate(), DMGetCoordinates()
1346 @*/
1347 PetscErrorCode DMDASetGLLCoordinates(DM da,PetscGLL *gll)
1348 {
1349   PetscErrorCode ierr;
1350 
1351   PetscFunctionBegin;
1352   if (da->dim == 1) {
1353     ierr = DMDASetGLLCoordinates_1d(da,gll);CHKERRQ(ierr);
1354   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for 2 or 3d");
1355   PetscFunctionReturn(0);
1356 }
1357