xref: /petsc/src/dm/impls/da/da.c (revision a3afe2d1ed14aa25e6e8bcdd861505b3816b69e5)
1 #include <petsc-private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
2 
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMDASetDim"
6 /*@
7   DMDASetDim - Sets the dimension
8 
9   Collective on DMDA
10 
11   Input Parameters:
12 + da - the DMDA
13 - dim - the dimension (or PETSC_DECIDE)
14 
15   Level: intermediate
16 
17 .seealso: DaGetDim(), DMDASetSizes()
18 @*/
19 PetscErrorCode  DMDASetDim(DM da, PetscInt dim)
20 {
21   DM_DA *dd = (DM_DA*)da->data;
22 
23   PetscFunctionBegin;
24   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
25   if (dd->dim > 0 && dim != dd->dim) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot change DMDA dim from %D after it was set to %D",dd->dim,dim);
26   dd->dim = dim;
27   PetscFunctionReturn(0);
28 }
29 
30 #undef __FUNCT__
31 #define __FUNCT__ "DMDASetSizes"
32 /*@
33   DMDASetSizes - Sets the global sizes
34 
35   Logically Collective on DMDA
36 
37   Input Parameters:
38 + da - the DMDA
39 . M - the global X size (or PETSC_DECIDE)
40 . N - the global Y size (or PETSC_DECIDE)
41 - P - the global Z size (or PETSC_DECIDE)
42 
43   Level: intermediate
44 
45 .seealso: DMDAGetSize(), PetscSplitOwnership()
46 @*/
47 PetscErrorCode  DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
48 {
49   DM_DA *dd = (DM_DA*)da->data;
50 
51   PetscFunctionBegin;
52   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
53   PetscValidLogicalCollectiveInt(da,M,2);
54   PetscValidLogicalCollectiveInt(da,N,3);
55   PetscValidLogicalCollectiveInt(da,P,4);
56   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
57 
58   dd->M = M;
59   dd->N = N;
60   dd->P = P;
61   PetscFunctionReturn(0);
62 }
63 
64 #undef __FUNCT__
65 #define __FUNCT__ "DMDASetNumProcs"
66 /*@
67   DMDASetNumProcs - Sets the number of processes in each dimension
68 
69   Logically Collective on DMDA
70 
71   Input Parameters:
72 + da - the DMDA
73 . m - the number of X procs (or PETSC_DECIDE)
74 . n - the number of Y procs (or PETSC_DECIDE)
75 - p - the number of Z procs (or PETSC_DECIDE)
76 
77   Level: intermediate
78 
79 .seealso: DMDASetSizes(), DMDAGetSize(), PetscSplitOwnership()
80 @*/
81 PetscErrorCode  DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
82 {
83   DM_DA          *dd = (DM_DA*)da->data;
84   PetscErrorCode ierr;
85 
86   PetscFunctionBegin;
87   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
88   PetscValidLogicalCollectiveInt(da,m,2);
89   PetscValidLogicalCollectiveInt(da,n,3);
90   PetscValidLogicalCollectiveInt(da,p,4);
91   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
92   dd->m = m;
93   dd->n = n;
94   dd->p = p;
95   if (dd->dim == 2) {
96     PetscMPIInt size;
97     ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr);
98     if ((dd->m > 0) && (dd->n < 0)) {
99       dd->n = size/dd->m;
100       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in X direction not divisible into comm size %d",m,size);
101     }
102     if ((dd->n > 0) && (dd->m < 0)) {
103       dd->m = size/dd->n;
104       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in Y direction not divisible into comm size %d",n,size);
105     }
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 #undef __FUNCT__
111 #define __FUNCT__ "DMDASetBoundaryType"
112 /*@
113   DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries.
114 
115   Not collective
116 
117   Input Parameter:
118 + da    - The DMDA
119 - bx,by,bz - One of DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_GHOSTED, DMDA_BOUNDARY_PERIODIC
120 
121   Level: intermediate
122 
123 .keywords:  distributed array, periodicity
124 .seealso: DMDACreate(), DMDestroy(), DMDA, DMDABoundaryType
125 @*/
126 PetscErrorCode  DMDASetBoundaryType(DM da,DMDABoundaryType bx,DMDABoundaryType by,DMDABoundaryType bz)
127 {
128   DM_DA *dd = (DM_DA*)da->data;
129 
130   PetscFunctionBegin;
131   PetscValidHeaderSpecific(da,DM_CLASSID,1);
132   PetscValidLogicalCollectiveEnum(da,bx,2);
133   PetscValidLogicalCollectiveEnum(da,by,3);
134   PetscValidLogicalCollectiveEnum(da,bz,4);
135   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
136   dd->bx = bx;
137   dd->by = by;
138   dd->bz = bz;
139   PetscFunctionReturn(0);
140 }
141 
142 #undef __FUNCT__
143 #define __FUNCT__ "DMDASetDof"
144 /*@
145   DMDASetDof - Sets the number of degrees of freedom per vertex
146 
147   Not collective
148 
149   Input Parameter:
150 + da  - The DMDA
151 - dof - Number of degrees of freedom
152 
153   Level: intermediate
154 
155 .keywords:  distributed array, degrees of freedom
156 .seealso: DMDACreate(), DMDestroy(), DMDA
157 @*/
158 PetscErrorCode  DMDASetDof(DM da, PetscInt dof)
159 {
160   DM_DA *dd = (DM_DA*)da->data;
161 
162   PetscFunctionBegin;
163   PetscValidHeaderSpecific(da,DM_CLASSID,1);
164   PetscValidLogicalCollectiveInt(da,dof,2);
165   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
166   dd->w  = dof;
167   da->bs = dof;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "DMDAGetOverlap"
173 /*@
174   DMDAGetOverlap - Gets the size of the per-processor overlap.
175 
176   Not collective
177 
178   Input Parameters:
179 . da  - The DMDA
180 
181   Output Parameters:
182 + x   - Overlap in the x direction
183 . y   - Overlap in the y direction
184 - z   - Overlap in the z direction
185 
186   Level: intermediate
187 
188 .keywords:  distributed array, overlap, domain decomposition
189 .seealso: DMDACreateDomainDecomposition(), DMDASetOverlap(), DMDA
190 @*/
191 PetscErrorCode  DMDAGetOverlap(DM da,PetscInt *x,PetscInt *y,PetscInt *z)
192 {
193   DM_DA *dd = (DM_DA*)da->data;
194 
195   PetscFunctionBegin;
196   PetscValidHeaderSpecific(da,DM_CLASSID,1);
197   if (x) *x = dd->xol;
198   if (y) *y = dd->yol;
199   if (z) *z = dd->zol;
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "DMDASetOverlap"
205 /*@
206   DMDASetOverlap - Sets the size of the per-processor overlap.
207 
208   Not collective
209 
210   Input Parameters:
211 + da  - The DMDA
212 . x   - Overlap in the x direction
213 . y   - Overlap in the y direction
214 - z   - Overlap in the z direction
215 
216   Level: intermediate
217 
218 .keywords:  distributed array, overlap, domain decomposition
219 .seealso: DMDACreateDomainDecomposition(), DMDAGetOverlap(), DMDA
220 @*/
221 PetscErrorCode  DMDASetOverlap(DM da,PetscInt x,PetscInt y,PetscInt z)
222 {
223   DM_DA *dd = (DM_DA*)da->data;
224 
225   PetscFunctionBegin;
226   PetscValidHeaderSpecific(da,DM_CLASSID,1);
227   PetscValidLogicalCollectiveInt(da,x,2);
228   PetscValidLogicalCollectiveInt(da,y,3);
229   PetscValidLogicalCollectiveInt(da,z,4);
230   dd->xol = x;
231   dd->yol = y;
232   dd->zol = z;
233   PetscFunctionReturn(0);
234 }
235 
236 
237 #undef __FUNCT__
238 #define __FUNCT__ "DMDAGetNumLocalSubDomains"
239 /*@
240   DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition.
241 
242   Not collective
243 
244   Input Parameters:
245 . da  - The DMDA
246 
247   Output Parameters:
248 + Nsub   - Number of local subdomains created upon decomposition
249 
250   Level: intermediate
251 
252 .keywords:  distributed array, domain decomposition
253 .seealso: DMDACreateDomainDecomposition(), DMDASetNumLocalSubDomains(), DMDA
254 @*/
255 PetscErrorCode  DMDAGetNumLocalSubDomains(DM da,PetscInt *Nsub)
256 {
257   DM_DA *dd = (DM_DA*)da->data;
258 
259   PetscFunctionBegin;
260   PetscValidHeaderSpecific(da,DM_CLASSID,1);
261   if (Nsub) *Nsub = dd->Nsub;
262   PetscFunctionReturn(0);
263 }
264 
265 #undef __FUNCT__
266 #define __FUNCT__ "DMDASetNumLocalSubDomains"
267 /*@
268   DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition.
269 
270   Not collective
271 
272   Input Parameters:
273 + da  - The DMDA
274 - Nsub - The number of local subdomains requested
275 
276   Level: intermediate
277 
278 .keywords:  distributed array, domain decomposition
279 .seealso: DMDACreateDomainDecomposition(), DMDAGetNumLocalSubDomains(), DMDA
280 @*/
281 PetscErrorCode  DMDASetNumLocalSubDomains(DM da,PetscInt Nsub)
282 {
283   DM_DA *dd = (DM_DA*)da->data;
284 
285   PetscFunctionBegin;
286   PetscValidHeaderSpecific(da,DM_CLASSID,1);
287   PetscValidLogicalCollectiveInt(da,Nsub,2);
288   dd->Nsub = Nsub;
289   PetscFunctionReturn(0);
290 }
291 
292 #undef __FUNCT__
293 #define __FUNCT__ "DMDASetOffset"
294 /*@
295   DMDASetOffset - Sets the index offset of the DA.
296 
297   Collective on DA
298 
299   Input Parameter:
300 + da  - The DMDA
301 . xo  - The offset in the x direction
302 . yo  - The offset in the y direction
303 - zo  - The offset in the z direction
304 
305   Level: intermediate
306 
307   Notes: This is used primarily to overlap a computation on a local DA with that on a global DA without
308   changing boundary conditions or subdomain features that depend upon the global offsets.
309 
310 .keywords:  distributed array, degrees of freedom
311 .seealso: DMDAGetOffset(), DMDAVecGetArray()
312 @*/
313 PetscErrorCode  DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
314 {
315   PetscErrorCode ierr;
316   DM_DA          *dd = (DM_DA*)da->data;
317 
318   PetscFunctionBegin;
319   PetscValidHeaderSpecific(da,DM_CLASSID,1);
320   PetscValidLogicalCollectiveInt(da,xo,2);
321   PetscValidLogicalCollectiveInt(da,yo,3);
322   PetscValidLogicalCollectiveInt(da,zo,4);
323   PetscValidLogicalCollectiveInt(da,Mo,5);
324   PetscValidLogicalCollectiveInt(da,No,6);
325   PetscValidLogicalCollectiveInt(da,Po,7);
326   dd->xo = xo;
327   dd->yo = yo;
328   dd->zo = zo;
329   dd->Mo = Mo;
330   dd->No = No;
331   dd->Po = Po;
332 
333   if (da->coordinateDM) {
334     ierr = DMDASetOffset(da->coordinateDM,xo,yo,zo,Mo,No,Po);CHKERRQ(ierr);
335   }
336   PetscFunctionReturn(0);
337 }
338 
339 #undef __FUNCT__
340 #define __FUNCT__ "DMDAGetOffset"
341 /*@
342   DMDAGetOffset - Gets the index offset of the DA.
343 
344   Not collective
345 
346   Input Parameter:
347 . da  - The DMDA
348 
349   Output Parameters:
350 + xo  - The offset in the x direction
351 . yo  - The offset in the y direction
352 . zo  - The offset in the z direction
353 . Mo  - The global size in the x direction
354 . No  - The global size in the y direction
355 - Po  - The global size in the z direction
356 
357   Level: intermediate
358 
359 .keywords:  distributed array, degrees of freedom
360 .seealso: DMDASetOffset(), DMDAVecGetArray()
361 @*/
362 PetscErrorCode  DMDAGetOffset(DM da,PetscInt *xo,PetscInt *yo,PetscInt *zo,PetscInt *Mo,PetscInt *No,PetscInt *Po)
363 {
364   DM_DA *dd = (DM_DA*)da->data;
365 
366   PetscFunctionBegin;
367   PetscValidHeaderSpecific(da,DM_CLASSID,1);
368   if (xo) *xo = dd->xo;
369   if (yo) *yo = dd->yo;
370   if (zo) *zo = dd->zo;
371   if (Mo) *Mo = dd->Mo;
372   if (No) *No = dd->No;
373   if (Po) *Po = dd->Po;
374   PetscFunctionReturn(0);
375 }
376 
377 #undef __FUNCT__
378 #define __FUNCT__ "DMDAGetNonOverlappingRegion"
379 /*@
380   DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain DM.
381 
382   Not collective
383 
384   Input Parameter:
385 . da  - The DMDA
386 
387   Output Parameters:
388 + xs  - The start of the region in x
389 . ys  - The start of the region in y
390 . zs  - The start of the region in z
391 . xs  - The size of the region in x
392 . ys  - The size of the region in y
393 . zs  - The size of the region in z
394 
395   Level: intermediate
396 
397 .keywords:  distributed array, degrees of freedom
398 .seealso: DMDAGetOffset(), DMDAVecGetArray()
399 @*/
400 PetscErrorCode  DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
401 {
402   DM_DA          *dd = (DM_DA*)da->data;
403 
404   PetscFunctionBegin;
405   PetscValidHeaderSpecific(da,DM_CLASSID,1);
406   if (xs) *xs = dd->nonxs;
407   if (ys) *ys = dd->nonys;
408   if (zs) *zs = dd->nonzs;
409   if (xm) *xm = dd->nonxm;
410   if (ym) *ym = dd->nonym;
411   if (zm) *zm = dd->nonzm;
412   PetscFunctionReturn(0);
413 }
414 
415 
416 #undef __FUNCT__
417 #define __FUNCT__ "DMDASetNonOverlappingRegion"
418 /*@
419   DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain DM.
420 
421   Collective on DA
422 
423   Input Parameter:
424 + da  - The DMDA
425 . xs  - The start of the region in x
426 . ys  - The start of the region in y
427 . zs  - The start of the region in z
428 . xs  - The size of the region in x
429 . ys  - The size of the region in y
430 . zs  - The size of the region in z
431 
432   Level: intermediate
433 
434 .keywords:  distributed array, degrees of freedom
435 .seealso: DMDAGetOffset(), DMDAVecGetArray()
436 @*/
437 PetscErrorCode  DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
438 {
439   DM_DA          *dd = (DM_DA*)da->data;
440 
441   PetscFunctionBegin;
442   PetscValidHeaderSpecific(da,DM_CLASSID,1);
443   PetscValidLogicalCollectiveInt(da,xs,2);
444   PetscValidLogicalCollectiveInt(da,ys,3);
445   PetscValidLogicalCollectiveInt(da,zs,4);
446   PetscValidLogicalCollectiveInt(da,xm,5);
447   PetscValidLogicalCollectiveInt(da,ym,6);
448   PetscValidLogicalCollectiveInt(da,zm,7);
449   dd->nonxs = xs;
450   dd->nonys = ys;
451   dd->nonzs = zs;
452   dd->nonxm = xm;
453   dd->nonym = ym;
454   dd->nonzm = zm;
455 
456   PetscFunctionReturn(0);
457 }
458 
459 #undef __FUNCT__
460 #define __FUNCT__ "DMDASetStencilType"
461 /*@
462   DMDASetStencilType - Sets the type of the communication stencil
463 
464   Logically Collective on DMDA
465 
466   Input Parameter:
467 + da    - The DMDA
468 - stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
469 
470   Level: intermediate
471 
472 .keywords:  distributed array, stencil
473 .seealso: DMDACreate(), DMDestroy(), DMDA
474 @*/
475 PetscErrorCode  DMDASetStencilType(DM da, DMDAStencilType stype)
476 {
477   DM_DA *dd = (DM_DA*)da->data;
478 
479   PetscFunctionBegin;
480   PetscValidHeaderSpecific(da,DM_CLASSID,1);
481   PetscValidLogicalCollectiveEnum(da,stype,2);
482   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
483   dd->stencil_type = stype;
484   PetscFunctionReturn(0);
485 }
486 
487 #undef __FUNCT__
488 #define __FUNCT__ "DMDASetStencilWidth"
489 /*@
490   DMDASetStencilWidth - Sets the width of the communication stencil
491 
492   Logically Collective on DMDA
493 
494   Input Parameter:
495 + da    - The DMDA
496 - width - The stencil width
497 
498   Level: intermediate
499 
500 .keywords:  distributed array, stencil
501 .seealso: DMDACreate(), DMDestroy(), DMDA
502 @*/
503 PetscErrorCode  DMDASetStencilWidth(DM da, PetscInt width)
504 {
505   DM_DA *dd = (DM_DA*)da->data;
506 
507   PetscFunctionBegin;
508   PetscValidHeaderSpecific(da,DM_CLASSID,1);
509   PetscValidLogicalCollectiveInt(da,width,2);
510   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
511   dd->s = width;
512   PetscFunctionReturn(0);
513 }
514 
515 #undef __FUNCT__
516 #define __FUNCT__ "DMDACheckOwnershipRanges_Private"
517 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da,PetscInt M,PetscInt m,const PetscInt lx[])
518 {
519   PetscInt i,sum;
520 
521   PetscFunctionBegin;
522   if (M < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Global dimension not set");
523   for (i=sum=0; i<m; i++) sum += lx[i];
524   if (sum != M) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Ownership ranges sum to %D but global dimension is %D",sum,M);
525   PetscFunctionReturn(0);
526 }
527 
528 #undef __FUNCT__
529 #define __FUNCT__ "DMDASetOwnershipRanges"
530 /*@
531   DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process
532 
533   Logically Collective on DMDA
534 
535   Input Parameter:
536 + da - The DMDA
537 . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m
538 . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n
539 - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p.
540 
541   Level: intermediate
542 
543   Note: these numbers are NOT multiplied by the number of dof per node.
544 
545 .keywords:  distributed array
546 .seealso: DMDACreate(), DMDestroy(), DMDA
547 @*/
548 PetscErrorCode  DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
549 {
550   PetscErrorCode ierr;
551   DM_DA          *dd = (DM_DA*)da->data;
552 
553   PetscFunctionBegin;
554   PetscValidHeaderSpecific(da,DM_CLASSID,1);
555   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
556   if (lx) {
557     if (dd->m < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
558     ierr = DMDACheckOwnershipRanges_Private(da,dd->M,dd->m,lx);CHKERRQ(ierr);
559     if (!dd->lx) {
560       ierr = PetscMalloc(dd->m*sizeof(PetscInt), &dd->lx);CHKERRQ(ierr);
561     }
562     ierr = PetscMemcpy(dd->lx, lx, dd->m*sizeof(PetscInt));CHKERRQ(ierr);
563   }
564   if (ly) {
565     if (dd->n < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
566     ierr = DMDACheckOwnershipRanges_Private(da,dd->N,dd->n,ly);CHKERRQ(ierr);
567     if (!dd->ly) {
568       ierr = PetscMalloc(dd->n*sizeof(PetscInt), &dd->ly);CHKERRQ(ierr);
569     }
570     ierr = PetscMemcpy(dd->ly, ly, dd->n*sizeof(PetscInt));CHKERRQ(ierr);
571   }
572   if (lz) {
573     if (dd->p < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
574     ierr = DMDACheckOwnershipRanges_Private(da,dd->P,dd->p,lz);CHKERRQ(ierr);
575     if (!dd->lz) {
576       ierr = PetscMalloc(dd->p*sizeof(PetscInt), &dd->lz);CHKERRQ(ierr);
577     }
578     ierr = PetscMemcpy(dd->lz, lz, dd->p*sizeof(PetscInt));CHKERRQ(ierr);
579   }
580   PetscFunctionReturn(0);
581 }
582 
583 #undef __FUNCT__
584 #define __FUNCT__ "DMDASetInterpolationType"
585 /*@
586        DMDASetInterpolationType - Sets the type of interpolation that will be
587           returned by DMCreateInterpolation()
588 
589    Logically Collective on DMDA
590 
591    Input Parameter:
592 +  da - initial distributed array
593 .  ctype - DMDA_Q1 and DMDA_Q0 are currently the only supported forms
594 
595    Level: intermediate
596 
597    Notes: you should call this on the coarser of the two DMDAs you pass to DMCreateInterpolation()
598 
599 .keywords:  distributed array, interpolation
600 
601 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType
602 @*/
603 PetscErrorCode  DMDASetInterpolationType(DM da,DMDAInterpolationType ctype)
604 {
605   DM_DA *dd = (DM_DA*)da->data;
606 
607   PetscFunctionBegin;
608   PetscValidHeaderSpecific(da,DM_CLASSID,1);
609   PetscValidLogicalCollectiveEnum(da,ctype,2);
610   dd->interptype = ctype;
611   PetscFunctionReturn(0);
612 }
613 
614 #undef __FUNCT__
615 #define __FUNCT__ "DMDAGetInterpolationType"
616 /*@
617        DMDAGetInterpolationType - Gets the type of interpolation that will be
618           used by DMCreateInterpolation()
619 
620    Not Collective
621 
622    Input Parameter:
623 .  da - distributed array
624 
625    Output Parameter:
626 .  ctype - interpolation type (DMDA_Q1 and DMDA_Q0 are currently the only supported forms)
627 
628    Level: intermediate
629 
630 .keywords:  distributed array, interpolation
631 
632 .seealso: DMDA, DMDAInterpolationType, DMDASetInterpolationType(), DMCreateInterpolation()
633 @*/
634 PetscErrorCode  DMDAGetInterpolationType(DM da,DMDAInterpolationType *ctype)
635 {
636   DM_DA *dd = (DM_DA*)da->data;
637 
638   PetscFunctionBegin;
639   PetscValidHeaderSpecific(da,DM_CLASSID,1);
640   PetscValidPointer(ctype,2);
641   *ctype = dd->interptype;
642   PetscFunctionReturn(0);
643 }
644 
645 #undef __FUNCT__
646 #define __FUNCT__ "DMDAGetNeighbors"
647 /*@C
648       DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
649         processes neighbors.
650 
651     Not Collective
652 
653    Input Parameter:
654 .     da - the DMDA object
655 
656    Output Parameters:
657 .     ranks - the neighbors ranks, stored with the x index increasing most rapidly.
658               this process itself is in the list
659 
660    Notes: In 2d the array is of length 9, in 3d of length 27
661           Not supported in 1d
662           Do not free the array, it is freed when the DMDA is destroyed.
663 
664    Fortran Notes: In fortran you must pass in an array of the appropriate length.
665 
666    Level: intermediate
667 
668 @*/
669 PetscErrorCode  DMDAGetNeighbors(DM da,const PetscMPIInt *ranks[])
670 {
671   DM_DA *dd = (DM_DA*)da->data;
672 
673   PetscFunctionBegin;
674   PetscValidHeaderSpecific(da,DM_CLASSID,1);
675   *ranks = dd->neighbors;
676   PetscFunctionReturn(0);
677 }
678 
679 #undef __FUNCT__
680 #define __FUNCT__ "DMDAGetOwnershipRanges"
681 /*@C
682       DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process
683 
684     Not Collective
685 
686    Input Parameter:
687 .     da - the DMDA object
688 
689    Output Parameter:
690 +     lx - ownership along x direction (optional)
691 .     ly - ownership along y direction (optional)
692 -     lz - ownership along z direction (optional)
693 
694    Level: intermediate
695 
696     Note: these correspond to the optional final arguments passed to DMDACreate(), DMDACreate2d(), DMDACreate3d()
697 
698     In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and
699     eighth arguments from DMDAGetInfo()
700 
701      In C you should not free these arrays, nor change the values in them. They will only have valid values while the
702     DMDA they came from still exists (has not been destroyed).
703 
704     These numbers are NOT multiplied by the number of dof per node.
705 
706 .seealso: DMDAGetCorners(), DMDAGetGhostCorners(), DMDACreate(), DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), VecGetOwnershipRanges()
707 @*/
708 PetscErrorCode  DMDAGetOwnershipRanges(DM da,const PetscInt *lx[],const PetscInt *ly[],const PetscInt *lz[])
709 {
710   DM_DA *dd = (DM_DA*)da->data;
711 
712   PetscFunctionBegin;
713   PetscValidHeaderSpecific(da,DM_CLASSID,1);
714   if (lx) *lx = dd->lx;
715   if (ly) *ly = dd->ly;
716   if (lz) *lz = dd->lz;
717   PetscFunctionReturn(0);
718 }
719 
720 #undef __FUNCT__
721 #define __FUNCT__ "DMDASetRefinementFactor"
722 /*@
723      DMDASetRefinementFactor - Set the ratios that the DMDA grid is refined
724 
725     Logically Collective on DMDA
726 
727   Input Parameters:
728 +    da - the DMDA object
729 .    refine_x - ratio of fine grid to coarse in x direction (2 by default)
730 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
731 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
732 
733   Options Database:
734 +  -da_refine_x - refinement ratio in x direction
735 .  -da_refine_y - refinement ratio in y direction
736 -  -da_refine_z - refinement ratio in z direction
737 
738   Level: intermediate
739 
740     Notes: Pass PETSC_IGNORE to leave a value unchanged
741 
742 .seealso: DMRefine(), DMDAGetRefinementFactor()
743 @*/
744 PetscErrorCode  DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z)
745 {
746   DM_DA *dd = (DM_DA*)da->data;
747 
748   PetscFunctionBegin;
749   PetscValidHeaderSpecific(da,DM_CLASSID,1);
750   PetscValidLogicalCollectiveInt(da,refine_x,2);
751   PetscValidLogicalCollectiveInt(da,refine_y,3);
752   PetscValidLogicalCollectiveInt(da,refine_z,4);
753 
754   if (refine_x > 0) dd->refine_x = refine_x;
755   if (refine_y > 0) dd->refine_y = refine_y;
756   if (refine_z > 0) dd->refine_z = refine_z;
757   PetscFunctionReturn(0);
758 }
759 
760 #undef __FUNCT__
761 #define __FUNCT__ "DMDAGetRefinementFactor"
762 /*@C
763      DMDAGetRefinementFactor - Gets the ratios that the DMDA grid is refined
764 
765     Not Collective
766 
767   Input Parameter:
768 .    da - the DMDA object
769 
770   Output Parameters:
771 +    refine_x - ratio of fine grid to coarse in x direction (2 by default)
772 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
773 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
774 
775   Level: intermediate
776 
777     Notes: Pass NULL for values you do not need
778 
779 .seealso: DMRefine(), DMDASetRefinementFactor()
780 @*/
781 PetscErrorCode  DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z)
782 {
783   DM_DA *dd = (DM_DA*)da->data;
784 
785   PetscFunctionBegin;
786   PetscValidHeaderSpecific(da,DM_CLASSID,1);
787   if (refine_x) *refine_x = dd->refine_x;
788   if (refine_y) *refine_y = dd->refine_y;
789   if (refine_z) *refine_z = dd->refine_z;
790   PetscFunctionReturn(0);
791 }
792 
793 #undef __FUNCT__
794 #define __FUNCT__ "DMDASetGetMatrix"
795 /*@C
796      DMDASetGetMatrix - Sets the routine used by the DMDA to allocate a matrix.
797 
798     Logically Collective on DMDA
799 
800   Input Parameters:
801 +    da - the DMDA object
802 -    f - the function that allocates the matrix for that specific DMDA
803 
804   Level: developer
805 
806    Notes: See DMDASetBlockFills() that provides a simple way to provide the nonzero structure for
807        the diagonal and off-diagonal blocks of the matrix
808 
809 .seealso: DMCreateMatrix(), DMDASetBlockFills()
810 @*/
811 PetscErrorCode  DMDASetGetMatrix(DM da,PetscErrorCode (*f)(DM, MatType,Mat*))
812 {
813   PetscFunctionBegin;
814   PetscValidHeaderSpecific(da,DM_CLASSID,1);
815   da->ops->creatematrix = f;
816   PetscFunctionReturn(0);
817 }
818 
819 #undef __FUNCT__
820 #define __FUNCT__ "DMDARefineOwnershipRanges"
821 /*
822   Creates "balanced" ownership ranges after refinement, constrained by the need for the
823   fine grid boundaries to fall within one stencil width of the coarse partition.
824 
825   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
826 */
827 static PetscErrorCode DMDARefineOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lc[],PetscInt lf[])
828 {
829   PetscInt       i,totalc = 0,remaining,startc = 0,startf = 0;
830   PetscErrorCode ierr;
831 
832   PetscFunctionBegin;
833   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
834   if (ratio == 1) {
835     ierr = PetscMemcpy(lf,lc,m*sizeof(lc[0]));CHKERRQ(ierr);
836     PetscFunctionReturn(0);
837   }
838   for (i=0; i<m; i++) totalc += lc[i];
839   remaining = (!periodic) + ratio * (totalc - (!periodic));
840   for (i=0; i<m; i++) {
841     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
842     if (i == m-1) lf[i] = want;
843     else {
844       const PetscInt nextc = startc + lc[i];
845       /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
846        * coarse stencil width of the first coarse node in the next subdomain. */
847       while ((startf+want)/ratio < nextc - stencil_width) want++;
848       /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
849        * coarse stencil width of the last coarse node in the current subdomain. */
850       while ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width) want--;
851       /* Make sure all constraints are satisfied */
852       if (want < 0 || want > remaining || ((startf+want)/ratio < nextc - stencil_width)
853           || ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible refined ownership range");
854     }
855     lf[i]      = want;
856     startc    += lc[i];
857     startf    += lf[i];
858     remaining -= lf[i];
859   }
860   PetscFunctionReturn(0);
861 }
862 
863 #undef __FUNCT__
864 #define __FUNCT__ "DMDACoarsenOwnershipRanges"
865 /*
866   Creates "balanced" ownership ranges after coarsening, constrained by the need for the
867   fine grid boundaries to fall within one stencil width of the coarse partition.
868 
869   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
870 */
871 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lf[],PetscInt lc[])
872 {
873   PetscInt       i,totalf,remaining,startc,startf;
874   PetscErrorCode ierr;
875 
876   PetscFunctionBegin;
877   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
878   if (ratio == 1) {
879     ierr = PetscMemcpy(lc,lf,m*sizeof(lf[0]));CHKERRQ(ierr);
880     PetscFunctionReturn(0);
881   }
882   for (i=0,totalf=0; i<m; i++) totalf += lf[i];
883   remaining = (!periodic) + (totalf - (!periodic)) / ratio;
884   for (i=0,startc=0,startf=0; i<m; i++) {
885     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
886     if (i == m-1) lc[i] = want;
887     else {
888       const PetscInt nextf = startf+lf[i];
889       /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
890        * node is within one stencil width. */
891       while (nextf/ratio < startc+want-stencil_width) want--;
892       /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
893        * fine node is within one stencil width. */
894       while ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width) want++;
895       if (want < 0 || want > remaining
896           || (nextf/ratio < startc+want-stencil_width) || ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible coarsened ownership range");
897     }
898     lc[i]      = want;
899     startc    += lc[i];
900     startf    += lf[i];
901     remaining -= lc[i];
902   }
903   PetscFunctionReturn(0);
904 }
905 
906 #undef __FUNCT__
907 #define __FUNCT__ "DMRefine_DA"
908 PetscErrorCode  DMRefine_DA(DM da,MPI_Comm comm,DM *daref)
909 {
910   PetscErrorCode ierr;
911   PetscInt       M,N,P,i;
912   DM             da2;
913   DM_DA          *dd = (DM_DA*)da->data,*dd2;
914 
915   PetscFunctionBegin;
916   PetscValidHeaderSpecific(da,DM_CLASSID,1);
917   PetscValidPointer(daref,3);
918 
919   if (dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
920     M = dd->refine_x*dd->M;
921   } else {
922     M = 1 + dd->refine_x*(dd->M - 1);
923   }
924   if (dd->by == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
925     if (dd->dim > 1) {
926       N = dd->refine_y*dd->N;
927     } else {
928       N = 1;
929     }
930   } else {
931     N = 1 + dd->refine_y*(dd->N - 1);
932   }
933   if (dd->bz == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
934     if (dd->dim > 2) {
935       P = dd->refine_z*dd->P;
936     } else {
937       P = 1;
938     }
939   } else {
940     P = 1 + dd->refine_z*(dd->P - 1);
941   }
942   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
943   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
944   ierr = DMDASetDim(da2,dd->dim);CHKERRQ(ierr);
945   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
946   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
947   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
948   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
949   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
950   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
951   if (dd->dim == 3) {
952     PetscInt *lx,*ly,*lz;
953     ierr = PetscMalloc3(dd->m,PetscInt,&lx,dd->n,PetscInt,&ly,dd->p,PetscInt,&lz);CHKERRQ(ierr);
954     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
955     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
956     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bz == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
957     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
958     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
959   } else if (dd->dim == 2) {
960     PetscInt *lx,*ly;
961     ierr = PetscMalloc2(dd->m,PetscInt,&lx,dd->n,PetscInt,&ly);CHKERRQ(ierr);
962     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
963     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
964     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
965     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
966   } else if (dd->dim == 1) {
967     PetscInt *lx;
968     ierr = PetscMalloc(dd->m*sizeof(PetscInt),&lx);CHKERRQ(ierr);
969     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
970     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
971     ierr = PetscFree(lx);CHKERRQ(ierr);
972   }
973   dd2 = (DM_DA*)da2->data;
974 
975   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
976   da2->ops->creatematrix = da->ops->creatematrix;
977   /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
978   da2->ops->getcoloring = da->ops->getcoloring;
979   dd2->interptype       = dd->interptype;
980 
981   /* copy fill information if given */
982   if (dd->dfill) {
983     ierr = PetscMalloc((dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt),&dd2->dfill);CHKERRQ(ierr);
984     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
985   }
986   if (dd->ofill) {
987     ierr = PetscMalloc((dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt),&dd2->ofill);CHKERRQ(ierr);
988     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
989   }
990   /* copy the refine information */
991   dd2->coarsen_x = dd2->refine_x = dd->refine_x;
992   dd2->coarsen_y = dd2->refine_y = dd->refine_y;
993   dd2->coarsen_z = dd2->refine_z = dd->refine_z;
994 
995   /* copy vector type information */
996   ierr = PetscFree(da2->vectype);CHKERRQ(ierr);
997   ierr = PetscStrallocpy(da->vectype,(char**)&da2->vectype);CHKERRQ(ierr);
998 
999   dd2->lf = dd->lf;
1000   dd2->lj = dd->lj;
1001 
1002   da2->leveldown = da->leveldown;
1003   da2->levelup   = da->levelup + 1;
1004 
1005   ierr = DMSetFromOptions(da2);CHKERRQ(ierr);
1006   ierr = DMSetUp(da2);CHKERRQ(ierr);
1007   ierr = DMViewFromOptions(da2,NULL,"-dm_view");CHKERRQ(ierr);
1008 
1009   /* interpolate coordinates if they are set on the coarse grid */
1010   if (da->coordinates) {
1011     DM  cdaf,cdac;
1012     Vec coordsc,coordsf;
1013     Mat II;
1014 
1015     ierr = DMGetCoordinateDM(da,&cdac);CHKERRQ(ierr);
1016     ierr = DMGetCoordinates(da,&coordsc);CHKERRQ(ierr);
1017     ierr = DMGetCoordinateDM(da2,&cdaf);CHKERRQ(ierr);
1018     /* force creation of the coordinate vector */
1019     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1020     ierr = DMGetCoordinates(da2,&coordsf);CHKERRQ(ierr);
1021     ierr = DMCreateInterpolation(cdac,cdaf,&II,NULL);CHKERRQ(ierr);
1022     ierr = MatInterpolate(II,coordsc,coordsf);CHKERRQ(ierr);
1023     ierr = MatDestroy(&II);CHKERRQ(ierr);
1024   }
1025 
1026   for (i=0; i<da->bs; i++) {
1027     const char *fieldname;
1028     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1029     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1030   }
1031 
1032   *daref = da2;
1033   PetscFunctionReturn(0);
1034 }
1035 
1036 
1037 #undef __FUNCT__
1038 #define __FUNCT__ "DMCoarsen_DA"
1039 PetscErrorCode  DMCoarsen_DA(DM da, MPI_Comm comm,DM *daref)
1040 {
1041   PetscErrorCode ierr;
1042   PetscInt       M,N,P,i;
1043   DM             da2;
1044   DM_DA          *dd = (DM_DA*)da->data,*dd2;
1045 
1046   PetscFunctionBegin;
1047   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1048   PetscValidPointer(daref,3);
1049 
1050   if (dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1051     M = dd->M / dd->coarsen_x;
1052   } else {
1053     M = 1 + (dd->M - 1) / dd->coarsen_x;
1054   }
1055   if (dd->by == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1056     if (dd->dim > 1) {
1057       N = dd->N / dd->coarsen_y;
1058     } else {
1059       N = 1;
1060     }
1061   } else {
1062     N = 1 + (dd->N - 1) / dd->coarsen_y;
1063   }
1064   if (dd->bz == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1065     if (dd->dim > 2) {
1066       P = dd->P / dd->coarsen_z;
1067     } else {
1068       P = 1;
1069     }
1070   } else {
1071     P = 1 + (dd->P - 1) / dd->coarsen_z;
1072   }
1073   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
1074   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
1075   ierr = DMDASetDim(da2,dd->dim);CHKERRQ(ierr);
1076   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
1077   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
1078   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
1079   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
1080   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
1081   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
1082   if (dd->dim == 3) {
1083     PetscInt *lx,*ly,*lz;
1084     ierr = PetscMalloc3(dd->m,PetscInt,&lx,dd->n,PetscInt,&ly,dd->p,PetscInt,&lz);CHKERRQ(ierr);
1085     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1086     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1087     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bz == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
1088     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
1089     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
1090   } else if (dd->dim == 2) {
1091     PetscInt *lx,*ly;
1092     ierr = PetscMalloc2(dd->m,PetscInt,&lx,dd->n,PetscInt,&ly);CHKERRQ(ierr);
1093     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1094     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1095     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
1096     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
1097   } else if (dd->dim == 1) {
1098     PetscInt *lx;
1099     ierr = PetscMalloc(dd->m*sizeof(PetscInt),&lx);CHKERRQ(ierr);
1100     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DMDA_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1101     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
1102     ierr = PetscFree(lx);CHKERRQ(ierr);
1103   }
1104   dd2 = (DM_DA*)da2->data;
1105 
1106   /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1107   /* da2->ops->createinterpolation = da->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1108   da2->ops->creatematrix = da->ops->creatematrix;
1109   da2->ops->getcoloring  = da->ops->getcoloring;
1110   dd2->interptype        = dd->interptype;
1111 
1112   /* copy fill information if given */
1113   if (dd->dfill) {
1114     ierr = PetscMalloc((dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt),&dd2->dfill);CHKERRQ(ierr);
1115     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1116   }
1117   if (dd->ofill) {
1118     ierr = PetscMalloc((dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt),&dd2->ofill);CHKERRQ(ierr);
1119     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1120   }
1121   /* copy the refine information */
1122   dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1123   dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1124   dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;
1125 
1126   /* copy vector type information */
1127   ierr = PetscFree(da2->vectype);CHKERRQ(ierr);
1128   ierr = PetscStrallocpy(da->vectype,(char**)&da2->vectype);CHKERRQ(ierr);
1129 
1130   dd2->lf = dd->lf;
1131   dd2->lj = dd->lj;
1132 
1133   da2->leveldown = da->leveldown + 1;
1134   da2->levelup   = da->levelup;
1135 
1136   ierr = DMSetFromOptions(da2);CHKERRQ(ierr);
1137   ierr = DMSetUp(da2);CHKERRQ(ierr);
1138   ierr = DMViewFromOptions(da2,NULL,"-dm_view");CHKERRQ(ierr);
1139 
1140   /* inject coordinates if they are set on the fine grid */
1141   if (da->coordinates) {
1142     DM         cdaf,cdac;
1143     Vec        coordsc,coordsf;
1144     VecScatter inject;
1145 
1146     ierr = DMGetCoordinateDM(da,&cdaf);CHKERRQ(ierr);
1147     ierr = DMGetCoordinates(da,&coordsf);CHKERRQ(ierr);
1148     ierr = DMGetCoordinateDM(da2,&cdac);CHKERRQ(ierr);
1149     /* force creation of the coordinate vector */
1150     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1151     ierr = DMGetCoordinates(da2,&coordsc);CHKERRQ(ierr);
1152 
1153     ierr = DMCreateInjection(cdac,cdaf,&inject);CHKERRQ(ierr);
1154     ierr = VecScatterBegin(inject,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1155     ierr = VecScatterEnd(inject,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1156     ierr = VecScatterDestroy(&inject);CHKERRQ(ierr);
1157   }
1158 
1159   for (i=0; i<da->bs; i++) {
1160     const char *fieldname;
1161     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1162     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1163   }
1164 
1165   *daref = da2;
1166   PetscFunctionReturn(0);
1167 }
1168 
1169 #undef __FUNCT__
1170 #define __FUNCT__ "DMRefineHierarchy_DA"
1171 PetscErrorCode  DMRefineHierarchy_DA(DM da,PetscInt nlevels,DM daf[])
1172 {
1173   PetscErrorCode ierr;
1174   PetscInt       i,n,*refx,*refy,*refz;
1175 
1176   PetscFunctionBegin;
1177   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1178   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1179   if (nlevels == 0) PetscFunctionReturn(0);
1180   PetscValidPointer(daf,3);
1181 
1182   /* Get refinement factors, defaults taken from the coarse DMDA */
1183   ierr = PetscMalloc3(nlevels,PetscInt,&refx,nlevels,PetscInt,&refy,nlevels,PetscInt,&refz);CHKERRQ(ierr);
1184   for (i=0; i<nlevels; i++) {
1185     ierr = DMDAGetRefinementFactor(da,&refx[i],&refy[i],&refz[i]);CHKERRQ(ierr);
1186   }
1187   n    = nlevels;
1188   ierr = PetscOptionsGetIntArray(((PetscObject)da)->prefix,"-da_refine_hierarchy_x",refx,&n,NULL);CHKERRQ(ierr);
1189   n    = nlevels;
1190   ierr = PetscOptionsGetIntArray(((PetscObject)da)->prefix,"-da_refine_hierarchy_y",refy,&n,NULL);CHKERRQ(ierr);
1191   n    = nlevels;
1192   ierr = PetscOptionsGetIntArray(((PetscObject)da)->prefix,"-da_refine_hierarchy_z",refz,&n,NULL);CHKERRQ(ierr);
1193 
1194   ierr = DMDASetRefinementFactor(da,refx[0],refy[0],refz[0]);CHKERRQ(ierr);
1195   ierr = DMRefine(da,PetscObjectComm((PetscObject)da),&daf[0]);CHKERRQ(ierr);
1196   for (i=1; i<nlevels; i++) {
1197     ierr = DMDASetRefinementFactor(daf[i-1],refx[i],refy[i],refz[i]);CHKERRQ(ierr);
1198     ierr = DMRefine(daf[i-1],PetscObjectComm((PetscObject)da),&daf[i]);CHKERRQ(ierr);
1199   }
1200   ierr = PetscFree3(refx,refy,refz);CHKERRQ(ierr);
1201   PetscFunctionReturn(0);
1202 }
1203 
1204 #undef __FUNCT__
1205 #define __FUNCT__ "DMCoarsenHierarchy_DA"
1206 PetscErrorCode  DMCoarsenHierarchy_DA(DM da,PetscInt nlevels,DM dac[])
1207 {
1208   PetscErrorCode ierr;
1209   PetscInt       i;
1210 
1211   PetscFunctionBegin;
1212   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1213   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1214   if (nlevels == 0) PetscFunctionReturn(0);
1215   PetscValidPointer(dac,3);
1216   ierr = DMCoarsen(da,PetscObjectComm((PetscObject)da),&dac[0]);CHKERRQ(ierr);
1217   for (i=1; i<nlevels; i++) {
1218     ierr = DMCoarsen(dac[i-1],PetscObjectComm((PetscObject)da),&dac[i]);CHKERRQ(ierr);
1219   }
1220   PetscFunctionReturn(0);
1221 }
1222