xref: /petsc/src/dm/impls/da/da.c (revision a6404fbfb1cfbf30d2bac9856cef3bf7411483d5)
1 #include <petsc/private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
2 
3 #undef __FUNCT__
4 #define __FUNCT__ "DMDASetSizes"
5 /*@
6   DMDASetSizes - Sets the global sizes
7 
8   Logically Collective on DMDA
9 
10   Input Parameters:
11 + da - the DMDA
12 . M - the global X size (or PETSC_DECIDE)
13 . N - the global Y size (or PETSC_DECIDE)
14 - P - the global Z size (or PETSC_DECIDE)
15 
16   Level: intermediate
17 
18 .seealso: DMDAGetSize(), PetscSplitOwnership()
19 @*/
20 PetscErrorCode  DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
21 {
22   DM_DA *dd = (DM_DA*)da->data;
23 
24   PetscFunctionBegin;
25   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
26   PetscValidLogicalCollectiveInt(da,M,2);
27   PetscValidLogicalCollectiveInt(da,N,3);
28   PetscValidLogicalCollectiveInt(da,P,4);
29   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
30 
31   dd->M = M;
32   dd->N = N;
33   dd->P = P;
34   PetscFunctionReturn(0);
35 }
36 
37 #undef __FUNCT__
38 #define __FUNCT__ "DMDASetNumProcs"
39 /*@
40   DMDASetNumProcs - Sets the number of processes in each dimension
41 
42   Logically Collective on DMDA
43 
44   Input Parameters:
45 + da - the DMDA
46 . m - the number of X procs (or PETSC_DECIDE)
47 . n - the number of Y procs (or PETSC_DECIDE)
48 - p - the number of Z procs (or PETSC_DECIDE)
49 
50   Level: intermediate
51 
52 .seealso: DMDASetSizes(), DMDAGetSize(), PetscSplitOwnership()
53 @*/
54 PetscErrorCode  DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
55 {
56   DM_DA          *dd = (DM_DA*)da->data;
57   PetscErrorCode ierr;
58 
59   PetscFunctionBegin;
60   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
61   PetscValidLogicalCollectiveInt(da,m,2);
62   PetscValidLogicalCollectiveInt(da,n,3);
63   PetscValidLogicalCollectiveInt(da,p,4);
64   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
65   dd->m = m;
66   dd->n = n;
67   dd->p = p;
68   if (da->dim == 2) {
69     PetscMPIInt size;
70     ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr);
71     if ((dd->m > 0) && (dd->n < 0)) {
72       dd->n = size/dd->m;
73       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in X direction not divisible into comm size %d",m,size);
74     }
75     if ((dd->n > 0) && (dd->m < 0)) {
76       dd->m = size/dd->n;
77       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in Y direction not divisible into comm size %d",n,size);
78     }
79   }
80   PetscFunctionReturn(0);
81 }
82 
83 #undef __FUNCT__
84 #define __FUNCT__ "DMDASetBoundaryType"
85 /*@
86   DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries.
87 
88   Not collective
89 
90   Input Parameter:
91 + da    - The DMDA
92 - bx,by,bz - One of DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, DM_BOUNDARY_PERIODIC
93 
94   Level: intermediate
95 
96 .keywords:  distributed array, periodicity
97 .seealso: DMDACreate(), DMDestroy(), DMDA, DMBoundaryType
98 @*/
99 PetscErrorCode  DMDASetBoundaryType(DM da,DMBoundaryType bx,DMBoundaryType by,DMBoundaryType bz)
100 {
101   DM_DA *dd = (DM_DA*)da->data;
102 
103   PetscFunctionBegin;
104   PetscValidHeaderSpecific(da,DM_CLASSID,1);
105   PetscValidLogicalCollectiveEnum(da,bx,2);
106   PetscValidLogicalCollectiveEnum(da,by,3);
107   PetscValidLogicalCollectiveEnum(da,bz,4);
108   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
109   dd->bx = bx;
110   dd->by = by;
111   dd->bz = bz;
112   PetscFunctionReturn(0);
113 }
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "DMDASetDof"
117 /*@
118   DMDASetDof - Sets the number of degrees of freedom per vertex
119 
120   Not collective
121 
122   Input Parameter:
123 + da  - The DMDA
124 - dof - Number of degrees of freedom
125 
126   Level: intermediate
127 
128 .keywords:  distributed array, degrees of freedom
129 .seealso: DMDACreate(), DMDestroy(), DMDA
130 @*/
131 PetscErrorCode  DMDASetDof(DM da, PetscInt dof)
132 {
133   DM_DA *dd = (DM_DA*)da->data;
134 
135   PetscFunctionBegin;
136   PetscValidHeaderSpecific(da,DM_CLASSID,1);
137   PetscValidLogicalCollectiveInt(da,dof,2);
138   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
139   dd->w  = dof;
140   da->bs = dof;
141   PetscFunctionReturn(0);
142 }
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "DMDAGetOverlap"
146 /*@
147   DMDAGetOverlap - Gets the size of the per-processor overlap.
148 
149   Not collective
150 
151   Input Parameters:
152 . da  - The DMDA
153 
154   Output Parameters:
155 + x   - Overlap in the x direction
156 . y   - Overlap in the y direction
157 - z   - Overlap in the z direction
158 
159   Level: intermediate
160 
161 .keywords:  distributed array, overlap, domain decomposition
162 .seealso: DMDACreateDomainDecomposition(), DMDASetOverlap(), DMDA
163 @*/
164 PetscErrorCode  DMDAGetOverlap(DM da,PetscInt *x,PetscInt *y,PetscInt *z)
165 {
166   DM_DA *dd = (DM_DA*)da->data;
167 
168   PetscFunctionBegin;
169   PetscValidHeaderSpecific(da,DM_CLASSID,1);
170   if (x) *x = dd->xol;
171   if (y) *y = dd->yol;
172   if (z) *z = dd->zol;
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "DMDASetOverlap"
178 /*@
179   DMDASetOverlap - Sets the size of the per-processor overlap.
180 
181   Not collective
182 
183   Input Parameters:
184 + da  - The DMDA
185 . x   - Overlap in the x direction
186 . y   - Overlap in the y direction
187 - z   - Overlap in the z direction
188 
189   Level: intermediate
190 
191 .keywords:  distributed array, overlap, domain decomposition
192 .seealso: DMDACreateDomainDecomposition(), DMDAGetOverlap(), DMDA
193 @*/
194 PetscErrorCode  DMDASetOverlap(DM da,PetscInt x,PetscInt y,PetscInt z)
195 {
196   DM_DA *dd = (DM_DA*)da->data;
197 
198   PetscFunctionBegin;
199   PetscValidHeaderSpecific(da,DM_CLASSID,1);
200   PetscValidLogicalCollectiveInt(da,x,2);
201   PetscValidLogicalCollectiveInt(da,y,3);
202   PetscValidLogicalCollectiveInt(da,z,4);
203   dd->xol = x;
204   dd->yol = y;
205   dd->zol = z;
206   PetscFunctionReturn(0);
207 }
208 
209 
210 #undef __FUNCT__
211 #define __FUNCT__ "DMDAGetNumLocalSubDomains"
212 /*@
213   DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition.
214 
215   Not collective
216 
217   Input Parameters:
218 . da  - The DMDA
219 
220   Output Parameters:
221 + Nsub   - Number of local subdomains created upon decomposition
222 
223   Level: intermediate
224 
225 .keywords:  distributed array, domain decomposition
226 .seealso: DMDACreateDomainDecomposition(), DMDASetNumLocalSubDomains(), DMDA
227 @*/
228 PetscErrorCode  DMDAGetNumLocalSubDomains(DM da,PetscInt *Nsub)
229 {
230   DM_DA *dd = (DM_DA*)da->data;
231 
232   PetscFunctionBegin;
233   PetscValidHeaderSpecific(da,DM_CLASSID,1);
234   if (Nsub) *Nsub = dd->Nsub;
235   PetscFunctionReturn(0);
236 }
237 
238 #undef __FUNCT__
239 #define __FUNCT__ "DMDASetNumLocalSubDomains"
240 /*@
241   DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition.
242 
243   Not collective
244 
245   Input Parameters:
246 + da  - The DMDA
247 - Nsub - The number of local subdomains requested
248 
249   Level: intermediate
250 
251 .keywords:  distributed array, domain decomposition
252 .seealso: DMDACreateDomainDecomposition(), DMDAGetNumLocalSubDomains(), DMDA
253 @*/
254 PetscErrorCode  DMDASetNumLocalSubDomains(DM da,PetscInt Nsub)
255 {
256   DM_DA *dd = (DM_DA*)da->data;
257 
258   PetscFunctionBegin;
259   PetscValidHeaderSpecific(da,DM_CLASSID,1);
260   PetscValidLogicalCollectiveInt(da,Nsub,2);
261   dd->Nsub = Nsub;
262   PetscFunctionReturn(0);
263 }
264 
265 #undef __FUNCT__
266 #define __FUNCT__ "DMDASetOffset"
267 /*@
268   DMDASetOffset - Sets the index offset of the DA.
269 
270   Collective on DA
271 
272   Input Parameter:
273 + da  - The DMDA
274 . xo  - The offset in the x direction
275 . yo  - The offset in the y direction
276 - zo  - The offset in the z direction
277 
278   Level: intermediate
279 
280   Notes: This is used primarily to overlap a computation on a local DA with that on a global DA without
281   changing boundary conditions or subdomain features that depend upon the global offsets.
282 
283 .keywords:  distributed array, degrees of freedom
284 .seealso: DMDAGetOffset(), DMDAVecGetArray()
285 @*/
286 PetscErrorCode  DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
287 {
288   PetscErrorCode ierr;
289   DM_DA          *dd = (DM_DA*)da->data;
290 
291   PetscFunctionBegin;
292   PetscValidHeaderSpecific(da,DM_CLASSID,1);
293   PetscValidLogicalCollectiveInt(da,xo,2);
294   PetscValidLogicalCollectiveInt(da,yo,3);
295   PetscValidLogicalCollectiveInt(da,zo,4);
296   PetscValidLogicalCollectiveInt(da,Mo,5);
297   PetscValidLogicalCollectiveInt(da,No,6);
298   PetscValidLogicalCollectiveInt(da,Po,7);
299   dd->xo = xo;
300   dd->yo = yo;
301   dd->zo = zo;
302   dd->Mo = Mo;
303   dd->No = No;
304   dd->Po = Po;
305 
306   if (da->coordinateDM) {
307     ierr = DMDASetOffset(da->coordinateDM,xo,yo,zo,Mo,No,Po);CHKERRQ(ierr);
308   }
309   PetscFunctionReturn(0);
310 }
311 
312 #undef __FUNCT__
313 #define __FUNCT__ "DMDAGetOffset"
314 /*@
315   DMDAGetOffset - Gets the index offset of the DA.
316 
317   Not collective
318 
319   Input Parameter:
320 . da  - The DMDA
321 
322   Output Parameters:
323 + xo  - The offset in the x direction
324 . yo  - The offset in the y direction
325 . zo  - The offset in the z direction
326 . Mo  - The global size in the x direction
327 . No  - The global size in the y direction
328 - Po  - The global size in the z direction
329 
330   Level: intermediate
331 
332 .keywords:  distributed array, degrees of freedom
333 .seealso: DMDASetOffset(), DMDAVecGetArray()
334 @*/
335 PetscErrorCode  DMDAGetOffset(DM da,PetscInt *xo,PetscInt *yo,PetscInt *zo,PetscInt *Mo,PetscInt *No,PetscInt *Po)
336 {
337   DM_DA *dd = (DM_DA*)da->data;
338 
339   PetscFunctionBegin;
340   PetscValidHeaderSpecific(da,DM_CLASSID,1);
341   if (xo) *xo = dd->xo;
342   if (yo) *yo = dd->yo;
343   if (zo) *zo = dd->zo;
344   if (Mo) *Mo = dd->Mo;
345   if (No) *No = dd->No;
346   if (Po) *Po = dd->Po;
347   PetscFunctionReturn(0);
348 }
349 
350 #undef __FUNCT__
351 #define __FUNCT__ "DMDAGetNonOverlappingRegion"
352 /*@
353   DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain DM.
354 
355   Not collective
356 
357   Input Parameter:
358 . da  - The DMDA
359 
360   Output Parameters:
361 + xs  - The start of the region in x
362 . ys  - The start of the region in y
363 . zs  - The start of the region in z
364 . xs  - The size of the region in x
365 . ys  - The size of the region in y
366 . zs  - The size of the region in z
367 
368   Level: intermediate
369 
370 .keywords:  distributed array, degrees of freedom
371 .seealso: DMDAGetOffset(), DMDAVecGetArray()
372 @*/
373 PetscErrorCode  DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
374 {
375   DM_DA          *dd = (DM_DA*)da->data;
376 
377   PetscFunctionBegin;
378   PetscValidHeaderSpecific(da,DM_CLASSID,1);
379   if (xs) *xs = dd->nonxs;
380   if (ys) *ys = dd->nonys;
381   if (zs) *zs = dd->nonzs;
382   if (xm) *xm = dd->nonxm;
383   if (ym) *ym = dd->nonym;
384   if (zm) *zm = dd->nonzm;
385   PetscFunctionReturn(0);
386 }
387 
388 
389 #undef __FUNCT__
390 #define __FUNCT__ "DMDASetNonOverlappingRegion"
391 /*@
392   DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain DM.
393 
394   Collective on DA
395 
396   Input Parameter:
397 + da  - The DMDA
398 . xs  - The start of the region in x
399 . ys  - The start of the region in y
400 . zs  - The start of the region in z
401 . xs  - The size of the region in x
402 . ys  - The size of the region in y
403 . zs  - The size of the region in z
404 
405   Level: intermediate
406 
407 .keywords:  distributed array, degrees of freedom
408 .seealso: DMDAGetOffset(), DMDAVecGetArray()
409 @*/
410 PetscErrorCode  DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
411 {
412   DM_DA          *dd = (DM_DA*)da->data;
413 
414   PetscFunctionBegin;
415   PetscValidHeaderSpecific(da,DM_CLASSID,1);
416   PetscValidLogicalCollectiveInt(da,xs,2);
417   PetscValidLogicalCollectiveInt(da,ys,3);
418   PetscValidLogicalCollectiveInt(da,zs,4);
419   PetscValidLogicalCollectiveInt(da,xm,5);
420   PetscValidLogicalCollectiveInt(da,ym,6);
421   PetscValidLogicalCollectiveInt(da,zm,7);
422   dd->nonxs = xs;
423   dd->nonys = ys;
424   dd->nonzs = zs;
425   dd->nonxm = xm;
426   dd->nonym = ym;
427   dd->nonzm = zm;
428 
429   PetscFunctionReturn(0);
430 }
431 
432 #undef __FUNCT__
433 #define __FUNCT__ "DMDASetStencilType"
434 /*@
435   DMDASetStencilType - Sets the type of the communication stencil
436 
437   Logically Collective on DMDA
438 
439   Input Parameter:
440 + da    - The DMDA
441 - stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
442 
443   Level: intermediate
444 
445 .keywords:  distributed array, stencil
446 .seealso: DMDACreate(), DMDestroy(), DMDA
447 @*/
448 PetscErrorCode  DMDASetStencilType(DM da, DMDAStencilType stype)
449 {
450   DM_DA *dd = (DM_DA*)da->data;
451 
452   PetscFunctionBegin;
453   PetscValidHeaderSpecific(da,DM_CLASSID,1);
454   PetscValidLogicalCollectiveEnum(da,stype,2);
455   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
456   dd->stencil_type = stype;
457   PetscFunctionReturn(0);
458 }
459 
460 #undef __FUNCT__
461 #define __FUNCT__ "DMDASetStencilWidth"
462 /*@
463   DMDASetStencilWidth - Sets the width of the communication stencil
464 
465   Logically Collective on DMDA
466 
467   Input Parameter:
468 + da    - The DMDA
469 - width - The stencil width
470 
471   Level: intermediate
472 
473 .keywords:  distributed array, stencil
474 .seealso: DMDACreate(), DMDestroy(), DMDA
475 @*/
476 PetscErrorCode  DMDASetStencilWidth(DM da, PetscInt width)
477 {
478   DM_DA *dd = (DM_DA*)da->data;
479 
480   PetscFunctionBegin;
481   PetscValidHeaderSpecific(da,DM_CLASSID,1);
482   PetscValidLogicalCollectiveInt(da,width,2);
483   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
484   dd->s = width;
485   PetscFunctionReturn(0);
486 }
487 
488 #undef __FUNCT__
489 #define __FUNCT__ "DMDACheckOwnershipRanges_Private"
490 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da,PetscInt M,PetscInt m,const PetscInt lx[])
491 {
492   PetscInt i,sum;
493 
494   PetscFunctionBegin;
495   if (M < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Global dimension not set");
496   for (i=sum=0; i<m; i++) sum += lx[i];
497   if (sum != M) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Ownership ranges sum to %D but global dimension is %D",sum,M);
498   PetscFunctionReturn(0);
499 }
500 
501 #undef __FUNCT__
502 #define __FUNCT__ "DMDASetOwnershipRanges"
503 /*@
504   DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process
505 
506   Logically Collective on DMDA
507 
508   Input Parameter:
509 + da - The DMDA
510 . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m
511 . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n
512 - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p.
513 
514   Level: intermediate
515 
516   Note: these numbers are NOT multiplied by the number of dof per node.
517 
518 .keywords:  distributed array
519 .seealso: DMDACreate(), DMDestroy(), DMDA
520 @*/
521 PetscErrorCode  DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
522 {
523   PetscErrorCode ierr;
524   DM_DA          *dd = (DM_DA*)da->data;
525 
526   PetscFunctionBegin;
527   PetscValidHeaderSpecific(da,DM_CLASSID,1);
528   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
529   if (lx) {
530     if (dd->m < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
531     ierr = DMDACheckOwnershipRanges_Private(da,dd->M,dd->m,lx);CHKERRQ(ierr);
532     if (!dd->lx) {
533       ierr = PetscMalloc1(dd->m, &dd->lx);CHKERRQ(ierr);
534     }
535     ierr = PetscMemcpy(dd->lx, lx, dd->m*sizeof(PetscInt));CHKERRQ(ierr);
536   }
537   if (ly) {
538     if (dd->n < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
539     ierr = DMDACheckOwnershipRanges_Private(da,dd->N,dd->n,ly);CHKERRQ(ierr);
540     if (!dd->ly) {
541       ierr = PetscMalloc1(dd->n, &dd->ly);CHKERRQ(ierr);
542     }
543     ierr = PetscMemcpy(dd->ly, ly, dd->n*sizeof(PetscInt));CHKERRQ(ierr);
544   }
545   if (lz) {
546     if (dd->p < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
547     ierr = DMDACheckOwnershipRanges_Private(da,dd->P,dd->p,lz);CHKERRQ(ierr);
548     if (!dd->lz) {
549       ierr = PetscMalloc1(dd->p, &dd->lz);CHKERRQ(ierr);
550     }
551     ierr = PetscMemcpy(dd->lz, lz, dd->p*sizeof(PetscInt));CHKERRQ(ierr);
552   }
553   PetscFunctionReturn(0);
554 }
555 
556 #undef __FUNCT__
557 #define __FUNCT__ "DMDASetInterpolationType"
558 /*@
559        DMDASetInterpolationType - Sets the type of interpolation that will be
560           returned by DMCreateInterpolation()
561 
562    Logically Collective on DMDA
563 
564    Input Parameter:
565 +  da - initial distributed array
566 .  ctype - DMDA_Q1 and DMDA_Q0 are currently the only supported forms
567 
568    Level: intermediate
569 
570    Notes: you should call this on the coarser of the two DMDAs you pass to DMCreateInterpolation()
571 
572 .keywords:  distributed array, interpolation
573 
574 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType
575 @*/
576 PetscErrorCode  DMDASetInterpolationType(DM da,DMDAInterpolationType ctype)
577 {
578   DM_DA *dd = (DM_DA*)da->data;
579 
580   PetscFunctionBegin;
581   PetscValidHeaderSpecific(da,DM_CLASSID,1);
582   PetscValidLogicalCollectiveEnum(da,ctype,2);
583   dd->interptype = ctype;
584   PetscFunctionReturn(0);
585 }
586 
587 #undef __FUNCT__
588 #define __FUNCT__ "DMDAGetInterpolationType"
589 /*@
590        DMDAGetInterpolationType - Gets the type of interpolation that will be
591           used by DMCreateInterpolation()
592 
593    Not Collective
594 
595    Input Parameter:
596 .  da - distributed array
597 
598    Output Parameter:
599 .  ctype - interpolation type (DMDA_Q1 and DMDA_Q0 are currently the only supported forms)
600 
601    Level: intermediate
602 
603 .keywords:  distributed array, interpolation
604 
605 .seealso: DMDA, DMDAInterpolationType, DMDASetInterpolationType(), DMCreateInterpolation()
606 @*/
607 PetscErrorCode  DMDAGetInterpolationType(DM da,DMDAInterpolationType *ctype)
608 {
609   DM_DA *dd = (DM_DA*)da->data;
610 
611   PetscFunctionBegin;
612   PetscValidHeaderSpecific(da,DM_CLASSID,1);
613   PetscValidPointer(ctype,2);
614   *ctype = dd->interptype;
615   PetscFunctionReturn(0);
616 }
617 
618 #undef __FUNCT__
619 #define __FUNCT__ "DMDAGetNeighbors"
620 /*@C
621       DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
622         processes neighbors.
623 
624     Not Collective
625 
626    Input Parameter:
627 .     da - the DMDA object
628 
629    Output Parameters:
630 .     ranks - the neighbors ranks, stored with the x index increasing most rapidly.
631               this process itself is in the list
632 
633    Notes: In 2d the array is of length 9, in 3d of length 27
634           Not supported in 1d
635           Do not free the array, it is freed when the DMDA is destroyed.
636 
637    Fortran Notes: In fortran you must pass in an array of the appropriate length.
638 
639    Level: intermediate
640 
641 @*/
642 PetscErrorCode  DMDAGetNeighbors(DM da,const PetscMPIInt *ranks[])
643 {
644   DM_DA *dd = (DM_DA*)da->data;
645 
646   PetscFunctionBegin;
647   PetscValidHeaderSpecific(da,DM_CLASSID,1);
648   *ranks = dd->neighbors;
649   PetscFunctionReturn(0);
650 }
651 
652 #undef __FUNCT__
653 #define __FUNCT__ "DMDAGetOwnershipRanges"
654 /*@C
655       DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process
656 
657     Not Collective
658 
659    Input Parameter:
660 .     da - the DMDA object
661 
662    Output Parameter:
663 +     lx - ownership along x direction (optional)
664 .     ly - ownership along y direction (optional)
665 -     lz - ownership along z direction (optional)
666 
667    Level: intermediate
668 
669     Note: these correspond to the optional final arguments passed to DMDACreate(), DMDACreate2d(), DMDACreate3d()
670 
671     In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and
672     eighth arguments from DMDAGetInfo()
673 
674      In C you should not free these arrays, nor change the values in them. They will only have valid values while the
675     DMDA they came from still exists (has not been destroyed).
676 
677     These numbers are NOT multiplied by the number of dof per node.
678 
679 .seealso: DMDAGetCorners(), DMDAGetGhostCorners(), DMDACreate(), DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), VecGetOwnershipRanges()
680 @*/
681 PetscErrorCode  DMDAGetOwnershipRanges(DM da,const PetscInt *lx[],const PetscInt *ly[],const PetscInt *lz[])
682 {
683   DM_DA *dd = (DM_DA*)da->data;
684 
685   PetscFunctionBegin;
686   PetscValidHeaderSpecific(da,DM_CLASSID,1);
687   if (lx) *lx = dd->lx;
688   if (ly) *ly = dd->ly;
689   if (lz) *lz = dd->lz;
690   PetscFunctionReturn(0);
691 }
692 
693 #undef __FUNCT__
694 #define __FUNCT__ "DMDASetRefinementFactor"
695 /*@
696      DMDASetRefinementFactor - Set the ratios that the DMDA grid is refined
697 
698     Logically Collective on DMDA
699 
700   Input Parameters:
701 +    da - the DMDA object
702 .    refine_x - ratio of fine grid to coarse in x direction (2 by default)
703 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
704 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
705 
706   Options Database:
707 +  -da_refine_x - refinement ratio in x direction
708 .  -da_refine_y - refinement ratio in y direction
709 -  -da_refine_z - refinement ratio in z direction
710 
711   Level: intermediate
712 
713     Notes: Pass PETSC_IGNORE to leave a value unchanged
714 
715 .seealso: DMRefine(), DMDAGetRefinementFactor()
716 @*/
717 PetscErrorCode  DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z)
718 {
719   DM_DA *dd = (DM_DA*)da->data;
720 
721   PetscFunctionBegin;
722   PetscValidHeaderSpecific(da,DM_CLASSID,1);
723   PetscValidLogicalCollectiveInt(da,refine_x,2);
724   PetscValidLogicalCollectiveInt(da,refine_y,3);
725   PetscValidLogicalCollectiveInt(da,refine_z,4);
726 
727   if (refine_x > 0) dd->refine_x = refine_x;
728   if (refine_y > 0) dd->refine_y = refine_y;
729   if (refine_z > 0) dd->refine_z = refine_z;
730   PetscFunctionReturn(0);
731 }
732 
733 #undef __FUNCT__
734 #define __FUNCT__ "DMDAGetRefinementFactor"
735 /*@C
736      DMDAGetRefinementFactor - Gets the ratios that the DMDA grid is refined
737 
738     Not Collective
739 
740   Input Parameter:
741 .    da - the DMDA object
742 
743   Output Parameters:
744 +    refine_x - ratio of fine grid to coarse in x direction (2 by default)
745 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
746 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
747 
748   Level: intermediate
749 
750     Notes: Pass NULL for values you do not need
751 
752 .seealso: DMRefine(), DMDASetRefinementFactor()
753 @*/
754 PetscErrorCode  DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z)
755 {
756   DM_DA *dd = (DM_DA*)da->data;
757 
758   PetscFunctionBegin;
759   PetscValidHeaderSpecific(da,DM_CLASSID,1);
760   if (refine_x) *refine_x = dd->refine_x;
761   if (refine_y) *refine_y = dd->refine_y;
762   if (refine_z) *refine_z = dd->refine_z;
763   PetscFunctionReturn(0);
764 }
765 
766 #undef __FUNCT__
767 #define __FUNCT__ "DMDASetGetMatrix"
768 /*@C
769      DMDASetGetMatrix - Sets the routine used by the DMDA to allocate a matrix.
770 
771     Logically Collective on DMDA
772 
773   Input Parameters:
774 +    da - the DMDA object
775 -    f - the function that allocates the matrix for that specific DMDA
776 
777   Level: developer
778 
779    Notes: See DMDASetBlockFills() that provides a simple way to provide the nonzero structure for
780        the diagonal and off-diagonal blocks of the matrix
781 
782 .seealso: DMCreateMatrix(), DMDASetBlockFills()
783 @*/
784 PetscErrorCode  DMDASetGetMatrix(DM da,PetscErrorCode (*f)(DM, Mat*))
785 {
786   PetscFunctionBegin;
787   PetscValidHeaderSpecific(da,DM_CLASSID,1);
788   da->ops->creatematrix = f;
789   PetscFunctionReturn(0);
790 }
791 
792 #undef __FUNCT__
793 #define __FUNCT__ "DMDARefineOwnershipRanges"
794 /*
795   Creates "balanced" ownership ranges after refinement, constrained by the need for the
796   fine grid boundaries to fall within one stencil width of the coarse partition.
797 
798   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
799 */
800 static PetscErrorCode DMDARefineOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lc[],PetscInt lf[])
801 {
802   PetscInt       i,totalc = 0,remaining,startc = 0,startf = 0;
803   PetscErrorCode ierr;
804 
805   PetscFunctionBegin;
806   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
807   if (ratio == 1) {
808     ierr = PetscMemcpy(lf,lc,m*sizeof(lc[0]));CHKERRQ(ierr);
809     PetscFunctionReturn(0);
810   }
811   for (i=0; i<m; i++) totalc += lc[i];
812   remaining = (!periodic) + ratio * (totalc - (!periodic));
813   for (i=0; i<m; i++) {
814     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
815     if (i == m-1) lf[i] = want;
816     else {
817       const PetscInt nextc = startc + lc[i];
818       /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
819        * coarse stencil width of the first coarse node in the next subdomain. */
820       while ((startf+want)/ratio < nextc - stencil_width) want++;
821       /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
822        * coarse stencil width of the last coarse node in the current subdomain. */
823       while ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width) want--;
824       /* Make sure all constraints are satisfied */
825       if (want < 0 || want > remaining || ((startf+want)/ratio < nextc - stencil_width)
826           || ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible refined ownership range");
827     }
828     lf[i]      = want;
829     startc    += lc[i];
830     startf    += lf[i];
831     remaining -= lf[i];
832   }
833   PetscFunctionReturn(0);
834 }
835 
836 #undef __FUNCT__
837 #define __FUNCT__ "DMDACoarsenOwnershipRanges"
838 /*
839   Creates "balanced" ownership ranges after coarsening, constrained by the need for the
840   fine grid boundaries to fall within one stencil width of the coarse partition.
841 
842   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
843 */
844 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lf[],PetscInt lc[])
845 {
846   PetscInt       i,totalf,remaining,startc,startf;
847   PetscErrorCode ierr;
848 
849   PetscFunctionBegin;
850   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
851   if (ratio == 1) {
852     ierr = PetscMemcpy(lc,lf,m*sizeof(lf[0]));CHKERRQ(ierr);
853     PetscFunctionReturn(0);
854   }
855   for (i=0,totalf=0; i<m; i++) totalf += lf[i];
856   remaining = (!periodic) + (totalf - (!periodic)) / ratio;
857   for (i=0,startc=0,startf=0; i<m; i++) {
858     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
859     if (i == m-1) lc[i] = want;
860     else {
861       const PetscInt nextf = startf+lf[i];
862       /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
863        * node is within one stencil width. */
864       while (nextf/ratio < startc+want-stencil_width) want--;
865       /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
866        * fine node is within one stencil width. */
867       while ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width) want++;
868       if (want < 0 || want > remaining
869           || (nextf/ratio < startc+want-stencil_width) || ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible coarsened ownership range");
870     }
871     lc[i]      = want;
872     startc    += lc[i];
873     startf    += lf[i];
874     remaining -= lc[i];
875   }
876   PetscFunctionReturn(0);
877 }
878 
879 #undef __FUNCT__
880 #define __FUNCT__ "DMRefine_DA"
881 PetscErrorCode  DMRefine_DA(DM da,MPI_Comm comm,DM *daref)
882 {
883   PetscErrorCode ierr;
884   PetscInt       M,N,P,i,dim;
885   DM             da2;
886   DM_DA          *dd = (DM_DA*)da->data,*dd2;
887 
888   PetscFunctionBegin;
889   PetscValidHeaderSpecific(da,DM_CLASSID,1);
890   PetscValidPointer(daref,3);
891 
892   ierr = DMGetDimension(da, &dim);CHKERRQ(ierr);
893   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
894     M = dd->refine_x*dd->M;
895   } else {
896     M = 1 + dd->refine_x*(dd->M - 1);
897   }
898   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
899     if (dim > 1) {
900       N = dd->refine_y*dd->N;
901     } else {
902       N = 1;
903     }
904   } else {
905     N = 1 + dd->refine_y*(dd->N - 1);
906   }
907   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
908     if (dim > 2) {
909       P = dd->refine_z*dd->P;
910     } else {
911       P = 1;
912     }
913   } else {
914     P = 1 + dd->refine_z*(dd->P - 1);
915   }
916   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
917   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
918   ierr = DMSetDimension(da2,dim);CHKERRQ(ierr);
919   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
920   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
921   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
922   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
923   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
924   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
925   if (dim == 3) {
926     PetscInt *lx,*ly,*lz;
927     ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr);
928     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
929     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
930     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
931     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
932     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
933   } else if (dim == 2) {
934     PetscInt *lx,*ly;
935     ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr);
936     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
937     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
938     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
939     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
940   } else if (dim == 1) {
941     PetscInt *lx;
942     ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr);
943     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
944     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
945     ierr = PetscFree(lx);CHKERRQ(ierr);
946   }
947   dd2 = (DM_DA*)da2->data;
948 
949   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
950   da2->ops->creatematrix = da->ops->creatematrix;
951   /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
952   da2->ops->getcoloring = da->ops->getcoloring;
953   dd2->interptype       = dd->interptype;
954 
955   /* copy fill information if given */
956   if (dd->dfill) {
957     ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr);
958     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
959   }
960   if (dd->ofill) {
961     ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr);
962     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
963   }
964   /* copy the refine information */
965   dd2->coarsen_x = dd2->refine_x = dd->refine_x;
966   dd2->coarsen_y = dd2->refine_y = dd->refine_y;
967   dd2->coarsen_z = dd2->refine_z = dd->refine_z;
968 
969   /* copy vector type information */
970   ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr);
971 
972   dd2->lf = dd->lf;
973   dd2->lj = dd->lj;
974 
975   da2->leveldown = da->leveldown;
976   da2->levelup   = da->levelup + 1;
977 
978   ierr = DMSetFromOptions(da2);CHKERRQ(ierr);
979   ierr = DMSetUp(da2);CHKERRQ(ierr);
980 
981   /* interpolate coordinates if they are set on the coarse grid */
982   if (da->coordinates) {
983     DM  cdaf,cdac;
984     Vec coordsc,coordsf;
985     Mat II;
986 
987     ierr = DMGetCoordinateDM(da,&cdac);CHKERRQ(ierr);
988     ierr = DMGetCoordinates(da,&coordsc);CHKERRQ(ierr);
989     ierr = DMGetCoordinateDM(da2,&cdaf);CHKERRQ(ierr);
990     /* force creation of the coordinate vector */
991     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
992     ierr = DMGetCoordinates(da2,&coordsf);CHKERRQ(ierr);
993     ierr = DMCreateInterpolation(cdac,cdaf,&II,NULL);CHKERRQ(ierr);
994     ierr = MatInterpolate(II,coordsc,coordsf);CHKERRQ(ierr);
995     ierr = MatDestroy(&II);CHKERRQ(ierr);
996   }
997 
998   for (i=0; i<da->bs; i++) {
999     const char *fieldname;
1000     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1001     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1002   }
1003 
1004   *daref = da2;
1005   PetscFunctionReturn(0);
1006 }
1007 
1008 
1009 #undef __FUNCT__
1010 #define __FUNCT__ "DMCoarsen_DA"
1011 PetscErrorCode  DMCoarsen_DA(DM da, MPI_Comm comm,DM *daref)
1012 {
1013   PetscErrorCode ierr;
1014   PetscInt       M,N,P,i,dim;
1015   DM             da2;
1016   DM_DA          *dd = (DM_DA*)da->data,*dd2;
1017 
1018   PetscFunctionBegin;
1019   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1020   PetscValidPointer(daref,3);
1021 
1022   ierr = DMGetDimension(da, &dim);CHKERRQ(ierr);
1023   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1024     M = dd->M / dd->coarsen_x;
1025   } else {
1026     M = 1 + (dd->M - 1) / dd->coarsen_x;
1027   }
1028   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1029     if (dim > 1) {
1030       N = dd->N / dd->coarsen_y;
1031     } else {
1032       N = 1;
1033     }
1034   } else {
1035     N = 1 + (dd->N - 1) / dd->coarsen_y;
1036   }
1037   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1038     if (dim > 2) {
1039       P = dd->P / dd->coarsen_z;
1040     } else {
1041       P = 1;
1042     }
1043   } else {
1044     P = 1 + (dd->P - 1) / dd->coarsen_z;
1045   }
1046   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
1047   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
1048   ierr = DMSetDimension(da2,dim);CHKERRQ(ierr);
1049   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
1050   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
1051   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
1052   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
1053   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
1054   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
1055   if (dim == 3) {
1056     PetscInt *lx,*ly,*lz;
1057     ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr);
1058     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1059     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1060     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
1061     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
1062     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
1063   } else if (dim == 2) {
1064     PetscInt *lx,*ly;
1065     ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr);
1066     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1067     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1068     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
1069     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
1070   } else if (dim == 1) {
1071     PetscInt *lx;
1072     ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr);
1073     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1074     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
1075     ierr = PetscFree(lx);CHKERRQ(ierr);
1076   }
1077   dd2 = (DM_DA*)da2->data;
1078 
1079   /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1080   /* da2->ops->createinterpolation = da->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1081   da2->ops->creatematrix = da->ops->creatematrix;
1082   da2->ops->getcoloring  = da->ops->getcoloring;
1083   dd2->interptype        = dd->interptype;
1084 
1085   /* copy fill information if given */
1086   if (dd->dfill) {
1087     ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr);
1088     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1089   }
1090   if (dd->ofill) {
1091     ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr);
1092     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1093   }
1094   /* copy the refine information */
1095   dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1096   dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1097   dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;
1098 
1099   /* copy vector type information */
1100   ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr);
1101 
1102   dd2->lf = dd->lf;
1103   dd2->lj = dd->lj;
1104 
1105   da2->leveldown = da->leveldown + 1;
1106   da2->levelup   = da->levelup;
1107 
1108   ierr = DMSetFromOptions(da2);CHKERRQ(ierr);
1109   ierr = DMSetUp(da2);CHKERRQ(ierr);
1110 
1111   /* inject coordinates if they are set on the fine grid */
1112   if (da->coordinates) {
1113     DM         cdaf,cdac;
1114     Vec        coordsc,coordsf;
1115     Mat        inject;
1116     VecScatter vscat;
1117 
1118     ierr = DMGetCoordinateDM(da,&cdaf);CHKERRQ(ierr);
1119     ierr = DMGetCoordinates(da,&coordsf);CHKERRQ(ierr);
1120     ierr = DMGetCoordinateDM(da2,&cdac);CHKERRQ(ierr);
1121     /* force creation of the coordinate vector */
1122     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1123     ierr = DMGetCoordinates(da2,&coordsc);CHKERRQ(ierr);
1124 
1125     ierr = DMCreateInjection(cdac,cdaf,&inject);CHKERRQ(ierr);
1126     ierr = MatScatterGetVecScatter(inject,&vscat);CHKERRQ(ierr);
1127     ierr = VecScatterBegin(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1128     ierr = VecScatterEnd(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1129     ierr = MatDestroy(&inject);CHKERRQ(ierr);
1130   }
1131 
1132   for (i=0; i<da->bs; i++) {
1133     const char *fieldname;
1134     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1135     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1136   }
1137 
1138   *daref = da2;
1139   PetscFunctionReturn(0);
1140 }
1141 
1142 #undef __FUNCT__
1143 #define __FUNCT__ "DMRefineHierarchy_DA"
1144 PetscErrorCode  DMRefineHierarchy_DA(DM da,PetscInt nlevels,DM daf[])
1145 {
1146   PetscErrorCode ierr;
1147   PetscInt       i,n,*refx,*refy,*refz;
1148 
1149   PetscFunctionBegin;
1150   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1151   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1152   if (nlevels == 0) PetscFunctionReturn(0);
1153   PetscValidPointer(daf,3);
1154 
1155   /* Get refinement factors, defaults taken from the coarse DMDA */
1156   ierr = PetscMalloc3(nlevels,&refx,nlevels,&refy,nlevels,&refz);CHKERRQ(ierr);
1157   for (i=0; i<nlevels; i++) {
1158     ierr = DMDAGetRefinementFactor(da,&refx[i],&refy[i],&refz[i]);CHKERRQ(ierr);
1159   }
1160   n    = nlevels;
1161   ierr = PetscOptionsGetIntArray(((PetscObject)da)->prefix,"-da_refine_hierarchy_x",refx,&n,NULL);CHKERRQ(ierr);
1162   n    = nlevels;
1163   ierr = PetscOptionsGetIntArray(((PetscObject)da)->prefix,"-da_refine_hierarchy_y",refy,&n,NULL);CHKERRQ(ierr);
1164   n    = nlevels;
1165   ierr = PetscOptionsGetIntArray(((PetscObject)da)->prefix,"-da_refine_hierarchy_z",refz,&n,NULL);CHKERRQ(ierr);
1166 
1167   ierr = DMDASetRefinementFactor(da,refx[0],refy[0],refz[0]);CHKERRQ(ierr);
1168   ierr = DMRefine(da,PetscObjectComm((PetscObject)da),&daf[0]);CHKERRQ(ierr);
1169   for (i=1; i<nlevels; i++) {
1170     ierr = DMDASetRefinementFactor(daf[i-1],refx[i],refy[i],refz[i]);CHKERRQ(ierr);
1171     ierr = DMRefine(daf[i-1],PetscObjectComm((PetscObject)da),&daf[i]);CHKERRQ(ierr);
1172   }
1173   ierr = PetscFree3(refx,refy,refz);CHKERRQ(ierr);
1174   PetscFunctionReturn(0);
1175 }
1176 
1177 #undef __FUNCT__
1178 #define __FUNCT__ "DMCoarsenHierarchy_DA"
1179 PetscErrorCode  DMCoarsenHierarchy_DA(DM da,PetscInt nlevels,DM dac[])
1180 {
1181   PetscErrorCode ierr;
1182   PetscInt       i;
1183 
1184   PetscFunctionBegin;
1185   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1186   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1187   if (nlevels == 0) PetscFunctionReturn(0);
1188   PetscValidPointer(dac,3);
1189   ierr = DMCoarsen(da,PetscObjectComm((PetscObject)da),&dac[0]);CHKERRQ(ierr);
1190   for (i=1; i<nlevels; i++) {
1191     ierr = DMCoarsen(dac[i-1],PetscObjectComm((PetscObject)da),&dac[i]);CHKERRQ(ierr);
1192   }
1193   PetscFunctionReturn(0);
1194 }
1195