xref: /petsc/src/dm/impls/da/da.c (revision 6b3eee04a67a44bd9a4e65b41b1dd96ffcd06ca3)
1 #include <petsc/private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
2 
3 #undef __FUNCT__
4 #define __FUNCT__ "DMDASetSizes"
5 /*@
6   DMDASetSizes - Sets the global sizes
7 
8   Logically Collective on DMDA
9 
10   Input Parameters:
11 + da - the DMDA
12 . M - the global X size (or PETSC_DECIDE)
13 . N - the global Y size (or PETSC_DECIDE)
14 - P - the global Z size (or PETSC_DECIDE)
15 
16   Level: intermediate
17 
18 .seealso: DMDAGetSize(), PetscSplitOwnership()
19 @*/
20 PetscErrorCode  DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
21 {
22   DM_DA *dd = (DM_DA*)da->data;
23 
24   PetscFunctionBegin;
25   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
26   PetscValidLogicalCollectiveInt(da,M,2);
27   PetscValidLogicalCollectiveInt(da,N,3);
28   PetscValidLogicalCollectiveInt(da,P,4);
29   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
30 
31   dd->M = M;
32   dd->N = N;
33   dd->P = P;
34   PetscFunctionReturn(0);
35 }
36 
37 #undef __FUNCT__
38 #define __FUNCT__ "DMDASetNumProcs"
39 /*@
40   DMDASetNumProcs - Sets the number of processes in each dimension
41 
42   Logically Collective on DMDA
43 
44   Input Parameters:
45 + da - the DMDA
46 . m - the number of X procs (or PETSC_DECIDE)
47 . n - the number of Y procs (or PETSC_DECIDE)
48 - p - the number of Z procs (or PETSC_DECIDE)
49 
50   Level: intermediate
51 
52 .seealso: DMDASetSizes(), DMDAGetSize(), PetscSplitOwnership()
53 @*/
54 PetscErrorCode  DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
55 {
56   DM_DA          *dd = (DM_DA*)da->data;
57   PetscErrorCode ierr;
58 
59   PetscFunctionBegin;
60   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
61   PetscValidLogicalCollectiveInt(da,m,2);
62   PetscValidLogicalCollectiveInt(da,n,3);
63   PetscValidLogicalCollectiveInt(da,p,4);
64   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
65   dd->m = m;
66   dd->n = n;
67   dd->p = p;
68   if (da->dim == 2) {
69     PetscMPIInt size;
70     ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr);
71     if ((dd->m > 0) && (dd->n < 0)) {
72       dd->n = size/dd->m;
73       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in X direction not divisible into comm size %d",m,size);
74     }
75     if ((dd->n > 0) && (dd->m < 0)) {
76       dd->m = size/dd->n;
77       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in Y direction not divisible into comm size %d",n,size);
78     }
79   }
80   PetscFunctionReturn(0);
81 }
82 
83 #undef __FUNCT__
84 #define __FUNCT__ "DMDASetBoundaryType"
85 /*@
86   DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries.
87 
88   Not collective
89 
90   Input Parameter:
91 + da    - The DMDA
92 - bx,by,bz - One of DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, DM_BOUNDARY_PERIODIC
93 
94   Level: intermediate
95 
96 .keywords:  distributed array, periodicity
97 .seealso: DMDACreate(), DMDestroy(), DMDA, DMBoundaryType
98 @*/
99 PetscErrorCode  DMDASetBoundaryType(DM da,DMBoundaryType bx,DMBoundaryType by,DMBoundaryType bz)
100 {
101   DM_DA *dd = (DM_DA*)da->data;
102 
103   PetscFunctionBegin;
104   PetscValidHeaderSpecific(da,DM_CLASSID,1);
105   PetscValidLogicalCollectiveEnum(da,bx,2);
106   PetscValidLogicalCollectiveEnum(da,by,3);
107   PetscValidLogicalCollectiveEnum(da,bz,4);
108   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
109   dd->bx = bx;
110   dd->by = by;
111   dd->bz = bz;
112   PetscFunctionReturn(0);
113 }
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "DMDASetDof"
117 /*@
118   DMDASetDof - Sets the number of degrees of freedom per vertex
119 
120   Not collective
121 
122   Input Parameters:
123 + da  - The DMDA
124 - dof - Number of degrees of freedom
125 
126   Level: intermediate
127 
128 .keywords:  distributed array, degrees of freedom
129 .seealso: DMDAGetDof(), DMDACreate(), DMDestroy(), DMDA
130 @*/
131 PetscErrorCode  DMDASetDof(DM da, PetscInt dof)
132 {
133   DM_DA *dd = (DM_DA*)da->data;
134 
135   PetscFunctionBegin;
136   PetscValidHeaderSpecific(da,DM_CLASSID,1);
137   PetscValidLogicalCollectiveInt(da,dof,2);
138   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
139   dd->w  = dof;
140   da->bs = dof;
141   PetscFunctionReturn(0);
142 }
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "DMDAGetDof"
146 /*@
147   DMDAGetDof - Gets the number of degrees of freedom per vertex
148 
149   Not collective
150 
151   Input Parameter:
152 . da  - The DMDA
153 
154   Output Parameter:
155 . dof - Number of degrees of freedom
156 
157   Level: intermediate
158 
159 .keywords:  distributed array, degrees of freedom
160 .seealso: DMDASetDof(), DMDACreate(), DMDestroy(), DMDA
161 @*/
162 PetscErrorCode DMDAGetDof(DM da, PetscInt *dof)
163 {
164   DM_DA *dd = (DM_DA *) da->data;
165 
166   PetscFunctionBegin;
167   PetscValidHeaderSpecific(da,DM_CLASSID,1);
168   PetscValidPointer(dof,2);
169   *dof = dd->w;
170   PetscFunctionReturn(0);
171 }
172 
173 #undef __FUNCT__
174 #define __FUNCT__ "DMDAGetOverlap"
175 /*@
176   DMDAGetOverlap - Gets the size of the per-processor overlap.
177 
178   Not collective
179 
180   Input Parameters:
181 . da  - The DMDA
182 
183   Output Parameters:
184 + x   - Overlap in the x direction
185 . y   - Overlap in the y direction
186 - z   - Overlap in the z direction
187 
188   Level: intermediate
189 
190 .keywords:  distributed array, overlap, domain decomposition
191 .seealso: DMDACreateDomainDecomposition(), DMDASetOverlap(), DMDA
192 @*/
193 PetscErrorCode  DMDAGetOverlap(DM da,PetscInt *x,PetscInt *y,PetscInt *z)
194 {
195   DM_DA *dd = (DM_DA*)da->data;
196 
197   PetscFunctionBegin;
198   PetscValidHeaderSpecific(da,DM_CLASSID,1);
199   if (x) *x = dd->xol;
200   if (y) *y = dd->yol;
201   if (z) *z = dd->zol;
202   PetscFunctionReturn(0);
203 }
204 
205 #undef __FUNCT__
206 #define __FUNCT__ "DMDASetOverlap"
207 /*@
208   DMDASetOverlap - Sets the size of the per-processor overlap.
209 
210   Not collective
211 
212   Input Parameters:
213 + da  - The DMDA
214 . x   - Overlap in the x direction
215 . y   - Overlap in the y direction
216 - z   - Overlap in the z direction
217 
218   Level: intermediate
219 
220 .keywords:  distributed array, overlap, domain decomposition
221 .seealso: DMDACreateDomainDecomposition(), DMDAGetOverlap(), DMDA
222 @*/
223 PetscErrorCode  DMDASetOverlap(DM da,PetscInt x,PetscInt y,PetscInt z)
224 {
225   DM_DA *dd = (DM_DA*)da->data;
226 
227   PetscFunctionBegin;
228   PetscValidHeaderSpecific(da,DM_CLASSID,1);
229   PetscValidLogicalCollectiveInt(da,x,2);
230   PetscValidLogicalCollectiveInt(da,y,3);
231   PetscValidLogicalCollectiveInt(da,z,4);
232   dd->xol = x;
233   dd->yol = y;
234   dd->zol = z;
235   PetscFunctionReturn(0);
236 }
237 
238 
239 #undef __FUNCT__
240 #define __FUNCT__ "DMDAGetNumLocalSubDomains"
241 /*@
242   DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition.
243 
244   Not collective
245 
246   Input Parameters:
247 . da  - The DMDA
248 
249   Output Parameters:
250 + Nsub   - Number of local subdomains created upon decomposition
251 
252   Level: intermediate
253 
254 .keywords:  distributed array, domain decomposition
255 .seealso: DMDACreateDomainDecomposition(), DMDASetNumLocalSubDomains(), DMDA
256 @*/
257 PetscErrorCode  DMDAGetNumLocalSubDomains(DM da,PetscInt *Nsub)
258 {
259   DM_DA *dd = (DM_DA*)da->data;
260 
261   PetscFunctionBegin;
262   PetscValidHeaderSpecific(da,DM_CLASSID,1);
263   if (Nsub) *Nsub = dd->Nsub;
264   PetscFunctionReturn(0);
265 }
266 
267 #undef __FUNCT__
268 #define __FUNCT__ "DMDASetNumLocalSubDomains"
269 /*@
270   DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition.
271 
272   Not collective
273 
274   Input Parameters:
275 + da  - The DMDA
276 - Nsub - The number of local subdomains requested
277 
278   Level: intermediate
279 
280 .keywords:  distributed array, domain decomposition
281 .seealso: DMDACreateDomainDecomposition(), DMDAGetNumLocalSubDomains(), DMDA
282 @*/
283 PetscErrorCode  DMDASetNumLocalSubDomains(DM da,PetscInt Nsub)
284 {
285   DM_DA *dd = (DM_DA*)da->data;
286 
287   PetscFunctionBegin;
288   PetscValidHeaderSpecific(da,DM_CLASSID,1);
289   PetscValidLogicalCollectiveInt(da,Nsub,2);
290   dd->Nsub = Nsub;
291   PetscFunctionReturn(0);
292 }
293 
294 #undef __FUNCT__
295 #define __FUNCT__ "DMDASetOffset"
296 /*@
297   DMDASetOffset - Sets the index offset of the DA.
298 
299   Collective on DA
300 
301   Input Parameter:
302 + da  - The DMDA
303 . xo  - The offset in the x direction
304 . yo  - The offset in the y direction
305 - zo  - The offset in the z direction
306 
307   Level: intermediate
308 
309   Notes: This is used primarily to overlap a computation on a local DA with that on a global DA without
310   changing boundary conditions or subdomain features that depend upon the global offsets.
311 
312 .keywords:  distributed array, degrees of freedom
313 .seealso: DMDAGetOffset(), DMDAVecGetArray()
314 @*/
315 PetscErrorCode  DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
316 {
317   PetscErrorCode ierr;
318   DM_DA          *dd = (DM_DA*)da->data;
319 
320   PetscFunctionBegin;
321   PetscValidHeaderSpecific(da,DM_CLASSID,1);
322   PetscValidLogicalCollectiveInt(da,xo,2);
323   PetscValidLogicalCollectiveInt(da,yo,3);
324   PetscValidLogicalCollectiveInt(da,zo,4);
325   PetscValidLogicalCollectiveInt(da,Mo,5);
326   PetscValidLogicalCollectiveInt(da,No,6);
327   PetscValidLogicalCollectiveInt(da,Po,7);
328   dd->xo = xo;
329   dd->yo = yo;
330   dd->zo = zo;
331   dd->Mo = Mo;
332   dd->No = No;
333   dd->Po = Po;
334 
335   if (da->coordinateDM) {
336     ierr = DMDASetOffset(da->coordinateDM,xo,yo,zo,Mo,No,Po);CHKERRQ(ierr);
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "DMDAGetOffset"
343 /*@
344   DMDAGetOffset - Gets the index offset of the DA.
345 
346   Not collective
347 
348   Input Parameter:
349 . da  - The DMDA
350 
351   Output Parameters:
352 + xo  - The offset in the x direction
353 . yo  - The offset in the y direction
354 . zo  - The offset in the z direction
355 . Mo  - The global size in the x direction
356 . No  - The global size in the y direction
357 - Po  - The global size in the z direction
358 
359   Level: intermediate
360 
361 .keywords:  distributed array, degrees of freedom
362 .seealso: DMDASetOffset(), DMDAVecGetArray()
363 @*/
364 PetscErrorCode  DMDAGetOffset(DM da,PetscInt *xo,PetscInt *yo,PetscInt *zo,PetscInt *Mo,PetscInt *No,PetscInt *Po)
365 {
366   DM_DA *dd = (DM_DA*)da->data;
367 
368   PetscFunctionBegin;
369   PetscValidHeaderSpecific(da,DM_CLASSID,1);
370   if (xo) *xo = dd->xo;
371   if (yo) *yo = dd->yo;
372   if (zo) *zo = dd->zo;
373   if (Mo) *Mo = dd->Mo;
374   if (No) *No = dd->No;
375   if (Po) *Po = dd->Po;
376   PetscFunctionReturn(0);
377 }
378 
379 #undef __FUNCT__
380 #define __FUNCT__ "DMDAGetNonOverlappingRegion"
381 /*@
382   DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain DM.
383 
384   Not collective
385 
386   Input Parameter:
387 . da  - The DMDA
388 
389   Output Parameters:
390 + xs  - The start of the region in x
391 . ys  - The start of the region in y
392 . zs  - The start of the region in z
393 . xs  - The size of the region in x
394 . ys  - The size of the region in y
395 . zs  - The size of the region in z
396 
397   Level: intermediate
398 
399 .keywords:  distributed array, degrees of freedom
400 .seealso: DMDAGetOffset(), DMDAVecGetArray()
401 @*/
402 PetscErrorCode  DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
403 {
404   DM_DA          *dd = (DM_DA*)da->data;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(da,DM_CLASSID,1);
408   if (xs) *xs = dd->nonxs;
409   if (ys) *ys = dd->nonys;
410   if (zs) *zs = dd->nonzs;
411   if (xm) *xm = dd->nonxm;
412   if (ym) *ym = dd->nonym;
413   if (zm) *zm = dd->nonzm;
414   PetscFunctionReturn(0);
415 }
416 
417 
418 #undef __FUNCT__
419 #define __FUNCT__ "DMDASetNonOverlappingRegion"
420 /*@
421   DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain DM.
422 
423   Collective on DA
424 
425   Input Parameter:
426 + da  - The DMDA
427 . xs  - The start of the region in x
428 . ys  - The start of the region in y
429 . zs  - The start of the region in z
430 . xs  - The size of the region in x
431 . ys  - The size of the region in y
432 . zs  - The size of the region in z
433 
434   Level: intermediate
435 
436 .keywords:  distributed array, degrees of freedom
437 .seealso: DMDAGetOffset(), DMDAVecGetArray()
438 @*/
439 PetscErrorCode  DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
440 {
441   DM_DA          *dd = (DM_DA*)da->data;
442 
443   PetscFunctionBegin;
444   PetscValidHeaderSpecific(da,DM_CLASSID,1);
445   PetscValidLogicalCollectiveInt(da,xs,2);
446   PetscValidLogicalCollectiveInt(da,ys,3);
447   PetscValidLogicalCollectiveInt(da,zs,4);
448   PetscValidLogicalCollectiveInt(da,xm,5);
449   PetscValidLogicalCollectiveInt(da,ym,6);
450   PetscValidLogicalCollectiveInt(da,zm,7);
451   dd->nonxs = xs;
452   dd->nonys = ys;
453   dd->nonzs = zs;
454   dd->nonxm = xm;
455   dd->nonym = ym;
456   dd->nonzm = zm;
457 
458   PetscFunctionReturn(0);
459 }
460 
461 #undef __FUNCT__
462 #define __FUNCT__ "DMDASetStencilType"
463 /*@
464   DMDASetStencilType - Sets the type of the communication stencil
465 
466   Logically Collective on DMDA
467 
468   Input Parameter:
469 + da    - The DMDA
470 - stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
471 
472   Level: intermediate
473 
474 .keywords:  distributed array, stencil
475 .seealso: DMDACreate(), DMDestroy(), DMDA
476 @*/
477 PetscErrorCode  DMDASetStencilType(DM da, DMDAStencilType stype)
478 {
479   DM_DA *dd = (DM_DA*)da->data;
480 
481   PetscFunctionBegin;
482   PetscValidHeaderSpecific(da,DM_CLASSID,1);
483   PetscValidLogicalCollectiveEnum(da,stype,2);
484   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
485   dd->stencil_type = stype;
486   PetscFunctionReturn(0);
487 }
488 
489 #undef __FUNCT__
490 #define __FUNCT__ "DMDAGetStencilType"
491 /*@
492   DMDAGetStencilType - Gets the type of the communication stencil
493 
494   Not collective
495 
496   Input Parameter:
497 . da    - The DMDA
498 
499   Output Parameter:
500 . stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
501 
502   Level: intermediate
503 
504 .keywords:  distributed array, stencil
505 .seealso: DMDACreate(), DMDestroy(), DMDA
506 @*/
507 PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype)
508 {
509   DM_DA *dd = (DM_DA*)da->data;
510 
511   PetscFunctionBegin;
512   PetscValidHeaderSpecific(da,DM_CLASSID,1);
513   PetscValidPointer(stype,2);
514   *stype = dd->stencil_type;
515   PetscFunctionReturn(0);
516 }
517 
518 #undef __FUNCT__
519 #define __FUNCT__ "DMDASetStencilWidth"
520 /*@
521   DMDASetStencilWidth - Sets the width of the communication stencil
522 
523   Logically Collective on DMDA
524 
525   Input Parameter:
526 + da    - The DMDA
527 - width - The stencil width
528 
529   Level: intermediate
530 
531 .keywords:  distributed array, stencil
532 .seealso: DMDACreate(), DMDestroy(), DMDA
533 @*/
534 PetscErrorCode  DMDASetStencilWidth(DM da, PetscInt width)
535 {
536   DM_DA *dd = (DM_DA*)da->data;
537 
538   PetscFunctionBegin;
539   PetscValidHeaderSpecific(da,DM_CLASSID,1);
540   PetscValidLogicalCollectiveInt(da,width,2);
541   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
542   dd->s = width;
543   PetscFunctionReturn(0);
544 }
545 
546 #undef __FUNCT__
547 #define __FUNCT__ "DMDAGetStencilWidth"
548 /*@
549   DMDAGetStencilWidth - Gets the width of the communication stencil
550 
551   Not collective
552 
553   Input Parameter:
554 . da    - The DMDA
555 
556   Output Parameter:
557 . width - The stencil width
558 
559   Level: intermediate
560 
561 .keywords:  distributed array, stencil
562 .seealso: DMDACreate(), DMDestroy(), DMDA
563 @*/
564 PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width)
565 {
566   DM_DA *dd = (DM_DA *) da->data;
567 
568   PetscFunctionBegin;
569   PetscValidHeaderSpecific(da,DM_CLASSID,1);
570   PetscValidPointer(width,2);
571   *width = dd->s;
572   PetscFunctionReturn(0);
573 }
574 
575 #undef __FUNCT__
576 #define __FUNCT__ "DMDACheckOwnershipRanges_Private"
577 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da,PetscInt M,PetscInt m,const PetscInt lx[])
578 {
579   PetscInt i,sum;
580 
581   PetscFunctionBegin;
582   if (M < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Global dimension not set");
583   for (i=sum=0; i<m; i++) sum += lx[i];
584   if (sum != M) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Ownership ranges sum to %D but global dimension is %D",sum,M);
585   PetscFunctionReturn(0);
586 }
587 
588 #undef __FUNCT__
589 #define __FUNCT__ "DMDASetOwnershipRanges"
590 /*@
591   DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process
592 
593   Logically Collective on DMDA
594 
595   Input Parameter:
596 + da - The DMDA
597 . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m
598 . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n
599 - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p.
600 
601   Level: intermediate
602 
603   Note: these numbers are NOT multiplied by the number of dof per node.
604 
605 .keywords:  distributed array
606 .seealso: DMDACreate(), DMDestroy(), DMDA
607 @*/
608 PetscErrorCode  DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
609 {
610   PetscErrorCode ierr;
611   DM_DA          *dd = (DM_DA*)da->data;
612 
613   PetscFunctionBegin;
614   PetscValidHeaderSpecific(da,DM_CLASSID,1);
615   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
616   if (lx) {
617     if (dd->m < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
618     ierr = DMDACheckOwnershipRanges_Private(da,dd->M,dd->m,lx);CHKERRQ(ierr);
619     if (!dd->lx) {
620       ierr = PetscMalloc1(dd->m, &dd->lx);CHKERRQ(ierr);
621     }
622     ierr = PetscMemcpy(dd->lx, lx, dd->m*sizeof(PetscInt));CHKERRQ(ierr);
623   }
624   if (ly) {
625     if (dd->n < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
626     ierr = DMDACheckOwnershipRanges_Private(da,dd->N,dd->n,ly);CHKERRQ(ierr);
627     if (!dd->ly) {
628       ierr = PetscMalloc1(dd->n, &dd->ly);CHKERRQ(ierr);
629     }
630     ierr = PetscMemcpy(dd->ly, ly, dd->n*sizeof(PetscInt));CHKERRQ(ierr);
631   }
632   if (lz) {
633     if (dd->p < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
634     ierr = DMDACheckOwnershipRanges_Private(da,dd->P,dd->p,lz);CHKERRQ(ierr);
635     if (!dd->lz) {
636       ierr = PetscMalloc1(dd->p, &dd->lz);CHKERRQ(ierr);
637     }
638     ierr = PetscMemcpy(dd->lz, lz, dd->p*sizeof(PetscInt));CHKERRQ(ierr);
639   }
640   PetscFunctionReturn(0);
641 }
642 
643 #undef __FUNCT__
644 #define __FUNCT__ "DMDASetInterpolationType"
645 /*@
646        DMDASetInterpolationType - Sets the type of interpolation that will be
647           returned by DMCreateInterpolation()
648 
649    Logically Collective on DMDA
650 
651    Input Parameter:
652 +  da - initial distributed array
653 .  ctype - DMDA_Q1 and DMDA_Q0 are currently the only supported forms
654 
655    Level: intermediate
656 
657    Notes: you should call this on the coarser of the two DMDAs you pass to DMCreateInterpolation()
658 
659 .keywords:  distributed array, interpolation
660 
661 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType
662 @*/
663 PetscErrorCode  DMDASetInterpolationType(DM da,DMDAInterpolationType ctype)
664 {
665   DM_DA *dd = (DM_DA*)da->data;
666 
667   PetscFunctionBegin;
668   PetscValidHeaderSpecific(da,DM_CLASSID,1);
669   PetscValidLogicalCollectiveEnum(da,ctype,2);
670   dd->interptype = ctype;
671   PetscFunctionReturn(0);
672 }
673 
674 #undef __FUNCT__
675 #define __FUNCT__ "DMDAGetInterpolationType"
676 /*@
677        DMDAGetInterpolationType - Gets the type of interpolation that will be
678           used by DMCreateInterpolation()
679 
680    Not Collective
681 
682    Input Parameter:
683 .  da - distributed array
684 
685    Output Parameter:
686 .  ctype - interpolation type (DMDA_Q1 and DMDA_Q0 are currently the only supported forms)
687 
688    Level: intermediate
689 
690 .keywords:  distributed array, interpolation
691 
692 .seealso: DMDA, DMDAInterpolationType, DMDASetInterpolationType(), DMCreateInterpolation()
693 @*/
694 PetscErrorCode  DMDAGetInterpolationType(DM da,DMDAInterpolationType *ctype)
695 {
696   DM_DA *dd = (DM_DA*)da->data;
697 
698   PetscFunctionBegin;
699   PetscValidHeaderSpecific(da,DM_CLASSID,1);
700   PetscValidPointer(ctype,2);
701   *ctype = dd->interptype;
702   PetscFunctionReturn(0);
703 }
704 
705 #undef __FUNCT__
706 #define __FUNCT__ "DMDAGetNeighbors"
707 /*@C
708       DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
709         processes neighbors.
710 
711     Not Collective
712 
713    Input Parameter:
714 .     da - the DMDA object
715 
716    Output Parameters:
717 .     ranks - the neighbors ranks, stored with the x index increasing most rapidly.
718               this process itself is in the list
719 
720    Notes: In 2d the array is of length 9, in 3d of length 27
721           Not supported in 1d
722           Do not free the array, it is freed when the DMDA is destroyed.
723 
724    Fortran Notes: In fortran you must pass in an array of the appropriate length.
725 
726    Level: intermediate
727 
728 @*/
729 PetscErrorCode  DMDAGetNeighbors(DM da,const PetscMPIInt *ranks[])
730 {
731   DM_DA *dd = (DM_DA*)da->data;
732 
733   PetscFunctionBegin;
734   PetscValidHeaderSpecific(da,DM_CLASSID,1);
735   *ranks = dd->neighbors;
736   PetscFunctionReturn(0);
737 }
738 
739 #undef __FUNCT__
740 #define __FUNCT__ "DMDAGetOwnershipRanges"
741 /*@C
742       DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process
743 
744     Not Collective
745 
746    Input Parameter:
747 .     da - the DMDA object
748 
749    Output Parameter:
750 +     lx - ownership along x direction (optional)
751 .     ly - ownership along y direction (optional)
752 -     lz - ownership along z direction (optional)
753 
754    Level: intermediate
755 
756     Note: these correspond to the optional final arguments passed to DMDACreate(), DMDACreate2d(), DMDACreate3d()
757 
758     In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and
759     eighth arguments from DMDAGetInfo()
760 
761      In C you should not free these arrays, nor change the values in them. They will only have valid values while the
762     DMDA they came from still exists (has not been destroyed).
763 
764     These numbers are NOT multiplied by the number of dof per node.
765 
766 .seealso: DMDAGetCorners(), DMDAGetGhostCorners(), DMDACreate(), DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), VecGetOwnershipRanges()
767 @*/
768 PetscErrorCode  DMDAGetOwnershipRanges(DM da,const PetscInt *lx[],const PetscInt *ly[],const PetscInt *lz[])
769 {
770   DM_DA *dd = (DM_DA*)da->data;
771 
772   PetscFunctionBegin;
773   PetscValidHeaderSpecific(da,DM_CLASSID,1);
774   if (lx) *lx = dd->lx;
775   if (ly) *ly = dd->ly;
776   if (lz) *lz = dd->lz;
777   PetscFunctionReturn(0);
778 }
779 
780 #undef __FUNCT__
781 #define __FUNCT__ "DMDASetRefinementFactor"
782 /*@
783      DMDASetRefinementFactor - Set the ratios that the DMDA grid is refined
784 
785     Logically Collective on DMDA
786 
787   Input Parameters:
788 +    da - the DMDA object
789 .    refine_x - ratio of fine grid to coarse in x direction (2 by default)
790 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
791 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
792 
793   Options Database:
794 +  -da_refine_x - refinement ratio in x direction
795 .  -da_refine_y - refinement ratio in y direction
796 -  -da_refine_z - refinement ratio in z direction
797 
798   Level: intermediate
799 
800     Notes: Pass PETSC_IGNORE to leave a value unchanged
801 
802 .seealso: DMRefine(), DMDAGetRefinementFactor()
803 @*/
804 PetscErrorCode  DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z)
805 {
806   DM_DA *dd = (DM_DA*)da->data;
807 
808   PetscFunctionBegin;
809   PetscValidHeaderSpecific(da,DM_CLASSID,1);
810   PetscValidLogicalCollectiveInt(da,refine_x,2);
811   PetscValidLogicalCollectiveInt(da,refine_y,3);
812   PetscValidLogicalCollectiveInt(da,refine_z,4);
813 
814   if (refine_x > 0) dd->refine_x = refine_x;
815   if (refine_y > 0) dd->refine_y = refine_y;
816   if (refine_z > 0) dd->refine_z = refine_z;
817   PetscFunctionReturn(0);
818 }
819 
820 #undef __FUNCT__
821 #define __FUNCT__ "DMDAGetRefinementFactor"
822 /*@C
823      DMDAGetRefinementFactor - Gets the ratios that the DMDA grid is refined
824 
825     Not Collective
826 
827   Input Parameter:
828 .    da - the DMDA object
829 
830   Output Parameters:
831 +    refine_x - ratio of fine grid to coarse in x direction (2 by default)
832 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
833 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
834 
835   Level: intermediate
836 
837     Notes: Pass NULL for values you do not need
838 
839 .seealso: DMRefine(), DMDASetRefinementFactor()
840 @*/
841 PetscErrorCode  DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z)
842 {
843   DM_DA *dd = (DM_DA*)da->data;
844 
845   PetscFunctionBegin;
846   PetscValidHeaderSpecific(da,DM_CLASSID,1);
847   if (refine_x) *refine_x = dd->refine_x;
848   if (refine_y) *refine_y = dd->refine_y;
849   if (refine_z) *refine_z = dd->refine_z;
850   PetscFunctionReturn(0);
851 }
852 
853 #undef __FUNCT__
854 #define __FUNCT__ "DMDASetGetMatrix"
855 /*@C
856      DMDASetGetMatrix - Sets the routine used by the DMDA to allocate a matrix.
857 
858     Logically Collective on DMDA
859 
860   Input Parameters:
861 +    da - the DMDA object
862 -    f - the function that allocates the matrix for that specific DMDA
863 
864   Level: developer
865 
866    Notes: See DMDASetBlockFills() that provides a simple way to provide the nonzero structure for
867        the diagonal and off-diagonal blocks of the matrix
868 
869 .seealso: DMCreateMatrix(), DMDASetBlockFills()
870 @*/
871 PetscErrorCode  DMDASetGetMatrix(DM da,PetscErrorCode (*f)(DM, Mat*))
872 {
873   PetscFunctionBegin;
874   PetscValidHeaderSpecific(da,DM_CLASSID,1);
875   da->ops->creatematrix = f;
876   PetscFunctionReturn(0);
877 }
878 
879 #undef __FUNCT__
880 #define __FUNCT__ "DMDARefineOwnershipRanges"
881 /*
882   Creates "balanced" ownership ranges after refinement, constrained by the need for the
883   fine grid boundaries to fall within one stencil width of the coarse partition.
884 
885   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
886 */
887 static PetscErrorCode DMDARefineOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lc[],PetscInt lf[])
888 {
889   PetscInt       i,totalc = 0,remaining,startc = 0,startf = 0;
890   PetscErrorCode ierr;
891 
892   PetscFunctionBegin;
893   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
894   if (ratio == 1) {
895     ierr = PetscMemcpy(lf,lc,m*sizeof(lc[0]));CHKERRQ(ierr);
896     PetscFunctionReturn(0);
897   }
898   for (i=0; i<m; i++) totalc += lc[i];
899   remaining = (!periodic) + ratio * (totalc - (!periodic));
900   for (i=0; i<m; i++) {
901     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
902     if (i == m-1) lf[i] = want;
903     else {
904       const PetscInt nextc = startc + lc[i];
905       /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
906        * coarse stencil width of the first coarse node in the next subdomain. */
907       while ((startf+want)/ratio < nextc - stencil_width) want++;
908       /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
909        * coarse stencil width of the last coarse node in the current subdomain. */
910       while ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width) want--;
911       /* Make sure all constraints are satisfied */
912       if (want < 0 || want > remaining || ((startf+want)/ratio < nextc - stencil_width)
913           || ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible refined ownership range");
914     }
915     lf[i]      = want;
916     startc    += lc[i];
917     startf    += lf[i];
918     remaining -= lf[i];
919   }
920   PetscFunctionReturn(0);
921 }
922 
923 #undef __FUNCT__
924 #define __FUNCT__ "DMDACoarsenOwnershipRanges"
925 /*
926   Creates "balanced" ownership ranges after coarsening, constrained by the need for the
927   fine grid boundaries to fall within one stencil width of the coarse partition.
928 
929   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
930 */
931 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lf[],PetscInt lc[])
932 {
933   PetscInt       i,totalf,remaining,startc,startf;
934   PetscErrorCode ierr;
935 
936   PetscFunctionBegin;
937   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
938   if (ratio == 1) {
939     ierr = PetscMemcpy(lc,lf,m*sizeof(lf[0]));CHKERRQ(ierr);
940     PetscFunctionReturn(0);
941   }
942   for (i=0,totalf=0; i<m; i++) totalf += lf[i];
943   remaining = (!periodic) + (totalf - (!periodic)) / ratio;
944   for (i=0,startc=0,startf=0; i<m; i++) {
945     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
946     if (i == m-1) lc[i] = want;
947     else {
948       const PetscInt nextf = startf+lf[i];
949       /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
950        * node is within one stencil width. */
951       while (nextf/ratio < startc+want-stencil_width) want--;
952       /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
953        * fine node is within one stencil width. */
954       while ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width) want++;
955       if (want < 0 || want > remaining
956           || (nextf/ratio < startc+want-stencil_width) || ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible coarsened ownership range");
957     }
958     lc[i]      = want;
959     startc    += lc[i];
960     startf    += lf[i];
961     remaining -= lc[i];
962   }
963   PetscFunctionReturn(0);
964 }
965 
966 #undef __FUNCT__
967 #define __FUNCT__ "DMRefine_DA"
968 PetscErrorCode  DMRefine_DA(DM da,MPI_Comm comm,DM *daref)
969 {
970   PetscErrorCode ierr;
971   PetscInt       M,N,P,i,dim;
972   DM             da2;
973   DM_DA          *dd = (DM_DA*)da->data,*dd2;
974 
975   PetscFunctionBegin;
976   PetscValidHeaderSpecific(da,DM_CLASSID,1);
977   PetscValidPointer(daref,3);
978 
979   ierr = DMGetDimension(da, &dim);CHKERRQ(ierr);
980   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
981     M = dd->refine_x*dd->M;
982   } else {
983     M = 1 + dd->refine_x*(dd->M - 1);
984   }
985   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
986     if (dim > 1) {
987       N = dd->refine_y*dd->N;
988     } else {
989       N = 1;
990     }
991   } else {
992     N = 1 + dd->refine_y*(dd->N - 1);
993   }
994   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
995     if (dim > 2) {
996       P = dd->refine_z*dd->P;
997     } else {
998       P = 1;
999     }
1000   } else {
1001     P = 1 + dd->refine_z*(dd->P - 1);
1002   }
1003   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
1004   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
1005   ierr = DMSetDimension(da2,dim);CHKERRQ(ierr);
1006   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
1007   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
1008   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
1009   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
1010   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
1011   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
1012   if (dim == 3) {
1013     PetscInt *lx,*ly,*lz;
1014     ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr);
1015     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1016     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1017     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
1018     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
1019     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
1020   } else if (dim == 2) {
1021     PetscInt *lx,*ly;
1022     ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr);
1023     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1024     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1025     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
1026     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
1027   } else if (dim == 1) {
1028     PetscInt *lx;
1029     ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr);
1030     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1031     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
1032     ierr = PetscFree(lx);CHKERRQ(ierr);
1033   }
1034   dd2 = (DM_DA*)da2->data;
1035 
1036   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
1037   da2->ops->creatematrix = da->ops->creatematrix;
1038   /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
1039   da2->ops->getcoloring = da->ops->getcoloring;
1040   dd2->interptype       = dd->interptype;
1041 
1042   /* copy fill information if given */
1043   if (dd->dfill) {
1044     ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr);
1045     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1046   }
1047   if (dd->ofill) {
1048     ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr);
1049     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1050   }
1051   /* copy the refine information */
1052   dd2->coarsen_x = dd2->refine_x = dd->refine_x;
1053   dd2->coarsen_y = dd2->refine_y = dd->refine_y;
1054   dd2->coarsen_z = dd2->refine_z = dd->refine_z;
1055 
1056   if (dd->refine_z_hier) {
1057     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) {
1058       dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1];
1059     }
1060     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) {
1061       dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown];
1062     }
1063     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1064     ierr = PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);CHKERRQ(ierr);
1065     ierr = PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1066   }
1067   if (dd->refine_y_hier) {
1068     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) {
1069       dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1];
1070     }
1071     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) {
1072       dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown];
1073     }
1074     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1075     ierr = PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);CHKERRQ(ierr);
1076     ierr = PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1077   }
1078   if (dd->refine_x_hier) {
1079     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) {
1080       dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1];
1081     }
1082     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) {
1083       dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown];
1084     }
1085     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1086     ierr = PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);CHKERRQ(ierr);
1087     ierr = PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1088   }
1089 
1090 
1091   /* copy vector type information */
1092   ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr);
1093 
1094   dd2->lf = dd->lf;
1095   dd2->lj = dd->lj;
1096 
1097   da2->leveldown = da->leveldown;
1098   da2->levelup   = da->levelup + 1;
1099 
1100   ierr = DMSetUp(da2);CHKERRQ(ierr);
1101 
1102   /* interpolate coordinates if they are set on the coarse grid */
1103   if (da->coordinates) {
1104     DM  cdaf,cdac;
1105     Vec coordsc,coordsf;
1106     Mat II;
1107 
1108     ierr = DMGetCoordinateDM(da,&cdac);CHKERRQ(ierr);
1109     ierr = DMGetCoordinates(da,&coordsc);CHKERRQ(ierr);
1110     ierr = DMGetCoordinateDM(da2,&cdaf);CHKERRQ(ierr);
1111     /* force creation of the coordinate vector */
1112     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1113     ierr = DMGetCoordinates(da2,&coordsf);CHKERRQ(ierr);
1114     ierr = DMCreateInterpolation(cdac,cdaf,&II,NULL);CHKERRQ(ierr);
1115     ierr = MatInterpolate(II,coordsc,coordsf);CHKERRQ(ierr);
1116     ierr = MatDestroy(&II);CHKERRQ(ierr);
1117   }
1118 
1119   for (i=0; i<da->bs; i++) {
1120     const char *fieldname;
1121     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1122     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1123   }
1124 
1125   *daref = da2;
1126   PetscFunctionReturn(0);
1127 }
1128 
1129 
1130 #undef __FUNCT__
1131 #define __FUNCT__ "DMCoarsen_DA"
1132 PetscErrorCode  DMCoarsen_DA(DM da, MPI_Comm comm,DM *daref)
1133 {
1134   PetscErrorCode ierr;
1135   PetscInt       M,N,P,i,dim;
1136   DM             da2;
1137   DM_DA          *dd = (DM_DA*)da->data,*dd2;
1138 
1139   PetscFunctionBegin;
1140   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1141   PetscValidPointer(daref,3);
1142 
1143   ierr = DMGetDimension(da, &dim);CHKERRQ(ierr);
1144   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1145     M = dd->M / dd->coarsen_x;
1146   } else {
1147     M = 1 + (dd->M - 1) / dd->coarsen_x;
1148   }
1149   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1150     if (dim > 1) {
1151       N = dd->N / dd->coarsen_y;
1152     } else {
1153       N = 1;
1154     }
1155   } else {
1156     N = 1 + (dd->N - 1) / dd->coarsen_y;
1157   }
1158   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1159     if (dim > 2) {
1160       P = dd->P / dd->coarsen_z;
1161     } else {
1162       P = 1;
1163     }
1164   } else {
1165     P = 1 + (dd->P - 1) / dd->coarsen_z;
1166   }
1167   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
1168   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
1169   ierr = DMSetDimension(da2,dim);CHKERRQ(ierr);
1170   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
1171   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
1172   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
1173   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
1174   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
1175   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
1176   if (dim == 3) {
1177     PetscInt *lx,*ly,*lz;
1178     ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr);
1179     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1180     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1181     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
1182     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
1183     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
1184   } else if (dim == 2) {
1185     PetscInt *lx,*ly;
1186     ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr);
1187     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1188     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1189     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
1190     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
1191   } else if (dim == 1) {
1192     PetscInt *lx;
1193     ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr);
1194     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1195     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
1196     ierr = PetscFree(lx);CHKERRQ(ierr);
1197   }
1198   dd2 = (DM_DA*)da2->data;
1199 
1200   /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1201   /* da2->ops->createinterpolation = da->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1202   da2->ops->creatematrix = da->ops->creatematrix;
1203   da2->ops->getcoloring  = da->ops->getcoloring;
1204   dd2->interptype        = dd->interptype;
1205 
1206   /* copy fill information if given */
1207   if (dd->dfill) {
1208     ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr);
1209     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1210   }
1211   if (dd->ofill) {
1212     ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr);
1213     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1214   }
1215   /* copy the refine information */
1216   dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1217   dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1218   dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;
1219 
1220   if (dd->refine_z_hier) {
1221     if (da->levelup - da->leveldown -1 > -1 && da->levelup - da->leveldown - 1< dd->refine_z_hier_n) {
1222       dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown - 1];
1223     }
1224     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_z_hier_n) {
1225       dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown - 2];
1226     }
1227     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1228     ierr = PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);CHKERRQ(ierr);
1229     ierr = PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1230   }
1231   if (dd->refine_y_hier) {
1232     if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1< dd->refine_y_hier_n) {
1233       dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown - 1];
1234     }
1235     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_y_hier_n) {
1236       dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown - 2];
1237     }
1238     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1239     ierr = PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);CHKERRQ(ierr);
1240     ierr = PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1241   }
1242   if (dd->refine_x_hier) {
1243     if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1 < dd->refine_x_hier_n) {
1244       dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown - 1];
1245     }
1246     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_x_hier_n) {
1247       dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown - 2];
1248     }
1249     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1250     ierr = PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);CHKERRQ(ierr);
1251     ierr = PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1252   }
1253 
1254   /* copy vector type information */
1255   ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr);
1256 
1257   dd2->lf = dd->lf;
1258   dd2->lj = dd->lj;
1259 
1260   da2->leveldown = da->leveldown + 1;
1261   da2->levelup   = da->levelup;
1262 
1263   ierr = DMSetUp(da2);CHKERRQ(ierr);
1264 
1265   /* inject coordinates if they are set on the fine grid */
1266   if (da->coordinates) {
1267     DM         cdaf,cdac;
1268     Vec        coordsc,coordsf;
1269     Mat        inject;
1270     VecScatter vscat;
1271 
1272     ierr = DMGetCoordinateDM(da,&cdaf);CHKERRQ(ierr);
1273     ierr = DMGetCoordinates(da,&coordsf);CHKERRQ(ierr);
1274     ierr = DMGetCoordinateDM(da2,&cdac);CHKERRQ(ierr);
1275     /* force creation of the coordinate vector */
1276     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1277     ierr = DMGetCoordinates(da2,&coordsc);CHKERRQ(ierr);
1278 
1279     ierr = DMCreateInjection(cdac,cdaf,&inject);CHKERRQ(ierr);
1280     ierr = MatScatterGetVecScatter(inject,&vscat);CHKERRQ(ierr);
1281     ierr = VecScatterBegin(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1282     ierr = VecScatterEnd(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1283     ierr = MatDestroy(&inject);CHKERRQ(ierr);
1284   }
1285 
1286   for (i=0; i<da->bs; i++) {
1287     const char *fieldname;
1288     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1289     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1290   }
1291 
1292   *daref = da2;
1293   PetscFunctionReturn(0);
1294 }
1295 
1296 #undef __FUNCT__
1297 #define __FUNCT__ "DMRefineHierarchy_DA"
1298 PetscErrorCode  DMRefineHierarchy_DA(DM da,PetscInt nlevels,DM daf[])
1299 {
1300   PetscErrorCode ierr;
1301   PetscInt       i,n,*refx,*refy,*refz;
1302 
1303   PetscFunctionBegin;
1304   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1305   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1306   if (nlevels == 0) PetscFunctionReturn(0);
1307   PetscValidPointer(daf,3);
1308 
1309   /* Get refinement factors, defaults taken from the coarse DMDA */
1310   ierr = PetscMalloc3(nlevels,&refx,nlevels,&refy,nlevels,&refz);CHKERRQ(ierr);
1311   for (i=0; i<nlevels; i++) {
1312     ierr = DMDAGetRefinementFactor(da,&refx[i],&refy[i],&refz[i]);CHKERRQ(ierr);
1313   }
1314   n    = nlevels;
1315   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_x",refx,&n,NULL);CHKERRQ(ierr);
1316   n    = nlevels;
1317   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_y",refy,&n,NULL);CHKERRQ(ierr);
1318   n    = nlevels;
1319   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_z",refz,&n,NULL);CHKERRQ(ierr);
1320 
1321   ierr = DMDASetRefinementFactor(da,refx[0],refy[0],refz[0]);CHKERRQ(ierr);
1322   ierr = DMRefine(da,PetscObjectComm((PetscObject)da),&daf[0]);CHKERRQ(ierr);
1323   for (i=1; i<nlevels; i++) {
1324     ierr = DMDASetRefinementFactor(daf[i-1],refx[i],refy[i],refz[i]);CHKERRQ(ierr);
1325     ierr = DMRefine(daf[i-1],PetscObjectComm((PetscObject)da),&daf[i]);CHKERRQ(ierr);
1326   }
1327   ierr = PetscFree3(refx,refy,refz);CHKERRQ(ierr);
1328   PetscFunctionReturn(0);
1329 }
1330 
1331 #undef __FUNCT__
1332 #define __FUNCT__ "DMCoarsenHierarchy_DA"
1333 PetscErrorCode  DMCoarsenHierarchy_DA(DM da,PetscInt nlevels,DM dac[])
1334 {
1335   PetscErrorCode ierr;
1336   PetscInt       i;
1337 
1338   PetscFunctionBegin;
1339   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1340   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1341   if (nlevels == 0) PetscFunctionReturn(0);
1342   PetscValidPointer(dac,3);
1343   ierr = DMCoarsen(da,PetscObjectComm((PetscObject)da),&dac[0]);CHKERRQ(ierr);
1344   for (i=1; i<nlevels; i++) {
1345     ierr = DMCoarsen(dac[i-1],PetscObjectComm((PetscObject)da),&dac[i]);CHKERRQ(ierr);
1346   }
1347   PetscFunctionReturn(0);
1348 }
1349