xref: /petsc/src/dm/impls/da/da.c (revision c4e6da2c21f60ca208d50527ab05cf373da374c0)
1 #include <petsc/private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
2 
3 /*@
4   DMDASetSizes - Sets the global sizes
5 
6   Logically Collective on DMDA
7 
8   Input Parameters:
9 + da - the DMDA
10 . M - the global X size (or PETSC_DECIDE)
11 . N - the global Y size (or PETSC_DECIDE)
12 - P - the global Z size (or PETSC_DECIDE)
13 
14   Level: intermediate
15 
16 .seealso: DMDAGetSize(), PetscSplitOwnership()
17 @*/
18 PetscErrorCode  DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
19 {
20   DM_DA *dd = (DM_DA*)da->data;
21 
22   PetscFunctionBegin;
23   PetscValidHeaderSpecificType(da, DM_CLASSID, 1,DMDA);
24   PetscValidLogicalCollectiveInt(da,M,2);
25   PetscValidLogicalCollectiveInt(da,N,3);
26   PetscValidLogicalCollectiveInt(da,P,4);
27   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
28 
29   dd->M = M;
30   dd->N = N;
31   dd->P = P;
32   PetscFunctionReturn(0);
33 }
34 
35 /*@
36   DMDASetNumProcs - Sets the number of processes in each dimension
37 
38   Logically Collective on DMDA
39 
40   Input Parameters:
41 + da - the DMDA
42 . m - the number of X procs (or PETSC_DECIDE)
43 . n - the number of Y procs (or PETSC_DECIDE)
44 - p - the number of Z procs (or PETSC_DECIDE)
45 
46   Level: intermediate
47 
48 .seealso: DMDASetSizes(), DMDAGetSize(), PetscSplitOwnership()
49 @*/
50 PetscErrorCode  DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
51 {
52   DM_DA          *dd = (DM_DA*)da->data;
53   PetscErrorCode ierr;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecificType(da, DM_CLASSID, 1,DMDA);
57   PetscValidLogicalCollectiveInt(da,m,2);
58   PetscValidLogicalCollectiveInt(da,n,3);
59   PetscValidLogicalCollectiveInt(da,p,4);
60   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
61   dd->m = m;
62   dd->n = n;
63   dd->p = p;
64   if (da->dim == 2) {
65     PetscMPIInt size;
66     ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr);
67     if ((dd->m > 0) && (dd->n < 0)) {
68       dd->n = size/dd->m;
69       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in X direction not divisible into comm size %d",m,size);
70     }
71     if ((dd->n > 0) && (dd->m < 0)) {
72       dd->m = size/dd->n;
73       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in Y direction not divisible into comm size %d",n,size);
74     }
75   }
76   PetscFunctionReturn(0);
77 }
78 
79 /*@
80   DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries.
81 
82   Not collective
83 
84   Input Parameter:
85 + da    - The DMDA
86 - bx,by,bz - One of DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, DM_BOUNDARY_PERIODIC
87 
88   Level: intermediate
89 
90 .keywords:  distributed array, periodicity
91 .seealso: DMDACreate(), DMDestroy(), DMDA, DMBoundaryType
92 @*/
93 PetscErrorCode  DMDASetBoundaryType(DM da,DMBoundaryType bx,DMBoundaryType by,DMBoundaryType bz)
94 {
95   DM_DA *dd = (DM_DA*)da->data;
96 
97   PetscFunctionBegin;
98   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
99   PetscValidLogicalCollectiveEnum(da,bx,2);
100   PetscValidLogicalCollectiveEnum(da,by,3);
101   PetscValidLogicalCollectiveEnum(da,bz,4);
102   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
103   dd->bx = bx;
104   dd->by = by;
105   dd->bz = bz;
106   PetscFunctionReturn(0);
107 }
108 
109 /*@
110   DMDASetDof - Sets the number of degrees of freedom per vertex
111 
112   Not collective
113 
114   Input Parameters:
115 + da  - The DMDA
116 - dof - Number of degrees of freedom
117 
118   Level: intermediate
119 
120 .keywords:  distributed array, degrees of freedom
121 .seealso: DMDAGetDof(), DMDACreate(), DMDestroy(), DMDA
122 @*/
123 PetscErrorCode  DMDASetDof(DM da, PetscInt dof)
124 {
125   DM_DA *dd = (DM_DA*)da->data;
126 
127   PetscFunctionBegin;
128   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
129   PetscValidLogicalCollectiveInt(da,dof,2);
130   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
131   dd->w  = dof;
132   da->bs = dof;
133   PetscFunctionReturn(0);
134 }
135 
136 /*@
137   DMDAGetDof - Gets the number of degrees of freedom per vertex
138 
139   Not collective
140 
141   Input Parameter:
142 . da  - The DMDA
143 
144   Output Parameter:
145 . dof - Number of degrees of freedom
146 
147   Level: intermediate
148 
149 .keywords:  distributed array, degrees of freedom
150 .seealso: DMDASetDof(), DMDACreate(), DMDestroy(), DMDA
151 @*/
152 PetscErrorCode DMDAGetDof(DM da, PetscInt *dof)
153 {
154   DM_DA *dd = (DM_DA *) da->data;
155 
156   PetscFunctionBegin;
157   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
158   PetscValidPointer(dof,2);
159   *dof = dd->w;
160   PetscFunctionReturn(0);
161 }
162 
163 /*@
164   DMDAGetOverlap - Gets the size of the per-processor overlap.
165 
166   Not collective
167 
168   Input Parameters:
169 . da  - The DMDA
170 
171   Output Parameters:
172 + x   - Overlap in the x direction
173 . y   - Overlap in the y direction
174 - z   - Overlap in the z direction
175 
176   Level: intermediate
177 
178 .keywords:  distributed array, overlap, domain decomposition
179 .seealso: DMDACreateDomainDecomposition(), DMDASetOverlap(), DMDA
180 @*/
181 PetscErrorCode  DMDAGetOverlap(DM da,PetscInt *x,PetscInt *y,PetscInt *z)
182 {
183   DM_DA *dd = (DM_DA*)da->data;
184 
185   PetscFunctionBegin;
186   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
187   if (x) *x = dd->xol;
188   if (y) *y = dd->yol;
189   if (z) *z = dd->zol;
190   PetscFunctionReturn(0);
191 }
192 
193 /*@
194   DMDASetOverlap - Sets the size of the per-processor overlap.
195 
196   Not collective
197 
198   Input Parameters:
199 + da  - The DMDA
200 . x   - Overlap in the x direction
201 . y   - Overlap in the y direction
202 - z   - Overlap in the z direction
203 
204   Level: intermediate
205 
206 .keywords:  distributed array, overlap, domain decomposition
207 .seealso: DMDACreateDomainDecomposition(), DMDAGetOverlap(), DMDA
208 @*/
209 PetscErrorCode  DMDASetOverlap(DM da,PetscInt x,PetscInt y,PetscInt z)
210 {
211   DM_DA *dd = (DM_DA*)da->data;
212 
213   PetscFunctionBegin;
214   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
215   PetscValidLogicalCollectiveInt(da,x,2);
216   PetscValidLogicalCollectiveInt(da,y,3);
217   PetscValidLogicalCollectiveInt(da,z,4);
218   dd->xol = x;
219   dd->yol = y;
220   dd->zol = z;
221   PetscFunctionReturn(0);
222 }
223 
224 
225 /*@
226   DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition.
227 
228   Not collective
229 
230   Input Parameters:
231 . da  - The DMDA
232 
233   Output Parameters:
234 + Nsub   - Number of local subdomains created upon decomposition
235 
236   Level: intermediate
237 
238 .keywords:  distributed array, domain decomposition
239 .seealso: DMDACreateDomainDecomposition(), DMDASetNumLocalSubDomains(), DMDA
240 @*/
241 PetscErrorCode  DMDAGetNumLocalSubDomains(DM da,PetscInt *Nsub)
242 {
243   DM_DA *dd = (DM_DA*)da->data;
244 
245   PetscFunctionBegin;
246   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
247   if (Nsub) *Nsub = dd->Nsub;
248   PetscFunctionReturn(0);
249 }
250 
251 /*@
252   DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition.
253 
254   Not collective
255 
256   Input Parameters:
257 + da  - The DMDA
258 - Nsub - The number of local subdomains requested
259 
260   Level: intermediate
261 
262 .keywords:  distributed array, domain decomposition
263 .seealso: DMDACreateDomainDecomposition(), DMDAGetNumLocalSubDomains(), DMDA
264 @*/
265 PetscErrorCode  DMDASetNumLocalSubDomains(DM da,PetscInt Nsub)
266 {
267   DM_DA *dd = (DM_DA*)da->data;
268 
269   PetscFunctionBegin;
270   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
271   PetscValidLogicalCollectiveInt(da,Nsub,2);
272   dd->Nsub = Nsub;
273   PetscFunctionReturn(0);
274 }
275 
276 /*@
277   DMDASetOffset - Sets the index offset of the DA.
278 
279   Collective on DA
280 
281   Input Parameter:
282 + da  - The DMDA
283 . xo  - The offset in the x direction
284 . yo  - The offset in the y direction
285 - zo  - The offset in the z direction
286 
287   Level: intermediate
288 
289   Notes:
290     This is used primarily to overlap a computation on a local DA with that on a global DA without
291   changing boundary conditions or subdomain features that depend upon the global offsets.
292 
293 .keywords:  distributed array, degrees of freedom
294 .seealso: DMDAGetOffset(), DMDAVecGetArray()
295 @*/
296 PetscErrorCode  DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
297 {
298   PetscErrorCode ierr;
299   DM_DA          *dd = (DM_DA*)da->data;
300 
301   PetscFunctionBegin;
302   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
303   PetscValidLogicalCollectiveInt(da,xo,2);
304   PetscValidLogicalCollectiveInt(da,yo,3);
305   PetscValidLogicalCollectiveInt(da,zo,4);
306   PetscValidLogicalCollectiveInt(da,Mo,5);
307   PetscValidLogicalCollectiveInt(da,No,6);
308   PetscValidLogicalCollectiveInt(da,Po,7);
309   dd->xo = xo;
310   dd->yo = yo;
311   dd->zo = zo;
312   dd->Mo = Mo;
313   dd->No = No;
314   dd->Po = Po;
315 
316   if (da->coordinateDM) {
317     ierr = DMDASetOffset(da->coordinateDM,xo,yo,zo,Mo,No,Po);CHKERRQ(ierr);
318   }
319   PetscFunctionReturn(0);
320 }
321 
322 /*@
323   DMDAGetOffset - Gets the index offset of the DA.
324 
325   Not collective
326 
327   Input Parameter:
328 . da  - The DMDA
329 
330   Output Parameters:
331 + xo  - The offset in the x direction
332 . yo  - The offset in the y direction
333 . zo  - The offset in the z direction
334 . Mo  - The global size in the x direction
335 . No  - The global size in the y direction
336 - Po  - The global size in the z direction
337 
338   Level: intermediate
339 
340 .keywords:  distributed array, degrees of freedom
341 .seealso: DMDASetOffset(), DMDAVecGetArray()
342 @*/
343 PetscErrorCode  DMDAGetOffset(DM da,PetscInt *xo,PetscInt *yo,PetscInt *zo,PetscInt *Mo,PetscInt *No,PetscInt *Po)
344 {
345   DM_DA *dd = (DM_DA*)da->data;
346 
347   PetscFunctionBegin;
348   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
349   if (xo) *xo = dd->xo;
350   if (yo) *yo = dd->yo;
351   if (zo) *zo = dd->zo;
352   if (Mo) *Mo = dd->Mo;
353   if (No) *No = dd->No;
354   if (Po) *Po = dd->Po;
355   PetscFunctionReturn(0);
356 }
357 
358 /*@
359   DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain DM.
360 
361   Not collective
362 
363   Input Parameter:
364 . da  - The DMDA
365 
366   Output Parameters:
367 + xs  - The start of the region in x
368 . ys  - The start of the region in y
369 . zs  - The start of the region in z
370 . xs  - The size of the region in x
371 . ys  - The size of the region in y
372 . zs  - The size of the region in z
373 
374   Level: intermediate
375 
376 .keywords:  distributed array, degrees of freedom
377 .seealso: DMDAGetOffset(), DMDAVecGetArray()
378 @*/
379 PetscErrorCode  DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
380 {
381   DM_DA          *dd = (DM_DA*)da->data;
382 
383   PetscFunctionBegin;
384   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
385   if (xs) *xs = dd->nonxs;
386   if (ys) *ys = dd->nonys;
387   if (zs) *zs = dd->nonzs;
388   if (xm) *xm = dd->nonxm;
389   if (ym) *ym = dd->nonym;
390   if (zm) *zm = dd->nonzm;
391   PetscFunctionReturn(0);
392 }
393 
394 
395 /*@
396   DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain DM.
397 
398   Collective on DA
399 
400   Input Parameter:
401 + da  - The DMDA
402 . xs  - The start of the region in x
403 . ys  - The start of the region in y
404 . zs  - The start of the region in z
405 . xs  - The size of the region in x
406 . ys  - The size of the region in y
407 . zs  - The size of the region in z
408 
409   Level: intermediate
410 
411 .keywords:  distributed array, degrees of freedom
412 .seealso: DMDAGetOffset(), DMDAVecGetArray()
413 @*/
414 PetscErrorCode  DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
415 {
416   DM_DA          *dd = (DM_DA*)da->data;
417 
418   PetscFunctionBegin;
419   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
420   PetscValidLogicalCollectiveInt(da,xs,2);
421   PetscValidLogicalCollectiveInt(da,ys,3);
422   PetscValidLogicalCollectiveInt(da,zs,4);
423   PetscValidLogicalCollectiveInt(da,xm,5);
424   PetscValidLogicalCollectiveInt(da,ym,6);
425   PetscValidLogicalCollectiveInt(da,zm,7);
426   dd->nonxs = xs;
427   dd->nonys = ys;
428   dd->nonzs = zs;
429   dd->nonxm = xm;
430   dd->nonym = ym;
431   dd->nonzm = zm;
432 
433   PetscFunctionReturn(0);
434 }
435 
436 /*@
437   DMDASetStencilType - Sets the type of the communication stencil
438 
439   Logically Collective on DMDA
440 
441   Input Parameter:
442 + da    - The DMDA
443 - stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
444 
445   Level: intermediate
446 
447 .keywords:  distributed array, stencil
448 .seealso: DMDACreate(), DMDestroy(), DMDA
449 @*/
450 PetscErrorCode  DMDASetStencilType(DM da, DMDAStencilType stype)
451 {
452   DM_DA *dd = (DM_DA*)da->data;
453 
454   PetscFunctionBegin;
455   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
456   PetscValidLogicalCollectiveEnum(da,stype,2);
457   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
458   dd->stencil_type = stype;
459   PetscFunctionReturn(0);
460 }
461 
462 /*@
463   DMDAGetStencilType - Gets the type of the communication stencil
464 
465   Not collective
466 
467   Input Parameter:
468 . da    - The DMDA
469 
470   Output Parameter:
471 . stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
472 
473   Level: intermediate
474 
475 .keywords:  distributed array, stencil
476 .seealso: DMDACreate(), DMDestroy(), DMDA
477 @*/
478 PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype)
479 {
480   DM_DA *dd = (DM_DA*)da->data;
481 
482   PetscFunctionBegin;
483   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
484   PetscValidPointer(stype,2);
485   *stype = dd->stencil_type;
486   PetscFunctionReturn(0);
487 }
488 
489 /*@
490   DMDASetStencilWidth - Sets the width of the communication stencil
491 
492   Logically Collective on DMDA
493 
494   Input Parameter:
495 + da    - The DMDA
496 - width - The stencil width
497 
498   Level: intermediate
499 
500 .keywords:  distributed array, stencil
501 .seealso: DMDACreate(), DMDestroy(), DMDA
502 @*/
503 PetscErrorCode  DMDASetStencilWidth(DM da, PetscInt width)
504 {
505   DM_DA *dd = (DM_DA*)da->data;
506 
507   PetscFunctionBegin;
508   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
509   PetscValidLogicalCollectiveInt(da,width,2);
510   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
511   dd->s = width;
512   PetscFunctionReturn(0);
513 }
514 
515 /*@
516   DMDAGetStencilWidth - Gets the width of the communication stencil
517 
518   Not collective
519 
520   Input Parameter:
521 . da    - The DMDA
522 
523   Output Parameter:
524 . width - The stencil width
525 
526   Level: intermediate
527 
528 .keywords:  distributed array, stencil
529 .seealso: DMDACreate(), DMDestroy(), DMDA
530 @*/
531 PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width)
532 {
533   DM_DA *dd = (DM_DA *) da->data;
534 
535   PetscFunctionBegin;
536   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
537   PetscValidPointer(width,2);
538   *width = dd->s;
539   PetscFunctionReturn(0);
540 }
541 
542 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da,PetscInt M,PetscInt m,const PetscInt lx[])
543 {
544   PetscInt i,sum;
545 
546   PetscFunctionBegin;
547   if (M < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Global dimension not set");
548   for (i=sum=0; i<m; i++) sum += lx[i];
549   if (sum != M) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Ownership ranges sum to %D but global dimension is %D",sum,M);
550   PetscFunctionReturn(0);
551 }
552 
553 /*@
554   DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process
555 
556   Logically Collective on DMDA
557 
558   Input Parameter:
559 + da - The DMDA
560 . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m
561 . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n
562 - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p.
563 
564   Level: intermediate
565 
566   Note: these numbers are NOT multiplied by the number of dof per node.
567 
568 .keywords:  distributed array
569 .seealso: DMDACreate(), DMDestroy(), DMDA
570 @*/
571 PetscErrorCode  DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
572 {
573   PetscErrorCode ierr;
574   DM_DA          *dd = (DM_DA*)da->data;
575 
576   PetscFunctionBegin;
577   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
578   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
579   if (lx) {
580     if (dd->m < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
581     ierr = DMDACheckOwnershipRanges_Private(da,dd->M,dd->m,lx);CHKERRQ(ierr);
582     if (!dd->lx) {
583       ierr = PetscMalloc1(dd->m, &dd->lx);CHKERRQ(ierr);
584     }
585     ierr = PetscMemcpy(dd->lx, lx, dd->m*sizeof(PetscInt));CHKERRQ(ierr);
586   }
587   if (ly) {
588     if (dd->n < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
589     ierr = DMDACheckOwnershipRanges_Private(da,dd->N,dd->n,ly);CHKERRQ(ierr);
590     if (!dd->ly) {
591       ierr = PetscMalloc1(dd->n, &dd->ly);CHKERRQ(ierr);
592     }
593     ierr = PetscMemcpy(dd->ly, ly, dd->n*sizeof(PetscInt));CHKERRQ(ierr);
594   }
595   if (lz) {
596     if (dd->p < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
597     ierr = DMDACheckOwnershipRanges_Private(da,dd->P,dd->p,lz);CHKERRQ(ierr);
598     if (!dd->lz) {
599       ierr = PetscMalloc1(dd->p, &dd->lz);CHKERRQ(ierr);
600     }
601     ierr = PetscMemcpy(dd->lz, lz, dd->p*sizeof(PetscInt));CHKERRQ(ierr);
602   }
603   PetscFunctionReturn(0);
604 }
605 
606 /*@
607        DMDASetInterpolationType - Sets the type of interpolation that will be
608           returned by DMCreateInterpolation()
609 
610    Logically Collective on DMDA
611 
612    Input Parameter:
613 +  da - initial distributed array
614 .  ctype - DMDA_Q1 and DMDA_Q0 are currently the only supported forms
615 
616    Level: intermediate
617 
618    Notes:
619     you should call this on the coarser of the two DMDAs you pass to DMCreateInterpolation()
620 
621 .keywords:  distributed array, interpolation
622 
623 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType
624 @*/
625 PetscErrorCode  DMDASetInterpolationType(DM da,DMDAInterpolationType ctype)
626 {
627   DM_DA *dd = (DM_DA*)da->data;
628 
629   PetscFunctionBegin;
630   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
631   PetscValidLogicalCollectiveEnum(da,ctype,2);
632   dd->interptype = ctype;
633   PetscFunctionReturn(0);
634 }
635 
636 /*@
637        DMDAGetInterpolationType - Gets the type of interpolation that will be
638           used by DMCreateInterpolation()
639 
640    Not Collective
641 
642    Input Parameter:
643 .  da - distributed array
644 
645    Output Parameter:
646 .  ctype - interpolation type (DMDA_Q1 and DMDA_Q0 are currently the only supported forms)
647 
648    Level: intermediate
649 
650 .keywords:  distributed array, interpolation
651 
652 .seealso: DMDA, DMDAInterpolationType, DMDASetInterpolationType(), DMCreateInterpolation()
653 @*/
654 PetscErrorCode  DMDAGetInterpolationType(DM da,DMDAInterpolationType *ctype)
655 {
656   DM_DA *dd = (DM_DA*)da->data;
657 
658   PetscFunctionBegin;
659   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
660   PetscValidPointer(ctype,2);
661   *ctype = dd->interptype;
662   PetscFunctionReturn(0);
663 }
664 
665 /*@C
666       DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
667         processes neighbors.
668 
669     Not Collective
670 
671    Input Parameter:
672 .     da - the DMDA object
673 
674    Output Parameters:
675 .     ranks - the neighbors ranks, stored with the x index increasing most rapidly.
676               this process itself is in the list
677 
678    Notes:
679     In 2d the array is of length 9, in 3d of length 27
680           Not supported in 1d
681           Do not free the array, it is freed when the DMDA is destroyed.
682 
683    Fortran Notes:
684     In fortran you must pass in an array of the appropriate length.
685 
686    Level: intermediate
687 
688 @*/
689 PetscErrorCode  DMDAGetNeighbors(DM da,const PetscMPIInt *ranks[])
690 {
691   DM_DA *dd = (DM_DA*)da->data;
692 
693   PetscFunctionBegin;
694   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
695   *ranks = dd->neighbors;
696   PetscFunctionReturn(0);
697 }
698 
699 /*@C
700       DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process
701 
702     Not Collective
703 
704    Input Parameter:
705 .     da - the DMDA object
706 
707    Output Parameter:
708 +     lx - ownership along x direction (optional)
709 .     ly - ownership along y direction (optional)
710 -     lz - ownership along z direction (optional)
711 
712    Level: intermediate
713 
714     Note: these correspond to the optional final arguments passed to DMDACreate(), DMDACreate2d(), DMDACreate3d()
715 
716     In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and
717     eighth arguments from DMDAGetInfo()
718 
719      In C you should not free these arrays, nor change the values in them. They will only have valid values while the
720     DMDA they came from still exists (has not been destroyed).
721 
722     These numbers are NOT multiplied by the number of dof per node.
723 
724      Not available from Fortran
725 
726 .seealso: DMDAGetCorners(), DMDAGetGhostCorners(), DMDACreate(), DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), VecGetOwnershipRanges()
727 @*/
728 PetscErrorCode  DMDAGetOwnershipRanges(DM da,const PetscInt *lx[],const PetscInt *ly[],const PetscInt *lz[])
729 {
730   DM_DA *dd = (DM_DA*)da->data;
731 
732   PetscFunctionBegin;
733   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
734   if (lx) *lx = dd->lx;
735   if (ly) *ly = dd->ly;
736   if (lz) *lz = dd->lz;
737   PetscFunctionReturn(0);
738 }
739 
740 /*@
741      DMDASetRefinementFactor - Set the ratios that the DMDA grid is refined
742 
743     Logically Collective on DMDA
744 
745   Input Parameters:
746 +    da - the DMDA object
747 .    refine_x - ratio of fine grid to coarse in x direction (2 by default)
748 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
749 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
750 
751   Options Database:
752 +  -da_refine_x - refinement ratio in x direction
753 .  -da_refine_y - refinement ratio in y direction
754 -  -da_refine_z - refinement ratio in z direction
755 
756   Level: intermediate
757 
758     Notes:
759     Pass PETSC_IGNORE to leave a value unchanged
760 
761 .seealso: DMRefine(), DMDAGetRefinementFactor()
762 @*/
763 PetscErrorCode  DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z)
764 {
765   DM_DA *dd = (DM_DA*)da->data;
766 
767   PetscFunctionBegin;
768   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
769   PetscValidLogicalCollectiveInt(da,refine_x,2);
770   PetscValidLogicalCollectiveInt(da,refine_y,3);
771   PetscValidLogicalCollectiveInt(da,refine_z,4);
772 
773   if (refine_x > 0) dd->refine_x = refine_x;
774   if (refine_y > 0) dd->refine_y = refine_y;
775   if (refine_z > 0) dd->refine_z = refine_z;
776   PetscFunctionReturn(0);
777 }
778 
779 /*@C
780      DMDAGetRefinementFactor - Gets the ratios that the DMDA grid is refined
781 
782     Not Collective
783 
784   Input Parameter:
785 .    da - the DMDA object
786 
787   Output Parameters:
788 +    refine_x - ratio of fine grid to coarse in x direction (2 by default)
789 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
790 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
791 
792   Level: intermediate
793 
794     Notes:
795     Pass NULL for values you do not need
796 
797 .seealso: DMRefine(), DMDASetRefinementFactor()
798 @*/
799 PetscErrorCode  DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z)
800 {
801   DM_DA *dd = (DM_DA*)da->data;
802 
803   PetscFunctionBegin;
804   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
805   if (refine_x) *refine_x = dd->refine_x;
806   if (refine_y) *refine_y = dd->refine_y;
807   if (refine_z) *refine_z = dd->refine_z;
808   PetscFunctionReturn(0);
809 }
810 
811 /*@C
812      DMDASetGetMatrix - Sets the routine used by the DMDA to allocate a matrix.
813 
814     Logically Collective on DMDA
815 
816   Input Parameters:
817 +    da - the DMDA object
818 -    f - the function that allocates the matrix for that specific DMDA
819 
820   Level: developer
821 
822    Notes:
823     See DMDASetBlockFills() that provides a simple way to provide the nonzero structure for
824        the diagonal and off-diagonal blocks of the matrix
825 
826    Not supported from Fortran
827 
828 .seealso: DMCreateMatrix(), DMDASetBlockFills()
829 @*/
830 PetscErrorCode  DMDASetGetMatrix(DM da,PetscErrorCode (*f)(DM, Mat*))
831 {
832   PetscFunctionBegin;
833   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
834   da->ops->creatematrix = f;
835   PetscFunctionReturn(0);
836 }
837 
838 /*
839   Creates "balanced" ownership ranges after refinement, constrained by the need for the
840   fine grid boundaries to fall within one stencil width of the coarse partition.
841 
842   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
843 */
844 static PetscErrorCode DMDARefineOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lc[],PetscInt lf[])
845 {
846   PetscInt       i,totalc = 0,remaining,startc = 0,startf = 0;
847   PetscErrorCode ierr;
848 
849   PetscFunctionBegin;
850   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
851   if (ratio == 1) {
852     ierr = PetscMemcpy(lf,lc,m*sizeof(lc[0]));CHKERRQ(ierr);
853     PetscFunctionReturn(0);
854   }
855   for (i=0; i<m; i++) totalc += lc[i];
856   remaining = (!periodic) + ratio * (totalc - (!periodic));
857   for (i=0; i<m; i++) {
858     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
859     if (i == m-1) lf[i] = want;
860     else {
861       const PetscInt nextc = startc + lc[i];
862       /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
863        * coarse stencil width of the first coarse node in the next subdomain. */
864       while ((startf+want)/ratio < nextc - stencil_width) want++;
865       /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
866        * coarse stencil width of the last coarse node in the current subdomain. */
867       while ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width) want--;
868       /* Make sure all constraints are satisfied */
869       if (want < 0 || want > remaining || ((startf+want)/ratio < nextc - stencil_width)
870           || ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible refined ownership range");
871     }
872     lf[i]      = want;
873     startc    += lc[i];
874     startf    += lf[i];
875     remaining -= lf[i];
876   }
877   PetscFunctionReturn(0);
878 }
879 
880 /*
881   Creates "balanced" ownership ranges after coarsening, constrained by the need for the
882   fine grid boundaries to fall within one stencil width of the coarse partition.
883 
884   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
885 */
886 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lf[],PetscInt lc[])
887 {
888   PetscInt       i,totalf,remaining,startc,startf;
889   PetscErrorCode ierr;
890 
891   PetscFunctionBegin;
892   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
893   if (ratio == 1) {
894     ierr = PetscMemcpy(lc,lf,m*sizeof(lf[0]));CHKERRQ(ierr);
895     PetscFunctionReturn(0);
896   }
897   for (i=0,totalf=0; i<m; i++) totalf += lf[i];
898   remaining = (!periodic) + (totalf - (!periodic)) / ratio;
899   for (i=0,startc=0,startf=0; i<m; i++) {
900     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
901     if (i == m-1) lc[i] = want;
902     else {
903       const PetscInt nextf = startf+lf[i];
904       /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
905        * node is within one stencil width. */
906       while (nextf/ratio < startc+want-stencil_width) want--;
907       /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
908        * fine node is within one stencil width. */
909       while ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width) want++;
910       if (want < 0 || want > remaining
911           || (nextf/ratio < startc+want-stencil_width) || ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible coarsened ownership range");
912     }
913     lc[i]      = want;
914     startc    += lc[i];
915     startf    += lf[i];
916     remaining -= lc[i];
917   }
918   PetscFunctionReturn(0);
919 }
920 
921 PetscErrorCode  DMRefine_DA(DM da,MPI_Comm comm,DM *daref)
922 {
923   PetscErrorCode ierr;
924   PetscInt       M,N,P,i,dim;
925   DM             da2;
926   DM_DA          *dd = (DM_DA*)da->data,*dd2;
927 
928   PetscFunctionBegin;
929   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
930   PetscValidPointer(daref,3);
931 
932   ierr = DMGetDimension(da, &dim);CHKERRQ(ierr);
933   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
934     M = dd->refine_x*dd->M;
935   } else {
936     M = 1 + dd->refine_x*(dd->M - 1);
937   }
938   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
939     if (dim > 1) {
940       N = dd->refine_y*dd->N;
941     } else {
942       N = 1;
943     }
944   } else {
945     N = 1 + dd->refine_y*(dd->N - 1);
946   }
947   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
948     if (dim > 2) {
949       P = dd->refine_z*dd->P;
950     } else {
951       P = 1;
952     }
953   } else {
954     P = 1 + dd->refine_z*(dd->P - 1);
955   }
956   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
957   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
958   ierr = DMSetDimension(da2,dim);CHKERRQ(ierr);
959   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
960   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
961   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
962   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
963   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
964   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
965   if (dim == 3) {
966     PetscInt *lx,*ly,*lz;
967     ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr);
968     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
969     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
970     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
971     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
972     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
973   } else if (dim == 2) {
974     PetscInt *lx,*ly;
975     ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr);
976     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
977     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
978     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
979     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
980   } else if (dim == 1) {
981     PetscInt *lx;
982     ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr);
983     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
984     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
985     ierr = PetscFree(lx);CHKERRQ(ierr);
986   }
987   dd2 = (DM_DA*)da2->data;
988 
989   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
990   da2->ops->creatematrix = da->ops->creatematrix;
991   /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
992   da2->ops->getcoloring = da->ops->getcoloring;
993   dd2->interptype       = dd->interptype;
994 
995   /* copy fill information if given */
996   if (dd->dfill) {
997     ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr);
998     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
999   }
1000   if (dd->ofill) {
1001     ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr);
1002     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1003   }
1004   /* copy the refine information */
1005   dd2->coarsen_x = dd2->refine_x = dd->refine_x;
1006   dd2->coarsen_y = dd2->refine_y = dd->refine_y;
1007   dd2->coarsen_z = dd2->refine_z = dd->refine_z;
1008 
1009   if (dd->refine_z_hier) {
1010     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) {
1011       dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1];
1012     }
1013     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) {
1014       dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown];
1015     }
1016     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1017     ierr = PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);CHKERRQ(ierr);
1018     ierr = PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1019   }
1020   if (dd->refine_y_hier) {
1021     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) {
1022       dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1];
1023     }
1024     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) {
1025       dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown];
1026     }
1027     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1028     ierr = PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);CHKERRQ(ierr);
1029     ierr = PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1030   }
1031   if (dd->refine_x_hier) {
1032     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) {
1033       dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1];
1034     }
1035     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) {
1036       dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown];
1037     }
1038     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1039     ierr = PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);CHKERRQ(ierr);
1040     ierr = PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1041   }
1042 
1043 
1044   /* copy vector type information */
1045   ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr);
1046 
1047   dd2->lf = dd->lf;
1048   dd2->lj = dd->lj;
1049 
1050   da2->leveldown = da->leveldown;
1051   da2->levelup   = da->levelup + 1;
1052 
1053   ierr = DMSetUp(da2);CHKERRQ(ierr);
1054 
1055   /* interpolate coordinates if they are set on the coarse grid */
1056   if (da->coordinates) {
1057     DM  cdaf,cdac;
1058     Vec coordsc,coordsf;
1059     Mat II;
1060 
1061     ierr = DMGetCoordinateDM(da,&cdac);CHKERRQ(ierr);
1062     ierr = DMGetCoordinates(da,&coordsc);CHKERRQ(ierr);
1063     ierr = DMGetCoordinateDM(da2,&cdaf);CHKERRQ(ierr);
1064     /* force creation of the coordinate vector */
1065     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1066     ierr = DMGetCoordinates(da2,&coordsf);CHKERRQ(ierr);
1067     ierr = DMCreateInterpolation(cdac,cdaf,&II,NULL);CHKERRQ(ierr);
1068     ierr = MatInterpolate(II,coordsc,coordsf);CHKERRQ(ierr);
1069     ierr = MatDestroy(&II);CHKERRQ(ierr);
1070   }
1071 
1072   for (i=0; i<da->bs; i++) {
1073     const char *fieldname;
1074     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1075     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1076   }
1077 
1078   *daref = da2;
1079   PetscFunctionReturn(0);
1080 }
1081 
1082 
1083 PetscErrorCode  DMCoarsen_DA(DM da, MPI_Comm comm,DM *daref)
1084 {
1085   PetscErrorCode ierr;
1086   PetscInt       M,N,P,i,dim;
1087   DM             da2;
1088   DM_DA          *dd = (DM_DA*)da->data,*dd2;
1089 
1090   PetscFunctionBegin;
1091   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
1092   PetscValidPointer(daref,3);
1093 
1094   ierr = DMGetDimension(da, &dim);CHKERRQ(ierr);
1095   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1096     M = dd->M / dd->coarsen_x;
1097   } else {
1098     M = 1 + (dd->M - 1) / dd->coarsen_x;
1099   }
1100   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1101     if (dim > 1) {
1102       N = dd->N / dd->coarsen_y;
1103     } else {
1104       N = 1;
1105     }
1106   } else {
1107     N = 1 + (dd->N - 1) / dd->coarsen_y;
1108   }
1109   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1110     if (dim > 2) {
1111       P = dd->P / dd->coarsen_z;
1112     } else {
1113       P = 1;
1114     }
1115   } else {
1116     P = 1 + (dd->P - 1) / dd->coarsen_z;
1117   }
1118   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
1119   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
1120   ierr = DMSetDimension(da2,dim);CHKERRQ(ierr);
1121   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
1122   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
1123   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
1124   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
1125   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
1126   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
1127   if (dim == 3) {
1128     PetscInt *lx,*ly,*lz;
1129     ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr);
1130     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1131     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1132     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
1133     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
1134     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
1135   } else if (dim == 2) {
1136     PetscInt *lx,*ly;
1137     ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr);
1138     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1139     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1140     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
1141     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
1142   } else if (dim == 1) {
1143     PetscInt *lx;
1144     ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr);
1145     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1146     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
1147     ierr = PetscFree(lx);CHKERRQ(ierr);
1148   }
1149   dd2 = (DM_DA*)da2->data;
1150 
1151   /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1152   /* da2->ops->createinterpolation = da->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1153   da2->ops->creatematrix = da->ops->creatematrix;
1154   da2->ops->getcoloring  = da->ops->getcoloring;
1155   dd2->interptype        = dd->interptype;
1156 
1157   /* copy fill information if given */
1158   if (dd->dfill) {
1159     ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr);
1160     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1161   }
1162   if (dd->ofill) {
1163     ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr);
1164     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1165   }
1166   /* copy the refine information */
1167   dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1168   dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1169   dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;
1170 
1171   if (dd->refine_z_hier) {
1172     if (da->levelup - da->leveldown -1 > -1 && da->levelup - da->leveldown - 1< dd->refine_z_hier_n) {
1173       dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown - 1];
1174     }
1175     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_z_hier_n) {
1176       dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown - 2];
1177     }
1178     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1179     ierr = PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);CHKERRQ(ierr);
1180     ierr = PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1181   }
1182   if (dd->refine_y_hier) {
1183     if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1< dd->refine_y_hier_n) {
1184       dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown - 1];
1185     }
1186     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_y_hier_n) {
1187       dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown - 2];
1188     }
1189     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1190     ierr = PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);CHKERRQ(ierr);
1191     ierr = PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1192   }
1193   if (dd->refine_x_hier) {
1194     if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1 < dd->refine_x_hier_n) {
1195       dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown - 1];
1196     }
1197     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_x_hier_n) {
1198       dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown - 2];
1199     }
1200     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1201     ierr = PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);CHKERRQ(ierr);
1202     ierr = PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1203   }
1204 
1205   /* copy vector type information */
1206   ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr);
1207 
1208   dd2->lf = dd->lf;
1209   dd2->lj = dd->lj;
1210 
1211   da2->leveldown = da->leveldown + 1;
1212   da2->levelup   = da->levelup;
1213 
1214   ierr = DMSetUp(da2);CHKERRQ(ierr);
1215 
1216   /* inject coordinates if they are set on the fine grid */
1217   if (da->coordinates) {
1218     DM         cdaf,cdac;
1219     Vec        coordsc,coordsf;
1220     Mat        inject;
1221     VecScatter vscat;
1222 
1223     ierr = DMGetCoordinateDM(da,&cdaf);CHKERRQ(ierr);
1224     ierr = DMGetCoordinates(da,&coordsf);CHKERRQ(ierr);
1225     ierr = DMGetCoordinateDM(da2,&cdac);CHKERRQ(ierr);
1226     /* force creation of the coordinate vector */
1227     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1228     ierr = DMGetCoordinates(da2,&coordsc);CHKERRQ(ierr);
1229 
1230     ierr = DMCreateInjection(cdac,cdaf,&inject);CHKERRQ(ierr);
1231     ierr = MatScatterGetVecScatter(inject,&vscat);CHKERRQ(ierr);
1232     ierr = VecScatterBegin(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1233     ierr = VecScatterEnd(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1234     ierr = MatDestroy(&inject);CHKERRQ(ierr);
1235   }
1236 
1237   for (i=0; i<da->bs; i++) {
1238     const char *fieldname;
1239     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1240     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1241   }
1242 
1243   *daref = da2;
1244   PetscFunctionReturn(0);
1245 }
1246 
1247 PetscErrorCode  DMRefineHierarchy_DA(DM da,PetscInt nlevels,DM daf[])
1248 {
1249   PetscErrorCode ierr;
1250   PetscInt       i,n,*refx,*refy,*refz;
1251 
1252   PetscFunctionBegin;
1253   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1254   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1255   if (nlevels == 0) PetscFunctionReturn(0);
1256   PetscValidPointer(daf,3);
1257 
1258   /* Get refinement factors, defaults taken from the coarse DMDA */
1259   ierr = PetscMalloc3(nlevels,&refx,nlevels,&refy,nlevels,&refz);CHKERRQ(ierr);
1260   for (i=0; i<nlevels; i++) {
1261     ierr = DMDAGetRefinementFactor(da,&refx[i],&refy[i],&refz[i]);CHKERRQ(ierr);
1262   }
1263   n    = nlevels;
1264   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_x",refx,&n,NULL);CHKERRQ(ierr);
1265   n    = nlevels;
1266   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_y",refy,&n,NULL);CHKERRQ(ierr);
1267   n    = nlevels;
1268   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_z",refz,&n,NULL);CHKERRQ(ierr);
1269 
1270   ierr = DMDASetRefinementFactor(da,refx[0],refy[0],refz[0]);CHKERRQ(ierr);
1271   ierr = DMRefine(da,PetscObjectComm((PetscObject)da),&daf[0]);CHKERRQ(ierr);
1272   for (i=1; i<nlevels; i++) {
1273     ierr = DMDASetRefinementFactor(daf[i-1],refx[i],refy[i],refz[i]);CHKERRQ(ierr);
1274     ierr = DMRefine(daf[i-1],PetscObjectComm((PetscObject)da),&daf[i]);CHKERRQ(ierr);
1275   }
1276   ierr = PetscFree3(refx,refy,refz);CHKERRQ(ierr);
1277   PetscFunctionReturn(0);
1278 }
1279 
1280 PetscErrorCode  DMCoarsenHierarchy_DA(DM da,PetscInt nlevels,DM dac[])
1281 {
1282   PetscErrorCode ierr;
1283   PetscInt       i;
1284 
1285   PetscFunctionBegin;
1286   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1287   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1288   if (nlevels == 0) PetscFunctionReturn(0);
1289   PetscValidPointer(dac,3);
1290   ierr = DMCoarsen(da,PetscObjectComm((PetscObject)da),&dac[0]);CHKERRQ(ierr);
1291   for (i=1; i<nlevels; i++) {
1292     ierr = DMCoarsen(dac[i-1],PetscObjectComm((PetscObject)da),&dac[i]);CHKERRQ(ierr);
1293   }
1294   PetscFunctionReturn(0);
1295 }
1296 
1297 #include <petscgll.h>
1298 
1299 PetscErrorCode DMDASetGLLCoordinates_1d(DM dm,PetscGLL *gll)
1300 {
1301   PetscErrorCode ierr;
1302   PetscInt       i,j,n = gll->n,xs,xn,q;
1303   PetscScalar    *xx;
1304   PetscReal      h;
1305   Vec            x;
1306   DM_DA          *da = (DM_DA*)dm->data;
1307 
1308   PetscFunctionBegin;
1309   if (da->bx != DM_BOUNDARY_PERIODIC) {
1310     ierr = DMDAGetInfo(dm,NULL,&q,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr);
1311     q    = (q-1)/(n-1);  /* number of spectral elements */
1312     h    = 2.0/q;
1313     ierr = DMDAGetCorners(dm,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr);
1314     xs   = xs/(n-1);
1315     xn   = xn/(n-1);
1316     ierr = DMDASetUniformCoordinates(dm,-1.,1.,0.,0.,0.,0.);CHKERRQ(ierr);
1317     ierr = DMGetCoordinates(dm,&x);CHKERRQ(ierr);
1318     ierr = DMDAVecGetArray(dm,x,&xx);CHKERRQ(ierr);
1319 
1320     /* loop over local spectral elements */
1321     for (j=xs; j<xs+xn; j++) {
1322       /*
1323        Except for the first process, each process starts on the second GLL point of the first element on that process
1324        */
1325       for (i= (j == xs && xs > 0)? 1 : 0; i<n; i++) {
1326         xx[j*(n-1) + i] = -1.0 + h*j + h*(gll->nodes[i]+1.0)/2.;
1327       }
1328     }
1329     ierr = DMDAVecRestoreArray(dm,x,&xx);CHKERRQ(ierr);
1330   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for periodic");
1331   PetscFunctionReturn(0);
1332 }
1333 
1334 /*@
1335 
1336      DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points
1337 
1338    Collective on DM
1339 
1340    Input Parameters:
1341 +   da - the DMDA object
1342 -   gll - the GLL object
1343 
1344    Notes:
1345     the parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points
1346           on each process much be divisible by the number of GLL elements needed per process. This depends on whether the DM is
1347           periodic or not.
1348 
1349    Level: advanced
1350 
1351 .seealso:   DMDACreate(), PetscGLLCreate(), DMGetCoordinates()
1352 @*/
1353 PetscErrorCode DMDASetGLLCoordinates(DM da,PetscGLL *gll)
1354 {
1355   PetscErrorCode ierr;
1356 
1357   PetscFunctionBegin;
1358   if (da->dim == 1) {
1359     ierr = DMDASetGLLCoordinates_1d(da,gll);CHKERRQ(ierr);
1360   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for 2 or 3d");
1361   PetscFunctionReturn(0);
1362 }
1363