xref: /petsc/src/dm/impls/da/dacorn.c (revision 7b6bb2c608b6fc6714ef38fda02c2dbb91c82665)
1 
2 /*
3   Code for manipulating distributed regular arrays in parallel.
4 */
5 
6 #include <private/daimpl.h>    /*I   "petscdmda.h"   I*/
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "DMDASetCoordinates"
10 /*@
11    DMDASetCoordinates - Sets into the DMDA a vector that indicates the
12       coordinates of the local nodes (NOT including ghost nodes).
13 
14    Collective on DMDA
15 
16    Input Parameter:
17 +  da - the distributed array
18 -  c - coordinate vector
19 
20    Note:
21     The coordinates should NOT include those for all ghost points
22 
23   Level: intermediate
24 
25 .keywords: distributed array, get, corners, nodes, local indices, coordinates
26 
27 .seealso: DMDASetGhostCoordinates(), DMDAGetGhostCorners(), DMDAGetCoordinates(), DMDASetUniformCoordinates(). DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA()
28 @*/
29 PetscErrorCode  DMDASetCoordinates(DM da,Vec c)
30 {
31   PetscErrorCode ierr;
32   DM_DA          *dd = (DM_DA*)da->data;
33 
34   PetscFunctionBegin;
35   PetscValidHeaderSpecific(da,DM_CLASSID,1);
36   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
37   ierr = PetscObjectReference((PetscObject)c);CHKERRQ(ierr);
38   ierr = VecDestroy(&dd->coordinates);CHKERRQ(ierr);
39   dd->coordinates = c;
40   ierr = VecSetBlockSize(c,dd->dim);CHKERRQ(ierr);
41   ierr = VecDestroy(&dd->ghosted_coordinates);CHKERRQ(ierr);
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "DMDASetGhostedCoordinates"
47 /*@
48    DMDASetGhostedCoordinates - Sets into the DMDA a vector that indicates the
49       coordinates of the local nodes, including ghost nodes.
50 
51    Collective on DMDA
52 
53    Input Parameter:
54 +  da - the distributed array
55 -  c - coordinate vector
56 
57    Note:
58     The coordinates of interior ghost points can be set using DMDASetCoordinates()
59     followed by DMDAGetGhostedCoordinates().  This is intended to enable the setting
60     of ghost coordinates outside of the domain.
61 
62     Non-ghosted coordinates, if set, are assumed still valid.
63 
64   Level: intermediate
65 
66 .keywords: distributed array, get, corners, nodes, local indices, coordinates
67 
68 .seealso: DMDASetCoordinates(), DMDAGetGhostCorners(), DMDAGetCoordinates(), DMDASetUniformCoordinates(). DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA()
69 @*/
70 PetscErrorCode  DMDASetGhostedCoordinates(DM da,Vec c)
71 {
72   PetscErrorCode ierr;
73   DM_DA          *dd = (DM_DA*)da->data;
74 
75   PetscFunctionBegin;
76   PetscValidHeaderSpecific(da,DM_CLASSID,1);
77   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
78   ierr = PetscObjectReference((PetscObject)c);CHKERRQ(ierr);
79   ierr = VecDestroy(&dd->ghosted_coordinates);CHKERRQ(ierr);
80   dd->ghosted_coordinates = c;
81   ierr = VecSetBlockSize(c,dd->dim);CHKERRQ(ierr);
82   PetscFunctionReturn(0);
83 }
84 
85 #undef __FUNCT__
86 #define __FUNCT__ "DMDAGetCoordinates"
87 /*@
88    DMDAGetCoordinates - Gets the node coordinates associated with a DMDA.
89 
90    Not Collective
91 
92    Input Parameter:
93 .  da - the distributed array
94 
95    Output Parameter:
96 .  c - coordinate vector
97 
98    Note:
99     Each process has only the coordinates for its local nodes (does NOT have the
100   coordinates for the ghost nodes).
101 
102     For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
103     and (x_0,y_0,z_0,x_1,y_1,z_1...)
104 
105   Level: intermediate
106 
107 .keywords: distributed array, get, corners, nodes, local indices, coordinates
108 
109 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA()
110 @*/
111 PetscErrorCode  DMDAGetCoordinates(DM da,Vec *c)
112 {
113   DM_DA          *dd = (DM_DA*)da->data;
114   PetscFunctionBegin;
115   PetscValidHeaderSpecific(da,DM_CLASSID,1);
116   PetscValidPointer(c,2);
117   *c = dd->coordinates;
118   PetscFunctionReturn(0);
119 }
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "DMDAGetCoordinateDA"
123 /*@
124    DMDAGetCoordinateDA - Gets the DMDA that scatters between global and local DMDA coordinates
125 
126    Collective on DMDA
127 
128    Input Parameter:
129 .  da - the distributed array
130 
131    Output Parameter:
132 .  dac - coordinate DMDA
133 
134   Level: intermediate
135 
136 .keywords: distributed array, get, corners, nodes, local indices, coordinates
137 
138 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetGhostedCoordinates()
139 @*/
140 PetscErrorCode  DMDAGetCoordinateDA(DM da,DM *cda)
141 {
142   PetscMPIInt    size;
143   PetscErrorCode ierr;
144   DM_DA          *dd = (DM_DA*)da->data;
145 
146   PetscFunctionBegin;
147   if (!dd->da_coordinates) {
148     ierr = MPI_Comm_size(((PetscObject)da)->comm,&size);CHKERRQ(ierr);
149     if (dd->dim == 1) {
150       PetscInt            s,m,*lc,l;
151       DMDABoundaryType bx;
152       ierr = DMDAGetInfo(da,0,&m,0,0,0,0,0,0,&s,&bx,0,0,0);CHKERRQ(ierr);
153       ierr = DMDAGetCorners(da,0,0,0,&l,0,0);CHKERRQ(ierr);
154       ierr = PetscMalloc(size*sizeof(PetscInt),&lc);CHKERRQ(ierr);
155       ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
156       ierr = DMDACreate1d(((PetscObject)da)->comm,bx,m,1,s,lc,&dd->da_coordinates);CHKERRQ(ierr);
157       ierr = PetscFree(lc);CHKERRQ(ierr);
158     } else if (dd->dim == 2) {
159       PetscInt            i,s,m,*lc,*ld,l,k,n,M,N;
160       DMDABoundaryType bx,by;
161       ierr = DMDAGetInfo(da,0,&m,&n,0,&M,&N,0,0,&s,&bx,&by,0,0);CHKERRQ(ierr);
162       ierr = DMDAGetCorners(da,0,0,0,&l,&k,0);CHKERRQ(ierr);
163       ierr = PetscMalloc2(size,PetscInt,&lc,size,PetscInt,&ld);CHKERRQ(ierr);
164       /* only first M values in lc matter */
165       ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
166       /* every Mth value in ld matters */
167       ierr = MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
168       for ( i=0; i<N; i++) {
169         ld[i] = ld[M*i];
170       }
171       ierr = DMDACreate2d(((PetscObject)da)->comm,bx,by,DMDA_STENCIL_BOX,m,n,M,N,2,s,lc,ld,&dd->da_coordinates);CHKERRQ(ierr);
172       ierr = PetscFree2(lc,ld);CHKERRQ(ierr);
173     } else if (dd->dim == 3) {
174       PetscInt            i,s,m,*lc,*ld,*le,l,k,q,n,M,N,P,p;
175       DMDABoundaryType bx,by,bz;
176       ierr = DMDAGetInfo(da,0,&m,&n,&p,&M,&N,&P,0,&s,&bx,&by,&bz,0);CHKERRQ(ierr);
177       ierr = DMDAGetCorners(da,0,0,0,&l,&k,&q);CHKERRQ(ierr);
178       ierr = PetscMalloc3(size,PetscInt,&lc,size,PetscInt,&ld,size,PetscInt,&le);CHKERRQ(ierr);
179       /* only first M values in lc matter */
180       ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
181       /* every Mth value in ld matters */
182       ierr = MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
183       for ( i=0; i<N; i++) {
184         ld[i] = ld[M*i];
185       }
186       ierr = MPI_Allgather(&q,1,MPIU_INT,le,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
187       for ( i=0; i<P; i++) {
188         le[i] = le[M*N*i];
189       }
190       ierr = DMDACreate3d(((PetscObject)da)->comm,bx,by,bz,DMDA_STENCIL_BOX,m,n,p,M,N,P,3,s,lc,ld,le,&dd->da_coordinates);CHKERRQ(ierr);
191       ierr = PetscFree3(lc,ld,le);CHKERRQ(ierr);
192     }
193   }
194   *cda = dd->da_coordinates;
195   PetscFunctionReturn(0);
196 }
197 
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "DMDAGetGhostedCoordinates"
201 /*@
202    DMDAGetGhostedCoordinates - Gets the node coordinates associated with a DMDA.
203 
204    Collective on DMDA
205 
206    Input Parameter:
207 .  da - the distributed array
208 
209    Output Parameter:
210 .  c - coordinate vector
211 
212    Note:
213     Each process has only the coordinates for its local AND ghost nodes
214 
215     For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
216     and (x_0,y_0,z_0,x_1,y_1,z_1...)
217 
218   Level: intermediate
219 
220 .keywords: distributed array, get, corners, nodes, local indices, coordinates
221 
222 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetCoordinateDA()
223 @*/
224 PetscErrorCode  DMDAGetGhostedCoordinates(DM da,Vec *c)
225 {
226   PetscErrorCode ierr;
227   DM_DA          *dd = (DM_DA*)da->data;
228 
229   PetscFunctionBegin;
230   PetscValidHeaderSpecific(da,DM_CLASSID,1);
231   PetscValidPointer(c,2);
232   if (!dd->coordinates) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER,"You must call DMDASetCoordinates() before this call");
233   if (!dd->ghosted_coordinates) {
234     DM dac;
235     ierr = DMDAGetCoordinateDA(da,&dac);CHKERRQ(ierr);
236     ierr = DMCreateLocalVector(dac,&dd->ghosted_coordinates);CHKERRQ(ierr);
237     ierr = DMGlobalToLocalBegin(dac,dd->coordinates,INSERT_VALUES,dd->ghosted_coordinates);CHKERRQ(ierr);
238     ierr = DMGlobalToLocalEnd(dac,dd->coordinates,INSERT_VALUES,dd->ghosted_coordinates);CHKERRQ(ierr);
239   }
240   *c = dd->ghosted_coordinates;
241   PetscFunctionReturn(0);
242 }
243 
244 #undef __FUNCT__
245 #define __FUNCT__ "DMDASetFieldName"
246 /*@C
247    DMDASetFieldName - Sets the names of individual field components in multicomponent
248    vectors associated with a DMDA.
249 
250    Not Collective
251 
252    Input Parameters:
253 +  da - the distributed array
254 .  nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the
255         number of degrees of freedom per node within the DMDA
256 -  names - the name of the field (component)
257 
258   Level: intermediate
259 
260 .keywords: distributed array, get, component name
261 
262 .seealso: DMDAGetFieldName()
263 @*/
264 PetscErrorCode  DMDASetFieldName(DM da,PetscInt nf,const char name[])
265 {
266   PetscErrorCode ierr;
267   DM_DA          *dd = (DM_DA*)da->data;
268 
269   PetscFunctionBegin;
270    PetscValidHeaderSpecific(da,DM_CLASSID,1);
271   if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
272   ierr = PetscFree(dd->fieldname[nf]);CHKERRQ(ierr);
273   ierr = PetscStrallocpy(name,&dd->fieldname[nf]);CHKERRQ(ierr);
274   PetscFunctionReturn(0);
275 }
276 
277 #undef __FUNCT__
278 #define __FUNCT__ "DMDAGetFieldName"
279 /*@C
280    DMDAGetFieldName - Gets the names of individual field components in multicomponent
281    vectors associated with a DMDA.
282 
283    Not Collective
284 
285    Input Parameter:
286 +  da - the distributed array
287 -  nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the
288         number of degrees of freedom per node within the DMDA
289 
290    Output Parameter:
291 .  names - the name of the field (component)
292 
293   Level: intermediate
294 
295 .keywords: distributed array, get, component name
296 
297 .seealso: DMDASetFieldName()
298 @*/
299 PetscErrorCode  DMDAGetFieldName(DM da,PetscInt nf,const char **name)
300 {
301   DM_DA          *dd = (DM_DA*)da->data;
302 
303   PetscFunctionBegin;
304   PetscValidHeaderSpecific(da,DM_CLASSID,1);
305   PetscValidPointer(name,3);
306   if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
307   *name = dd->fieldname[nf];
308   PetscFunctionReturn(0);
309 }
310 
311 #undef __FUNCT__
312 #define __FUNCT__ "DMDAGetCorners"
313 /*@
314    DMDAGetCorners - Returns the global (x,y,z) indices of the lower left
315    corner of the local region, excluding ghost points.
316 
317    Not Collective
318 
319    Input Parameter:
320 .  da - the distributed array
321 
322    Output Parameters:
323 +  x,y,z - the corner indices (where y and z are optional; these are used
324            for 2D and 3D problems)
325 -  m,n,p - widths in the corresponding directions (where n and p are optional;
326            these are used for 2D and 3D problems)
327 
328    Note:
329    The corner information is independent of the number of degrees of
330    freedom per node set with the DMDACreateXX() routine. Thus the x, y, z, and
331    m, n, p can be thought of as coordinates on a logical grid, where each
332    grid point has (potentially) several degrees of freedom.
333    Any of y, z, n, and p can be passed in as PETSC_NULL if not needed.
334 
335   Level: beginner
336 
337 .keywords: distributed array, get, corners, nodes, local indices
338 
339 .seealso: DMDAGetGhostCorners(), DMDAGetOwnershipRanges()
340 @*/
341 PetscErrorCode  DMDAGetCorners(DM da,PetscInt *x,PetscInt *y,PetscInt *z,PetscInt *m,PetscInt *n,PetscInt *p)
342 {
343   PetscInt w;
344   DM_DA    *dd = (DM_DA*)da->data;
345 
346   PetscFunctionBegin;
347   PetscValidHeaderSpecific(da,DM_CLASSID,1);
348   /* since the xs, xe ... have all been multiplied by the number of degrees
349      of freedom per cell, w = dd->w, we divide that out before returning.*/
350   w = dd->w;
351   if (x) *x = dd->xs/w; if(m) *m = (dd->xe - dd->xs)/w;
352   /* the y and z have NOT been multiplied by w */
353   if (y) *y = dd->ys;   if (n) *n = (dd->ye - dd->ys);
354   if (z) *z = dd->zs;   if (p) *p = (dd->ze - dd->zs);
355   PetscFunctionReturn(0);
356 }
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "DMDAGetLocalBoundingBox"
360 /*@
361    DMDAGetLocalBoundingBox - Returns the local bounding box for the DMDA.
362 
363    Not Collective
364 
365    Input Parameter:
366 .  da - the distributed array
367 
368    Output Parameters:
369 +  lmin - local minimum coordinates (length dim, optional)
370 -  lmax - local maximim coordinates (length dim, optional)
371 
372   Level: beginner
373 
374 .keywords: distributed array, get, coordinates
375 
376 .seealso: DMDAGetCoordinateDA(), DMDAGetCoordinates(), DMDAGetBoundingBox()
377 @*/
378 PetscErrorCode  DMDAGetLocalBoundingBox(DM da,PetscReal lmin[],PetscReal lmax[])
379 {
380   PetscErrorCode    ierr;
381   Vec               coords  = PETSC_NULL;
382   PetscInt          dim,i,j;
383   const PetscScalar *local_coords;
384   PetscReal         min[3]={PETSC_MAX_REAL,PETSC_MAX_REAL,PETSC_MAX_REAL},max[3]={PETSC_MIN_REAL,PETSC_MIN_REAL,PETSC_MIN_REAL};
385   PetscInt          N,Ni;
386   DM_DA             *dd = (DM_DA*)da->data;
387 
388   PetscFunctionBegin;
389   PetscValidHeaderSpecific(da,DM_CLASSID,1);
390   dim = dd->dim;
391   ierr = DMDAGetCoordinates(da,&coords);CHKERRQ(ierr);
392   ierr = VecGetArrayRead(coords,&local_coords);CHKERRQ(ierr);
393   ierr = VecGetLocalSize(coords,&N);CHKERRQ(ierr);
394   Ni = N/dim;
395   for (i=0; i<Ni; i++) {
396     for (j=0; j<dim; j++) {
397       min[j] = PetscMin(min[j],PetscRealPart(local_coords[i*dim+j]));CHKERRQ(ierr);
398       max[j] = PetscMax(min[j],PetscRealPart(local_coords[i*dim+j]));CHKERRQ(ierr);
399     }
400   }
401   ierr = VecRestoreArrayRead(coords,&local_coords);CHKERRQ(ierr);
402   if (lmin) {ierr = PetscMemcpy(lmin,min,dim*sizeof(PetscReal));CHKERRQ(ierr);}
403   if (lmax) {ierr = PetscMemcpy(lmax,max,dim*sizeof(PetscReal));CHKERRQ(ierr);}
404   PetscFunctionReturn(0);
405 }
406 
407 #undef __FUNCT__
408 #define __FUNCT__ "DMDAGetBoundingBox"
409 /*@
410    DMDAGetBoundingBox - Returns the global bounding box for the DMDA.
411 
412    Collective on DMDA
413 
414    Input Parameter:
415 .  da - the distributed array
416 
417    Output Parameters:
418 +  gmin - global minimum coordinates (length dim, optional)
419 -  gmax - global maximim coordinates (length dim, optional)
420 
421   Level: beginner
422 
423 .keywords: distributed array, get, coordinates
424 
425 .seealso: DMDAGetCoordinateDA(), DMDAGetCoordinates(), DMDAGetLocalBoundingBox()
426 @*/
427 PetscErrorCode  DMDAGetBoundingBox(DM da,PetscReal gmin[],PetscReal gmax[])
428 {
429   PetscErrorCode ierr;
430   PetscMPIInt    count;
431   PetscReal      lmin[3],lmax[3];
432   DM_DA          *dd = (DM_DA*)da->data;
433 
434   PetscFunctionBegin;
435   PetscValidHeaderSpecific(da,DM_CLASSID,1);
436   count = PetscMPIIntCast(dd->dim);
437   ierr = DMDAGetLocalBoundingBox(da,lmin,lmax);CHKERRQ(ierr);
438   if (gmin) {ierr = MPI_Allreduce(lmin,gmin,count,MPIU_REAL,MPIU_MIN,((PetscObject)da)->comm);CHKERRQ(ierr);}
439   if (gmax) {ierr = MPI_Allreduce(lmax,gmax,count,MPIU_REAL,MPIU_MAX,((PetscObject)da)->comm);CHKERRQ(ierr);}
440   PetscFunctionReturn(0);
441 }
442