xref: /petsc/src/dm/impls/da/dacorn.c (revision 3c48a1e8da19189ff2402a4e41a2fc082d52c349)
1 
2 /*
3   Code for manipulating distributed regular arrays in parallel.
4 */
5 
6 #include "private/daimpl.h"    /*I   "petscdmda.h"   I*/
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "DMDASetCoordinates"
10 /*@
11    DMDASetCoordinates - Sets into the DMDA a vector that indicates the
12       coordinates of the local nodes (NOT including ghost nodes).
13 
14    Collective on DMDA
15 
16    Input Parameter:
17 +  da - the distributed array
18 -  c - coordinate vector
19 
20    Note:
21     The coordinates should NOT include those for all ghost points
22 
23   Level: intermediate
24 
25 .keywords: distributed array, get, corners, nodes, local indices, coordinates
26 
27 .seealso: DMDASetGhostCoordinates(), DMDAGetGhostCorners(), DMDAGetCoordinates(), DMDASetUniformCoordinates(). DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA()
28 @*/
29 PetscErrorCode  DMDASetCoordinates(DM da,Vec c)
30 {
31   PetscErrorCode ierr;
32   DM_DA          *dd = (DM_DA*)da->data;
33 
34   PetscFunctionBegin;
35   PetscValidHeaderSpecific(da,DM_CLASSID,1);
36   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
37   ierr = PetscObjectReference((PetscObject)c);CHKERRQ(ierr);
38   if (dd->coordinates) {ierr = VecDestroy(dd->coordinates);CHKERRQ(ierr);}
39   dd->coordinates = c;
40   ierr = VecSetBlockSize(c,dd->dim);CHKERRQ(ierr);
41   if (dd->ghosted_coordinates) { /* The ghosted coordinates are no longer valid */
42     ierr = VecDestroy(dd->ghosted_coordinates);CHKERRQ(ierr);
43     dd->ghosted_coordinates = PETSC_NULL;
44   }
45   PetscFunctionReturn(0);
46 }
47 
48 #undef __FUNCT__
49 #define __FUNCT__ "DMDASetGhostedCoordinates"
50 /*@
51    DMDASetGhostedCoordinates - Sets into the DMDA a vector that indicates the
52       coordinates of the local nodes, including ghost nodes.
53 
54    Collective on DMDA
55 
56    Input Parameter:
57 +  da - the distributed array
58 -  c - coordinate vector
59 
60    Note:
61     The coordinates of interior ghost points can be set using DMDASetCoordinates()
62     followed by DMDAGetGhostedCoordinates().  This is intended to enable the setting
63     of ghost coordinates outside of the domain.
64 
65     Non-ghosted coordinates, if set, are assumed still valid.
66 
67   Level: intermediate
68 
69 .keywords: distributed array, get, corners, nodes, local indices, coordinates
70 
71 .seealso: DMDASetCoordinates(), DMDAGetGhostCorners(), DMDAGetCoordinates(), DMDASetUniformCoordinates(). DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA()
72 @*/
73 PetscErrorCode  DMDASetGhostedCoordinates(DM da,Vec c)
74 {
75   PetscErrorCode ierr;
76   DM_DA          *dd = (DM_DA*)da->data;
77 
78   PetscFunctionBegin;
79   PetscValidHeaderSpecific(da,DM_CLASSID,1);
80   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
81   ierr = PetscObjectReference((PetscObject)c);CHKERRQ(ierr);
82   if (dd->ghosted_coordinates) {ierr = VecDestroy(dd->ghosted_coordinates);CHKERRQ(ierr);}
83   dd->ghosted_coordinates = c;
84   ierr = VecSetBlockSize(c,dd->dim);CHKERRQ(ierr);
85   PetscFunctionReturn(0);
86 }
87 
88 #undef __FUNCT__
89 #define __FUNCT__ "DMDAGetCoordinates"
90 /*@
91    DMDAGetCoordinates - Gets the node coordinates associated with a DMDA.
92 
93    Not Collective
94 
95    Input Parameter:
96 .  da - the distributed array
97 
98    Output Parameter:
99 .  c - coordinate vector
100 
101    Note:
102     Each process has only the coordinates for its local nodes (does NOT have the
103   coordinates for the ghost nodes).
104 
105     For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
106     and (x_0,y_0,z_0,x_1,y_1,z_1...)
107 
108   Level: intermediate
109 
110 .keywords: distributed array, get, corners, nodes, local indices, coordinates
111 
112 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA()
113 @*/
114 PetscErrorCode  DMDAGetCoordinates(DM da,Vec *c)
115 {
116   DM_DA          *dd = (DM_DA*)da->data;
117   PetscFunctionBegin;
118   PetscValidHeaderSpecific(da,DM_CLASSID,1);
119   PetscValidPointer(c,2);
120   *c = dd->coordinates;
121   PetscFunctionReturn(0);
122 }
123 
124 #undef __FUNCT__
125 #define __FUNCT__ "DMDAGetCoordinateDA"
126 /*@
127    DMDAGetCoordinateDA - Gets the DMDA that scatters between global and local DMDA coordinates
128 
129    Collective on DMDA
130 
131    Input Parameter:
132 .  da - the distributed array
133 
134    Output Parameter:
135 .  dac - coordinate DMDA
136 
137   Level: intermediate
138 
139 .keywords: distributed array, get, corners, nodes, local indices, coordinates
140 
141 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetGhostedCoordinates()
142 @*/
143 PetscErrorCode  DMDAGetCoordinateDA(DM da,DM *cda)
144 {
145   PetscMPIInt    size;
146   PetscErrorCode ierr;
147   DM_DA          *dd = (DM_DA*)da->data;
148 
149   PetscFunctionBegin;
150   if (!dd->da_coordinates) {
151     ierr = MPI_Comm_size(((PetscObject)da)->comm,&size);CHKERRQ(ierr);
152     if (dd->dim == 1) {
153       PetscInt            s,m,*lc,l;
154       DMDABoundaryType bx;
155       ierr = DMDAGetInfo(da,0,&m,0,0,0,0,0,0,&s,&bx,0,0,0);CHKERRQ(ierr);
156       ierr = DMDAGetCorners(da,0,0,0,&l,0,0);CHKERRQ(ierr);
157       ierr = PetscMalloc(size*sizeof(PetscInt),&lc);CHKERRQ(ierr);
158       ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
159       ierr = DMDACreate1d(((PetscObject)da)->comm,bx,m,1,s,lc,&dd->da_coordinates);CHKERRQ(ierr);
160       ierr = PetscFree(lc);CHKERRQ(ierr);
161     } else if (dd->dim == 2) {
162       PetscInt            i,s,m,*lc,*ld,l,k,n,M,N;
163       DMDABoundaryType bx,by;
164       ierr = DMDAGetInfo(da,0,&m,&n,0,&M,&N,0,0,&s,&bx,&by,0,0);CHKERRQ(ierr);
165       ierr = DMDAGetCorners(da,0,0,0,&l,&k,0);CHKERRQ(ierr);
166       ierr = PetscMalloc2(size,PetscInt,&lc,size,PetscInt,&ld);CHKERRQ(ierr);
167       /* only first M values in lc matter */
168       ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
169       /* every Mth value in ld matters */
170       ierr = MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
171       for ( i=0; i<N; i++) {
172         ld[i] = ld[M*i];
173       }
174       ierr = DMDACreate2d(((PetscObject)da)->comm,bx,by,DMDA_STENCIL_BOX,m,n,M,N,2,s,lc,ld,&dd->da_coordinates);CHKERRQ(ierr);
175       ierr = PetscFree2(lc,ld);CHKERRQ(ierr);
176     } else if (dd->dim == 3) {
177       PetscInt            i,s,m,*lc,*ld,*le,l,k,q,n,M,N,P,p;
178       DMDABoundaryType bx,by,bz;
179       ierr = DMDAGetInfo(da,0,&m,&n,&p,&M,&N,&P,0,&s,&bx,&by,&bz,0);CHKERRQ(ierr);
180       ierr = DMDAGetCorners(da,0,0,0,&l,&k,&q);CHKERRQ(ierr);
181       ierr = PetscMalloc3(size,PetscInt,&lc,size,PetscInt,&ld,size,PetscInt,&le);CHKERRQ(ierr);
182       /* only first M values in lc matter */
183       ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
184       /* every Mth value in ld matters */
185       ierr = MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
186       for ( i=0; i<N; i++) {
187         ld[i] = ld[M*i];
188       }
189       ierr = MPI_Allgather(&q,1,MPIU_INT,le,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
190       for ( i=0; i<P; i++) {
191         le[i] = le[M*N*i];
192       }
193       ierr = DMDACreate3d(((PetscObject)da)->comm,bx,by,bz,DMDA_STENCIL_BOX,m,n,p,M,N,P,3,s,lc,ld,le,&dd->da_coordinates);CHKERRQ(ierr);
194       ierr = PetscFree3(lc,ld,le);CHKERRQ(ierr);
195     }
196   }
197   *cda = dd->da_coordinates;
198   PetscFunctionReturn(0);
199 }
200 
201 
202 #undef __FUNCT__
203 #define __FUNCT__ "DMDAGetGhostedCoordinates"
204 /*@
205    DMDAGetGhostedCoordinates - Gets the node coordinates associated with a DMDA.
206 
207    Collective on DMDA
208 
209    Input Parameter:
210 .  da - the distributed array
211 
212    Output Parameter:
213 .  c - coordinate vector
214 
215    Note:
216     Each process has only the coordinates for its local AND ghost nodes
217 
218     For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
219     and (x_0,y_0,z_0,x_1,y_1,z_1...)
220 
221   Level: intermediate
222 
223 .keywords: distributed array, get, corners, nodes, local indices, coordinates
224 
225 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetCoordinateDA()
226 @*/
227 PetscErrorCode  DMDAGetGhostedCoordinates(DM da,Vec *c)
228 {
229   PetscErrorCode ierr;
230   DM_DA          *dd = (DM_DA*)da->data;
231 
232   PetscFunctionBegin;
233   PetscValidHeaderSpecific(da,DM_CLASSID,1);
234   PetscValidPointer(c,2);
235   if (!dd->coordinates) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER,"You must call DMDASetCoordinates() before this call");
236   if (!dd->ghosted_coordinates) {
237     DM dac;
238     ierr = DMDAGetCoordinateDA(da,&dac);CHKERRQ(ierr);
239     ierr = DMCreateLocalVector(dac,&dd->ghosted_coordinates);CHKERRQ(ierr);
240     ierr = DMGlobalToLocalBegin(dac,dd->coordinates,INSERT_VALUES,dd->ghosted_coordinates);CHKERRQ(ierr);
241     ierr = DMGlobalToLocalEnd(dac,dd->coordinates,INSERT_VALUES,dd->ghosted_coordinates);CHKERRQ(ierr);
242   }
243   *c = dd->ghosted_coordinates;
244   PetscFunctionReturn(0);
245 }
246 
247 #undef __FUNCT__
248 #define __FUNCT__ "DMDASetFieldName"
249 /*@C
250    DMDASetFieldName - Sets the names of individual field components in multicomponent
251    vectors associated with a DMDA.
252 
253    Not Collective
254 
255    Input Parameters:
256 +  da - the distributed array
257 .  nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the
258         number of degrees of freedom per node within the DMDA
259 -  names - the name of the field (component)
260 
261   Level: intermediate
262 
263 .keywords: distributed array, get, component name
264 
265 .seealso: DMDAGetFieldName()
266 @*/
267 PetscErrorCode  DMDASetFieldName(DM da,PetscInt nf,const char name[])
268 {
269   PetscErrorCode ierr;
270   DM_DA          *dd = (DM_DA*)da->data;
271 
272   PetscFunctionBegin;
273    PetscValidHeaderSpecific(da,DM_CLASSID,1);
274   if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
275   ierr = PetscFree(dd->fieldname[nf]);CHKERRQ(ierr);
276   ierr = PetscStrallocpy(name,&dd->fieldname[nf]);CHKERRQ(ierr);
277   PetscFunctionReturn(0);
278 }
279 
280 #undef __FUNCT__
281 #define __FUNCT__ "DMDAGetFieldName"
282 /*@C
283    DMDAGetFieldName - Gets the names of individual field components in multicomponent
284    vectors associated with a DMDA.
285 
286    Not Collective
287 
288    Input Parameter:
289 +  da - the distributed array
290 -  nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the
291         number of degrees of freedom per node within the DMDA
292 
293    Output Parameter:
294 .  names - the name of the field (component)
295 
296   Level: intermediate
297 
298 .keywords: distributed array, get, component name
299 
300 .seealso: DMDASetFieldName()
301 @*/
302 PetscErrorCode  DMDAGetFieldName(DM da,PetscInt nf,const char **name)
303 {
304   DM_DA          *dd = (DM_DA*)da->data;
305 
306   PetscFunctionBegin;
307   PetscValidHeaderSpecific(da,DM_CLASSID,1);
308   PetscValidPointer(name,3);
309   if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
310   *name = dd->fieldname[nf];
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "DMDAGetCorners"
316 /*@
317    DMDAGetCorners - Returns the global (x,y,z) indices of the lower left
318    corner of the local region, excluding ghost points.
319 
320    Not Collective
321 
322    Input Parameter:
323 .  da - the distributed array
324 
325    Output Parameters:
326 +  x,y,z - the corner indices (where y and z are optional; these are used
327            for 2D and 3D problems)
328 -  m,n,p - widths in the corresponding directions (where n and p are optional;
329            these are used for 2D and 3D problems)
330 
331    Note:
332    The corner information is independent of the number of degrees of
333    freedom per node set with the DMDACreateXX() routine. Thus the x, y, z, and
334    m, n, p can be thought of as coordinates on a logical grid, where each
335    grid point has (potentially) several degrees of freedom.
336    Any of y, z, n, and p can be passed in as PETSC_NULL if not needed.
337 
338   Level: beginner
339 
340 .keywords: distributed array, get, corners, nodes, local indices
341 
342 .seealso: DMDAGetGhostCorners(), DMDAGetOwnershipRanges()
343 @*/
344 PetscErrorCode  DMDAGetCorners(DM da,PetscInt *x,PetscInt *y,PetscInt *z,PetscInt *m,PetscInt *n,PetscInt *p)
345 {
346   PetscInt w;
347   DM_DA    *dd = (DM_DA*)da->data;
348 
349   PetscFunctionBegin;
350   PetscValidHeaderSpecific(da,DM_CLASSID,1);
351   /* since the xs, xe ... have all been multiplied by the number of degrees
352      of freedom per cell, w = dd->w, we divide that out before returning.*/
353   w = dd->w;
354   if (x) *x = dd->xs/w; if(m) *m = (dd->xe - dd->xs)/w;
355   /* the y and z have NOT been multiplied by w */
356   if (y) *y = dd->ys;   if (n) *n = (dd->ye - dd->ys);
357   if (z) *z = dd->zs;   if (p) *p = (dd->ze - dd->zs);
358   PetscFunctionReturn(0);
359 }
360 
361 #undef __FUNCT__
362 #define __FUNCT__ "DMDAGetLocalBoundingBox"
363 /*@
364    DMDAGetLocalBoundingBox - Returns the local bounding box for the DMDA.
365 
366    Not Collective
367 
368    Input Parameter:
369 .  da - the distributed array
370 
371    Output Parameters:
372 +  lmin - local minimum coordinates (length dim, optional)
373 -  lmax - local maximim coordinates (length dim, optional)
374 
375   Level: beginner
376 
377 .keywords: distributed array, get, coordinates
378 
379 .seealso: DMDAGetCoordinateDA(), DMDAGetCoordinates(), DMDAGetBoundingBox()
380 @*/
381 PetscErrorCode  DMDAGetLocalBoundingBox(DM da,PetscReal lmin[],PetscReal lmax[])
382 {
383   PetscErrorCode    ierr;
384   Vec               coords  = PETSC_NULL;
385   PetscInt          dim,i,j;
386   const PetscScalar *local_coords;
387   PetscReal         min[3]={PETSC_MAX,PETSC_MAX,PETSC_MAX},max[3]={PETSC_MIN,PETSC_MIN,PETSC_MIN};
388   PetscInt          N,Ni;
389   DM_DA             *dd = (DM_DA*)da->data;
390 
391   PetscFunctionBegin;
392   PetscValidHeaderSpecific(da,DM_CLASSID,1);
393   dim = dd->dim;
394   ierr = DMDAGetCoordinates(da,&coords);CHKERRQ(ierr);
395   ierr = VecGetArrayRead(coords,&local_coords);CHKERRQ(ierr);
396   ierr = VecGetLocalSize(coords,&N);CHKERRQ(ierr);
397   Ni = N/dim;
398   for (i=0; i<Ni; i++) {
399     for (j=0; j<dim; j++) {
400       min[j] = PetscMin(min[j],PetscRealPart(local_coords[i*dim+j]));CHKERRQ(ierr);
401       max[j] = PetscMax(min[j],PetscRealPart(local_coords[i*dim+j]));CHKERRQ(ierr);
402     }
403   }
404   ierr = VecRestoreArrayRead(coords,&local_coords);CHKERRQ(ierr);
405   if (lmin) {ierr = PetscMemcpy(lmin,min,dim*sizeof(PetscReal));CHKERRQ(ierr);}
406   if (lmax) {ierr = PetscMemcpy(lmax,max,dim*sizeof(PetscReal));CHKERRQ(ierr);}
407   PetscFunctionReturn(0);
408 }
409 
410 #undef __FUNCT__
411 #define __FUNCT__ "DMDAGetBoundingBox"
412 /*@
413    DMDAGetBoundingBox - Returns the global bounding box for the DMDA.
414 
415    Collective on DMDA
416 
417    Input Parameter:
418 .  da - the distributed array
419 
420    Output Parameters:
421 +  gmin - global minimum coordinates (length dim, optional)
422 -  gmax - global maximim coordinates (length dim, optional)
423 
424   Level: beginner
425 
426 .keywords: distributed array, get, coordinates
427 
428 .seealso: DMDAGetCoordinateDA(), DMDAGetCoordinates(), DMDAGetLocalBoundingBox()
429 @*/
430 PetscErrorCode  DMDAGetBoundingBox(DM da,PetscReal gmin[],PetscReal gmax[])
431 {
432   PetscErrorCode ierr;
433   PetscMPIInt    count;
434   PetscReal      lmin[3],lmax[3];
435   DM_DA          *dd = (DM_DA*)da->data;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(da,DM_CLASSID,1);
439   count = PetscMPIIntCast(dd->dim);
440   ierr = DMDAGetLocalBoundingBox(da,lmin,lmax);CHKERRQ(ierr);
441   if (gmin) {ierr = MPI_Allreduce(lmin,gmin,count,MPIU_REAL,MPI_MIN,((PetscObject)da)->comm);CHKERRQ(ierr);}
442   if (gmax) {ierr = MPI_Allreduce(lmax,gmax,count,MPIU_REAL,MPI_MAX,((PetscObject)da)->comm);CHKERRQ(ierr);}
443   PetscFunctionReturn(0);
444 }
445