xref: /petsc/src/dm/impls/da/dacorn.c (revision 8b8307b2cbff7ae63ec0459e534a4a6ccda2943f)
1 #define PETSCDM_DLL
2 
3 /*
4   Code for manipulating distributed regular arrays in parallel.
5 */
6 
7 #include "private/daimpl.h"    /*I   "petscdm.h"   I*/
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "DMDASetCoordinates"
11 /*@
12    DMDASetCoordinates - Sets into the DMDA a vector that indicates the
13       coordinates of the local nodes (NOT including ghost nodes).
14 
15    Collective on DMDA
16 
17    Input Parameter:
18 +  da - the distributed array
19 -  c - coordinate vector
20 
21    Note:
22     The coordinates should NOT include those for all ghost points
23 
24   Level: intermediate
25 
26 .keywords: distributed array, get, corners, nodes, local indices, coordinates
27 
28 .seealso: DMDAGetGhostCorners(), DMDAGetCoordinates(), DMDASetUniformCoordinates(). DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA()
29 @*/
30 PetscErrorCode PETSCDM_DLLEXPORT DMDASetCoordinates(DM da,Vec c)
31 {
32   PetscErrorCode ierr;
33   DM_DA          *dd = (DM_DA*)da->data;
34 
35   PetscFunctionBegin;
36   PetscValidHeaderSpecific(da,DM_CLASSID,1);
37   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
38   ierr = PetscObjectReference((PetscObject)c);CHKERRQ(ierr);
39   if (dd->coordinates) {ierr = VecDestroy(dd->coordinates);CHKERRQ(ierr);}
40   dd->coordinates = c;
41   ierr = VecSetBlockSize(c,dd->dim);CHKERRQ(ierr);
42   if (dd->ghosted_coordinates) { /* The ghosted coordinates are no longer valid */
43     ierr = VecDestroy(dd->ghosted_coordinates);CHKERRQ(ierr);
44     dd->ghosted_coordinates = PETSC_NULL;
45   }
46   PetscFunctionReturn(0);
47 }
48 
49 #undef __FUNCT__
50 #define __FUNCT__ "DMDAGetCoordinates"
51 /*@
52    DMDAGetCoordinates - Gets the node coordinates associated with a DMDA.
53 
54    Not Collective
55 
56    Input Parameter:
57 .  da - the distributed array
58 
59    Output Parameter:
60 .  c - coordinate vector
61 
62    Note:
63     Each process has only the coordinates for its local nodes (does NOT have the
64   coordinates for the ghost nodes).
65 
66     For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
67     and (x_0,y_0,z_0,x_1,y_1,z_1...)
68 
69   Level: intermediate
70 
71 .keywords: distributed array, get, corners, nodes, local indices, coordinates
72 
73 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA()
74 @*/
75 PetscErrorCode PETSCDM_DLLEXPORT DMDAGetCoordinates(DM da,Vec *c)
76 {
77   DM_DA          *dd = (DM_DA*)da->data;
78   PetscFunctionBegin;
79   PetscValidHeaderSpecific(da,DM_CLASSID,1);
80   PetscValidPointer(c,2);
81   *c = dd->coordinates;
82   PetscFunctionReturn(0);
83 }
84 
85 #undef __FUNCT__
86 #define __FUNCT__ "DMDAGetCoordinateDA"
87 /*@
88    DMDAGetCoordinateDA - Gets the DMDA that scatters between global and local DMDA coordinates
89 
90    Collective on DMDA
91 
92    Input Parameter:
93 .  da - the distributed array
94 
95    Output Parameter:
96 .  dac - coordinate DMDA
97 
98   Level: intermediate
99 
100 .keywords: distributed array, get, corners, nodes, local indices, coordinates
101 
102 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetGhostedCoordinates()
103 @*/
104 PetscErrorCode PETSCDM_DLLEXPORT DMDAGetCoordinateDA(DM da,DM *cda)
105 {
106   PetscMPIInt    size;
107   PetscErrorCode ierr;
108   DM_DA          *dd = (DM_DA*)da->data;
109 
110   PetscFunctionBegin;
111   if (!dd->da_coordinates) {
112     ierr = MPI_Comm_size(((PetscObject)da)->comm,&size);CHKERRQ(ierr);
113     if (dd->dim == 1) {
114       PetscInt            s,m,*lc,l;
115       DMDAPeriodicType pt;
116       ierr = DMDAGetInfo(da,0,&m,0,0,0,0,0,0,&s,&pt,0);CHKERRQ(ierr);
117       ierr = DMDAGetCorners(da,0,0,0,&l,0,0);CHKERRQ(ierr);
118       ierr = PetscMalloc(size*sizeof(PetscInt),&lc);CHKERRQ(ierr);
119       ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
120       ierr = DMDACreate1d(((PetscObject)da)->comm,pt,m,1,s,lc,&dd->da_coordinates);CHKERRQ(ierr);
121       ierr = PetscFree(lc);CHKERRQ(ierr);
122     } else if (dd->dim == 2) {
123       PetscInt            i,s,m,*lc,*ld,l,k,n,M,N;
124       DMDAPeriodicType pt;
125       ierr = DMDAGetInfo(da,0,&m,&n,0,&M,&N,0,0,&s,&pt,0);CHKERRQ(ierr);
126       ierr = DMDAGetCorners(da,0,0,0,&l,&k,0);CHKERRQ(ierr);
127       ierr = PetscMalloc2(size,PetscInt,&lc,size,PetscInt,&ld);CHKERRQ(ierr);
128       /* only first M values in lc matter */
129       ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
130       /* every Mth value in ld matters */
131       ierr = MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
132       for ( i=0; i<N; i++) {
133         ld[i] = ld[M*i];
134       }
135       ierr = DMDACreate2d(((PetscObject)da)->comm,pt,DMDA_STENCIL_BOX,m,n,M,N,2,s,lc,ld,&dd->da_coordinates);CHKERRQ(ierr);
136       ierr = PetscFree2(lc,ld);CHKERRQ(ierr);
137     } else if (dd->dim == 3) {
138       PetscInt            i,s,m,*lc,*ld,*le,l,k,q,n,M,N,P,p;
139       DMDAPeriodicType pt;
140       ierr = DMDAGetInfo(da,0,&m,&n,&p,&M,&N,&P,0,&s,&pt,0);CHKERRQ(ierr);
141       ierr = DMDAGetCorners(da,0,0,0,&l,&k,&q);CHKERRQ(ierr);
142       ierr = PetscMalloc3(size,PetscInt,&lc,size,PetscInt,&ld,size,PetscInt,&le);CHKERRQ(ierr);
143       /* only first M values in lc matter */
144       ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
145       /* every Mth value in ld matters */
146       ierr = MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
147       for ( i=0; i<N; i++) {
148         ld[i] = ld[M*i];
149       }
150       ierr = MPI_Allgather(&q,1,MPIU_INT,le,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
151       for ( i=0; i<P; i++) {
152         le[i] = le[M*N*i];
153       }
154       ierr = DMDACreate3d(((PetscObject)da)->comm,pt,DMDA_STENCIL_BOX,m,n,p,M,N,P,3,s,lc,ld,le,&dd->da_coordinates);CHKERRQ(ierr);
155       ierr = PetscFree3(lc,ld,le);CHKERRQ(ierr);
156     }
157   }
158   *cda = dd->da_coordinates;
159   PetscFunctionReturn(0);
160 }
161 
162 
163 #undef __FUNCT__
164 #define __FUNCT__ "DMDAGetGhostedCoordinates"
165 /*@
166    DMDAGetGhostedCoordinates - Gets the node coordinates associated with a DMDA.
167 
168    Collective on DMDA
169 
170    Input Parameter:
171 .  da - the distributed array
172 
173    Output Parameter:
174 .  c - coordinate vector
175 
176    Note:
177     Each process has only the coordinates for its local AND ghost nodes
178 
179     For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
180     and (x_0,y_0,z_0,x_1,y_1,z_1...)
181 
182   Level: intermediate
183 
184 .keywords: distributed array, get, corners, nodes, local indices, coordinates
185 
186 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetCoordinateDA()
187 @*/
188 PetscErrorCode PETSCDM_DLLEXPORT DMDAGetGhostedCoordinates(DM da,Vec *c)
189 {
190   PetscErrorCode ierr;
191   DM_DA          *dd = (DM_DA*)da->data;
192 
193   PetscFunctionBegin;
194   PetscValidHeaderSpecific(da,DM_CLASSID,1);
195   PetscValidPointer(c,2);
196   if (!dd->coordinates) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER,"You must call DMDASetCoordinates() before this call");
197   if (!dd->ghosted_coordinates) {
198     DM dac;
199     ierr = DMDAGetCoordinateDA(da,&dac);CHKERRQ(ierr);
200     ierr = DMCreateLocalVector(dac,&dd->ghosted_coordinates);CHKERRQ(ierr);
201     ierr = DMGlobalToLocalBegin(dac,dd->coordinates,INSERT_VALUES,dd->ghosted_coordinates);CHKERRQ(ierr);
202     ierr = DMGlobalToLocalEnd(dac,dd->coordinates,INSERT_VALUES,dd->ghosted_coordinates);CHKERRQ(ierr);
203   }
204   *c = dd->ghosted_coordinates;
205   PetscFunctionReturn(0);
206 }
207 
208 #undef __FUNCT__
209 #define __FUNCT__ "DMDASetFieldName"
210 /*@C
211    DMDASetFieldName - Sets the names of individual field components in multicomponent
212    vectors associated with a DMDA.
213 
214    Not Collective
215 
216    Input Parameters:
217 +  da - the distributed array
218 .  nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the
219         number of degrees of freedom per node within the DMDA
220 -  names - the name of the field (component)
221 
222   Level: intermediate
223 
224 .keywords: distributed array, get, component name
225 
226 .seealso: DMDAGetFieldName()
227 @*/
228 PetscErrorCode PETSCDM_DLLEXPORT DMDASetFieldName(DM da,PetscInt nf,const char name[])
229 {
230   PetscErrorCode ierr;
231   DM_DA          *dd = (DM_DA*)da->data;
232 
233   PetscFunctionBegin;
234    PetscValidHeaderSpecific(da,DM_CLASSID,1);
235   if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
236   if (dd->fieldname[nf]) {ierr = PetscFree(dd->fieldname[nf]);CHKERRQ(ierr);}
237    ierr = PetscStrallocpy(name,&dd->fieldname[nf]);CHKERRQ(ierr);
238   PetscFunctionReturn(0);
239 }
240 
241 #undef __FUNCT__
242 #define __FUNCT__ "DMDAGetFieldName"
243 /*@C
244    DMDAGetFieldName - Gets the names of individual field components in multicomponent
245    vectors associated with a DMDA.
246 
247    Not Collective
248 
249    Input Parameter:
250 +  da - the distributed array
251 -  nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the
252         number of degrees of freedom per node within the DMDA
253 
254    Output Parameter:
255 .  names - the name of the field (component)
256 
257   Level: intermediate
258 
259 .keywords: distributed array, get, component name
260 
261 .seealso: DMDASetFieldName()
262 @*/
263 PetscErrorCode PETSCDM_DLLEXPORT DMDAGetFieldName(DM da,PetscInt nf,const char **name)
264 {
265   DM_DA          *dd = (DM_DA*)da->data;
266 
267   PetscFunctionBegin;
268   PetscValidHeaderSpecific(da,DM_CLASSID,1);
269   PetscValidPointer(name,3);
270   if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
271   *name = dd->fieldname[nf];
272   PetscFunctionReturn(0);
273 }
274 
275 #undef __FUNCT__
276 #define __FUNCT__ "DMDAGetCorners"
277 /*@
278    DMDAGetCorners - Returns the global (x,y,z) indices of the lower left
279    corner of the local region, excluding ghost points.
280 
281    Not Collective
282 
283    Input Parameter:
284 .  da - the distributed array
285 
286    Output Parameters:
287 +  x,y,z - the corner indices (where y and z are optional; these are used
288            for 2D and 3D problems)
289 -  m,n,p - widths in the corresponding directions (where n and p are optional;
290            these are used for 2D and 3D problems)
291 
292    Note:
293    The corner information is independent of the number of degrees of
294    freedom per node set with the DMDACreateXX() routine. Thus the x, y, z, and
295    m, n, p can be thought of as coordinates on a logical grid, where each
296    grid point has (potentially) several degrees of freedom.
297    Any of y, z, n, and p can be passed in as PETSC_NULL if not needed.
298 
299   Level: beginner
300 
301 .keywords: distributed array, get, corners, nodes, local indices
302 
303 .seealso: DMDAGetGhostCorners(), DMDAGetOwnershipRanges()
304 @*/
305 PetscErrorCode PETSCDM_DLLEXPORT DMDAGetCorners(DM da,PetscInt *x,PetscInt *y,PetscInt *z,PetscInt *m,PetscInt *n,PetscInt *p)
306 {
307   PetscInt w;
308   DM_DA    *dd = (DM_DA*)da->data;
309 
310   PetscFunctionBegin;
311   PetscValidHeaderSpecific(da,DM_CLASSID,1);
312   /* since the xs, xe ... have all been multiplied by the number of degrees
313      of freedom per cell, w = dd->w, we divide that out before returning.*/
314   w = dd->w;
315   if (x) *x = dd->xs/w; if(m) *m = (dd->xe - dd->xs)/w;
316   /* the y and z have NOT been multiplied by w */
317   if (y) *y = dd->ys;   if (n) *n = (dd->ye - dd->ys);
318   if (z) *z = dd->zs;   if (p) *p = (dd->ze - dd->zs);
319   PetscFunctionReturn(0);
320 }
321 
322 #undef __FUNCT__
323 #define __FUNCT__ "DMDAGetLocalBoundingBox"
324 /*@
325    DMDAGetLocalBoundingBox - Returns the local bounding box for the DMDA.
326 
327    Not Collective
328 
329    Input Parameter:
330 .  da - the distributed array
331 
332    Output Parameters:
333 +  lmin - local minimum coordinates (length dim, optional)
334 -  lmax - local maximim coordinates (length dim, optional)
335 
336   Level: beginner
337 
338 .keywords: distributed array, get, coordinates
339 
340 .seealso: DMDAGetCoordinateDA(), DMDAGetCoordinates(), DMDAGetBoundingBox()
341 @*/
342 PetscErrorCode PETSCDM_DLLEXPORT DMDAGetLocalBoundingBox(DM da,PetscReal lmin[],PetscReal lmax[])
343 {
344   PetscErrorCode    ierr;
345   Vec               coords  = PETSC_NULL;
346   PetscInt          dim,i,j;
347   const PetscScalar *local_coords;
348   PetscReal         min[3]={PETSC_MAX,PETSC_MAX,PETSC_MAX},max[3]={PETSC_MIN,PETSC_MIN,PETSC_MIN};
349   PetscInt          N,Ni;
350   DM_DA             *dd = (DM_DA*)da->data;
351 
352   PetscFunctionBegin;
353   PetscValidHeaderSpecific(da,DM_CLASSID,1);
354   dim = dd->dim;
355   ierr = DMDAGetCoordinates(da,&coords);CHKERRQ(ierr);
356   ierr = VecGetArrayRead(coords,&local_coords);CHKERRQ(ierr);
357   ierr = VecGetLocalSize(coords,&N);CHKERRQ(ierr);
358   Ni = N/dim;
359   for (i=0; i<Ni; i++) {
360     for (j=0; j<dim; j++) {
361       min[j] = PetscMin(min[j],PetscRealPart(local_coords[i*dim+j]));CHKERRQ(ierr);
362       max[j] = PetscMax(min[j],PetscRealPart(local_coords[i*dim+j]));CHKERRQ(ierr);
363     }
364   }
365   ierr = VecRestoreArrayRead(coords,&local_coords);CHKERRQ(ierr);
366   if (lmin) {ierr = PetscMemcpy(lmin,min,dim*sizeof(PetscReal));CHKERRQ(ierr);}
367   if (lmax) {ierr = PetscMemcpy(lmax,max,dim*sizeof(PetscReal));CHKERRQ(ierr);}
368   PetscFunctionReturn(0);
369 }
370 
371 #undef __FUNCT__
372 #define __FUNCT__ "DMDAGetBoundingBox"
373 /*@
374    DMDAGetBoundingBox - Returns the global bounding box for the DMDA.
375 
376    Collective on DMDA
377 
378    Input Parameter:
379 .  da - the distributed array
380 
381    Output Parameters:
382 +  gmin - global minimum coordinates (length dim, optional)
383 -  gmax - global maximim coordinates (length dim, optional)
384 
385   Level: beginner
386 
387 .keywords: distributed array, get, coordinates
388 
389 .seealso: DMDAGetCoordinateDA(), DMDAGetCoordinates(), DMDAGetLocalBoundingBox()
390 @*/
391 PetscErrorCode PETSCDM_DLLEXPORT DMDAGetBoundingBox(DM da,PetscReal gmin[],PetscReal gmax[])
392 {
393   PetscErrorCode ierr;
394   PetscMPIInt    count;
395   PetscReal      lmin[3],lmax[3];
396   DM_DA          *dd = (DM_DA*)da->data;
397 
398   PetscFunctionBegin;
399   PetscValidHeaderSpecific(da,DM_CLASSID,1);
400   count = PetscMPIIntCast(dd->dim);
401   ierr = DMDAGetLocalBoundingBox(da,lmin,lmax);CHKERRQ(ierr);
402   if (gmin) {ierr = MPI_Allreduce(lmin,gmin,count,MPIU_REAL,MPI_MIN,((PetscObject)da)->comm);CHKERRQ(ierr);}
403   if (gmax) {ierr = MPI_Allreduce(lmax,gmax,count,MPIU_REAL,MPI_MAX,((PetscObject)da)->comm);CHKERRQ(ierr);}
404   PetscFunctionReturn(0);
405 }
406