xref: /petsc/src/dm/impls/da/dagtol.c (revision bebe2cf65d55febe21a5af8db2bd2e168caaa2e7)
1 
2 /*
3   Code for manipulating distributed regular arrays in parallel.
4 */
5 
6 #include <petsc/private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "DMGlobalToLocalBegin_DA"
10 PetscErrorCode  DMGlobalToLocalBegin_DA(DM da,Vec g,InsertMode mode,Vec l)
11 {
12   PetscErrorCode ierr;
13   DM_DA          *dd = (DM_DA*)da->data;
14 
15   PetscFunctionBegin;
16   PetscValidHeaderSpecific(da,DM_CLASSID,1);
17   PetscValidHeaderSpecific(g,VEC_CLASSID,2);
18   PetscValidHeaderSpecific(l,VEC_CLASSID,4);
19   ierr = VecScatterBegin(dd->gtol,g,l,mode,SCATTER_FORWARD);CHKERRQ(ierr);
20   PetscFunctionReturn(0);
21 }
22 
23 
24 #undef __FUNCT__
25 #define __FUNCT__ "DMGlobalToLocalEnd_DA"
26 PetscErrorCode  DMGlobalToLocalEnd_DA(DM da,Vec g,InsertMode mode,Vec l)
27 {
28   PetscErrorCode ierr;
29   DM_DA          *dd = (DM_DA*)da->data;
30 
31   PetscFunctionBegin;
32   PetscValidHeaderSpecific(da,DM_CLASSID,1);
33   PetscValidHeaderSpecific(g,VEC_CLASSID,2);
34   PetscValidHeaderSpecific(l,VEC_CLASSID,4);
35   ierr = VecScatterEnd(dd->gtol,g,l,mode,SCATTER_FORWARD);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "DMLocalToGlobalBegin_DA"
41 PetscErrorCode  DMLocalToGlobalBegin_DA(DM da,Vec l,InsertMode mode,Vec g)
42 {
43   PetscErrorCode ierr;
44   DM_DA          *dd = (DM_DA*)da->data;
45 
46   PetscFunctionBegin;
47   PetscValidHeaderSpecific(da,DM_CLASSID,1);
48   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
49   PetscValidHeaderSpecific(g,VEC_CLASSID,3);
50   if (mode == ADD_VALUES) {
51     ierr = VecScatterBegin(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
52   } else if (mode == INSERT_VALUES) {
53     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bx != DM_BOUNDARY_NONE && dd->s > 0 && dd->m == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallism in x direction");
54     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->by != DM_BOUNDARY_NONE && dd->s > 0 && dd->n == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallism in y direction");
55     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bz != DM_BOUNDARY_NONE && dd->s > 0 && dd->p == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallism in z direction");
56     ierr = VecScatterBegin(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);CHKERRQ(ierr);
57   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
58   PetscFunctionReturn(0);
59 }
60 
61 #undef __FUNCT__
62 #define __FUNCT__ "DMLocalToGlobalEnd_DA"
63 PetscErrorCode  DMLocalToGlobalEnd_DA(DM da,Vec l,InsertMode mode,Vec g)
64 {
65   PetscErrorCode ierr;
66   DM_DA          *dd = (DM_DA*)da->data;
67 
68   PetscFunctionBegin;
69   PetscValidHeaderSpecific(da,DM_CLASSID,1);
70   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
71   PetscValidHeaderSpecific(g,VEC_CLASSID,3);
72   if (mode == ADD_VALUES) {
73     ierr = VecScatterEnd(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
74   } else if (mode == INSERT_VALUES) {
75     ierr = VecScatterEnd(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);CHKERRQ(ierr);
76   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
77   PetscFunctionReturn(0);
78 }
79 
80 extern PetscErrorCode DMDAGetNatural_Private(DM,PetscInt*,IS*);
81 #undef __FUNCT__
82 #define __FUNCT__ "DMDAGlobalToNatural_Create"
83 /*
84    DMDAGlobalToNatural_Create - Create the global to natural scatter object
85 
86    Collective on DMDA
87 
88    Input Parameter:
89 .  da - the distributed array context
90 
91    Level: developer
92 
93    Notes: This is an internal routine called by DMDAGlobalToNatural() to
94      create the scatter context.
95 
96 .keywords: distributed array, global to local, begin
97 
98 .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
99           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
100 */
101 PetscErrorCode DMDAGlobalToNatural_Create(DM da)
102 {
103   PetscErrorCode ierr;
104   PetscInt       m,start,Nlocal;
105   IS             from,to;
106   Vec            global;
107   DM_DA          *dd = (DM_DA*)da->data;
108 
109   PetscFunctionBegin;
110   PetscValidHeaderSpecific(da,DM_CLASSID,1);
111   if (!dd->natural) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ORDER,"Natural layout vector not yet created; cannot scatter into it");
112 
113   /* create the scatter context */
114   ierr = VecGetLocalSize(dd->natural,&m);CHKERRQ(ierr);
115   ierr = VecGetOwnershipRange(dd->natural,&start,NULL);CHKERRQ(ierr);
116 
117   ierr = DMDAGetNatural_Private(da,&Nlocal,&to);CHKERRQ(ierr);
118   if (Nlocal != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal error: Nlocal %D local vector size %D",Nlocal,m);
119   ierr = ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&from);CHKERRQ(ierr);
120   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,dd->Nlocal,PETSC_DETERMINE,0,&global);CHKERRQ(ierr);
121   ierr = VecScatterCreate(global,from,dd->natural,to,&dd->gton);CHKERRQ(ierr);
122   ierr = VecDestroy(&global);CHKERRQ(ierr);
123   ierr = ISDestroy(&from);CHKERRQ(ierr);
124   ierr = ISDestroy(&to);CHKERRQ(ierr);
125   PetscFunctionReturn(0);
126 }
127 
128 #undef __FUNCT__
129 #define __FUNCT__ "DMDAGlobalToNaturalBegin"
130 /*@
131    DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
132    in the "natural" grid ordering. Must be followed by
133    DMDAGlobalToNaturalEnd() to complete the exchange.
134 
135    Neighbor-wise Collective on DMDA
136 
137    Input Parameters:
138 +  da - the distributed array context
139 .  g - the global vector
140 -  mode - one of INSERT_VALUES or ADD_VALUES
141 
142    Output Parameter:
143 .  l  - the natural ordering values
144 
145    Level: advanced
146 
147    Notes:
148    The global and natrual vectors used here need not be the same as those
149    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
150    must have the same parallel data layout; they could, for example, be
151    obtained with VecDuplicate() from the DMDA originating vectors.
152 
153    You must call DMDACreateNaturalVector() before using this routine
154 
155 .keywords: distributed array, global to local, begin
156 
157 .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
158           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
159 
160 @*/
161 PetscErrorCode  DMDAGlobalToNaturalBegin(DM da,Vec g,InsertMode mode,Vec l)
162 {
163   PetscErrorCode ierr;
164   DM_DA          *dd = (DM_DA*)da->data;
165 
166   PetscFunctionBegin;
167   PetscValidHeaderSpecific(da,DM_CLASSID,1);
168   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
169   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
170   if (!dd->gton) {
171     /* create the scatter context */
172     ierr = DMDAGlobalToNatural_Create(da);CHKERRQ(ierr);
173   }
174   ierr = VecScatterBegin(dd->gton,g,l,mode,SCATTER_FORWARD);CHKERRQ(ierr);
175   PetscFunctionReturn(0);
176 }
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "DMDAGlobalToNaturalEnd"
180 /*@
181    DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
182    in the natural ordering. Must be preceeded by DMDAGlobalToNaturalBegin().
183 
184    Neighbor-wise Collective on DMDA
185 
186    Input Parameters:
187 +  da - the distributed array context
188 .  g - the global vector
189 -  mode - one of INSERT_VALUES or ADD_VALUES
190 
191    Output Parameter:
192 .  l  - the global values in the natural ordering
193 
194    Level: advanced
195 
196    Notes:
197    The global and local vectors used here need not be the same as those
198    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
199    must have the same parallel data layout; they could, for example, be
200    obtained with VecDuplicate() from the DMDA originating vectors.
201 
202 .keywords: distributed array, global to local, end
203 
204 .seealso: DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
205           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
206 
207 @*/
208 PetscErrorCode  DMDAGlobalToNaturalEnd(DM da,Vec g,InsertMode mode,Vec l)
209 {
210   PetscErrorCode ierr;
211   DM_DA          *dd = (DM_DA*)da->data;
212 
213   PetscFunctionBegin;
214   PetscValidHeaderSpecific(da,DM_CLASSID,1);
215   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
216   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
217   ierr = VecScatterEnd(dd->gton,g,l,mode,SCATTER_FORWARD);CHKERRQ(ierr);
218   PetscFunctionReturn(0);
219 }
220 
221 #undef __FUNCT__
222 #define __FUNCT__ "DMDANaturalToGlobalBegin"
223 /*@
224    DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
225    to a global vector in the PETSc DMDA grid ordering. Must be followed by
226    DMDANaturalToGlobalEnd() to complete the exchange.
227 
228    Neighbor-wise Collective on DMDA
229 
230    Input Parameters:
231 +  da - the distributed array context
232 .  g - the global vector in a natural ordering
233 -  mode - one of INSERT_VALUES or ADD_VALUES
234 
235    Output Parameter:
236 .  l  - the values in the DMDA ordering
237 
238    Level: advanced
239 
240    Notes:
241    The global and natural vectors used here need not be the same as those
242    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
243    must have the same parallel data layout; they could, for example, be
244    obtained with VecDuplicate() from the DMDA originating vectors.
245 
246 .keywords: distributed array, global to local, begin
247 
248 .seealso: DMDAGlobalToNaturalEnd(), DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
249           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
250 
251 @*/
252 PetscErrorCode  DMDANaturalToGlobalBegin(DM da,Vec g,InsertMode mode,Vec l)
253 {
254   PetscErrorCode ierr;
255   DM_DA          *dd = (DM_DA*)da->data;
256 
257   PetscFunctionBegin;
258   PetscValidHeaderSpecific(da,DM_CLASSID,1);
259   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
260   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
261   if (!dd->gton) {
262     /* create the scatter context */
263     ierr = DMDAGlobalToNatural_Create(da);CHKERRQ(ierr);
264   }
265   ierr = VecScatterBegin(dd->gton,g,l,mode,SCATTER_REVERSE);CHKERRQ(ierr);
266   PetscFunctionReturn(0);
267 }
268 
269 #undef __FUNCT__
270 #define __FUNCT__ "DMDANaturalToGlobalEnd"
271 /*@
272    DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
273    to a global vector in the PETSc DMDA ordering. Must be preceeded by DMDANaturalToGlobalBegin().
274 
275    Neighbor-wise Collective on DMDA
276 
277    Input Parameters:
278 +  da - the distributed array context
279 .  g - the global vector in a natural ordering
280 -  mode - one of INSERT_VALUES or ADD_VALUES
281 
282    Output Parameter:
283 .  l  - the global values in the PETSc DMDA ordering
284 
285    Level: intermediate
286 
287    Notes:
288    The global and local vectors used here need not be the same as those
289    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
290    must have the same parallel data layout; they could, for example, be
291    obtained with VecDuplicate() from the DMDA originating vectors.
292 
293 .keywords: distributed array, global to local, end
294 
295 .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
296           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
297 
298 @*/
299 PetscErrorCode  DMDANaturalToGlobalEnd(DM da,Vec g,InsertMode mode,Vec l)
300 {
301   PetscErrorCode ierr;
302   DM_DA          *dd = (DM_DA*)da->data;
303 
304   PetscFunctionBegin;
305   PetscValidHeaderSpecific(da,DM_CLASSID,1);
306   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
307   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
308   ierr = VecScatterEnd(dd->gton,g,l,mode,SCATTER_REVERSE);CHKERRQ(ierr);
309   PetscFunctionReturn(0);
310 }
311 
312