xref: /petsc/src/dm/impls/da/dagtol.c (revision 609bdbee21ea3be08735c64dbe00a9ab27759925)
1 
2 /*
3   Code for manipulating distributed regular arrays in parallel.
4 */
5 
6 #include <petsc/private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
7 
8 PetscErrorCode  DMGlobalToLocalBegin_DA(DM da,Vec g,InsertMode mode,Vec l)
9 {
10   PetscErrorCode ierr;
11   DM_DA          *dd = (DM_DA*)da->data;
12 
13   PetscFunctionBegin;
14   PetscValidHeaderSpecific(da,DM_CLASSID,1);
15   PetscValidHeaderSpecific(g,VEC_CLASSID,2);
16   PetscValidHeaderSpecific(l,VEC_CLASSID,4);
17   ierr = VecScatterBegin(dd->gtol,g,l,mode,SCATTER_FORWARD);CHKERRQ(ierr);
18   PetscFunctionReturn(0);
19 }
20 
21 
22 PetscErrorCode  DMGlobalToLocalEnd_DA(DM da,Vec g,InsertMode mode,Vec l)
23 {
24   PetscErrorCode ierr;
25   DM_DA          *dd = (DM_DA*)da->data;
26 
27   PetscFunctionBegin;
28   PetscValidHeaderSpecific(da,DM_CLASSID,1);
29   PetscValidHeaderSpecific(g,VEC_CLASSID,2);
30   PetscValidHeaderSpecific(l,VEC_CLASSID,4);
31   ierr = VecScatterEnd(dd->gtol,g,l,mode,SCATTER_FORWARD);CHKERRQ(ierr);
32   PetscFunctionReturn(0);
33 }
34 
35 PetscErrorCode  DMLocalToGlobalBegin_DA(DM da,Vec l,InsertMode mode,Vec g)
36 {
37   PetscErrorCode ierr;
38   DM_DA          *dd = (DM_DA*)da->data;
39 
40   PetscFunctionBegin;
41   PetscValidHeaderSpecific(da,DM_CLASSID,1);
42   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
43   PetscValidHeaderSpecific(g,VEC_CLASSID,3);
44   if (mode == ADD_VALUES) {
45     ierr = VecScatterBegin(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
46   } else if (mode == INSERT_VALUES) {
47     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bx != DM_BOUNDARY_NONE && dd->s > 0 && dd->m == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in x direction");
48     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->by != DM_BOUNDARY_NONE && dd->s > 0 && dd->n == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in y direction");
49     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bz != DM_BOUNDARY_NONE && dd->s > 0 && dd->p == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in z direction");
50     ierr = VecScatterBegin(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);CHKERRQ(ierr);
51   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
52   PetscFunctionReturn(0);
53 }
54 
55 PetscErrorCode  DMLocalToGlobalEnd_DA(DM da,Vec l,InsertMode mode,Vec g)
56 {
57   PetscErrorCode ierr;
58   DM_DA          *dd = (DM_DA*)da->data;
59 
60   PetscFunctionBegin;
61   PetscValidHeaderSpecific(da,DM_CLASSID,1);
62   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
63   PetscValidHeaderSpecific(g,VEC_CLASSID,3);
64   if (mode == ADD_VALUES) {
65     ierr = VecScatterEnd(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
66   } else if (mode == INSERT_VALUES) {
67     ierr = VecScatterEnd(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);CHKERRQ(ierr);
68   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
69   PetscFunctionReturn(0);
70 }
71 
72 extern PetscErrorCode DMDAGetNatural_Private(DM,PetscInt*,IS*);
73 /*
74    DMDAGlobalToNatural_Create - Create the global to natural scatter object
75 
76    Collective on DMDA
77 
78    Input Parameter:
79 .  da - the distributed array context
80 
81    Level: developer
82 
83    Notes: This is an internal routine called by DMDAGlobalToNatural() to
84      create the scatter context.
85 
86 .keywords: distributed array, global to local, begin
87 
88 .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
89           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
90 */
91 PetscErrorCode DMDAGlobalToNatural_Create(DM da)
92 {
93   PetscErrorCode ierr;
94   PetscInt       m,start,Nlocal;
95   IS             from,to;
96   Vec            global;
97   DM_DA          *dd = (DM_DA*)da->data;
98 
99   PetscFunctionBegin;
100   PetscValidHeaderSpecific(da,DM_CLASSID,1);
101   if (!dd->natural) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ORDER,"Natural layout vector not yet created; cannot scatter into it");
102 
103   /* create the scatter context */
104   ierr = VecGetLocalSize(dd->natural,&m);CHKERRQ(ierr);
105   ierr = VecGetOwnershipRange(dd->natural,&start,NULL);CHKERRQ(ierr);
106 
107   ierr = DMDAGetNatural_Private(da,&Nlocal,&to);CHKERRQ(ierr);
108   if (Nlocal != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal error: Nlocal %D local vector size %D",Nlocal,m);
109   ierr = ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&from);CHKERRQ(ierr);
110   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,dd->Nlocal,PETSC_DETERMINE,0,&global);CHKERRQ(ierr);
111   ierr = VecScatterCreate(global,from,dd->natural,to,&dd->gton);CHKERRQ(ierr);
112   ierr = VecDestroy(&global);CHKERRQ(ierr);
113   ierr = ISDestroy(&from);CHKERRQ(ierr);
114   ierr = ISDestroy(&to);CHKERRQ(ierr);
115   PetscFunctionReturn(0);
116 }
117 
118 /*@
119    DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
120    in the "natural" grid ordering. Must be followed by
121    DMDAGlobalToNaturalEnd() to complete the exchange.
122 
123    Neighbor-wise Collective on DMDA
124 
125    Input Parameters:
126 +  da - the distributed array context
127 .  g - the global vector
128 -  mode - one of INSERT_VALUES or ADD_VALUES
129 
130    Output Parameter:
131 .  l  - the natural ordering values
132 
133    Level: advanced
134 
135    Notes:
136    The global and natrual vectors used here need not be the same as those
137    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
138    must have the same parallel data layout; they could, for example, be
139    obtained with VecDuplicate() from the DMDA originating vectors.
140 
141    You must call DMDACreateNaturalVector() before using this routine
142 
143 .keywords: distributed array, global to local, begin
144 
145 .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
146           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
147 
148 @*/
149 PetscErrorCode  DMDAGlobalToNaturalBegin(DM da,Vec g,InsertMode mode,Vec l)
150 {
151   PetscErrorCode ierr;
152   DM_DA          *dd = (DM_DA*)da->data;
153 
154   PetscFunctionBegin;
155   PetscValidHeaderSpecific(da,DM_CLASSID,1);
156   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
157   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
158   if (!dd->gton) {
159     /* create the scatter context */
160     ierr = DMDAGlobalToNatural_Create(da);CHKERRQ(ierr);
161   }
162   ierr = VecScatterBegin(dd->gton,g,l,mode,SCATTER_FORWARD);CHKERRQ(ierr);
163   PetscFunctionReturn(0);
164 }
165 
166 /*@
167    DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
168    in the natural ordering. Must be preceeded by DMDAGlobalToNaturalBegin().
169 
170    Neighbor-wise Collective on DMDA
171 
172    Input Parameters:
173 +  da - the distributed array context
174 .  g - the global vector
175 -  mode - one of INSERT_VALUES or ADD_VALUES
176 
177    Output Parameter:
178 .  l  - the global values in the natural ordering
179 
180    Level: advanced
181 
182    Notes:
183    The global and local vectors used here need not be the same as those
184    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
185    must have the same parallel data layout; they could, for example, be
186    obtained with VecDuplicate() from the DMDA originating vectors.
187 
188 .keywords: distributed array, global to local, end
189 
190 .seealso: DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
191           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
192 
193 @*/
194 PetscErrorCode  DMDAGlobalToNaturalEnd(DM da,Vec g,InsertMode mode,Vec l)
195 {
196   PetscErrorCode ierr;
197   DM_DA          *dd = (DM_DA*)da->data;
198 
199   PetscFunctionBegin;
200   PetscValidHeaderSpecific(da,DM_CLASSID,1);
201   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
202   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
203   ierr = VecScatterEnd(dd->gton,g,l,mode,SCATTER_FORWARD);CHKERRQ(ierr);
204   PetscFunctionReturn(0);
205 }
206 
207 /*@
208    DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
209    to a global vector in the PETSc DMDA grid ordering. Must be followed by
210    DMDANaturalToGlobalEnd() to complete the exchange.
211 
212    Neighbor-wise Collective on DMDA
213 
214    Input Parameters:
215 +  da - the distributed array context
216 .  g - the global vector in a natural ordering
217 -  mode - one of INSERT_VALUES or ADD_VALUES
218 
219    Output Parameter:
220 .  l  - the values in the DMDA ordering
221 
222    Level: advanced
223 
224    Notes:
225    The global and natural vectors used here need not be the same as those
226    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
227    must have the same parallel data layout; they could, for example, be
228    obtained with VecDuplicate() from the DMDA originating vectors.
229 
230 .keywords: distributed array, global to local, begin
231 
232 .seealso: DMDAGlobalToNaturalEnd(), DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
233           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
234 
235 @*/
236 PetscErrorCode  DMDANaturalToGlobalBegin(DM da,Vec g,InsertMode mode,Vec l)
237 {
238   PetscErrorCode ierr;
239   DM_DA          *dd = (DM_DA*)da->data;
240 
241   PetscFunctionBegin;
242   PetscValidHeaderSpecific(da,DM_CLASSID,1);
243   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
244   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
245   if (!dd->gton) {
246     /* create the scatter context */
247     ierr = DMDAGlobalToNatural_Create(da);CHKERRQ(ierr);
248   }
249   ierr = VecScatterBegin(dd->gton,g,l,mode,SCATTER_REVERSE);CHKERRQ(ierr);
250   PetscFunctionReturn(0);
251 }
252 
253 /*@
254    DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
255    to a global vector in the PETSc DMDA ordering. Must be preceeded by DMDANaturalToGlobalBegin().
256 
257    Neighbor-wise Collective on DMDA
258 
259    Input Parameters:
260 +  da - the distributed array context
261 .  g - the global vector in a natural ordering
262 -  mode - one of INSERT_VALUES or ADD_VALUES
263 
264    Output Parameter:
265 .  l  - the global values in the PETSc DMDA ordering
266 
267    Level: intermediate
268 
269    Notes:
270    The global and local vectors used here need not be the same as those
271    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
272    must have the same parallel data layout; they could, for example, be
273    obtained with VecDuplicate() from the DMDA originating vectors.
274 
275 .keywords: distributed array, global to local, end
276 
277 .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
278           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
279 
280 @*/
281 PetscErrorCode  DMDANaturalToGlobalEnd(DM da,Vec g,InsertMode mode,Vec l)
282 {
283   PetscErrorCode ierr;
284   DM_DA          *dd = (DM_DA*)da->data;
285 
286   PetscFunctionBegin;
287   PetscValidHeaderSpecific(da,DM_CLASSID,1);
288   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
289   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
290   ierr = VecScatterEnd(dd->gton,g,l,mode,SCATTER_REVERSE);CHKERRQ(ierr);
291   PetscFunctionReturn(0);
292 }
293 
294