xref: /petsc/src/dm/impls/da/dagtol.c (revision 487a658c8b32ba712a1dc8280daad2fd70c1dcd9)
1 /*
2   Code for manipulating distributed regular arrays in parallel.
3 */
4 
5 #include <petsc/private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
6 
7 PetscErrorCode  DMGlobalToLocalBegin_DA(DM da,Vec g,InsertMode mode,Vec l)
8 {
9   PetscErrorCode ierr;
10   DM_DA          *dd = (DM_DA*)da->data;
11 
12   PetscFunctionBegin;
13   PetscValidHeaderSpecific(da,DM_CLASSID,1);
14   PetscValidHeaderSpecific(g,VEC_CLASSID,2);
15   PetscValidHeaderSpecific(l,VEC_CLASSID,4);
16   ierr = VecScatterBegin(dd->gtol,g,l,mode,SCATTER_FORWARD);CHKERRQ(ierr);
17   PetscFunctionReturn(0);
18 }
19 
20 PetscErrorCode  DMGlobalToLocalEnd_DA(DM da,Vec g,InsertMode mode,Vec l)
21 {
22   PetscErrorCode ierr;
23   DM_DA          *dd = (DM_DA*)da->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(da,DM_CLASSID,1);
27   PetscValidHeaderSpecific(g,VEC_CLASSID,2);
28   PetscValidHeaderSpecific(l,VEC_CLASSID,4);
29   ierr = VecScatterEnd(dd->gtol,g,l,mode,SCATTER_FORWARD);CHKERRQ(ierr);
30   PetscFunctionReturn(0);
31 }
32 
33 PetscErrorCode  DMLocalToGlobalBegin_DA(DM da,Vec l,InsertMode mode,Vec g)
34 {
35   PetscErrorCode ierr;
36   DM_DA          *dd = (DM_DA*)da->data;
37 
38   PetscFunctionBegin;
39   PetscValidHeaderSpecific(da,DM_CLASSID,1);
40   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
41   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
42   if (mode == ADD_VALUES) {
43     ierr = VecScatterBegin(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
44   } else if (mode == INSERT_VALUES) {
45     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bx != DM_BOUNDARY_NONE && dd->s > 0 && dd->m == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in x direction");
46     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->by != DM_BOUNDARY_NONE && dd->s > 0 && dd->n == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in y direction");
47     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bz != DM_BOUNDARY_NONE && dd->s > 0 && dd->p == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in z direction");
48     ierr = VecScatterBegin(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);CHKERRQ(ierr);
49   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
50   PetscFunctionReturn(0);
51 }
52 
53 PetscErrorCode  DMLocalToGlobalEnd_DA(DM da,Vec l,InsertMode mode,Vec g)
54 {
55   PetscErrorCode ierr;
56   DM_DA          *dd = (DM_DA*)da->data;
57 
58   PetscFunctionBegin;
59   PetscValidHeaderSpecific(da,DM_CLASSID,1);
60   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
61   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
62   if (mode == ADD_VALUES) {
63     ierr = VecScatterEnd(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
64   } else if (mode == INSERT_VALUES) {
65     ierr = VecScatterEnd(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);CHKERRQ(ierr);
66   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
67   PetscFunctionReturn(0);
68 }
69 
70 extern PetscErrorCode DMDAGetNatural_Private(DM,PetscInt*,IS*);
71 /*
72    DMDAGlobalToNatural_Create - Create the global to natural scatter object
73 
74    Collective on DMDA
75 
76    Input Parameter:
77 .  da - the distributed array context
78 
79    Level: developer
80 
81    Notes:
82     This is an internal routine called by DMDAGlobalToNatural() to
83      create the scatter context.
84 
85 .keywords: distributed array, global to local, begin
86 
87 .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
88           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
89 */
90 PetscErrorCode DMDAGlobalToNatural_Create(DM da)
91 {
92   PetscErrorCode ierr;
93   PetscInt       m,start,Nlocal;
94   IS             from,to;
95   Vec            global;
96   DM_DA          *dd = (DM_DA*)da->data;
97 
98   PetscFunctionBegin;
99   PetscValidHeaderSpecific(da,DM_CLASSID,1);
100   if (!dd->natural) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ORDER,"Natural layout vector not yet created; cannot scatter into it");
101 
102   /* create the scatter context */
103   ierr = VecGetLocalSize(dd->natural,&m);CHKERRQ(ierr);
104   ierr = VecGetOwnershipRange(dd->natural,&start,NULL);CHKERRQ(ierr);
105 
106   ierr = DMDAGetNatural_Private(da,&Nlocal,&to);CHKERRQ(ierr);
107   if (Nlocal != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal error: Nlocal %D local vector size %D",Nlocal,m);
108   ierr = ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&from);CHKERRQ(ierr);
109   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,dd->Nlocal,PETSC_DETERMINE,0,&global);CHKERRQ(ierr);
110   ierr = VecScatterCreate(global,from,dd->natural,to,&dd->gton);CHKERRQ(ierr);
111   ierr = VecDestroy(&global);CHKERRQ(ierr);
112   ierr = ISDestroy(&from);CHKERRQ(ierr);
113   ierr = ISDestroy(&to);CHKERRQ(ierr);
114   PetscFunctionReturn(0);
115 }
116 
117 /*@
118    DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
119    in the "natural" grid ordering. Must be followed by
120    DMDAGlobalToNaturalEnd() to complete the exchange.
121 
122    Neighbor-wise Collective on DMDA
123 
124    Input Parameters:
125 +  da - the distributed array context
126 .  g - the global vector
127 -  mode - one of INSERT_VALUES or ADD_VALUES
128 
129    Output Parameter:
130 .  l  - the natural ordering values
131 
132    Level: advanced
133 
134    Notes:
135    The global and natrual vectors used here need not be the same as those
136    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
137    must have the same parallel data layout; they could, for example, be
138    obtained with VecDuplicate() from the DMDA originating vectors.
139 
140    You must call DMDACreateNaturalVector() before using this routine
141 
142 .keywords: distributed array, global to local, begin
143 
144 .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
145           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
146 
147 @*/
148 PetscErrorCode  DMDAGlobalToNaturalBegin(DM da,Vec g,InsertMode mode,Vec n)
149 {
150   PetscErrorCode ierr;
151   DM_DA          *dd = (DM_DA*)da->data;
152 
153   PetscFunctionBegin;
154   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
155   PetscValidHeaderSpecific(g,VEC_CLASSID,2);
156   PetscValidHeaderSpecific(n,VEC_CLASSID,4);
157   if (!dd->gton) {
158     /* create the scatter context */
159     ierr = DMDAGlobalToNatural_Create(da);CHKERRQ(ierr);
160   }
161   ierr = VecScatterBegin(dd->gton,g,n,mode,SCATTER_FORWARD);CHKERRQ(ierr);
162   PetscFunctionReturn(0);
163 }
164 
165 /*@
166    DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
167    in the natural ordering. Must be preceeded by DMDAGlobalToNaturalBegin().
168 
169    Neighbor-wise Collective on DMDA
170 
171    Input Parameters:
172 +  da - the distributed array context
173 .  g - the global vector
174 -  mode - one of INSERT_VALUES or ADD_VALUES
175 
176    Output Parameter:
177 .  l  - the global values in the natural ordering
178 
179    Level: advanced
180 
181    Notes:
182    The global and local vectors used here need not be the same as those
183    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
184    must have the same parallel data layout; they could, for example, be
185    obtained with VecDuplicate() from the DMDA originating vectors.
186 
187 .keywords: distributed array, global to local, end
188 
189 .seealso: DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
190           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
191 
192 @*/
193 PetscErrorCode  DMDAGlobalToNaturalEnd(DM da,Vec g,InsertMode mode,Vec n)
194 {
195   PetscErrorCode ierr;
196   DM_DA          *dd = (DM_DA*)da->data;
197 
198   PetscFunctionBegin;
199   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
200   PetscValidHeaderSpecific(g,VEC_CLASSID,2);
201   PetscValidHeaderSpecific(n,VEC_CLASSID,4);
202   ierr = VecScatterEnd(dd->gton,g,n,mode,SCATTER_FORWARD);CHKERRQ(ierr);
203   PetscFunctionReturn(0);
204 }
205 
206 /*@
207    DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
208    to a global vector in the PETSc DMDA grid ordering. Must be followed by
209    DMDANaturalToGlobalEnd() to complete the exchange.
210 
211    Neighbor-wise Collective on DMDA
212 
213    Input Parameters:
214 +  da - the distributed array context
215 .  g - the global vector in a natural ordering
216 -  mode - one of INSERT_VALUES or ADD_VALUES
217 
218    Output Parameter:
219 .  l  - the values in the DMDA ordering
220 
221    Level: advanced
222 
223    Notes:
224    The global and natural vectors used here need not be the same as those
225    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
226    must have the same parallel data layout; they could, for example, be
227    obtained with VecDuplicate() from the DMDA originating vectors.
228 
229 .keywords: distributed array, global to local, begin
230 
231 .seealso: DMDAGlobalToNaturalEnd(), DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
232           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
233 
234 @*/
235 PetscErrorCode  DMDANaturalToGlobalBegin(DM da,Vec n,InsertMode mode,Vec g)
236 {
237   PetscErrorCode ierr;
238   DM_DA          *dd = (DM_DA*)da->data;
239 
240   PetscFunctionBegin;
241   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
242   PetscValidHeaderSpecific(n,VEC_CLASSID,2);
243   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
244   if (!dd->gton) {
245     /* create the scatter context */
246     ierr = DMDAGlobalToNatural_Create(da);CHKERRQ(ierr);
247   }
248   ierr = VecScatterBegin(dd->gton,n,g,mode,SCATTER_REVERSE);CHKERRQ(ierr);
249   PetscFunctionReturn(0);
250 }
251 
252 /*@
253    DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
254    to a global vector in the PETSc DMDA ordering. Must be preceeded by DMDANaturalToGlobalBegin().
255 
256    Neighbor-wise Collective on DMDA
257 
258    Input Parameters:
259 +  da - the distributed array context
260 .  g - the global vector in a natural ordering
261 -  mode - one of INSERT_VALUES or ADD_VALUES
262 
263    Output Parameter:
264 .  l  - the global values in the PETSc DMDA ordering
265 
266    Level: advanced
267 
268    Notes:
269    The global and local vectors used here need not be the same as those
270    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
271    must have the same parallel data layout; they could, for example, be
272    obtained with VecDuplicate() from the DMDA originating vectors.
273 
274 .keywords: distributed array, global to local, end
275 
276 .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
277           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
278 
279 @*/
280 PetscErrorCode  DMDANaturalToGlobalEnd(DM da,Vec n,InsertMode mode,Vec g)
281 {
282   PetscErrorCode ierr;
283   DM_DA          *dd = (DM_DA*)da->data;
284 
285   PetscFunctionBegin;
286   PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
287   PetscValidHeaderSpecific(n,VEC_CLASSID,2);
288   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
289   ierr = VecScatterEnd(dd->gton,n,g,mode,SCATTER_REVERSE);CHKERRQ(ierr);
290   PetscFunctionReturn(0);
291 }
292