xref: /petsc/src/dm/impls/da/dagtol.c (revision feff33ee0b5b037fa8f9f294dede656a2f85cc47)
1 /*
2   Code for manipulating distributed regular arrays in parallel.
3 */
4 
5 #include <petsc/private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
6 
7 PetscErrorCode  DMGlobalToLocalBegin_DA(DM da,Vec g,InsertMode mode,Vec l)
8 {
9   PetscErrorCode ierr;
10   DM_DA          *dd = (DM_DA*)da->data;
11 
12   PetscFunctionBegin;
13   PetscValidHeaderSpecific(da,DM_CLASSID,1);
14   PetscValidHeaderSpecific(g,VEC_CLASSID,2);
15   PetscValidHeaderSpecific(l,VEC_CLASSID,4);
16   ierr = VecScatterBegin(dd->gtol,g,l,mode,SCATTER_FORWARD);CHKERRQ(ierr);
17   PetscFunctionReturn(0);
18 }
19 
20 PetscErrorCode  DMGlobalToLocalEnd_DA(DM da,Vec g,InsertMode mode,Vec l)
21 {
22   PetscErrorCode ierr;
23   DM_DA          *dd = (DM_DA*)da->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(da,DM_CLASSID,1);
27   PetscValidHeaderSpecific(g,VEC_CLASSID,2);
28   PetscValidHeaderSpecific(l,VEC_CLASSID,4);
29   ierr = VecScatterEnd(dd->gtol,g,l,mode,SCATTER_FORWARD);CHKERRQ(ierr);
30   PetscFunctionReturn(0);
31 }
32 
33 PetscErrorCode  DMLocalToGlobalBegin_DA(DM da,Vec l,InsertMode mode,Vec g)
34 {
35   PetscErrorCode ierr;
36   DM_DA          *dd = (DM_DA*)da->data;
37 
38   PetscFunctionBegin;
39   PetscValidHeaderSpecific(da,DM_CLASSID,1);
40   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
41   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
42   if (mode == ADD_VALUES) {
43     ierr = VecScatterBegin(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
44   } else if (mode == INSERT_VALUES) {
45     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bx != DM_BOUNDARY_NONE && dd->s > 0 && dd->m == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in x direction");
46     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->by != DM_BOUNDARY_NONE && dd->s > 0 && dd->n == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in y direction");
47     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bz != DM_BOUNDARY_NONE && dd->s > 0 && dd->p == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in z direction");
48     ierr = VecScatterBegin(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);CHKERRQ(ierr);
49   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
50   PetscFunctionReturn(0);
51 }
52 
53 PetscErrorCode  DMLocalToGlobalEnd_DA(DM da,Vec l,InsertMode mode,Vec g)
54 {
55   PetscErrorCode ierr;
56   DM_DA          *dd = (DM_DA*)da->data;
57 
58   PetscFunctionBegin;
59   PetscValidHeaderSpecific(da,DM_CLASSID,1);
60   PetscValidHeaderSpecific(l,VEC_CLASSID,2);
61   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
62   if (mode == ADD_VALUES) {
63     ierr = VecScatterEnd(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
64   } else if (mode == INSERT_VALUES) {
65     ierr = VecScatterEnd(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);CHKERRQ(ierr);
66   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
67   PetscFunctionReturn(0);
68 }
69 
70 extern PetscErrorCode DMDAGetNatural_Private(DM,PetscInt*,IS*);
71 /*
72    DMDAGlobalToNatural_Create - Create the global to natural scatter object
73 
74    Collective on DMDA
75 
76    Input Parameter:
77 .  da - the distributed array context
78 
79    Level: developer
80 
81    Notes: This is an internal routine called by DMDAGlobalToNatural() to
82      create the scatter context.
83 
84 .keywords: distributed array, global to local, begin
85 
86 .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
87           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
88 */
89 PetscErrorCode DMDAGlobalToNatural_Create(DM da)
90 {
91   PetscErrorCode ierr;
92   PetscInt       m,start,Nlocal;
93   IS             from,to;
94   Vec            global;
95   DM_DA          *dd = (DM_DA*)da->data;
96 
97   PetscFunctionBegin;
98   PetscValidHeaderSpecific(da,DM_CLASSID,1);
99   if (!dd->natural) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ORDER,"Natural layout vector not yet created; cannot scatter into it");
100 
101   /* create the scatter context */
102   ierr = VecGetLocalSize(dd->natural,&m);CHKERRQ(ierr);
103   ierr = VecGetOwnershipRange(dd->natural,&start,NULL);CHKERRQ(ierr);
104 
105   ierr = DMDAGetNatural_Private(da,&Nlocal,&to);CHKERRQ(ierr);
106   if (Nlocal != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal error: Nlocal %D local vector size %D",Nlocal,m);
107   ierr = ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&from);CHKERRQ(ierr);
108   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,dd->Nlocal,PETSC_DETERMINE,0,&global);CHKERRQ(ierr);
109   ierr = VecScatterCreate(global,from,dd->natural,to,&dd->gton);CHKERRQ(ierr);
110   ierr = VecDestroy(&global);CHKERRQ(ierr);
111   ierr = ISDestroy(&from);CHKERRQ(ierr);
112   ierr = ISDestroy(&to);CHKERRQ(ierr);
113   PetscFunctionReturn(0);
114 }
115 
116 /*@
117    DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
118    in the "natural" grid ordering. Must be followed by
119    DMDAGlobalToNaturalEnd() to complete the exchange.
120 
121    Neighbor-wise Collective on DMDA
122 
123    Input Parameters:
124 +  da - the distributed array context
125 .  g - the global vector
126 -  mode - one of INSERT_VALUES or ADD_VALUES
127 
128    Output Parameter:
129 .  l  - the natural ordering values
130 
131    Level: advanced
132 
133    Notes:
134    The global and natrual vectors used here need not be the same as those
135    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
136    must have the same parallel data layout; they could, for example, be
137    obtained with VecDuplicate() from the DMDA originating vectors.
138 
139    You must call DMDACreateNaturalVector() before using this routine
140 
141 .keywords: distributed array, global to local, begin
142 
143 .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
144           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
145 
146 @*/
147 PetscErrorCode  DMDAGlobalToNaturalBegin(DM da,Vec g,InsertMode mode,Vec n)
148 {
149   PetscErrorCode ierr;
150   DM_DA          *dd = (DM_DA*)da->data;
151 
152   PetscFunctionBegin;
153   PetscValidHeaderSpecific(da,DM_CLASSID,1);
154   PetscValidHeaderSpecific(g,VEC_CLASSID,2);
155   PetscValidHeaderSpecific(n,VEC_CLASSID,4);
156   if (!dd->gton) {
157     /* create the scatter context */
158     ierr = DMDAGlobalToNatural_Create(da);CHKERRQ(ierr);
159   }
160   ierr = VecScatterBegin(dd->gton,g,n,mode,SCATTER_FORWARD);CHKERRQ(ierr);
161   PetscFunctionReturn(0);
162 }
163 
164 /*@
165    DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
166    in the natural ordering. Must be preceeded by DMDAGlobalToNaturalBegin().
167 
168    Neighbor-wise Collective on DMDA
169 
170    Input Parameters:
171 +  da - the distributed array context
172 .  g - the global vector
173 -  mode - one of INSERT_VALUES or ADD_VALUES
174 
175    Output Parameter:
176 .  l  - the global values in the natural ordering
177 
178    Level: advanced
179 
180    Notes:
181    The global and local vectors used here need not be the same as those
182    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
183    must have the same parallel data layout; they could, for example, be
184    obtained with VecDuplicate() from the DMDA originating vectors.
185 
186 .keywords: distributed array, global to local, end
187 
188 .seealso: DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
189           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
190 
191 @*/
192 PetscErrorCode  DMDAGlobalToNaturalEnd(DM da,Vec g,InsertMode mode,Vec n)
193 {
194   PetscErrorCode ierr;
195   DM_DA          *dd = (DM_DA*)da->data;
196 
197   PetscFunctionBegin;
198   PetscValidHeaderSpecific(da,DM_CLASSID,1);
199   PetscValidHeaderSpecific(g,VEC_CLASSID,2);
200   PetscValidHeaderSpecific(n,VEC_CLASSID,4);
201   ierr = VecScatterEnd(dd->gton,g,n,mode,SCATTER_FORWARD);CHKERRQ(ierr);
202   PetscFunctionReturn(0);
203 }
204 
205 /*@
206    DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
207    to a global vector in the PETSc DMDA grid ordering. Must be followed by
208    DMDANaturalToGlobalEnd() to complete the exchange.
209 
210    Neighbor-wise Collective on DMDA
211 
212    Input Parameters:
213 +  da - the distributed array context
214 .  g - the global vector in a natural ordering
215 -  mode - one of INSERT_VALUES or ADD_VALUES
216 
217    Output Parameter:
218 .  l  - the values in the DMDA ordering
219 
220    Level: advanced
221 
222    Notes:
223    The global and natural vectors used here need not be the same as those
224    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
225    must have the same parallel data layout; they could, for example, be
226    obtained with VecDuplicate() from the DMDA originating vectors.
227 
228 .keywords: distributed array, global to local, begin
229 
230 .seealso: DMDAGlobalToNaturalEnd(), DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
231           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
232 
233 @*/
234 PetscErrorCode  DMDANaturalToGlobalBegin(DM da,Vec n,InsertMode mode,Vec g)
235 {
236   PetscErrorCode ierr;
237   DM_DA          *dd = (DM_DA*)da->data;
238 
239   PetscFunctionBegin;
240   PetscValidHeaderSpecific(da,DM_CLASSID,1);
241   PetscValidHeaderSpecific(n,VEC_CLASSID,2);
242   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
243   if (!dd->gton) {
244     /* create the scatter context */
245     ierr = DMDAGlobalToNatural_Create(da);CHKERRQ(ierr);
246   }
247   ierr = VecScatterBegin(dd->gton,n,g,mode,SCATTER_REVERSE);CHKERRQ(ierr);
248   PetscFunctionReturn(0);
249 }
250 
251 /*@
252    DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
253    to a global vector in the PETSc DMDA ordering. Must be preceeded by DMDANaturalToGlobalBegin().
254 
255    Neighbor-wise Collective on DMDA
256 
257    Input Parameters:
258 +  da - the distributed array context
259 .  g - the global vector in a natural ordering
260 -  mode - one of INSERT_VALUES or ADD_VALUES
261 
262    Output Parameter:
263 .  l  - the global values in the PETSc DMDA ordering
264 
265    Level: advanced
266 
267    Notes:
268    The global and local vectors used here need not be the same as those
269    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
270    must have the same parallel data layout; they could, for example, be
271    obtained with VecDuplicate() from the DMDA originating vectors.
272 
273 .keywords: distributed array, global to local, end
274 
275 .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
276           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
277 
278 @*/
279 PetscErrorCode  DMDANaturalToGlobalEnd(DM da,Vec n,InsertMode mode,Vec g)
280 {
281   PetscErrorCode ierr;
282   DM_DA          *dd = (DM_DA*)da->data;
283 
284   PetscFunctionBegin;
285   PetscValidHeaderSpecific(da,DM_CLASSID,1);
286   PetscValidHeaderSpecific(n,VEC_CLASSID,2);
287   PetscValidHeaderSpecific(g,VEC_CLASSID,4);
288   ierr = VecScatterEnd(dd->gton,n,g,mode,SCATTER_REVERSE);CHKERRQ(ierr);
289   PetscFunctionReturn(0);
290 }
291