xref: /petsc/src/dm/impls/da/dagtol.c (revision a1cb98fac0cdf0eb4d3e8a0c8b58f3fe8f800bc6)
1 /*
2   Code for manipulating distributed regular arrays in parallel.
3 */
4 
5 #include <petsc/private/dmdaimpl.h> /*I   "petscdmda.h"   I*/
6 
7 PetscErrorCode DMGlobalToLocalBegin_DA(DM da, Vec g, InsertMode mode, Vec l)
8 {
9   DM_DA *dd = (DM_DA *)da->data;
10 
11   PetscFunctionBegin;
12   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
13   PetscValidHeaderSpecific(g, VEC_CLASSID, 2);
14   PetscValidHeaderSpecific(l, VEC_CLASSID, 4);
15   PetscCall(VecScatterBegin(dd->gtol, g, l, mode, SCATTER_FORWARD));
16   PetscFunctionReturn(0);
17 }
18 
19 PetscErrorCode DMGlobalToLocalEnd_DA(DM da, Vec g, InsertMode mode, Vec l)
20 {
21   DM_DA *dd = (DM_DA *)da->data;
22 
23   PetscFunctionBegin;
24   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
25   PetscValidHeaderSpecific(g, VEC_CLASSID, 2);
26   PetscValidHeaderSpecific(l, VEC_CLASSID, 4);
27   PetscCall(VecScatterEnd(dd->gtol, g, l, mode, SCATTER_FORWARD));
28   PetscFunctionReturn(0);
29 }
30 
31 PetscErrorCode DMLocalToGlobalBegin_DA(DM da, Vec l, InsertMode mode, Vec g)
32 {
33   DM_DA *dd = (DM_DA *)da->data;
34 
35   PetscFunctionBegin;
36   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
37   PetscValidHeaderSpecific(l, VEC_CLASSID, 2);
38   PetscValidHeaderSpecific(g, VEC_CLASSID, 4);
39   if (mode == ADD_VALUES) {
40     PetscCall(VecScatterBegin(dd->gtol, l, g, ADD_VALUES, SCATTER_REVERSE));
41   } else if (mode == INSERT_VALUES) {
42     PetscCheck(dd->bx == DM_BOUNDARY_GHOSTED || dd->bx == DM_BOUNDARY_NONE || dd->s <= 0 || dd->m != 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Available only for boundary none or with parallelism in x direction");
43     PetscCheck(dd->bx == DM_BOUNDARY_GHOSTED || dd->by == DM_BOUNDARY_NONE || dd->s <= 0 || dd->n != 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Available only for boundary none or with parallelism in y direction");
44     PetscCheck(dd->bx == DM_BOUNDARY_GHOSTED || dd->bz == DM_BOUNDARY_NONE || dd->s <= 0 || dd->p != 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Available only for boundary none or with parallelism in z direction");
45     PetscCall(VecScatterBegin(dd->gtol, l, g, INSERT_VALUES, SCATTER_REVERSE_LOCAL));
46   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented");
47   PetscFunctionReturn(0);
48 }
49 
50 PetscErrorCode DMLocalToGlobalEnd_DA(DM da, Vec l, InsertMode mode, Vec g)
51 {
52   DM_DA *dd = (DM_DA *)da->data;
53 
54   PetscFunctionBegin;
55   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
56   PetscValidHeaderSpecific(l, VEC_CLASSID, 2);
57   PetscValidHeaderSpecific(g, VEC_CLASSID, 4);
58   if (mode == ADD_VALUES) {
59     PetscCall(VecScatterEnd(dd->gtol, l, g, ADD_VALUES, SCATTER_REVERSE));
60   } else if (mode == INSERT_VALUES) {
61     PetscCall(VecScatterEnd(dd->gtol, l, g, INSERT_VALUES, SCATTER_REVERSE_LOCAL));
62   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented");
63   PetscFunctionReturn(0);
64 }
65 
66 extern PetscErrorCode DMDAGetNatural_Private(DM, PetscInt *, IS *);
67 /*
68    DMDAGlobalToNatural_Create - Create the global to natural scatter object
69 
70    Collective on da
71 
72    Input Parameter:
73 .  da - the distributed array context
74 
75    Level: developer
76 
77    Notes:
78     This is an internal routine called by DMDAGlobalToNatural() to
79      create the scatter context.
80 
81 .seealso: `DMDAGlobalToNaturalBegin()`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
82           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
83 */
84 PetscErrorCode DMDAGlobalToNatural_Create(DM da)
85 {
86   PetscInt m, start, Nlocal;
87   IS       from, to;
88   Vec      global;
89   DM_DA   *dd = (DM_DA *)da->data;
90 
91   PetscFunctionBegin;
92   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
93   PetscCheck(dd->natural, PetscObjectComm((PetscObject)da), PETSC_ERR_ORDER, "Natural layout vector not yet created; cannot scatter into it");
94 
95   /* create the scatter context */
96   PetscCall(VecGetLocalSize(dd->natural, &m));
97   PetscCall(VecGetOwnershipRange(dd->natural, &start, NULL));
98 
99   PetscCall(DMDAGetNatural_Private(da, &Nlocal, &to));
100   PetscCheck(Nlocal == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Internal error: Nlocal %" PetscInt_FMT " local vector size %" PetscInt_FMT, Nlocal, m);
101   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), m, start, 1, &from));
102   PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)da), dd->w, dd->Nlocal, PETSC_DETERMINE, NULL, &global));
103   PetscCall(VecScatterCreate(global, from, dd->natural, to, &dd->gton));
104   PetscCall(VecDestroy(&global));
105   PetscCall(ISDestroy(&from));
106   PetscCall(ISDestroy(&to));
107   PetscFunctionReturn(0);
108 }
109 
110 /*@
111    DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
112    in the "natural" grid ordering. Must be followed by
113    DMDAGlobalToNaturalEnd() to complete the exchange.
114 
115    Neighbor-wise Collective on da
116 
117    Input Parameters:
118 +  da - the distributed array context
119 .  g - the global vector
120 -  mode - one of INSERT_VALUES or ADD_VALUES
121 
122    Output Parameter:
123 .  l  - the natural ordering values
124 
125    Level: advanced
126 
127    Notes:
128    The global and natural vectors used here need not be the same as those
129    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
130    must have the same parallel data layout; they could, for example, be
131    obtained with VecDuplicate() from the DMDA originating vectors.
132 
133    You must call DMDACreateNaturalVector() before using this routine
134 
135 .seealso: `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
136           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
137 
138 @*/
139 PetscErrorCode DMDAGlobalToNaturalBegin(DM da, Vec g, InsertMode mode, Vec n)
140 {
141   DM_DA *dd = (DM_DA *)da->data;
142 
143   PetscFunctionBegin;
144   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
145   PetscValidHeaderSpecific(g, VEC_CLASSID, 2);
146   PetscValidHeaderSpecific(n, VEC_CLASSID, 4);
147   if (!dd->gton) {
148     /* create the scatter context */
149     PetscCall(DMDAGlobalToNatural_Create(da));
150   }
151   PetscCall(VecScatterBegin(dd->gton, g, n, mode, SCATTER_FORWARD));
152   PetscFunctionReturn(0);
153 }
154 
155 /*@
156    DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
157    in the natural ordering. Must be preceded by DMDAGlobalToNaturalBegin().
158 
159    Neighbor-wise Collective on da
160 
161    Input Parameters:
162 +  da - the distributed array context
163 .  g - the global vector
164 -  mode - one of INSERT_VALUES or ADD_VALUES
165 
166    Output Parameter:
167 .  l  - the global values in the natural ordering
168 
169    Level: advanced
170 
171    Notes:
172    The global and local vectors used here need not be the same as those
173    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
174    must have the same parallel data layout; they could, for example, be
175    obtained with VecDuplicate() from the DMDA originating vectors.
176 
177 .seealso: `DMDAGlobalToNaturalBegin()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
178           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
179 
180 @*/
181 PetscErrorCode DMDAGlobalToNaturalEnd(DM da, Vec g, InsertMode mode, Vec n)
182 {
183   DM_DA *dd = (DM_DA *)da->data;
184 
185   PetscFunctionBegin;
186   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
187   PetscValidHeaderSpecific(g, VEC_CLASSID, 2);
188   PetscValidHeaderSpecific(n, VEC_CLASSID, 4);
189   PetscCall(VecScatterEnd(dd->gton, g, n, mode, SCATTER_FORWARD));
190   PetscFunctionReturn(0);
191 }
192 
193 /*@
194    DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
195    to a global vector in the PETSc DMDA grid ordering. Must be followed by
196    DMDANaturalToGlobalEnd() to complete the exchange.
197 
198    Neighbor-wise Collective on da
199 
200    Input Parameters:
201 +  da - the distributed array context
202 .  g - the global vector in a natural ordering
203 -  mode - one of INSERT_VALUES or ADD_VALUES
204 
205    Output Parameter:
206 .  l  - the values in the DMDA ordering
207 
208    Level: advanced
209 
210    Notes:
211    The global and natural vectors used here need not be the same as those
212    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
213    must have the same parallel data layout; they could, for example, be
214    obtained with VecDuplicate() from the DMDA originating vectors.
215 
216 .seealso: `DMDAGlobalToNaturalEnd()`, `DMDAGlobalToNaturalBegin()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
217           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
218 
219 @*/
220 PetscErrorCode DMDANaturalToGlobalBegin(DM da, Vec n, InsertMode mode, Vec g)
221 {
222   DM_DA *dd = (DM_DA *)da->data;
223 
224   PetscFunctionBegin;
225   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
226   PetscValidHeaderSpecific(n, VEC_CLASSID, 2);
227   PetscValidHeaderSpecific(g, VEC_CLASSID, 4);
228   if (!dd->gton) {
229     /* create the scatter context */
230     PetscCall(DMDAGlobalToNatural_Create(da));
231   }
232   PetscCall(VecScatterBegin(dd->gton, n, g, mode, SCATTER_REVERSE));
233   PetscFunctionReturn(0);
234 }
235 
236 /*@
237    DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
238    to a global vector in the PETSc DMDA ordering. Must be preceded by DMDANaturalToGlobalBegin().
239 
240    Neighbor-wise Collective on da
241 
242    Input Parameters:
243 +  da - the distributed array context
244 .  g - the global vector in a natural ordering
245 -  mode - one of INSERT_VALUES or ADD_VALUES
246 
247    Output Parameter:
248 .  l  - the global values in the PETSc DMDA ordering
249 
250    Level: advanced
251 
252    Notes:
253    The global and local vectors used here need not be the same as those
254    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
255    must have the same parallel data layout; they could, for example, be
256    obtained with VecDuplicate() from the DMDA originating vectors.
257 
258 .seealso: `DMDAGlobalToNaturalBegin()`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
259           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
260 
261 @*/
262 PetscErrorCode DMDANaturalToGlobalEnd(DM da, Vec n, InsertMode mode, Vec g)
263 {
264   DM_DA *dd = (DM_DA *)da->data;
265 
266   PetscFunctionBegin;
267   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
268   PetscValidHeaderSpecific(n, VEC_CLASSID, 2);
269   PetscValidHeaderSpecific(g, VEC_CLASSID, 4);
270   PetscCall(VecScatterEnd(dd->gton, n, g, mode, SCATTER_REVERSE));
271   PetscFunctionReturn(0);
272 }
273