xref: /petsc/src/dm/impls/da/dagtol.c (revision 21e3ffae2f3b73c0bd738cf6d0a809700fc04bb0)
1 /*
2   Code for manipulating distributed regular arrays in parallel.
3 */
4 
5 #include <petsc/private/dmdaimpl.h> /*I   "petscdmda.h"   I*/
6 
7 PetscErrorCode DMGlobalToLocalBegin_DA(DM da, Vec g, InsertMode mode, Vec l)
8 {
9   DM_DA *dd = (DM_DA *)da->data;
10 
11   PetscFunctionBegin;
12   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
13   PetscValidHeaderSpecific(g, VEC_CLASSID, 2);
14   PetscValidHeaderSpecific(l, VEC_CLASSID, 4);
15   PetscCall(VecScatterBegin(dd->gtol, g, l, mode, SCATTER_FORWARD));
16   PetscFunctionReturn(PETSC_SUCCESS);
17 }
18 
19 PetscErrorCode DMGlobalToLocalEnd_DA(DM da, Vec g, InsertMode mode, Vec l)
20 {
21   DM_DA *dd = (DM_DA *)da->data;
22 
23   PetscFunctionBegin;
24   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
25   PetscValidHeaderSpecific(g, VEC_CLASSID, 2);
26   PetscValidHeaderSpecific(l, VEC_CLASSID, 4);
27   PetscCall(VecScatterEnd(dd->gtol, g, l, mode, SCATTER_FORWARD));
28   PetscFunctionReturn(PETSC_SUCCESS);
29 }
30 
31 PetscErrorCode DMLocalToGlobalBegin_DA(DM da, Vec l, InsertMode mode, Vec g)
32 {
33   DM_DA *dd = (DM_DA *)da->data;
34 
35   PetscFunctionBegin;
36   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
37   PetscValidHeaderSpecific(l, VEC_CLASSID, 2);
38   PetscValidHeaderSpecific(g, VEC_CLASSID, 4);
39   if (mode == ADD_VALUES) {
40     PetscCall(VecScatterBegin(dd->gtol, l, g, ADD_VALUES, SCATTER_REVERSE));
41   } else if (mode == INSERT_VALUES) {
42     PetscCheck(dd->bx == DM_BOUNDARY_GHOSTED || dd->bx == DM_BOUNDARY_NONE || dd->s <= 0 || dd->m != 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Available only for boundary none or with parallelism in x direction");
43     PetscCheck(dd->bx == DM_BOUNDARY_GHOSTED || dd->by == DM_BOUNDARY_NONE || dd->s <= 0 || dd->n != 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Available only for boundary none or with parallelism in y direction");
44     PetscCheck(dd->bx == DM_BOUNDARY_GHOSTED || dd->bz == DM_BOUNDARY_NONE || dd->s <= 0 || dd->p != 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Available only for boundary none or with parallelism in z direction");
45     PetscCall(VecScatterBegin(dd->gtol, l, g, INSERT_VALUES, SCATTER_REVERSE_LOCAL));
46   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented");
47   PetscFunctionReturn(PETSC_SUCCESS);
48 }
49 
50 PetscErrorCode DMLocalToGlobalEnd_DA(DM da, Vec l, InsertMode mode, Vec g)
51 {
52   DM_DA *dd = (DM_DA *)da->data;
53 
54   PetscFunctionBegin;
55   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
56   PetscValidHeaderSpecific(l, VEC_CLASSID, 2);
57   PetscValidHeaderSpecific(g, VEC_CLASSID, 4);
58   if (mode == ADD_VALUES) {
59     PetscCall(VecScatterEnd(dd->gtol, l, g, ADD_VALUES, SCATTER_REVERSE));
60   } else if (mode == INSERT_VALUES) {
61     PetscCall(VecScatterEnd(dd->gtol, l, g, INSERT_VALUES, SCATTER_REVERSE_LOCAL));
62   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented");
63   PetscFunctionReturn(PETSC_SUCCESS);
64 }
65 
66 extern PetscErrorCode DMDAGetNatural_Private(DM, PetscInt *, IS *);
67 /*
68    DMDAGlobalToNatural_Create - Create the global to natural scatter object
69 
70    Collective on da
71 
72    Input Parameter:
73 .  da - the distributed array context
74 
75    Level: developer
76 
77    Note:
78     This is an internal routine called by `DMDAGlobalToNatural()` to
79      create the scatter context.
80 
81 .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalBegin()`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
82           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
83 */
84 PetscErrorCode DMDAGlobalToNatural_Create(DM da)
85 {
86   PetscInt m, start, Nlocal;
87   IS       from, to;
88   Vec      global;
89   DM_DA   *dd = (DM_DA *)da->data;
90 
91   PetscFunctionBegin;
92   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
93   PetscCheck(dd->natural, PetscObjectComm((PetscObject)da), PETSC_ERR_ORDER, "Natural layout vector not yet created; cannot scatter into it");
94 
95   /* create the scatter context */
96   PetscCall(VecGetLocalSize(dd->natural, &m));
97   PetscCall(VecGetOwnershipRange(dd->natural, &start, NULL));
98 
99   PetscCall(DMDAGetNatural_Private(da, &Nlocal, &to));
100   PetscCheck(Nlocal == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Internal error: Nlocal %" PetscInt_FMT " local vector size %" PetscInt_FMT, Nlocal, m);
101   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), m, start, 1, &from));
102   PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)da), dd->w, dd->Nlocal, PETSC_DETERMINE, NULL, &global));
103   PetscCall(VecScatterCreate(global, from, dd->natural, to, &dd->gton));
104   PetscCall(VecDestroy(&global));
105   PetscCall(ISDestroy(&from));
106   PetscCall(ISDestroy(&to));
107   PetscFunctionReturn(PETSC_SUCCESS);
108 }
109 
110 /*@
111    DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
112    in the "natural" grid ordering. Must be followed by
113    `DMDAGlobalToNaturalEnd()` to complete the exchange.
114 
115    Neighbor-wise Collective on da
116 
117    Input Parameters:
118 +  da - the distributed array context
119 .  g - the global vector
120 -  mode - one of `INSERT_VALUES` or `ADD_VALUES`
121 
122    Output Parameter:
123 .  l  - the natural ordering values
124 
125    Level: advanced
126 
127    Notes:
128    The global and natural vectors used here need not be the same as those
129    obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
130    must have the same parallel data layout; they could, for example, be
131    obtained with `VecDuplicate()` from the `DMDA` originating vectors.
132 
133    You must call `DMDACreateNaturalVector()` before using this routine
134 
135 .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
136           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
137 @*/
138 PetscErrorCode DMDAGlobalToNaturalBegin(DM da, Vec g, InsertMode mode, Vec n)
139 {
140   DM_DA *dd = (DM_DA *)da->data;
141 
142   PetscFunctionBegin;
143   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
144   PetscValidHeaderSpecific(g, VEC_CLASSID, 2);
145   PetscValidHeaderSpecific(n, VEC_CLASSID, 4);
146   if (!dd->gton) {
147     /* create the scatter context */
148     PetscCall(DMDAGlobalToNatural_Create(da));
149   }
150   PetscCall(VecScatterBegin(dd->gton, g, n, mode, SCATTER_FORWARD));
151   PetscFunctionReturn(PETSC_SUCCESS);
152 }
153 
154 /*@
155    DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
156    in the natural ordering. Must be preceded by `DMDAGlobalToNaturalBegin()`.
157 
158    Neighbor-wise Collective on da
159 
160    Input Parameters:
161 +  da - the distributed array context
162 .  g - the global vector
163 -  mode - one of `INSERT_VALUES` or `ADD_VALUES`
164 
165    Output Parameter:
166 .  l  - the global values in the natural ordering
167 
168    Level: advanced
169 
170    Notes:
171    The global and local vectors used here need not be the same as those
172    obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
173    must have the same parallel data layout; they could, for example, be
174    obtained with VecDuplicate() from the `DMDA` originating vectors.
175 
176 .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalBegin()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
177           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
178 @*/
179 PetscErrorCode DMDAGlobalToNaturalEnd(DM da, Vec g, InsertMode mode, Vec n)
180 {
181   DM_DA *dd = (DM_DA *)da->data;
182 
183   PetscFunctionBegin;
184   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
185   PetscValidHeaderSpecific(g, VEC_CLASSID, 2);
186   PetscValidHeaderSpecific(n, VEC_CLASSID, 4);
187   PetscCall(VecScatterEnd(dd->gton, g, n, mode, SCATTER_FORWARD));
188   PetscFunctionReturn(PETSC_SUCCESS);
189 }
190 
191 /*@
192    DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
193    to a global vector in the PETSc `DMDA` grid ordering. Must be followed by
194    `DMDANaturalToGlobalEnd()` to complete the exchange.
195 
196    Neighbor-wise Collective on da
197 
198    Input Parameters:
199 +  da - the distributed array context
200 .  g - the global vector in a natural ordering
201 -  mode - one of `INSERT_VALUES` or `ADD_VALUES`
202 
203    Output Parameter:
204 .  l  - the values in the `DMDA` ordering
205 
206    Level: advanced
207 
208    Notes:
209    The global and natural vectors used here need not be the same as those
210    obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
211    must have the same parallel data layout; they could, for example, be
212    obtained with `VecDuplicate()` from the `DMDA` originating vectors.
213 
214 .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalEnd()`, `DMDAGlobalToNaturalBegin()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
215           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
216 @*/
217 PetscErrorCode DMDANaturalToGlobalBegin(DM da, Vec n, InsertMode mode, Vec g)
218 {
219   DM_DA *dd = (DM_DA *)da->data;
220 
221   PetscFunctionBegin;
222   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
223   PetscValidHeaderSpecific(n, VEC_CLASSID, 2);
224   PetscValidHeaderSpecific(g, VEC_CLASSID, 4);
225   if (!dd->gton) {
226     /* create the scatter context */
227     PetscCall(DMDAGlobalToNatural_Create(da));
228   }
229   PetscCall(VecScatterBegin(dd->gton, n, g, mode, SCATTER_REVERSE));
230   PetscFunctionReturn(PETSC_SUCCESS);
231 }
232 
233 /*@
234    DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
235    to a global vector in the PETSc `DMDA` ordering. Must be preceded by `DMDANaturalToGlobalBegin()`.
236 
237    Neighbor-wise Collective on da
238 
239    Input Parameters:
240 +  da - the distributed array context
241 .  g - the global vector in a natural ordering
242 -  mode - one of `INSERT_VALUES` or `ADD_VALUES`
243 
244    Output Parameter:
245 .  l  - the global values in the PETSc `DMDA` ordering
246 
247    Level: advanced
248 
249    Notes:
250    The global and local vectors used here need not be the same as those
251    obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
252    must have the same parallel data layout; they could, for example, be
253    obtained with `VecDuplicate()` from the `DMDA` originating vectors.
254 
255 .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalBegin()`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
256           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
257 @*/
258 PetscErrorCode DMDANaturalToGlobalEnd(DM da, Vec n, InsertMode mode, Vec g)
259 {
260   DM_DA *dd = (DM_DA *)da->data;
261 
262   PetscFunctionBegin;
263   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
264   PetscValidHeaderSpecific(n, VEC_CLASSID, 2);
265   PetscValidHeaderSpecific(g, VEC_CLASSID, 4);
266   PetscCall(VecScatterEnd(dd->gton, n, g, mode, SCATTER_REVERSE));
267   PetscFunctionReturn(PETSC_SUCCESS);
268 }
269