xref: /petsc/src/dm/impls/da/da1.c (revision 2ff79c18c26c94ed8cb599682f680f231dca6444)
1 /*
2    Code for manipulating distributed regular 1d arrays in parallel.
3    This file was created by Peter Mell   6/30/95
4 */
5 
6 #include <petsc/private/dmdaimpl.h> /*I  "petscdmda.h"   I*/
7 
8 #include <petscdraw.h>
9 static PetscErrorCode DMView_DA_1d(DM da, PetscViewer viewer)
10 {
11   PetscMPIInt rank;
12   PetscBool   isascii, isdraw, isglvis, isbinary;
13   DM_DA      *dd = (DM_DA *)da->data;
14 #if defined(PETSC_HAVE_MATLAB)
15   PetscBool ismatlab;
16 #endif
17 
18   PetscFunctionBegin;
19   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)da), &rank));
20 
21   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
22   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
23   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERGLVIS, &isglvis));
24   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
25 #if defined(PETSC_HAVE_MATLAB)
26   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERMATLAB, &ismatlab));
27 #endif
28   if (isascii) {
29     PetscViewerFormat format;
30 
31     PetscCall(PetscViewerGetFormat(viewer, &format));
32     if (format == PETSC_VIEWER_LOAD_BALANCE) {
33       PetscInt      i, nmax = 0, nmin = PETSC_INT_MAX, navg = 0, *nz, nzlocal;
34       DMDALocalInfo info;
35       PetscMPIInt   size;
36       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da), &size));
37       PetscCall(DMDAGetLocalInfo(da, &info));
38       nzlocal = info.xm;
39       PetscCall(PetscMalloc1(size, &nz));
40       PetscCallMPI(MPI_Allgather(&nzlocal, 1, MPIU_INT, nz, 1, MPIU_INT, PetscObjectComm((PetscObject)da)));
41       for (i = 0; i < size; i++) {
42         nmax = PetscMax(nmax, nz[i]);
43         nmin = PetscMin(nmin, nz[i]);
44         navg += nz[i];
45       }
46       PetscCall(PetscFree(nz));
47       navg = navg / size;
48       PetscCall(PetscViewerASCIIPrintf(viewer, "  Load Balance - Grid Points: Min %" PetscInt_FMT "  avg %" PetscInt_FMT "  max %" PetscInt_FMT "\n", nmin, navg, nmax));
49       PetscFunctionReturn(PETSC_SUCCESS);
50     }
51     if (format != PETSC_VIEWER_ASCII_GLVIS) {
52       DMDALocalInfo info;
53       PetscCall(DMDAGetLocalInfo(da, &info));
54       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
55       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Processor [%d] M %" PetscInt_FMT " m %" PetscInt_FMT " w %" PetscInt_FMT " s %" PetscInt_FMT "\n", rank, dd->M, dd->m, dd->w, dd->s));
56       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "X range of indices: %" PetscInt_FMT " %" PetscInt_FMT "\n", info.xs, info.xs + info.xm));
57       PetscCall(PetscViewerFlush(viewer));
58       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
59     } else if (format == PETSC_VIEWER_ASCII_GLVIS) PetscCall(DMView_DA_GLVis(da, viewer));
60   } else if (isdraw) {
61     PetscDraw draw;
62     double    ymin = -1, ymax = 1, xmin = -1, xmax = dd->M, x;
63     PetscInt  base;
64     char      node[10];
65     PetscBool isnull;
66 
67     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
68     PetscCall(PetscDrawIsNull(draw, &isnull));
69     if (isnull) PetscFunctionReturn(PETSC_SUCCESS);
70 
71     PetscCall(PetscDrawCheckResizedWindow(draw));
72     PetscCall(PetscDrawClear(draw));
73     PetscCall(PetscDrawSetCoordinates(draw, xmin, ymin, xmax, ymax));
74 
75     PetscDrawCollectiveBegin(draw);
76     /* first processor draws all node lines */
77     if (rank == 0) {
78       PetscInt xmin_tmp;
79       ymin = 0.0;
80       ymax = 0.3;
81       for (xmin_tmp = 0; xmin_tmp < dd->M; xmin_tmp++) PetscCall(PetscDrawLine(draw, (double)xmin_tmp, ymin, (double)xmin_tmp, ymax, PETSC_DRAW_BLACK));
82       xmin = 0.0;
83       xmax = dd->M - 1;
84       PetscCall(PetscDrawLine(draw, xmin, ymin, xmax, ymin, PETSC_DRAW_BLACK));
85       PetscCall(PetscDrawLine(draw, xmin, ymax, xmax, ymax, PETSC_DRAW_BLACK));
86     }
87     PetscDrawCollectiveEnd(draw);
88     PetscCall(PetscDrawFlush(draw));
89     PetscCall(PetscDrawPause(draw));
90 
91     PetscDrawCollectiveBegin(draw);
92     /* draw my box */
93     ymin = 0;
94     ymax = 0.3;
95     xmin = dd->xs / dd->w;
96     xmax = (dd->xe / dd->w) - 1;
97     PetscCall(PetscDrawLine(draw, xmin, ymin, xmax, ymin, PETSC_DRAW_RED));
98     PetscCall(PetscDrawLine(draw, xmin, ymin, xmin, ymax, PETSC_DRAW_RED));
99     PetscCall(PetscDrawLine(draw, xmin, ymax, xmax, ymax, PETSC_DRAW_RED));
100     PetscCall(PetscDrawLine(draw, xmax, ymin, xmax, ymax, PETSC_DRAW_RED));
101     /* Put in index numbers */
102     base = dd->base / dd->w;
103     for (x = xmin; x <= xmax; x++) {
104       PetscCall(PetscSNPrintf(node, sizeof(node), "%" PetscInt_FMT, base++));
105       PetscCall(PetscDrawString(draw, x, ymin, PETSC_DRAW_RED, node));
106     }
107     PetscDrawCollectiveEnd(draw);
108     PetscCall(PetscDrawFlush(draw));
109     PetscCall(PetscDrawPause(draw));
110     PetscCall(PetscDrawSave(draw));
111   } else if (isglvis) {
112     PetscCall(DMView_DA_GLVis(da, viewer));
113   } else if (isbinary) {
114     PetscCall(DMView_DA_Binary(da, viewer));
115 #if defined(PETSC_HAVE_MATLAB)
116   } else if (ismatlab) {
117     PetscCall(DMView_DA_Matlab(da, viewer));
118 #endif
119   }
120   PetscFunctionReturn(PETSC_SUCCESS);
121 }
122 
123 PetscErrorCode DMSetUp_DA_1D(DM da)
124 {
125   DM_DA          *dd    = (DM_DA *)da->data;
126   const PetscInt  M     = dd->M;
127   const PetscInt  dof   = dd->w;
128   const PetscInt  s     = dd->s;
129   const PetscInt  sDist = s; /* stencil distance in points */
130   const PetscInt *lx    = dd->lx;
131   DMBoundaryType  bx    = dd->bx;
132   MPI_Comm        comm;
133   Vec             local, global;
134   VecScatter      gtol;
135   IS              to, from;
136   PetscBool       flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
137   PetscMPIInt     rank, size;
138   PetscInt        i, *idx, nn, left, xs, xe, x, Xs, Xe, start, m, IXs, IXe;
139 
140   PetscFunctionBegin;
141   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
142   PetscCallMPI(MPI_Comm_size(comm, &size));
143   PetscCallMPI(MPI_Comm_rank(comm, &rank));
144 
145   dd->p = 1;
146   dd->n = 1;
147   dd->m = size;
148   m     = dd->m;
149 
150   if (s > 0) {
151     /* if not communicating data then should be ok to have nothing on some processes */
152     PetscCheck(M >= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "More processes than data points! %" PetscInt_FMT " %" PetscInt_FMT, m, M);
153     PetscCheck((M - 1) >= s || size <= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Array is too small for stencil! %" PetscInt_FMT " %" PetscInt_FMT, M - 1, s);
154   }
155 
156   /*
157      Determine locally owned region
158      xs is the first local node number, x is the number of local nodes
159   */
160   if (!lx) {
161     PetscCall(PetscMalloc1(m, &dd->lx));
162     PetscCall(PetscOptionsGetBool(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_partition_blockcomm", &flg1, NULL));
163     PetscCall(PetscOptionsGetBool(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_partition_nodes_at_end", &flg2, NULL));
164     if (flg1) { /* Block Comm type Distribution */
165       xs = rank * M / m;
166       x  = (rank + 1) * M / m - xs;
167     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
168       x = (M + rank) / m;
169       if (M / m == x) xs = rank * x;
170       else xs = rank * (x - 1) + (M + rank) % (x * m);
171     } else { /* The odd nodes are evenly distributed across the first k nodes */
172       /* Regular PETSc Distribution */
173       x = M / m + ((M % m) > rank);
174       if (rank >= (M % m)) xs = (rank * (M / m) + M % m);
175       else xs = rank * (M / m) + rank;
176     }
177     PetscCallMPI(MPI_Allgather(&xs, 1, MPIU_INT, dd->lx, 1, MPIU_INT, comm));
178     for (i = 0; i < m - 1; i++) dd->lx[i] = dd->lx[i + 1] - dd->lx[i];
179     dd->lx[m - 1] = M - dd->lx[m - 1];
180   } else {
181     x  = lx[rank];
182     xs = 0;
183     for (i = 0; i < rank; i++) xs += lx[i];
184     /* verify that data user provided is consistent */
185     left = xs;
186     for (i = rank; i < size; i++) left += lx[i];
187     PetscCheck(left == M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Sum of lx across processors not equal to M %" PetscInt_FMT " %" PetscInt_FMT, left, M);
188   }
189 
190   /*
191    check if the scatter requires more than one process neighbor or wraps around
192    the domain more than once
193   */
194   PetscCheck((x >= s) || ((M <= 1) && (bx != DM_BOUNDARY_PERIODIC)), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local x-width of domain x %" PetscInt_FMT " is smaller than stencil width s %" PetscInt_FMT, x, s);
195 
196   xe = xs + x;
197 
198   /* determine ghost region (Xs) and region scattered into (IXs)  */
199   if (xs - sDist > 0) {
200     Xs  = xs - sDist;
201     IXs = xs - sDist;
202   } else {
203     if (bx) Xs = xs - sDist;
204     else Xs = 0;
205     IXs = 0;
206   }
207   if (xe + sDist <= M) {
208     Xe  = xe + sDist;
209     IXe = xe + sDist;
210   } else {
211     if (bx) Xe = xe + sDist;
212     else Xe = M;
213     IXe = M;
214   }
215 
216   if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
217     Xs  = xs - sDist;
218     Xe  = xe + sDist;
219     IXs = xs - sDist;
220     IXe = xe + sDist;
221   }
222 
223   /* allocate the base parallel and sequential vectors */
224   dd->Nlocal = dof * x;
225   PetscCall(VecCreateMPIWithArray(comm, dof, dd->Nlocal, PETSC_DECIDE, NULL, &global));
226   dd->nlocal = dof * (Xe - Xs);
227   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, dof, dd->nlocal, NULL, &local));
228 
229   PetscCall(VecGetOwnershipRange(global, &start, NULL));
230 
231   /* Create Global to Local Vector Scatter Context */
232   /* global to local must retrieve ghost points */
233   PetscCall(ISCreateStride(comm, dof * (IXe - IXs), dof * (IXs - Xs), 1, &to));
234 
235   PetscCall(PetscMalloc1(x + 2 * sDist, &idx));
236 
237   for (i = 0; i < IXs - Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/
238 
239   nn = IXs - Xs;
240   if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
241     for (i = 0; i < sDist; i++) {   /* Left ghost points */
242       if ((xs - sDist + i) >= 0) idx[nn++] = xs - sDist + i;
243       else idx[nn++] = M + (xs - sDist + i);
244     }
245 
246     for (i = 0; i < x; i++) idx[nn++] = xs + i; /* Non-ghost points */
247 
248     for (i = 0; i < sDist; i++) { /* Right ghost points */
249       if ((xe + i) < M) idx[nn++] = xe + i;
250       else idx[nn++] = (xe + i) - M;
251     }
252   } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
253     for (i = 0; i < (sDist); i++) {      /* Left ghost points */
254       if ((xs - sDist + i) >= 0) idx[nn++] = xs - sDist + i;
255       else idx[nn++] = sDist - i;
256     }
257 
258     for (i = 0; i < x; i++) idx[nn++] = xs + i; /* Non-ghost points */
259 
260     for (i = 0; i < (sDist); i++) { /* Right ghost points */
261       if ((xe + i) < M) idx[nn++] = xe + i;
262       else idx[nn++] = M - (i + 2);
263     }
264   } else { /* Now do all cases with no periodicity */
265     if (0 <= xs - sDist) {
266       for (i = 0; i < sDist; i++) idx[nn++] = xs - sDist + i;
267     } else {
268       for (i = 0; i < xs; i++) idx[nn++] = i;
269     }
270 
271     for (i = 0; i < x; i++) idx[nn++] = xs + i;
272 
273     if ((xe + sDist) <= M) {
274       for (i = 0; i < sDist; i++) idx[nn++] = xe + i;
275     } else {
276       for (i = xe; i < M; i++) idx[nn++] = i;
277     }
278   }
279 
280   PetscCall(ISCreateBlock(comm, dof, nn - IXs + Xs, &idx[IXs - Xs], PETSC_USE_POINTER, &from));
281   PetscCall(VecScatterCreate(global, from, local, to, &gtol));
282   PetscCall(ISDestroy(&to));
283   PetscCall(ISDestroy(&from));
284   PetscCall(VecDestroy(&local));
285   PetscCall(VecDestroy(&global));
286 
287   dd->xs = dof * xs;
288   dd->xe = dof * xe;
289   dd->ys = 0;
290   dd->ye = 1;
291   dd->zs = 0;
292   dd->ze = 1;
293   dd->Xs = dof * Xs;
294   dd->Xe = dof * Xe;
295   dd->Ys = 0;
296   dd->Ye = 1;
297   dd->Zs = 0;
298   dd->Ze = 1;
299 
300   dd->gtol      = gtol;
301   dd->base      = dof * xs;
302   da->ops->view = DMView_DA_1d;
303 
304   /*
305      Set the local to global ordering in the global vector, this allows use
306      of VecSetValuesLocal().
307   */
308   for (i = 0; i < Xe - IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/
309 
310   PetscCall(ISLocalToGlobalMappingCreate(comm, dof, nn, idx, PETSC_OWN_POINTER, &da->ltogmap));
311   PetscFunctionReturn(PETSC_SUCCESS);
312 }
313 
314 /*@
315   DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
316   regular array data that is distributed across one or mpre MPI processes.
317 
318   Collective
319 
320   Input Parameters:
321 + comm - MPI communicator
322 . bx   - type of ghost cells at the boundary the array should have, if any. Use
323          `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, or `DM_BOUNDARY_PERIODIC`.
324 . M    - global dimension of the array (that is the number of grid points)
325 . dof  - number of degrees of freedom per node
326 . s    - stencil width
327 - lx   - array containing number of nodes in the X direction on each processor,
328          or `NULL`. If non-null, must be of length as the number of processes in the MPI_Comm.
329          The sum of these entries must equal `M`
330 
331   Output Parameter:
332 . da - the resulting distributed array object
333 
334   Options Database Keys:
335 + -dm_view          - Calls `DMView()` at the conclusion of `DMDACreate1d()`
336 . -da_grid_x <nx>   - number of grid points in x direction
337 . -da_refine_x <rx> - refinement factor
338 - -da_refine <n>    - refine the `DMDA` n times before creating it
339 
340   Level: beginner
341 
342   Notes:
343   The array data itself is NOT stored in the `DMDA`, it is stored in `Vec` objects;
344   The appropriate vector objects can be obtained with calls to `DMCreateGlobalVector()`
345   and `DMCreateLocalVector()` and calls to `VecDuplicate()` if more are needed.
346 
347   You must call `DMSetUp()` after this call before using this `DM`.
348 
349   If you wish to use the options database to change values in the `DMDA` call `DMSetFromOptions()` after this call
350   but before `DMSetUp()`.
351 
352 .seealso: [](sec_struct), `DMDA`, `DM`, `DMDestroy()`, `DMView()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMGlobalToLocalBegin()`, `DMDASetRefinementFactor()`,
353           `DMGlobalToLocalEnd()`, `DMLocalToGlobalBegin()`, `DMLocalToLocalBegin()`, `DMLocalToLocalEnd()`, `DMDAGetRefinementFactor()`,
354           `DMDAGetInfo()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`, `DMDACreateNaturalVector()`, `DMLoad()`, `DMDAGetOwnershipRanges()`,
355           `DMStagCreate1d()`, `DMBoundaryType`
356 @*/
357 PetscErrorCode DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
358 {
359   PetscMPIInt size;
360 
361   PetscFunctionBegin;
362   PetscCall(DMDACreate(comm, da));
363   PetscCall(DMSetDimension(*da, 1));
364   PetscCall(DMDASetSizes(*da, M, 1, 1));
365   PetscCallMPI(MPI_Comm_size(comm, &size));
366   PetscCall(DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE));
367   PetscCall(DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE));
368   PetscCall(DMDASetDof(*da, dof));
369   PetscCall(DMDASetStencilWidth(*da, s));
370   PetscCall(DMDASetOwnershipRanges(*da, lx, NULL, NULL));
371   PetscFunctionReturn(PETSC_SUCCESS);
372 }
373