xref: /petsc/src/dm/impls/da/da1.c (revision df4cd43f92eaa320656440c40edb1046daee8f75)
1 
2 /*
3    Code for manipulating distributed regular 1d arrays in parallel.
4    This file was created by Peter Mell   6/30/95
5 */
6 
7 #include <petsc/private/dmdaimpl.h> /*I  "petscdmda.h"   I*/
8 
9 #include <petscdraw.h>
10 static PetscErrorCode DMView_DA_1d(DM da, PetscViewer viewer)
11 {
12   PetscMPIInt rank;
13   PetscBool   iascii, isdraw, isglvis, isbinary;
14   DM_DA      *dd = (DM_DA *)da->data;
15 #if defined(PETSC_HAVE_MATLAB)
16   PetscBool ismatlab;
17 #endif
18 
19   PetscFunctionBegin;
20   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)da), &rank));
21 
22   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
23   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
24   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERGLVIS, &isglvis));
25   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
26 #if defined(PETSC_HAVE_MATLAB)
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERMATLAB, &ismatlab));
28 #endif
29   if (iascii) {
30     PetscViewerFormat format;
31 
32     PetscCall(PetscViewerGetFormat(viewer, &format));
33     if (format == PETSC_VIEWER_LOAD_BALANCE) {
34       PetscInt      i, nmax = 0, nmin = PETSC_MAX_INT, navg = 0, *nz, nzlocal;
35       DMDALocalInfo info;
36       PetscMPIInt   size;
37       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da), &size));
38       PetscCall(DMDAGetLocalInfo(da, &info));
39       nzlocal = info.xm;
40       PetscCall(PetscMalloc1(size, &nz));
41       PetscCallMPI(MPI_Allgather(&nzlocal, 1, MPIU_INT, nz, 1, MPIU_INT, PetscObjectComm((PetscObject)da)));
42       for (i = 0; i < (PetscInt)size; i++) {
43         nmax = PetscMax(nmax, nz[i]);
44         nmin = PetscMin(nmin, nz[i]);
45         navg += nz[i];
46       }
47       PetscCall(PetscFree(nz));
48       navg = navg / size;
49       PetscCall(PetscViewerASCIIPrintf(viewer, "  Load Balance - Grid Points: Min %" PetscInt_FMT "  avg %" PetscInt_FMT "  max %" PetscInt_FMT "\n", nmin, navg, nmax));
50       PetscFunctionReturn(PETSC_SUCCESS);
51     }
52     if (format != PETSC_VIEWER_ASCII_VTK_DEPRECATED && format != PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED && format != PETSC_VIEWER_ASCII_GLVIS) {
53       DMDALocalInfo info;
54       PetscCall(DMDAGetLocalInfo(da, &info));
55       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
56       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Processor [%d] M %" PetscInt_FMT " m %" PetscInt_FMT " w %" PetscInt_FMT " s %" PetscInt_FMT "\n", rank, dd->M, dd->m, dd->w, dd->s));
57       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "X range of indices: %" PetscInt_FMT " %" PetscInt_FMT "\n", info.xs, info.xs + info.xm));
58       PetscCall(PetscViewerFlush(viewer));
59       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
60     } else if (format == PETSC_VIEWER_ASCII_GLVIS) PetscCall(DMView_DA_GLVis(da, viewer));
61     else PetscCall(DMView_DA_VTK(da, viewer));
62   } else if (isdraw) {
63     PetscDraw draw;
64     double    ymin = -1, ymax = 1, xmin = -1, xmax = dd->M, x;
65     PetscInt  base;
66     char      node[10];
67     PetscBool isnull;
68 
69     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
70     PetscCall(PetscDrawIsNull(draw, &isnull));
71     if (isnull) PetscFunctionReturn(PETSC_SUCCESS);
72 
73     PetscCall(PetscDrawCheckResizedWindow(draw));
74     PetscCall(PetscDrawClear(draw));
75     PetscCall(PetscDrawSetCoordinates(draw, xmin, ymin, xmax, ymax));
76 
77     PetscDrawCollectiveBegin(draw);
78     /* first processor draws all node lines */
79     if (rank == 0) {
80       PetscInt xmin_tmp;
81       ymin = 0.0;
82       ymax = 0.3;
83       for (xmin_tmp = 0; xmin_tmp < dd->M; xmin_tmp++) PetscCall(PetscDrawLine(draw, (double)xmin_tmp, ymin, (double)xmin_tmp, ymax, PETSC_DRAW_BLACK));
84       xmin = 0.0;
85       xmax = dd->M - 1;
86       PetscCall(PetscDrawLine(draw, xmin, ymin, xmax, ymin, PETSC_DRAW_BLACK));
87       PetscCall(PetscDrawLine(draw, xmin, ymax, xmax, ymax, PETSC_DRAW_BLACK));
88     }
89     PetscDrawCollectiveEnd(draw);
90     PetscCall(PetscDrawFlush(draw));
91     PetscCall(PetscDrawPause(draw));
92 
93     PetscDrawCollectiveBegin(draw);
94     /* draw my box */
95     ymin = 0;
96     ymax = 0.3;
97     xmin = dd->xs / dd->w;
98     xmax = (dd->xe / dd->w) - 1;
99     PetscCall(PetscDrawLine(draw, xmin, ymin, xmax, ymin, PETSC_DRAW_RED));
100     PetscCall(PetscDrawLine(draw, xmin, ymin, xmin, ymax, PETSC_DRAW_RED));
101     PetscCall(PetscDrawLine(draw, xmin, ymax, xmax, ymax, PETSC_DRAW_RED));
102     PetscCall(PetscDrawLine(draw, xmax, ymin, xmax, ymax, PETSC_DRAW_RED));
103     /* Put in index numbers */
104     base = dd->base / dd->w;
105     for (x = xmin; x <= xmax; x++) {
106       PetscCall(PetscSNPrintf(node, sizeof(node), "%d", (int)base++));
107       PetscCall(PetscDrawString(draw, x, ymin, PETSC_DRAW_RED, node));
108     }
109     PetscDrawCollectiveEnd(draw);
110     PetscCall(PetscDrawFlush(draw));
111     PetscCall(PetscDrawPause(draw));
112     PetscCall(PetscDrawSave(draw));
113   } else if (isglvis) {
114     PetscCall(DMView_DA_GLVis(da, viewer));
115   } else if (isbinary) {
116     PetscCall(DMView_DA_Binary(da, viewer));
117 #if defined(PETSC_HAVE_MATLAB)
118   } else if (ismatlab) {
119     PetscCall(DMView_DA_Matlab(da, viewer));
120 #endif
121   }
122   PetscFunctionReturn(PETSC_SUCCESS);
123 }
124 
125 PetscErrorCode DMSetUp_DA_1D(DM da)
126 {
127   DM_DA          *dd    = (DM_DA *)da->data;
128   const PetscInt  M     = dd->M;
129   const PetscInt  dof   = dd->w;
130   const PetscInt  s     = dd->s;
131   const PetscInt  sDist = s; /* stencil distance in points */
132   const PetscInt *lx    = dd->lx;
133   DMBoundaryType  bx    = dd->bx;
134   MPI_Comm        comm;
135   Vec             local, global;
136   VecScatter      gtol;
137   IS              to, from;
138   PetscBool       flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
139   PetscMPIInt     rank, size;
140   PetscInt        i, *idx, nn, left, xs, xe, x, Xs, Xe, start, m, IXs, IXe;
141 
142   PetscFunctionBegin;
143   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
144   PetscCallMPI(MPI_Comm_size(comm, &size));
145   PetscCallMPI(MPI_Comm_rank(comm, &rank));
146 
147   dd->p = 1;
148   dd->n = 1;
149   dd->m = size;
150   m     = dd->m;
151 
152   if (s > 0) {
153     /* if not communicating data then should be ok to have nothing on some processes */
154     PetscCheck(M >= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "More processes than data points! %" PetscInt_FMT " %" PetscInt_FMT, m, M);
155     PetscCheck((M - 1) >= s || size <= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Array is too small for stencil! %" PetscInt_FMT " %" PetscInt_FMT, M - 1, s);
156   }
157 
158   /*
159      Determine locally owned region
160      xs is the first local node number, x is the number of local nodes
161   */
162   if (!lx) {
163     PetscCall(PetscMalloc1(m, &dd->lx));
164     PetscCall(PetscOptionsGetBool(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_partition_blockcomm", &flg1, NULL));
165     PetscCall(PetscOptionsGetBool(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_partition_nodes_at_end", &flg2, NULL));
166     if (flg1) { /* Block Comm type Distribution */
167       xs = rank * M / m;
168       x  = (rank + 1) * M / m - xs;
169     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
170       x = (M + rank) / m;
171       if (M / m == x) xs = rank * x;
172       else xs = rank * (x - 1) + (M + rank) % (x * m);
173     } else { /* The odd nodes are evenly distributed across the first k nodes */
174       /* Regular PETSc Distribution */
175       x = M / m + ((M % m) > rank);
176       if (rank >= (M % m)) xs = (rank * (PetscInt)(M / m) + M % m);
177       else xs = rank * (PetscInt)(M / m) + rank;
178     }
179     PetscCallMPI(MPI_Allgather(&xs, 1, MPIU_INT, dd->lx, 1, MPIU_INT, comm));
180     for (i = 0; i < m - 1; i++) dd->lx[i] = dd->lx[i + 1] - dd->lx[i];
181     dd->lx[m - 1] = M - dd->lx[m - 1];
182   } else {
183     x  = lx[rank];
184     xs = 0;
185     for (i = 0; i < rank; i++) xs += lx[i];
186     /* verify that data user provided is consistent */
187     left = xs;
188     for (i = rank; i < size; i++) left += lx[i];
189     PetscCheck(left == M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Sum of lx across processors not equal to M %" PetscInt_FMT " %" PetscInt_FMT, left, M);
190   }
191 
192   /*
193    check if the scatter requires more than one process neighbor or wraps around
194    the domain more than once
195   */
196   PetscCheck((x >= s) || ((M <= 1) && (bx != DM_BOUNDARY_PERIODIC)), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local x-width of domain x %" PetscInt_FMT " is smaller than stencil width s %" PetscInt_FMT, x, s);
197 
198   xe = xs + x;
199 
200   /* determine ghost region (Xs) and region scattered into (IXs)  */
201   if (xs - sDist > 0) {
202     Xs  = xs - sDist;
203     IXs = xs - sDist;
204   } else {
205     if (bx) Xs = xs - sDist;
206     else Xs = 0;
207     IXs = 0;
208   }
209   if (xe + sDist <= M) {
210     Xe  = xe + sDist;
211     IXe = xe + sDist;
212   } else {
213     if (bx) Xe = xe + sDist;
214     else Xe = M;
215     IXe = M;
216   }
217 
218   if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
219     Xs  = xs - sDist;
220     Xe  = xe + sDist;
221     IXs = xs - sDist;
222     IXe = xe + sDist;
223   }
224 
225   /* allocate the base parallel and sequential vectors */
226   dd->Nlocal = dof * x;
227   PetscCall(VecCreateMPIWithArray(comm, dof, dd->Nlocal, PETSC_DECIDE, NULL, &global));
228   dd->nlocal = dof * (Xe - Xs);
229   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, dof, dd->nlocal, NULL, &local));
230 
231   PetscCall(VecGetOwnershipRange(global, &start, NULL));
232 
233   /* Create Global to Local Vector Scatter Context */
234   /* global to local must retrieve ghost points */
235   PetscCall(ISCreateStride(comm, dof * (IXe - IXs), dof * (IXs - Xs), 1, &to));
236 
237   PetscCall(PetscMalloc1(x + 2 * sDist, &idx));
238 
239   for (i = 0; i < IXs - Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/
240 
241   nn = IXs - Xs;
242   if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
243     for (i = 0; i < sDist; i++) {   /* Left ghost points */
244       if ((xs - sDist + i) >= 0) idx[nn++] = xs - sDist + i;
245       else idx[nn++] = M + (xs - sDist + i);
246     }
247 
248     for (i = 0; i < x; i++) idx[nn++] = xs + i; /* Non-ghost points */
249 
250     for (i = 0; i < sDist; i++) { /* Right ghost points */
251       if ((xe + i) < M) idx[nn++] = xe + i;
252       else idx[nn++] = (xe + i) - M;
253     }
254   } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
255     for (i = 0; i < (sDist); i++) {      /* Left ghost points */
256       if ((xs - sDist + i) >= 0) idx[nn++] = xs - sDist + i;
257       else idx[nn++] = sDist - i;
258     }
259 
260     for (i = 0; i < x; i++) idx[nn++] = xs + i; /* Non-ghost points */
261 
262     for (i = 0; i < (sDist); i++) { /* Right ghost points */
263       if ((xe + i) < M) idx[nn++] = xe + i;
264       else idx[nn++] = M - (i + 2);
265     }
266   } else { /* Now do all cases with no periodicity */
267     if (0 <= xs - sDist) {
268       for (i = 0; i < sDist; i++) idx[nn++] = xs - sDist + i;
269     } else {
270       for (i = 0; i < xs; i++) idx[nn++] = i;
271     }
272 
273     for (i = 0; i < x; i++) idx[nn++] = xs + i;
274 
275     if ((xe + sDist) <= M) {
276       for (i = 0; i < sDist; i++) idx[nn++] = xe + i;
277     } else {
278       for (i = xe; i < M; i++) idx[nn++] = i;
279     }
280   }
281 
282   PetscCall(ISCreateBlock(comm, dof, nn - IXs + Xs, &idx[IXs - Xs], PETSC_USE_POINTER, &from));
283   PetscCall(VecScatterCreate(global, from, local, to, &gtol));
284   PetscCall(ISDestroy(&to));
285   PetscCall(ISDestroy(&from));
286   PetscCall(VecDestroy(&local));
287   PetscCall(VecDestroy(&global));
288 
289   dd->xs = dof * xs;
290   dd->xe = dof * xe;
291   dd->ys = 0;
292   dd->ye = 1;
293   dd->zs = 0;
294   dd->ze = 1;
295   dd->Xs = dof * Xs;
296   dd->Xe = dof * Xe;
297   dd->Ys = 0;
298   dd->Ye = 1;
299   dd->Zs = 0;
300   dd->Ze = 1;
301 
302   dd->gtol      = gtol;
303   dd->base      = dof * xs;
304   da->ops->view = DMView_DA_1d;
305 
306   /*
307      Set the local to global ordering in the global vector, this allows use
308      of VecSetValuesLocal().
309   */
310   for (i = 0; i < Xe - IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/
311 
312   PetscCall(ISLocalToGlobalMappingCreate(comm, dof, nn, idx, PETSC_OWN_POINTER, &da->ltogmap));
313 
314   PetscFunctionReturn(PETSC_SUCCESS);
315 }
316 
317 /*@C
318    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
319    regular array data that is distributed across some processors.
320 
321    Collective
322 
323    Input Parameters:
324 +  comm - MPI communicator
325 .  bx - type of ghost cells at the boundary the array should have, if any. Use
326           `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, or `DM_BOUNDARY_PERIODIC`.
327 .  M - global dimension of the array (that is the number of grid points)
328             from the command line with -da_grid_x <M>)
329 .  dof - number of degrees of freedom per node
330 .  s - stencil width
331 -  lx - array containing number of nodes in the X direction on each processor,
332         or NULL. If non-null, must be of length as the number of processes in the MPI_Comm.
333         The sum of these entries must equal M
334 
335    Output Parameter:
336 .  da - the resulting distributed array object
337 
338    Options Database Keys:
339 +  -dm_view - Calls `DMView()` at the conclusion of `DMDACreate1d()`
340 .  -da_grid_x <nx> - number of grid points in x direction
341 .  -da_refine_x <rx> - refinement factor
342 -  -da_refine <n> - refine the `DMDA` n times before creating it
343 
344    Level: beginner
345 
346    Notes:
347    The array data itself is NOT stored in the `DMDA`, it is stored in `Vec` objects;
348    The appropriate vector objects can be obtained with calls to `DMCreateGlobalVector()`
349    and `DMCreateLocalVector()` and calls to `VecDuplicate()` if more are needed.
350 
351    You must call `DMSetUp()` after this call before using this `DM`.
352 
353    If you wish to use the options database to change values in the `DMDA` call `DMSetFromOptions()` after this call
354    but before `DMSetUp()`.
355 
356 .seealso: `DMDA`, `DM`, `DMDestroy()`, `DMView()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMGlobalToLocalBegin()`, `DMDASetRefinementFactor()`,
357           `DMGlobalToLocalEnd()`, `DMLocalToGlobalBegin()`, `DMLocalToLocalBegin()`, `DMLocalToLocalEnd()`, `DMDAGetRefinementFactor()`,
358           `DMDAGetInfo()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`, `DMDACreateNaturalVector()`, `DMLoad()`, `DMDAGetOwnershipRanges()`,
359           `DMStagCreate1d()`
360 @*/
361 PetscErrorCode DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
362 {
363   PetscMPIInt size;
364 
365   PetscFunctionBegin;
366   PetscCall(DMDACreate(comm, da));
367   PetscCall(DMSetDimension(*da, 1));
368   PetscCall(DMDASetSizes(*da, M, 1, 1));
369   PetscCallMPI(MPI_Comm_size(comm, &size));
370   PetscCall(DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE));
371   PetscCall(DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE));
372   PetscCall(DMDASetDof(*da, dof));
373   PetscCall(DMDASetStencilWidth(*da, s));
374   PetscCall(DMDASetOwnershipRanges(*da, lx, NULL, NULL));
375   PetscFunctionReturn(PETSC_SUCCESS);
376 }
377