xref: /petsc/src/dm/impls/da/da1.c (revision a16fd2c93c02146fccd68469496ac02ca99b9ebe)
1 
2 /*
3    Code for manipulating distributed regular 1d arrays in parallel.
4    This file was created by Peter Mell   6/30/95
5 */
6 
7 #include <petsc/private/dmdaimpl.h> /*I  "petscdmda.h"   I*/
8 
9 #include <petscdraw.h>
10 static PetscErrorCode DMView_DA_1d(DM da, PetscViewer viewer) {
11   PetscMPIInt rank;
12   PetscBool   iascii, isdraw, isglvis, isbinary;
13   DM_DA      *dd = (DM_DA *)da->data;
14 #if defined(PETSC_HAVE_MATLAB_ENGINE)
15   PetscBool ismatlab;
16 #endif
17 
18   PetscFunctionBegin;
19   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)da), &rank));
20 
21   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
22   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
23   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERGLVIS, &isglvis));
24   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
25 #if defined(PETSC_HAVE_MATLAB_ENGINE)
26   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERMATLAB, &ismatlab));
27 #endif
28   if (iascii) {
29     PetscViewerFormat format;
30 
31     PetscCall(PetscViewerGetFormat(viewer, &format));
32     if (format == PETSC_VIEWER_LOAD_BALANCE) {
33       PetscInt      i, nmax = 0, nmin = PETSC_MAX_INT, navg = 0, *nz, nzlocal;
34       DMDALocalInfo info;
35       PetscMPIInt   size;
36       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da), &size));
37       PetscCall(DMDAGetLocalInfo(da, &info));
38       nzlocal = info.xm;
39       PetscCall(PetscMalloc1(size, &nz));
40       PetscCallMPI(MPI_Allgather(&nzlocal, 1, MPIU_INT, nz, 1, MPIU_INT, PetscObjectComm((PetscObject)da)));
41       for (i = 0; i < (PetscInt)size; i++) {
42         nmax = PetscMax(nmax, nz[i]);
43         nmin = PetscMin(nmin, nz[i]);
44         navg += nz[i];
45       }
46       PetscCall(PetscFree(nz));
47       navg = navg / size;
48       PetscCall(PetscViewerASCIIPrintf(viewer, "  Load Balance - Grid Points: Min %" PetscInt_FMT "  avg %" PetscInt_FMT "  max %" PetscInt_FMT "\n", nmin, navg, nmax));
49       PetscFunctionReturn(0);
50     }
51     if (format != PETSC_VIEWER_ASCII_VTK_DEPRECATED && format != PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED && format != PETSC_VIEWER_ASCII_GLVIS) {
52       DMDALocalInfo info;
53       PetscCall(DMDAGetLocalInfo(da, &info));
54       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
55       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Processor [%d] M %" PetscInt_FMT " m %" PetscInt_FMT " w %" PetscInt_FMT " s %" PetscInt_FMT "\n", rank, dd->M, dd->m, dd->w, dd->s));
56       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "X range of indices: %" PetscInt_FMT " %" PetscInt_FMT "\n", info.xs, info.xs + info.xm));
57       PetscCall(PetscViewerFlush(viewer));
58       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
59     } else if (format == PETSC_VIEWER_ASCII_GLVIS) PetscCall(DMView_DA_GLVis(da, viewer));
60     else PetscCall(DMView_DA_VTK(da, viewer));
61   } else if (isdraw) {
62     PetscDraw draw;
63     double    ymin = -1, ymax = 1, xmin = -1, xmax = dd->M, x;
64     PetscInt  base;
65     char      node[10];
66     PetscBool isnull;
67 
68     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
69     PetscCall(PetscDrawIsNull(draw, &isnull));
70     if (isnull) PetscFunctionReturn(0);
71 
72     PetscCall(PetscDrawCheckResizedWindow(draw));
73     PetscCall(PetscDrawClear(draw));
74     PetscCall(PetscDrawSetCoordinates(draw, xmin, ymin, xmax, ymax));
75 
76     PetscDrawCollectiveBegin(draw);
77     /* first processor draws all node lines */
78     if (rank == 0) {
79       PetscInt xmin_tmp;
80       ymin = 0.0;
81       ymax = 0.3;
82       for (xmin_tmp = 0; xmin_tmp < dd->M; xmin_tmp++) PetscCall(PetscDrawLine(draw, (double)xmin_tmp, ymin, (double)xmin_tmp, ymax, PETSC_DRAW_BLACK));
83       xmin = 0.0;
84       xmax = dd->M - 1;
85       PetscCall(PetscDrawLine(draw, xmin, ymin, xmax, ymin, PETSC_DRAW_BLACK));
86       PetscCall(PetscDrawLine(draw, xmin, ymax, xmax, ymax, PETSC_DRAW_BLACK));
87     }
88     PetscDrawCollectiveEnd(draw);
89     PetscCall(PetscDrawFlush(draw));
90     PetscCall(PetscDrawPause(draw));
91 
92     PetscDrawCollectiveBegin(draw);
93     /* draw my box */
94     ymin = 0;
95     ymax = 0.3;
96     xmin = dd->xs / dd->w;
97     xmax = (dd->xe / dd->w) - 1;
98     PetscCall(PetscDrawLine(draw, xmin, ymin, xmax, ymin, PETSC_DRAW_RED));
99     PetscCall(PetscDrawLine(draw, xmin, ymin, xmin, ymax, PETSC_DRAW_RED));
100     PetscCall(PetscDrawLine(draw, xmin, ymax, xmax, ymax, PETSC_DRAW_RED));
101     PetscCall(PetscDrawLine(draw, xmax, ymin, xmax, ymax, PETSC_DRAW_RED));
102     /* Put in index numbers */
103     base = dd->base / dd->w;
104     for (x = xmin; x <= xmax; x++) {
105       PetscCall(PetscSNPrintf(node, sizeof(node), "%d", (int)base++));
106       PetscCall(PetscDrawString(draw, x, ymin, PETSC_DRAW_RED, node));
107     }
108     PetscDrawCollectiveEnd(draw);
109     PetscCall(PetscDrawFlush(draw));
110     PetscCall(PetscDrawPause(draw));
111     PetscCall(PetscDrawSave(draw));
112   } else if (isglvis) {
113     PetscCall(DMView_DA_GLVis(da, viewer));
114   } else if (isbinary) {
115     PetscCall(DMView_DA_Binary(da, viewer));
116 #if defined(PETSC_HAVE_MATLAB_ENGINE)
117   } else if (ismatlab) {
118     PetscCall(DMView_DA_Matlab(da, viewer));
119 #endif
120   }
121   PetscFunctionReturn(0);
122 }
123 
124 PetscErrorCode DMSetUp_DA_1D(DM da) {
125   DM_DA          *dd    = (DM_DA *)da->data;
126   const PetscInt  M     = dd->M;
127   const PetscInt  dof   = dd->w;
128   const PetscInt  s     = dd->s;
129   const PetscInt  sDist = s; /* stencil distance in points */
130   const PetscInt *lx    = dd->lx;
131   DMBoundaryType  bx    = dd->bx;
132   MPI_Comm        comm;
133   Vec             local, global;
134   VecScatter      gtol;
135   IS              to, from;
136   PetscBool       flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
137   PetscMPIInt     rank, size;
138   PetscInt        i, *idx, nn, left, xs, xe, x, Xs, Xe, start, m, IXs, IXe;
139 
140   PetscFunctionBegin;
141   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
142   PetscCallMPI(MPI_Comm_size(comm, &size));
143   PetscCallMPI(MPI_Comm_rank(comm, &rank));
144 
145   dd->p = 1;
146   dd->n = 1;
147   dd->m = size;
148   m     = dd->m;
149 
150   if (s > 0) {
151     /* if not communicating data then should be ok to have nothing on some processes */
152     PetscCheck(M >= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "More processes than data points! %" PetscInt_FMT " %" PetscInt_FMT, m, M);
153     PetscCheck((M - 1) >= s || size <= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Array is too small for stencil! %" PetscInt_FMT " %" PetscInt_FMT, M - 1, s);
154   }
155 
156   /*
157      Determine locally owned region
158      xs is the first local node number, x is the number of local nodes
159   */
160   if (!lx) {
161     PetscCall(PetscMalloc1(m, &dd->lx));
162     PetscCall(PetscOptionsGetBool(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_partition_blockcomm", &flg1, NULL));
163     PetscCall(PetscOptionsGetBool(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_partition_nodes_at_end", &flg2, NULL));
164     if (flg1) { /* Block Comm type Distribution */
165       xs = rank * M / m;
166       x  = (rank + 1) * M / m - xs;
167     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
168       x = (M + rank) / m;
169       if (M / m == x) xs = rank * x;
170       else xs = rank * (x - 1) + (M + rank) % (x * m);
171     } else { /* The odd nodes are evenly distributed across the first k nodes */
172       /* Regular PETSc Distribution */
173       x = M / m + ((M % m) > rank);
174       if (rank >= (M % m)) xs = (rank * (PetscInt)(M / m) + M % m);
175       else xs = rank * (PetscInt)(M / m) + rank;
176     }
177     PetscCallMPI(MPI_Allgather(&xs, 1, MPIU_INT, dd->lx, 1, MPIU_INT, comm));
178     for (i = 0; i < m - 1; i++) dd->lx[i] = dd->lx[i + 1] - dd->lx[i];
179     dd->lx[m - 1] = M - dd->lx[m - 1];
180   } else {
181     x  = lx[rank];
182     xs = 0;
183     for (i = 0; i < rank; i++) xs += lx[i];
184     /* verify that data user provided is consistent */
185     left = xs;
186     for (i = rank; i < size; i++) left += lx[i];
187     PetscCheck(left == M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Sum of lx across processors not equal to M %" PetscInt_FMT " %" PetscInt_FMT, left, M);
188   }
189 
190   /*
191    check if the scatter requires more than one process neighbor or wraps around
192    the domain more than once
193   */
194   PetscCheck((x >= s) || ((M <= 1) && (bx != DM_BOUNDARY_PERIODIC)), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local x-width of domain x %" PetscInt_FMT " is smaller than stencil width s %" PetscInt_FMT, x, s);
195 
196   xe = xs + x;
197 
198   /* determine ghost region (Xs) and region scattered into (IXs)  */
199   if (xs - sDist > 0) {
200     Xs  = xs - sDist;
201     IXs = xs - sDist;
202   } else {
203     if (bx) Xs = xs - sDist;
204     else Xs = 0;
205     IXs = 0;
206   }
207   if (xe + sDist <= M) {
208     Xe  = xe + sDist;
209     IXe = xe + sDist;
210   } else {
211     if (bx) Xe = xe + sDist;
212     else Xe = M;
213     IXe = M;
214   }
215 
216   if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
217     Xs  = xs - sDist;
218     Xe  = xe + sDist;
219     IXs = xs - sDist;
220     IXe = xe + sDist;
221   }
222 
223   /* allocate the base parallel and sequential vectors */
224   dd->Nlocal = dof * x;
225   PetscCall(VecCreateMPIWithArray(comm, dof, dd->Nlocal, PETSC_DECIDE, NULL, &global));
226   dd->nlocal = dof * (Xe - Xs);
227   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, dof, dd->nlocal, NULL, &local));
228 
229   PetscCall(VecGetOwnershipRange(global, &start, NULL));
230 
231   /* Create Global to Local Vector Scatter Context */
232   /* global to local must retrieve ghost points */
233   PetscCall(ISCreateStride(comm, dof * (IXe - IXs), dof * (IXs - Xs), 1, &to));
234 
235   PetscCall(PetscMalloc1(x + 2 * sDist, &idx));
236   PetscCall(PetscLogObjectMemory((PetscObject)da, (x + 2 * (sDist)) * sizeof(PetscInt)));
237 
238   for (i = 0; i < IXs - Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/
239 
240   nn = IXs - Xs;
241   if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
242     for (i = 0; i < sDist; i++) {   /* Left ghost points */
243       if ((xs - sDist + i) >= 0) idx[nn++] = xs - sDist + i;
244       else idx[nn++] = M + (xs - sDist + i);
245     }
246 
247     for (i = 0; i < x; i++) idx[nn++] = xs + i; /* Non-ghost points */
248 
249     for (i = 0; i < sDist; i++) { /* Right ghost points */
250       if ((xe + i) < M) idx[nn++] = xe + i;
251       else idx[nn++] = (xe + i) - M;
252     }
253   } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
254     for (i = 0; i < (sDist); i++) {      /* Left ghost points */
255       if ((xs - sDist + i) >= 0) idx[nn++] = xs - sDist + i;
256       else idx[nn++] = sDist - i;
257     }
258 
259     for (i = 0; i < x; i++) idx[nn++] = xs + i; /* Non-ghost points */
260 
261     for (i = 0; i < (sDist); i++) { /* Right ghost points */
262       if ((xe + i) < M) idx[nn++] = xe + i;
263       else idx[nn++] = M - (i + 2);
264     }
265   } else { /* Now do all cases with no periodicity */
266     if (0 <= xs - sDist) {
267       for (i = 0; i < sDist; i++) idx[nn++] = xs - sDist + i;
268     } else {
269       for (i = 0; i < xs; i++) idx[nn++] = i;
270     }
271 
272     for (i = 0; i < x; i++) idx[nn++] = xs + i;
273 
274     if ((xe + sDist) <= M) {
275       for (i = 0; i < sDist; i++) idx[nn++] = xe + i;
276     } else {
277       for (i = xe; i < M; i++) idx[nn++] = i;
278     }
279   }
280 
281   PetscCall(ISCreateBlock(comm, dof, nn - IXs + Xs, &idx[IXs - Xs], PETSC_USE_POINTER, &from));
282   PetscCall(VecScatterCreate(global, from, local, to, &gtol));
283   PetscCall(PetscLogObjectParent((PetscObject)da, (PetscObject)gtol));
284   PetscCall(ISDestroy(&to));
285   PetscCall(ISDestroy(&from));
286   PetscCall(VecDestroy(&local));
287   PetscCall(VecDestroy(&global));
288 
289   dd->xs = dof * xs;
290   dd->xe = dof * xe;
291   dd->ys = 0;
292   dd->ye = 1;
293   dd->zs = 0;
294   dd->ze = 1;
295   dd->Xs = dof * Xs;
296   dd->Xe = dof * Xe;
297   dd->Ys = 0;
298   dd->Ye = 1;
299   dd->Zs = 0;
300   dd->Ze = 1;
301 
302   dd->gtol      = gtol;
303   dd->base      = dof * xs;
304   da->ops->view = DMView_DA_1d;
305 
306   /*
307      Set the local to global ordering in the global vector, this allows use
308      of VecSetValuesLocal().
309   */
310   for (i = 0; i < Xe - IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/
311 
312   PetscCall(ISLocalToGlobalMappingCreate(comm, dof, nn, idx, PETSC_OWN_POINTER, &da->ltogmap));
313   PetscCall(PetscLogObjectParent((PetscObject)da, (PetscObject)da->ltogmap));
314 
315   PetscFunctionReturn(0);
316 }
317 
318 /*@C
319    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
320    regular array data that is distributed across some processors.
321 
322    Collective
323 
324    Input Parameters:
325 +  comm - MPI communicator
326 .  bx - type of ghost cells at the boundary the array should have, if any. Use
327           DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, or DM_BOUNDARY_PERIODIC.
328 .  M - global dimension of the array (that is the number of grid points)
329             from the command line with -da_grid_x <M>)
330 .  dof - number of degrees of freedom per node
331 .  s - stencil width
332 -  lx - array containing number of nodes in the X direction on each processor,
333         or NULL. If non-null, must be of length as the number of processes in the MPI_Comm.
334         The sum of these entries must equal M
335 
336    Output Parameter:
337 .  da - the resulting distributed array object
338 
339    Options Database Key:
340 +  -dm_view - Calls DMView() at the conclusion of DMDACreate1d()
341 .  -da_grid_x <nx> - number of grid points in x direction
342 .  -da_refine_x <rx> - refinement factor
343 -  -da_refine <n> - refine the DMDA n times before creating it
344 
345    Level: beginner
346 
347    Notes:
348    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
349    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
350    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.
351 
352    You must call DMSetUp() after this call before using this DM.
353 
354    If you wish to use the options database to change values in the DMDA call DMSetFromOptions() after this call
355    but before DMSetUp().
356 
357 .seealso: `DMDestroy()`, `DMView()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMGlobalToLocalBegin()`, `DMDASetRefinementFactor()`,
358           `DMGlobalToLocalEnd()`, `DMLocalToGlobalBegin()`, `DMLocalToLocalBegin()`, `DMLocalToLocalEnd()`, `DMDAGetRefinementFactor()`,
359           `DMDAGetInfo()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`, `DMDACreateNaturalVector()`, `DMLoad()`, `DMDAGetOwnershipRanges()`,
360           `DMStagCreate1d()`
361 
362 @*/
363 PetscErrorCode DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da) {
364   PetscMPIInt size;
365 
366   PetscFunctionBegin;
367   PetscCall(DMDACreate(comm, da));
368   PetscCall(DMSetDimension(*da, 1));
369   PetscCall(DMDASetSizes(*da, M, 1, 1));
370   PetscCallMPI(MPI_Comm_size(comm, &size));
371   PetscCall(DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE));
372   PetscCall(DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE));
373   PetscCall(DMDASetDof(*da, dof));
374   PetscCall(DMDASetStencilWidth(*da, s));
375   PetscCall(DMDASetOwnershipRanges(*da, lx, NULL, NULL));
376   PetscFunctionReturn(0);
377 }
378