xref: /petsc/src/dm/impls/da/da1.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3) !
1 
2 /*
3    Code for manipulating distributed regular 1d arrays in parallel.
4    This file was created by Peter Mell   6/30/95
5 */
6 
7 #include <petsc/private/dmdaimpl.h>     /*I  "petscdmda.h"   I*/
8 
9 #include <petscdraw.h>
10 static PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer)
11 {
12   PetscMPIInt    rank;
13   PetscBool      iascii,isdraw,isglvis,isbinary;
14   DM_DA          *dd = (DM_DA*)da->data;
15 #if defined(PETSC_HAVE_MATLAB_ENGINE)
16   PetscBool ismatlab;
17 #endif
18 
19   PetscFunctionBegin;
20   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank));
21 
22   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
23   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw));
24   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis));
25   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary));
26 #if defined(PETSC_HAVE_MATLAB_ENGINE)
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab));
28 #endif
29   if (iascii) {
30     PetscViewerFormat format;
31 
32     PetscCall(PetscViewerGetFormat(viewer, &format));
33     if (format == PETSC_VIEWER_LOAD_BALANCE) {
34       PetscInt      i,nmax = 0,nmin = PETSC_MAX_INT,navg = 0,*nz,nzlocal;
35       DMDALocalInfo info;
36       PetscMPIInt   size;
37       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da),&size));
38       PetscCall(DMDAGetLocalInfo(da,&info));
39       nzlocal = info.xm;
40       PetscCall(PetscMalloc1(size,&nz));
41       PetscCallMPI(MPI_Allgather(&nzlocal,1,MPIU_INT,nz,1,MPIU_INT,PetscObjectComm((PetscObject)da)));
42       for (i=0; i<(PetscInt)size; i++) {
43         nmax = PetscMax(nmax,nz[i]);
44         nmin = PetscMin(nmin,nz[i]);
45         navg += nz[i];
46       }
47       PetscCall(PetscFree(nz));
48       navg = navg/size;
49       PetscCall(PetscViewerASCIIPrintf(viewer,"  Load Balance - Grid Points: Min %" PetscInt_FMT "  avg %" PetscInt_FMT "  max %" PetscInt_FMT "\n",nmin,navg,nmax));
50       PetscFunctionReturn(0);
51     }
52     if (format != PETSC_VIEWER_ASCII_VTK_DEPRECATED && format != PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED && format != PETSC_VIEWER_ASCII_GLVIS) {
53       DMDALocalInfo info;
54       PetscCall(DMDAGetLocalInfo(da,&info));
55       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
56       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %" PetscInt_FMT " m %" PetscInt_FMT " w %" PetscInt_FMT " s %" PetscInt_FMT "\n",rank,dd->M,dd->m,dd->w,dd->s));
57       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %" PetscInt_FMT " %" PetscInt_FMT "\n",info.xs,info.xs+info.xm));
58       PetscCall(PetscViewerFlush(viewer));
59       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
60     } else if (format == PETSC_VIEWER_ASCII_GLVIS) PetscCall(DMView_DA_GLVis(da,viewer));
61     else PetscCall(DMView_DA_VTK(da, viewer));
62   } else if (isdraw) {
63     PetscDraw      draw;
64     double         ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
65     PetscInt       base;
66     char           node[10];
67     PetscBool      isnull;
68 
69     PetscCall(PetscViewerDrawGetDraw(viewer,0,&draw));
70     PetscCall(PetscDrawIsNull(draw,&isnull));
71     if (isnull) PetscFunctionReturn(0);
72 
73     PetscCall(PetscDrawCheckResizedWindow(draw));
74     PetscCall(PetscDrawClear(draw));
75     PetscCall(PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax));
76 
77     PetscDrawCollectiveBegin(draw);
78     /* first processor draws all node lines */
79     if (rank == 0) {
80       PetscInt xmin_tmp;
81       ymin = 0.0; ymax = 0.3;
82       for (xmin_tmp=0; xmin_tmp < dd->M; xmin_tmp++) {
83         PetscCall(PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK));
84       }
85       xmin = 0.0; xmax = dd->M - 1;
86       PetscCall(PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK));
87       PetscCall(PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK));
88     }
89     PetscDrawCollectiveEnd(draw);
90     PetscCall(PetscDrawFlush(draw));
91     PetscCall(PetscDrawPause(draw));
92 
93     PetscDrawCollectiveBegin(draw);
94     /* draw my box */
95     ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w)  - 1;
96     PetscCall(PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED));
97     PetscCall(PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED));
98     PetscCall(PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED));
99     PetscCall(PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED));
100     /* Put in index numbers */
101     base = dd->base / dd->w;
102     for (x=xmin; x<=xmax; x++) {
103       PetscCall(PetscSNPrintf(node,sizeof(node),"%d",(int)base++));
104       PetscCall(PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node));
105     }
106     PetscDrawCollectiveEnd(draw);
107     PetscCall(PetscDrawFlush(draw));
108     PetscCall(PetscDrawPause(draw));
109     PetscCall(PetscDrawSave(draw));
110   } else if (isglvis) {
111     PetscCall(DMView_DA_GLVis(da,viewer));
112   } else if (isbinary) {
113     PetscCall(DMView_DA_Binary(da,viewer));
114 #if defined(PETSC_HAVE_MATLAB_ENGINE)
115   } else if (ismatlab) {
116     PetscCall(DMView_DA_Matlab(da,viewer));
117 #endif
118   }
119   PetscFunctionReturn(0);
120 }
121 
122 PetscErrorCode  DMSetUp_DA_1D(DM da)
123 {
124   DM_DA            *dd   = (DM_DA*)da->data;
125   const PetscInt   M     = dd->M;
126   const PetscInt   dof   = dd->w;
127   const PetscInt   s     = dd->s;
128   const PetscInt   sDist = s;  /* stencil distance in points */
129   const PetscInt   *lx   = dd->lx;
130   DMBoundaryType   bx    = dd->bx;
131   MPI_Comm         comm;
132   Vec              local, global;
133   VecScatter       gtol;
134   IS               to, from;
135   PetscBool        flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
136   PetscMPIInt      rank, size;
137   PetscInt         i,*idx,nn,left,xs,xe,x,Xs,Xe,start,m,IXs,IXe;
138 
139   PetscFunctionBegin;
140   PetscCall(PetscObjectGetComm((PetscObject) da, &comm));
141   PetscCallMPI(MPI_Comm_size(comm,&size));
142   PetscCallMPI(MPI_Comm_rank(comm,&rank));
143 
144   dd->p = 1;
145   dd->n = 1;
146   dd->m = size;
147   m     = dd->m;
148 
149   if (s > 0) {
150     /* if not communicating data then should be ok to have nothing on some processes */
151     PetscCheck(M >= m,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %" PetscInt_FMT " %" PetscInt_FMT,m,M);
152     PetscCheck((M-1) >= s || size <= 1,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %" PetscInt_FMT " %" PetscInt_FMT,M-1,s);
153   }
154 
155   /*
156      Determine locally owned region
157      xs is the first local node number, x is the number of local nodes
158   */
159   if (!lx) {
160     PetscCall(PetscMalloc1(m, &dd->lx));
161     PetscCall(PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_blockcomm",&flg1,NULL));
162     PetscCall(PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_nodes_at_end",&flg2,NULL));
163     if (flg1) {      /* Block Comm type Distribution */
164       xs = rank*M/m;
165       x  = (rank + 1)*M/m - xs;
166     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
167       x = (M + rank)/m;
168       if (M/m == x) xs = rank*x;
169       else          xs = rank*(x-1) + (M+rank)%(x*m);
170     } else { /* The odd nodes are evenly distributed across the first k nodes */
171       /* Regular PETSc Distribution */
172       x = M/m + ((M % m) > rank);
173       if (rank >= (M % m)) xs = (rank * (PetscInt)(M/m) + M % m);
174       else                 xs = rank * (PetscInt)(M/m) + rank;
175     }
176     PetscCallMPI(MPI_Allgather(&xs,1,MPIU_INT,dd->lx,1,MPIU_INT,comm));
177     for (i=0; i<m-1; i++) dd->lx[i] = dd->lx[i+1] - dd->lx[i];
178     dd->lx[m-1] = M - dd->lx[m-1];
179   } else {
180     x  = lx[rank];
181     xs = 0;
182     for (i=0; i<rank; i++) xs += lx[i];
183     /* verify that data user provided is consistent */
184     left = xs;
185     for (i=rank; i<size; i++) left += lx[i];
186     PetscCheck(left == M,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %" PetscInt_FMT " %" PetscInt_FMT,left,M);
187   }
188 
189   /*
190    check if the scatter requires more than one process neighbor or wraps around
191    the domain more than once
192   */
193   PetscCheck((x >= s) || ((M <= 1) && (bx != DM_BOUNDARY_PERIODIC)),PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %" PetscInt_FMT " is smaller than stencil width s %" PetscInt_FMT,x,s);
194 
195   xe  = xs + x;
196 
197   /* determine ghost region (Xs) and region scattered into (IXs)  */
198   if (xs-sDist > 0) {
199     Xs  = xs - sDist;
200     IXs = xs - sDist;
201   } else {
202     if (bx) Xs = xs - sDist;
203     else Xs = 0;
204     IXs = 0;
205   }
206   if (xe+sDist <= M) {
207     Xe  = xe + sDist;
208     IXe = xe + sDist;
209   } else {
210     if (bx) Xe = xe + sDist;
211     else Xe = M;
212     IXe = M;
213   }
214 
215   if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
216     Xs  = xs - sDist;
217     Xe  = xe + sDist;
218     IXs = xs - sDist;
219     IXe = xe + sDist;
220   }
221 
222   /* allocate the base parallel and sequential vectors */
223   dd->Nlocal = dof*x;
224   PetscCall(VecCreateMPIWithArray(comm,dof,dd->Nlocal,PETSC_DECIDE,NULL,&global));
225   dd->nlocal = dof*(Xe-Xs);
226   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,dof,dd->nlocal,NULL,&local));
227 
228   PetscCall(VecGetOwnershipRange(global,&start,NULL));
229 
230   /* Create Global to Local Vector Scatter Context */
231   /* global to local must retrieve ghost points */
232   PetscCall(ISCreateStride(comm,dof*(IXe-IXs),dof*(IXs-Xs),1,&to));
233 
234   PetscCall(PetscMalloc1(x+2*sDist,&idx));
235   PetscCall(PetscLogObjectMemory((PetscObject)da,(x+2*(sDist))*sizeof(PetscInt)));
236 
237   for (i=0; i<IXs-Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/
238 
239   nn = IXs-Xs;
240   if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
241     for (i=0; i<sDist; i++) {  /* Left ghost points */
242       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
243       else                 idx[nn++] = M+(xs-sDist+i);
244     }
245 
246     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */
247 
248     for (i=0; i<sDist; i++) { /* Right ghost points */
249       if ((xe+i)<M) idx [nn++] =  xe+i;
250       else          idx [nn++] = (xe+i) - M;
251     }
252   } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
253     for (i=0; i<(sDist); i++) {  /* Left ghost points */
254       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
255       else                 idx[nn++] = sDist - i;
256     }
257 
258     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */
259 
260     for (i=0; i<(sDist); i++) { /* Right ghost points */
261       if ((xe+i)<M) idx[nn++] =  xe+i;
262       else          idx[nn++] = M - (i + 2);
263     }
264   } else {      /* Now do all cases with no periodicity */
265     if (0 <= xs-sDist) {
266       for (i=0; i<sDist; i++) idx[nn++] = xs - sDist + i;
267     } else {
268       for (i=0; i<xs; i++) idx[nn++] = i;
269     }
270 
271     for (i=0; i<x; i++) idx [nn++] = xs + i;
272 
273     if ((xe+sDist)<=M) {
274       for (i=0; i<sDist; i++) idx[nn++]=xe+i;
275     } else {
276       for (i=xe; i<M; i++) idx[nn++]=i;
277     }
278   }
279 
280   PetscCall(ISCreateBlock(comm,dof,nn-IXs+Xs,&idx[IXs-Xs],PETSC_USE_POINTER,&from));
281   PetscCall(VecScatterCreate(global,from,local,to,&gtol));
282   PetscCall(PetscLogObjectParent((PetscObject)da,(PetscObject)gtol));
283   PetscCall(ISDestroy(&to));
284   PetscCall(ISDestroy(&from));
285   PetscCall(VecDestroy(&local));
286   PetscCall(VecDestroy(&global));
287 
288   dd->xs = dof*xs; dd->xe = dof*xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
289   dd->Xs = dof*Xs; dd->Xe = dof*Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;
290 
291   dd->gtol      = gtol;
292   dd->base      = dof*xs;
293   da->ops->view = DMView_DA_1d;
294 
295   /*
296      Set the local to global ordering in the global vector, this allows use
297      of VecSetValuesLocal().
298   */
299   for (i=0; i<Xe-IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/
300 
301   PetscCall(ISLocalToGlobalMappingCreate(comm,dof,nn,idx,PETSC_OWN_POINTER,&da->ltogmap));
302   PetscCall(PetscLogObjectParent((PetscObject)da,(PetscObject)da->ltogmap));
303 
304   PetscFunctionReturn(0);
305 }
306 
307 /*@C
308    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
309    regular array data that is distributed across some processors.
310 
311    Collective
312 
313    Input Parameters:
314 +  comm - MPI communicator
315 .  bx - type of ghost cells at the boundary the array should have, if any. Use
316           DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, or DM_BOUNDARY_PERIODIC.
317 .  M - global dimension of the array (that is the number of grid points)
318             from the command line with -da_grid_x <M>)
319 .  dof - number of degrees of freedom per node
320 .  s - stencil width
321 -  lx - array containing number of nodes in the X direction on each processor,
322         or NULL. If non-null, must be of length as the number of processes in the MPI_Comm.
323         The sum of these entries must equal M
324 
325    Output Parameter:
326 .  da - the resulting distributed array object
327 
328    Options Database Key:
329 +  -dm_view - Calls DMView() at the conclusion of DMDACreate1d()
330 .  -da_grid_x <nx> - number of grid points in x direction
331 .  -da_refine_x <rx> - refinement factor
332 -  -da_refine <n> - refine the DMDA n times before creating it
333 
334    Level: beginner
335 
336    Notes:
337    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
338    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
339    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.
340 
341    You must call DMSetUp() after this call before using this DM.
342 
343    If you wish to use the options database to change values in the DMDA call DMSetFromOptions() after this call
344    but before DMSetUp().
345 
346 .seealso: `DMDestroy()`, `DMView()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMGlobalToLocalBegin()`, `DMDASetRefinementFactor()`,
347           `DMGlobalToLocalEnd()`, `DMLocalToGlobalBegin()`, `DMLocalToLocalBegin()`, `DMLocalToLocalEnd()`, `DMDAGetRefinementFactor()`,
348           `DMDAGetInfo()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`, `DMDACreateNaturalVector()`, `DMLoad()`, `DMDAGetOwnershipRanges()`,
349           `DMStagCreate1d()`
350 
351 @*/
352 PetscErrorCode  DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
353 {
354   PetscMPIInt    size;
355 
356   PetscFunctionBegin;
357   PetscCall(DMDACreate(comm, da));
358   PetscCall(DMSetDimension(*da, 1));
359   PetscCall(DMDASetSizes(*da, M, 1, 1));
360   PetscCallMPI(MPI_Comm_size(comm, &size));
361   PetscCall(DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE));
362   PetscCall(DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE));
363   PetscCall(DMDASetDof(*da, dof));
364   PetscCall(DMDASetStencilWidth(*da, s));
365   PetscCall(DMDASetOwnershipRanges(*da, lx, NULL, NULL));
366   PetscFunctionReturn(0);
367 }
368