xref: /petsc/src/dm/impls/da/da1.c (revision 2fa40bb9206b96114faa7cb222621ec184d31cd2)
1 
2 /*
3    Code for manipulating distributed regular 1d arrays in parallel.
4    This file was created by Peter Mell   6/30/95
5 */
6 
7 #include <petsc/private/dmdaimpl.h>     /*I  "petscdmda.h"   I*/
8 
9 #include <petscdraw.h>
10 static PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer)
11 {
12   PetscErrorCode ierr;
13   PetscMPIInt    rank;
14   PetscBool      iascii,isdraw,isglvis,isbinary;
15   DM_DA          *dd = (DM_DA*)da->data;
16 #if defined(PETSC_HAVE_MATLAB_ENGINE)
17   PetscBool ismatlab;
18 #endif
19 
20   PetscFunctionBegin;
21   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);CHKERRMPI(ierr);
22 
23   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
24   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
25   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);CHKERRQ(ierr);
26   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
27 #if defined(PETSC_HAVE_MATLAB_ENGINE)
28   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);CHKERRQ(ierr);
29 #endif
30   if (iascii) {
31     PetscViewerFormat format;
32 
33     ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
34     if (format == PETSC_VIEWER_LOAD_BALANCE) {
35       PetscInt      i,nmax = 0,nmin = PETSC_MAX_INT,navg = 0,*nz,nzlocal;
36       DMDALocalInfo info;
37       PetscMPIInt   size;
38       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRMPI(ierr);
39       ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
40       nzlocal = info.xm;
41       ierr = PetscMalloc1(size,&nz);CHKERRQ(ierr);
42       ierr = MPI_Allgather(&nzlocal,1,MPIU_INT,nz,1,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRMPI(ierr);
43       for (i=0; i<(PetscInt)size; i++) {
44         nmax = PetscMax(nmax,nz[i]);
45         nmin = PetscMin(nmin,nz[i]);
46         navg += nz[i];
47       }
48       ierr = PetscFree(nz);CHKERRQ(ierr);
49       navg = navg/size;
50       ierr = PetscViewerASCIIPrintf(viewer,"  Load Balance - Grid Points: Min %D  avg %D  max %D\n",nmin,navg,nmax);CHKERRQ(ierr);
51       PetscFunctionReturn(0);
52     }
53     if (format != PETSC_VIEWER_ASCII_VTK_DEPRECATED && format != PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED && format != PETSC_VIEWER_ASCII_GLVIS) {
54       DMDALocalInfo info;
55       ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
56       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
57       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s);CHKERRQ(ierr);
58       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);CHKERRQ(ierr);
59       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
60       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
61     } else if (format == PETSC_VIEWER_ASCII_GLVIS) {
62       ierr = DMView_DA_GLVis(da,viewer);CHKERRQ(ierr);
63     } else {
64       ierr = DMView_DA_VTK(da, viewer);CHKERRQ(ierr);
65     }
66   } else if (isdraw) {
67     PetscDraw draw;
68     double    ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
69     PetscInt  base;
70     char      node[10];
71     PetscBool isnull;
72 
73     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
74     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr);
75     if (isnull) PetscFunctionReturn(0);
76 
77     ierr = PetscDrawCheckResizedWindow(draw);CHKERRQ(ierr);
78     ierr = PetscDrawClear(draw);CHKERRQ(ierr);
79     ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr);
80 
81     ierr = PetscDrawCollectiveBegin(draw);CHKERRQ(ierr);
82     /* first processor draws all node lines */
83     if (rank == 0) {
84       PetscInt xmin_tmp;
85       ymin = 0.0; ymax = 0.3;
86       for (xmin_tmp=0; xmin_tmp < dd->M; xmin_tmp++) {
87         ierr = PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
88       }
89       xmin = 0.0; xmax = dd->M - 1;
90       ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr);
91       ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
92     }
93     ierr = PetscDrawCollectiveEnd(draw);CHKERRQ(ierr);
94     ierr = PetscDrawFlush(draw);CHKERRQ(ierr);
95     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
96 
97     ierr = PetscDrawCollectiveBegin(draw);CHKERRQ(ierr);
98     /* draw my box */
99     ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w)  - 1;
100     ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr);
101     ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
102     ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
103     ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
104     /* Put in index numbers */
105     base = dd->base / dd->w;
106     for (x=xmin; x<=xmax; x++) {
107       ierr = PetscSNPrintf(node,sizeof(node),"%d",(int)base++);CHKERRQ(ierr);
108       ierr = PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);CHKERRQ(ierr);
109     }
110     ierr = PetscDrawCollectiveEnd(draw);CHKERRQ(ierr);
111     ierr = PetscDrawFlush(draw);CHKERRQ(ierr);
112     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
113     ierr = PetscDrawSave(draw);CHKERRQ(ierr);
114   } else if (isglvis) {
115     ierr = DMView_DA_GLVis(da,viewer);CHKERRQ(ierr);
116   } else if (isbinary) {
117     ierr = DMView_DA_Binary(da,viewer);CHKERRQ(ierr);
118 #if defined(PETSC_HAVE_MATLAB_ENGINE)
119   } else if (ismatlab) {
120     ierr = DMView_DA_Matlab(da,viewer);CHKERRQ(ierr);
121 #endif
122   }
123   PetscFunctionReturn(0);
124 }
125 
126 PetscErrorCode  DMSetUp_DA_1D(DM da)
127 {
128   DM_DA            *dd   = (DM_DA*)da->data;
129   const PetscInt   M     = dd->M;
130   const PetscInt   dof   = dd->w;
131   const PetscInt   s     = dd->s;
132   const PetscInt   sDist = s;  /* stencil distance in points */
133   const PetscInt   *lx   = dd->lx;
134   DMBoundaryType   bx    = dd->bx;
135   MPI_Comm         comm;
136   Vec              local, global;
137   VecScatter       gtol;
138   IS               to, from;
139   PetscBool        flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
140   PetscMPIInt      rank, size;
141   PetscInt         i,*idx,nn,left,xs,xe,x,Xs,Xe,start,m,IXs,IXe;
142   PetscErrorCode   ierr;
143 
144   PetscFunctionBegin;
145   ierr = PetscObjectGetComm((PetscObject) da, &comm);CHKERRQ(ierr);
146   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
147   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
148 
149   dd->p = 1;
150   dd->n = 1;
151   dd->m = size;
152   m     = dd->m;
153 
154   if (s > 0) {
155     /* if not communicating data then should be ok to have nothing on some processes */
156     if (M < m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %D %D",m,M);
157     if ((M-1) < s && size > 1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
158   }
159 
160   /*
161      Determine locally owned region
162      xs is the first local node number, x is the number of local nodes
163   */
164   if (!lx) {
165     ierr = PetscMalloc1(m, &dd->lx);CHKERRQ(ierr);
166     ierr = PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_blockcomm",&flg1,NULL);CHKERRQ(ierr);
167     ierr = PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_nodes_at_end",&flg2,NULL);CHKERRQ(ierr);
168     if (flg1) {      /* Block Comm type Distribution */
169       xs = rank*M/m;
170       x  = (rank + 1)*M/m - xs;
171     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
172       x = (M + rank)/m;
173       if (M/m == x) xs = rank*x;
174       else          xs = rank*(x-1) + (M+rank)%(x*m);
175     } else { /* The odd nodes are evenly distributed across the first k nodes */
176       /* Regular PETSc Distribution */
177       x = M/m + ((M % m) > rank);
178       if (rank >= (M % m)) xs = (rank * (PetscInt)(M/m) + M % m);
179       else                 xs = rank * (PetscInt)(M/m) + rank;
180     }
181     ierr = MPI_Allgather(&xs,1,MPIU_INT,dd->lx,1,MPIU_INT,comm);CHKERRMPI(ierr);
182     for (i=0; i<m-1; i++) dd->lx[i] = dd->lx[i+1] - dd->lx[i];
183     dd->lx[m-1] = M - dd->lx[m-1];
184   } else {
185     x  = lx[rank];
186     xs = 0;
187     for (i=0; i<rank; i++) xs += lx[i];
188     /* verify that data user provided is consistent */
189     left = xs;
190     for (i=rank; i<size; i++) left += lx[i];
191     if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
192   }
193 
194   /*
195    check if the scatter requires more than one process neighbor or wraps around
196    the domain more than once
197   */
198   if ((x < s) & ((M > 1) | (bx == DM_BOUNDARY_PERIODIC))) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
199 
200   xe  = xs + x;
201 
202   /* determine ghost region (Xs) and region scattered into (IXs)  */
203   if (xs-sDist > 0) {
204     Xs  = xs - sDist;
205     IXs = xs - sDist;
206   } else {
207     if (bx) Xs = xs - sDist;
208     else Xs = 0;
209     IXs = 0;
210   }
211   if (xe+sDist <= M) {
212     Xe  = xe + sDist;
213     IXe = xe + sDist;
214   } else {
215     if (bx) Xe = xe + sDist;
216     else Xe = M;
217     IXe = M;
218   }
219 
220   if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
221     Xs  = xs - sDist;
222     Xe  = xe + sDist;
223     IXs = xs - sDist;
224     IXe = xe + sDist;
225   }
226 
227   /* allocate the base parallel and sequential vectors */
228   dd->Nlocal = dof*x;
229   ierr       = VecCreateMPIWithArray(comm,dof,dd->Nlocal,PETSC_DECIDE,NULL,&global);CHKERRQ(ierr);
230   dd->nlocal = dof*(Xe-Xs);
231   ierr       = VecCreateSeqWithArray(PETSC_COMM_SELF,dof,dd->nlocal,NULL,&local);CHKERRQ(ierr);
232 
233   ierr = VecGetOwnershipRange(global,&start,NULL);CHKERRQ(ierr);
234 
235   /* Create Global to Local Vector Scatter Context */
236   /* global to local must retrieve ghost points */
237   ierr = ISCreateStride(comm,dof*(IXe-IXs),dof*(IXs-Xs),1,&to);CHKERRQ(ierr);
238 
239   ierr = PetscMalloc1(x+2*sDist,&idx);CHKERRQ(ierr);
240   ierr = PetscLogObjectMemory((PetscObject)da,(x+2*(sDist))*sizeof(PetscInt));CHKERRQ(ierr);
241 
242   for (i=0; i<IXs-Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/
243 
244   nn = IXs-Xs;
245   if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
246     for (i=0; i<sDist; i++) {  /* Left ghost points */
247       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
248       else                 idx[nn++] = M+(xs-sDist+i);
249     }
250 
251     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */
252 
253     for (i=0; i<sDist; i++) { /* Right ghost points */
254       if ((xe+i)<M) idx [nn++] =  xe+i;
255       else          idx [nn++] = (xe+i) - M;
256     }
257   } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
258     for (i=0; i<(sDist); i++) {  /* Left ghost points */
259       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
260       else                 idx[nn++] = sDist - i;
261     }
262 
263     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */
264 
265     for (i=0; i<(sDist); i++) { /* Right ghost points */
266       if ((xe+i)<M) idx[nn++] =  xe+i;
267       else          idx[nn++] = M - (i + 2);
268     }
269   } else {      /* Now do all cases with no periodicity */
270     if (0 <= xs-sDist) {
271       for (i=0; i<sDist; i++) idx[nn++] = xs - sDist + i;
272     } else {
273       for (i=0; i<xs; i++) idx[nn++] = i;
274     }
275 
276     for (i=0; i<x; i++) idx [nn++] = xs + i;
277 
278     if ((xe+sDist)<=M) {
279       for (i=0; i<sDist; i++) idx[nn++]=xe+i;
280     } else {
281       for (i=xe; i<M; i++) idx[nn++]=i;
282     }
283   }
284 
285   ierr = ISCreateBlock(comm,dof,nn-IXs+Xs,&idx[IXs-Xs],PETSC_USE_POINTER,&from);CHKERRQ(ierr);
286   ierr = VecScatterCreate(global,from,local,to,&gtol);CHKERRQ(ierr);
287   ierr = PetscLogObjectParent((PetscObject)da,(PetscObject)gtol);CHKERRQ(ierr);
288   ierr = ISDestroy(&to);CHKERRQ(ierr);
289   ierr = ISDestroy(&from);CHKERRQ(ierr);
290   ierr = VecDestroy(&local);CHKERRQ(ierr);
291   ierr = VecDestroy(&global);CHKERRQ(ierr);
292 
293   dd->xs = dof*xs; dd->xe = dof*xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
294   dd->Xs = dof*Xs; dd->Xe = dof*Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;
295 
296   dd->gtol      = gtol;
297   dd->base      = dof*xs;
298   da->ops->view = DMView_DA_1d;
299 
300   /*
301      Set the local to global ordering in the global vector, this allows use
302      of VecSetValuesLocal().
303   */
304   for (i=0; i<Xe-IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/
305 
306   ierr = ISLocalToGlobalMappingCreate(comm,dof,nn,idx,PETSC_OWN_POINTER,&da->ltogmap);CHKERRQ(ierr);
307   ierr = PetscLogObjectParent((PetscObject)da,(PetscObject)da->ltogmap);CHKERRQ(ierr);
308 
309   PetscFunctionReturn(0);
310 }
311 
312 /*@C
313    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
314    regular array data that is distributed across some processors.
315 
316    Collective
317 
318    Input Parameters:
319 +  comm - MPI communicator
320 .  bx - type of ghost cells at the boundary the array should have, if any. Use
321           DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, or DM_BOUNDARY_PERIODIC.
322 .  M - global dimension of the array (that is the number of grid points)
323             from the command line with -da_grid_x <M>)
324 .  dof - number of degrees of freedom per node
325 .  s - stencil width
326 -  lx - array containing number of nodes in the X direction on each processor,
327         or NULL. If non-null, must be of length as the number of processes in the MPI_Comm.
328         The sum of these entries must equal M
329 
330    Output Parameter:
331 .  da - the resulting distributed array object
332 
333    Options Database Key:
334 +  -dm_view - Calls DMView() at the conclusion of DMDACreate1d()
335 .  -da_grid_x <nx> - number of grid points in x direction
336 .  -da_refine_x <rx> - refinement factor
337 -  -da_refine <n> - refine the DMDA n times before creating it
338 
339    Level: beginner
340 
341    Notes:
342    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
343    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
344    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.
345 
346    You must call DMSetUp() after this call before using this DM.
347 
348    If you wish to use the options database to change values in the DMDA call DMSetFromOptions() after this call
349    but before DMSetUp().
350 
351 .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(),
352           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMLocalToLocalBegin(), DMLocalToLocalEnd(), DMDAGetRefinementFactor(),
353           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMLoad(), DMDAGetOwnershipRanges(),
354           DMStagCreate1d()
355 
356 @*/
357 PetscErrorCode  DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
358 {
359   PetscErrorCode ierr;
360   PetscMPIInt    size;
361 
362   PetscFunctionBegin;
363   ierr = DMDACreate(comm, da);CHKERRQ(ierr);
364   ierr = DMSetDimension(*da, 1);CHKERRQ(ierr);
365   ierr = DMDASetSizes(*da, M, 1, 1);CHKERRQ(ierr);
366   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
367   ierr = DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);CHKERRQ(ierr);
368   ierr = DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE);CHKERRQ(ierr);
369   ierr = DMDASetDof(*da, dof);CHKERRQ(ierr);
370   ierr = DMDASetStencilWidth(*da, s);CHKERRQ(ierr);
371   ierr = DMDASetOwnershipRanges(*da, lx, NULL, NULL);CHKERRQ(ierr);
372   PetscFunctionReturn(0);
373 }
374