xref: /petsc/src/dm/impls/da/da1.c (revision 63f3c55c12ae2f190c391f6df6c540efe018a44a)
1 
2 /*
3    Code for manipulating distributed regular 1d arrays in parallel.
4    This file was created by Peter Mell   6/30/95
5 */
6 
7 #include <petsc/private/dmdaimpl.h>     /*I  "petscdmda.h"   I*/
8 
9 #include <petscdraw.h>
10 static PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer)
11 {
12   PetscErrorCode ierr;
13   PetscMPIInt    rank;
14   PetscBool      iascii,isdraw,isglvis,isbinary;
15   DM_DA          *dd = (DM_DA*)da->data;
16 #if defined(PETSC_HAVE_MATLAB_ENGINE)
17   PetscBool ismatlab;
18 #endif
19 
20   PetscFunctionBegin;
21   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);CHKERRQ(ierr);
22 
23   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
24   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
25   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);CHKERRQ(ierr);
26   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
27 #if defined(PETSC_HAVE_MATLAB_ENGINE)
28   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);CHKERRQ(ierr);
29 #endif
30   if (iascii) {
31     PetscViewerFormat format;
32 
33     ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
34     if (format == PETSC_VIEWER_LOAD_BALANCE) {
35       PetscInt      i,nmax = 0,nmin = PETSC_MAX_INT,navg = 0,*nz,nzlocal;
36       DMDALocalInfo info;
37       PetscMPIInt   size;
38       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr);
39       ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
40       nzlocal = info.xm;
41       ierr = PetscMalloc1(size,&nz);CHKERRQ(ierr);
42       ierr = MPI_Allgather(&nzlocal,1,MPIU_INT,nz,1,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
43       for (i=0; i<(PetscInt)size; i++) {
44         nmax = PetscMax(nmax,nz[i]);
45         nmin = PetscMin(nmin,nz[i]);
46         navg += nz[i];
47       }
48       ierr = PetscFree(nz);CHKERRQ(ierr);
49       navg = navg/size;
50       ierr = PetscViewerASCIIPrintf(viewer,"  Load Balance - Grid Points: Min %D  avg %D  max %D\n",nmin,navg,nmax);CHKERRQ(ierr);
51       PetscFunctionReturn(0);
52     }
53     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL && format != PETSC_VIEWER_ASCII_GLVIS) {
54       DMDALocalInfo info;
55       ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
56       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
57       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s);CHKERRQ(ierr);
58       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);CHKERRQ(ierr);
59       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
60       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
61     } else if (format == PETSC_VIEWER_ASCII_GLVIS) {
62       ierr = DMView_DA_GLVis(da,viewer);CHKERRQ(ierr);
63     } else {
64       ierr = DMView_DA_VTK(da, viewer);CHKERRQ(ierr);
65     }
66   } else if (isdraw) {
67     PetscDraw draw;
68     double    ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
69     PetscInt  base;
70     char      node[10];
71     PetscBool isnull;
72 
73     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
74     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr);
75     if (isnull) PetscFunctionReturn(0);
76 
77     ierr = PetscDrawCheckResizedWindow(draw);CHKERRQ(ierr);
78     ierr = PetscDrawClear(draw);CHKERRQ(ierr);
79     ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr);
80 
81     ierr = PetscDrawCollectiveBegin(draw);CHKERRQ(ierr);
82     /* first processor draws all node lines */
83     if (!rank) {
84       PetscInt xmin_tmp;
85       ymin = 0.0; ymax = 0.3;
86       for (xmin_tmp=0; xmin_tmp < dd->M; xmin_tmp++) {
87         ierr = PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
88       }
89       xmin = 0.0; xmax = dd->M - 1;
90       ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr);
91       ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
92     }
93     ierr = PetscDrawCollectiveEnd(draw);CHKERRQ(ierr);
94     ierr = PetscDrawFlush(draw);CHKERRQ(ierr);
95     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
96 
97     ierr = PetscDrawCollectiveBegin(draw);CHKERRQ(ierr);
98     /* draw my box */
99     ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w)  - 1;
100     ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr);
101     ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
102     ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
103     ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
104     /* Put in index numbers */
105     base = dd->base / dd->w;
106     for (x=xmin; x<=xmax; x++) {
107       ierr = PetscSNPrintf(node,sizeof(node),"%d",(int)base++);CHKERRQ(ierr);
108       ierr = PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);CHKERRQ(ierr);
109     }
110     ierr = PetscDrawCollectiveEnd(draw);CHKERRQ(ierr);
111     ierr = PetscDrawFlush(draw);CHKERRQ(ierr);
112     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
113     ierr = PetscDrawSave(draw);CHKERRQ(ierr);
114   } else if (isglvis) {
115     ierr = DMView_DA_GLVis(da,viewer);CHKERRQ(ierr);
116   } else if (isbinary) {
117     ierr = DMView_DA_Binary(da,viewer);CHKERRQ(ierr);
118 #if defined(PETSC_HAVE_MATLAB_ENGINE)
119   } else if (ismatlab) {
120     ierr = DMView_DA_Matlab(da,viewer);CHKERRQ(ierr);
121 #endif
122   }
123   PetscFunctionReturn(0);
124 }
125 
126 
127 PetscErrorCode  DMSetUp_DA_1D(DM da)
128 {
129   DM_DA            *dd   = (DM_DA*)da->data;
130   const PetscInt   M     = dd->M;
131   const PetscInt   dof   = dd->w;
132   const PetscInt   s     = dd->s;
133   const PetscInt   sDist = s;  /* stencil distance in points */
134   const PetscInt   *lx   = dd->lx;
135   DMBoundaryType   bx    = dd->bx;
136   MPI_Comm         comm;
137   Vec              local, global;
138   VecScatter       gtol;
139   IS               to, from;
140   PetscBool        flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
141   PetscMPIInt      rank, size;
142   PetscInt         i,*idx,nn,left,xs,xe,x,Xs,Xe,start,m,IXs,IXe;
143   PetscErrorCode   ierr;
144 
145   PetscFunctionBegin;
146   ierr = PetscObjectGetComm((PetscObject) da, &comm);CHKERRQ(ierr);
147   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
148   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
149 
150   dd->p = 1;
151   dd->n = 1;
152   dd->m = size;
153   m     = dd->m;
154 
155   if (s > 0) {
156     /* if not communicating data then should be ok to have nothing on some processes */
157     if (M < m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %D %D",m,M);
158     if ((M-1) < s && size > 1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
159   }
160 
161   /*
162      Determine locally owned region
163      xs is the first local node number, x is the number of local nodes
164   */
165   if (!lx) {
166     ierr = PetscMalloc1(m, &dd->lx);CHKERRQ(ierr);
167     ierr = PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_blockcomm",&flg1,NULL);CHKERRQ(ierr);
168     ierr = PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_nodes_at_end",&flg2,NULL);CHKERRQ(ierr);
169     if (flg1) {      /* Block Comm type Distribution */
170       xs = rank*M/m;
171       x  = (rank + 1)*M/m - xs;
172     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
173       x = (M + rank)/m;
174       if (M/m == x) xs = rank*x;
175       else          xs = rank*(x-1) + (M+rank)%(x*m);
176     } else { /* The odd nodes are evenly distributed across the first k nodes */
177       /* Regular PETSc Distribution */
178       x = M/m + ((M % m) > rank);
179       if (rank >= (M % m)) xs = (rank * (PetscInt)(M/m) + M % m);
180       else                 xs = rank * (PetscInt)(M/m) + rank;
181     }
182     ierr = MPI_Allgather(&xs,1,MPIU_INT,dd->lx,1,MPIU_INT,comm);CHKERRQ(ierr);
183     for (i=0; i<m-1; i++) dd->lx[i] = dd->lx[i+1] - dd->lx[i];
184     dd->lx[m-1] = M - dd->lx[m-1];
185   } else {
186     x  = lx[rank];
187     xs = 0;
188     for (i=0; i<rank; i++) xs += lx[i];
189     /* verify that data user provided is consistent */
190     left = xs;
191     for (i=rank; i<size; i++) left += lx[i];
192     if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
193   }
194 
195   /*
196    check if the scatter requires more than one process neighbor or wraps around
197    the domain more than once
198   */
199   if ((x < s) & ((M > 1) | (bx == DM_BOUNDARY_PERIODIC))) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
200 
201   xe  = xs + x;
202 
203   /* determine ghost region (Xs) and region scattered into (IXs)  */
204   if (xs-sDist > 0) {
205     Xs  = xs - sDist;
206     IXs = xs - sDist;
207   } else {
208     if (bx) Xs = xs - sDist;
209     else Xs = 0;
210     IXs = 0;
211   }
212   if (xe+sDist <= M) {
213     Xe  = xe + sDist;
214     IXe = xe + sDist;
215   } else {
216     if (bx) Xe = xe + sDist;
217     else Xe = M;
218     IXe = M;
219   }
220 
221   if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
222     Xs  = xs - sDist;
223     Xe  = xe + sDist;
224     IXs = xs - sDist;
225     IXe = xe + sDist;
226   }
227 
228   /* allocate the base parallel and sequential vectors */
229   dd->Nlocal = dof*x;
230   ierr       = VecCreateMPIWithArray(comm,dof,dd->Nlocal,PETSC_DECIDE,NULL,&global);CHKERRQ(ierr);
231   dd->nlocal = dof*(Xe-Xs);
232   ierr       = VecCreateSeqWithArray(PETSC_COMM_SELF,dof,dd->nlocal,NULL,&local);CHKERRQ(ierr);
233 
234   ierr = VecGetOwnershipRange(global,&start,NULL);CHKERRQ(ierr);
235 
236   /* Create Global to Local Vector Scatter Context */
237   /* global to local must retrieve ghost points */
238   ierr = ISCreateStride(comm,dof*(IXe-IXs),dof*(IXs-Xs),1,&to);CHKERRQ(ierr);
239 
240   ierr = PetscMalloc1(x+2*sDist,&idx);CHKERRQ(ierr);
241   ierr = PetscLogObjectMemory((PetscObject)da,(x+2*(sDist))*sizeof(PetscInt));CHKERRQ(ierr);
242 
243   for (i=0; i<IXs-Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/
244 
245   nn = IXs-Xs;
246   if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
247     for (i=0; i<sDist; i++) {  /* Left ghost points */
248       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
249       else                 idx[nn++] = M+(xs-sDist+i);
250     }
251 
252     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */
253 
254     for (i=0; i<sDist; i++) { /* Right ghost points */
255       if ((xe+i)<M) idx [nn++] =  xe+i;
256       else          idx [nn++] = (xe+i) - M;
257     }
258   } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
259     for (i=0; i<(sDist); i++) {  /* Left ghost points */
260       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
261       else                 idx[nn++] = sDist - i;
262     }
263 
264     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */
265 
266     for (i=0; i<(sDist); i++) { /* Right ghost points */
267       if ((xe+i)<M) idx[nn++] =  xe+i;
268       else          idx[nn++] = M - (i + 2);
269     }
270   } else {      /* Now do all cases with no periodicity */
271     if (0 <= xs-sDist) {
272       for (i=0; i<sDist; i++) idx[nn++] = xs - sDist + i;
273     } else {
274       for (i=0; i<xs; i++) idx[nn++] = i;
275     }
276 
277     for (i=0; i<x; i++) idx [nn++] = xs + i;
278 
279     if ((xe+sDist)<=M) {
280       for (i=0; i<sDist; i++) idx[nn++]=xe+i;
281     } else {
282       for (i=xe; i<M; i++) idx[nn++]=i;
283     }
284   }
285 
286   ierr = ISCreateBlock(comm,dof,nn-IXs+Xs,&idx[IXs-Xs],PETSC_USE_POINTER,&from);CHKERRQ(ierr);
287   ierr = VecScatterCreate(global,from,local,to,&gtol);CHKERRQ(ierr);
288   ierr = PetscLogObjectParent((PetscObject)da,(PetscObject)gtol);CHKERRQ(ierr);
289   ierr = ISDestroy(&to);CHKERRQ(ierr);
290   ierr = ISDestroy(&from);CHKERRQ(ierr);
291   ierr = VecDestroy(&local);CHKERRQ(ierr);
292   ierr = VecDestroy(&global);CHKERRQ(ierr);
293 
294   dd->xs = dof*xs; dd->xe = dof*xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
295   dd->Xs = dof*Xs; dd->Xe = dof*Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;
296 
297   dd->gtol      = gtol;
298   dd->base      = dof*xs;
299   da->ops->view = DMView_DA_1d;
300 
301   /*
302      Set the local to global ordering in the global vector, this allows use
303      of VecSetValuesLocal().
304   */
305   for (i=0; i<Xe-IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/
306 
307   ierr = ISLocalToGlobalMappingCreate(comm,dof,nn,idx,PETSC_OWN_POINTER,&da->ltogmap);CHKERRQ(ierr);
308   ierr = PetscLogObjectParent((PetscObject)da,(PetscObject)da->ltogmap);CHKERRQ(ierr);
309 
310   PetscFunctionReturn(0);
311 }
312 
313 
314 /*@C
315    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
316    regular array data that is distributed across some processors.
317 
318    Collective
319 
320    Input Parameters:
321 +  comm - MPI communicator
322 .  bx - type of ghost cells at the boundary the array should have, if any. Use
323           DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, or DM_BOUNDARY_PERIODIC.
324 .  M - global dimension of the array (that is the number of grid points)
325             from the command line with -da_grid_x <M>)
326 .  dof - number of degrees of freedom per node
327 .  s - stencil width
328 -  lx - array containing number of nodes in the X direction on each processor,
329         or NULL. If non-null, must be of length as the number of processes in the MPI_Comm.
330         The sum of these entries must equal M
331 
332    Output Parameter:
333 .  da - the resulting distributed array object
334 
335    Options Database Key:
336 +  -dm_view - Calls DMView() at the conclusion of DMDACreate1d()
337 .  -da_grid_x <nx> - number of grid points in x direction
338 .  -da_refine_x <rx> - refinement factor
339 -  -da_refine <n> - refine the DMDA n times before creating it
340 
341    Level: beginner
342 
343    Notes:
344    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
345    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
346    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.
347 
348    You must call DMSetUp() after this call before using this DM.
349 
350    If you wish to use the options database to change values in the DMDA call DMSetFromOptions() after this call
351    but before DMSetUp().
352 
353 .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(),
354           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMLocalToLocalBegin(), DMLocalToLocalEnd(), DMDAGetRefinementFactor(),
355           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMLoad(), DMDAGetOwnershipRanges(),
356           DMStagCreate1d()
357 
358 @*/
359 PetscErrorCode  DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
360 {
361   PetscErrorCode ierr;
362   PetscMPIInt    size;
363 
364   PetscFunctionBegin;
365   ierr = DMDACreate(comm, da);CHKERRQ(ierr);
366   ierr = DMSetDimension(*da, 1);CHKERRQ(ierr);
367   ierr = DMDASetSizes(*da, M, 1, 1);CHKERRQ(ierr);
368   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
369   ierr = DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);CHKERRQ(ierr);
370   ierr = DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE);CHKERRQ(ierr);
371   ierr = DMDASetDof(*da, dof);CHKERRQ(ierr);
372   ierr = DMDASetStencilWidth(*da, s);CHKERRQ(ierr);
373   ierr = DMDASetOwnershipRanges(*da, lx, NULL, NULL);CHKERRQ(ierr);
374   PetscFunctionReturn(0);
375 }
376