xref: /petsc/src/dm/impls/da/da1.c (revision 2205254efee3a00a594e5e2a3a70f74dcb40bc03)
1 
2 /*
3    Code for manipulating distributed regular 1d arrays in parallel.
4    This file was created by Peter Mell   6/30/95
5 */
6 
7 #include <petsc-private/daimpl.h>     /*I  "petscdmda.h"   I*/
8 
9 const char *const DMDABoundaryTypes[] = {"BOUNDARY_NONE","BOUNDARY_GHOSTED","BOUNDARY_PERIODIC","DMDA_",0};
10 
11 #undef __FUNCT__
12 #define __FUNCT__ "DMView_DA_1d"
13 PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer)
14 {
15   PetscErrorCode ierr;
16   PetscMPIInt    rank;
17   PetscBool      iascii,isdraw,isbinary;
18   DM_DA          *dd = (DM_DA*)da->data;
19 #if defined(PETSC_HAVE_MATLAB_ENGINE)
20   PetscBool ismatlab;
21 #endif
22 
23   PetscFunctionBegin;
24   ierr = MPI_Comm_rank(((PetscObject)da)->comm,&rank);CHKERRQ(ierr);
25 
26   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
27   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
28   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
29 #if defined(PETSC_HAVE_MATLAB_ENGINE)
30   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);CHKERRQ(ierr);
31 #endif
32   if (iascii) {
33     PetscViewerFormat format;
34 
35     ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
36     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
37       DMDALocalInfo info;
38       ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
39       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
40       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s);CHKERRQ(ierr);
41       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);CHKERRQ(ierr);
42       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
43       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
44     } else {
45       ierr = DMView_DA_VTK(da, viewer);CHKERRQ(ierr);
46     }
47   } else if (isdraw) {
48     PetscDraw draw;
49     double    ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
50     PetscInt  base;
51     char      node[10];
52     PetscBool isnull;
53 
54     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
55     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
56 
57     ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr);
58     ierr = PetscDrawSynchronizedClear(draw);CHKERRQ(ierr);
59 
60     /* first processor draws all node lines */
61     if (!rank) {
62       PetscInt xmin_tmp;
63       ymin = 0.0; ymax = 0.3;
64 
65       for (xmin_tmp=0; xmin_tmp < dd->M; xmin_tmp++) {
66         ierr = PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
67       }
68 
69       xmin = 0.0; xmax = dd->M - 1;
70       ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr);
71       ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
72     }
73 
74     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
75     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
76 
77     /* draw my box */
78     ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w)  - 1;
79     ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr);
80     ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
81     ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
82     ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
83 
84     /* Put in index numbers */
85     base = dd->base / dd->w;
86     for (x=xmin; x<=xmax; x++) {
87       sprintf(node,"%d",(int)base++);
88       ierr = PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);CHKERRQ(ierr);
89     }
90 
91     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
92     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
93   } else if (isbinary) {
94     ierr = DMView_DA_Binary(da,viewer);CHKERRQ(ierr);
95 #if defined(PETSC_HAVE_MATLAB_ENGINE)
96   } else if (ismatlab) {
97     ierr = DMView_DA_Matlab(da,viewer);CHKERRQ(ierr);
98 #endif
99   }
100   PetscFunctionReturn(0);
101 }
102 
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "DMSetUp_DA_1D"
106 PetscErrorCode  DMSetUp_DA_1D(DM da)
107 {
108   DM_DA            *dd   = (DM_DA*)da->data;
109   const PetscInt   M     = dd->M;
110   const PetscInt   dof   = dd->w;
111   const PetscInt   s     = dd->s;
112   const PetscInt   sDist = s*dof;  /* absolute stencil distance */
113   const PetscInt   *lx   = dd->lx;
114   DMDABoundaryType bx    = dd->bx;
115   MPI_Comm         comm;
116   Vec              local, global;
117   VecScatter       ltog, gtol;
118   IS               to, from;
119   PetscBool        flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
120   PetscMPIInt      rank, size;
121   PetscInt         i,j,*idx,nn,left,xs,xe,x,Xs,Xe,start,end,m,IXs,IXe;
122   PetscErrorCode   ierr;
123 
124   PetscFunctionBegin;
125   ierr = PetscObjectGetComm((PetscObject) da, &comm);CHKERRQ(ierr);
126   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
127   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
128 
129   dd->m = size;
130   m     = dd->m;
131 
132   if (s > 0) {
133     /* if not communicating data then should be ok to have nothing on some processes */
134     if (M < m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %D %D",m,M);
135     if ((M-1) < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
136   }
137 
138   /*
139      Determine locally owned region
140      xs is the first local node number, x is the number of local nodes
141   */
142   if (!lx) {
143     ierr = PetscMalloc(m*sizeof(PetscInt), &dd->lx);CHKERRQ(ierr);
144     ierr = PetscOptionsGetBool(PETSC_NULL,"-da_partition_blockcomm",&flg1,PETSC_NULL);CHKERRQ(ierr);
145     ierr = PetscOptionsGetBool(PETSC_NULL,"-da_partition_nodes_at_end",&flg2,PETSC_NULL);CHKERRQ(ierr);
146     if (flg1) {      /* Block Comm type Distribution */
147       xs = rank*M/m;
148       x  = (rank + 1)*M/m - xs;
149     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
150       x = (M + rank)/m;
151       if (M/m == x) xs = rank*x;
152       else          xs = rank*(x-1) + (M+rank)%(x*m);
153     } else { /* The odd nodes are evenly distributed across the first k nodes */
154       /* Regular PETSc Distribution */
155       x = M/m + ((M % m) > rank);
156       if (rank >= (M % m)) xs = (rank * (PetscInt)(M/m) + M % m);
157       else                 xs = rank * (PetscInt)(M/m) + rank;
158     }
159     ierr = MPI_Allgather(&xs,1,MPIU_INT,dd->lx,1,MPIU_INT,comm);CHKERRQ(ierr);
160     for (i=0; i<m-1; i++) dd->lx[i] = dd->lx[i+1] - dd->lx[i];
161     dd->lx[m-1] = M - dd->lx[m-1];
162   } else {
163     x  = lx[rank];
164     xs = 0;
165     for (i=0; i<rank; i++) xs += lx[i];
166     /* verify that data user provided is consistent */
167     left = xs;
168     for (i=rank; i<size; i++) left += lx[i];
169     if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
170   }
171 
172   /*
173    check if the scatter requires more than one process neighbor or wraps around
174    the domain more than once
175   */
176   if ((x < s) & ((M > 1) | (bx == DMDA_BOUNDARY_PERIODIC))) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
177 
178   /* From now on x,xs,xe,Xs,Xe are the exact location in the array */
179   x  *= dof;
180   xs *= dof;
181   xe  = xs + x;
182 
183   /* determine ghost region (Xs) and region scattered into (IXs)  */
184   if (xs-sDist > 0) {
185     Xs  = xs - sDist;
186     IXs = xs - sDist;
187   } else {
188     if (bx) Xs = xs - sDist;
189     else Xs = 0;
190     IXs = 0;
191   }
192   if (xe+sDist <= M*dof) {
193     Xe  = xe + sDist;
194     IXe = xe + sDist;
195   } else {
196     if (bx) Xe = xe + sDist;
197     else Xe = M*dof;
198     IXe = M*dof;
199   }
200 
201   if (bx == DMDA_BOUNDARY_PERIODIC || bx == DMDA_BOUNDARY_MIRROR) {
202     Xs  = xs - sDist;
203     Xe  = xe + sDist;
204     IXs = xs - sDist;
205     IXe = xe + sDist;
206   }
207 
208   /* allocate the base parallel and sequential vectors */
209   dd->Nlocal = x;
210   ierr       = VecCreateMPIWithArray(comm,dof,dd->Nlocal,PETSC_DECIDE,0,&global);CHKERRQ(ierr);
211   dd->nlocal = (Xe-Xs);
212   ierr       = VecCreateSeqWithArray(PETSC_COMM_SELF,dof,dd->nlocal,0,&local);CHKERRQ(ierr);
213 
214   /* Create Local to Global Vector Scatter Context */
215   /* local to global inserts non-ghost point region into global */
216   ierr = VecGetOwnershipRange(global,&start,&end);CHKERRQ(ierr);
217   ierr = ISCreateStride(comm,x,start,1,&to);CHKERRQ(ierr);
218   ierr = ISCreateStride(comm,x,xs-Xs,1,&from);CHKERRQ(ierr);
219   ierr = VecScatterCreate(local,from,global,to,&ltog);CHKERRQ(ierr);
220   ierr = PetscLogObjectParent(da,ltog);CHKERRQ(ierr);
221   ierr = ISDestroy(&from);CHKERRQ(ierr);
222   ierr = ISDestroy(&to);CHKERRQ(ierr);
223 
224   /* Create Global to Local Vector Scatter Context */
225   /* global to local must retrieve ghost points */
226   ierr = ISCreateStride(comm,(IXe-IXs),IXs-Xs,1,&to);CHKERRQ(ierr);
227 
228   ierr = PetscMalloc((x+2*(sDist))*sizeof(PetscInt),&idx);CHKERRQ(ierr);
229   ierr = PetscLogObjectMemory(da,(x+2*(sDist))*sizeof(PetscInt));CHKERRQ(ierr);
230 
231   for (i=0; i<IXs-Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/
232 
233   nn = IXs-Xs;
234   if (bx == DMDA_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
235     for (i=0; i<sDist; i++) {  /* Left ghost points */
236       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
237       else                 idx[nn++] = M*dof+(xs-sDist+i);
238     }
239 
240     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */
241 
242     for (i=0; i<sDist; i++) { /* Right ghost points */
243       if ((xe+i)<M*dof) idx [nn++] =  xe+i;
244       else              idx [nn++] = (xe+i) - M*dof;
245     }
246   } else if (bx == DMDA_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
247     for (i=0; i<(sDist)/dof; i++) {  /* Left ghost points */
248       for (j=0; j<dof; j++) {
249         if ((xs-sDist+i*dof + j)>=0) idx[nn++] = xs-sDist+i*dof +j;
250         else                         idx[nn++] = sDist - dof*(i) + j;
251       }
252     }
253 
254     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */
255 
256     for (i=0; i<(sDist)/dof; i++) { /* Right ghost points */
257       for (j=0; j<dof; j++) {
258         if ((xe+i)<M*dof) idx[nn++] =  xe+i*dof+j;
259         else              idx[nn++] = M*dof - dof*(i + 2) + j;
260       }
261     }
262   } else {      /* Now do all cases with no periodicity */
263     if (0 <= xs-sDist) {
264       for (i=0; i<sDist; i++) idx[nn++] = xs - sDist + i;
265     } else {
266       for (i=0; i<xs; i++) idx[nn++] = i;
267     }
268 
269     for (i=0; i<x; i++) idx [nn++] = xs + i;
270 
271     if ((xe+sDist)<=M*dof) {
272       for (i=0; i<sDist; i++) idx[nn++]=xe+i;
273     } else {
274       for (i=xe; i<(M*dof); i++) idx[nn++]=i;
275     }
276   }
277 
278   ierr = ISCreateGeneral(comm,nn-IXs+Xs,&idx[IXs-Xs],PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
279   ierr = VecScatterCreate(global,from,local,to,&gtol);CHKERRQ(ierr);
280   ierr = PetscLogObjectParent(da,to);CHKERRQ(ierr);
281   ierr = PetscLogObjectParent(da,from);CHKERRQ(ierr);
282   ierr = PetscLogObjectParent(da,gtol);CHKERRQ(ierr);
283   ierr = ISDestroy(&to);CHKERRQ(ierr);
284   ierr = ISDestroy(&from);CHKERRQ(ierr);
285   ierr = VecDestroy(&local);CHKERRQ(ierr);
286   ierr = VecDestroy(&global);CHKERRQ(ierr);
287 
288   dd->xs = xs; dd->xe = xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
289   dd->Xs = Xs; dd->Xe = Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;
290 
291   dd->gtol      = gtol;
292   dd->ltog      = ltog;
293   dd->base      = xs;
294   da->ops->view = DMView_DA_1d;
295 
296   /*
297      Set the local to global ordering in the global vector, this allows use
298      of VecSetValuesLocal().
299   */
300   for (i=0; i<Xe-IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/
301 
302   ierr = ISLocalToGlobalMappingCreate(comm,nn,idx,PETSC_COPY_VALUES,&da->ltogmap);CHKERRQ(ierr);
303   ierr = ISLocalToGlobalMappingBlock(da->ltogmap,dd->w,&da->ltogmapb);CHKERRQ(ierr);
304   ierr = PetscLogObjectParent(da,da->ltogmap);CHKERRQ(ierr);
305 
306   dd->idx = idx;
307   dd->Nl  = nn;
308   PetscFunctionReturn(0);
309 }
310 
311 
312 #undef __FUNCT__
313 #define __FUNCT__ "DMDACreate1d"
314 /*@C
315    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
316    regular array data that is distributed across some processors.
317 
318    Collective on MPI_Comm
319 
320    Input Parameters:
321 +  comm - MPI communicator
322 .  bx - type of ghost cells at the boundary the array should have, if any. Use
323           DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_GHOSTED, or DMDA_BOUNDARY_PERIODIC.
324 .  M - global dimension of the array (use -M to indicate that it may be set to a different value
325             from the command line with -da_grid_x <M>)
326 .  dof - number of degrees of freedom per node
327 .  s - stencil width
328 -  lx - array containing number of nodes in the X direction on each processor,
329         or PETSC_NULL. If non-null, must be of length as the number of processes in the MPI_Comm.
330 
331    Output Parameter:
332 .  da - the resulting distributed array object
333 
334    Options Database Key:
335 +  -dm_view - Calls DMView() at the conclusion of DMDACreate1d()
336 .  -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
337 .  -da_refine_x <rx> - refinement factor
338 -  -da_refine <n> - refine the DMDA n times before creating it, if M < 0
339 
340    Level: beginner
341 
342    Notes:
343    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
344    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
345    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.
346 
347 .keywords: distributed array, create, one-dimensional
348 
349 .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(),
350           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMDALocalToLocalBegin(), DMDALocalToLocalEnd(), DMDAGetRefinementFactor(),
351           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMLoad(), DMDAGetOwnershipRanges()
352 
353 @*/
354 PetscErrorCode  DMDACreate1d(MPI_Comm comm, DMDABoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
355 {
356   PetscErrorCode ierr;
357   PetscMPIInt    size;
358 
359   PetscFunctionBegin;
360   ierr = DMDACreate(comm, da);CHKERRQ(ierr);
361   ierr = DMDASetDim(*da, 1);CHKERRQ(ierr);
362   ierr = DMDASetSizes(*da, M, 1, 1);CHKERRQ(ierr);
363   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
364   ierr = DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);CHKERRQ(ierr);
365   ierr = DMDASetBoundaryType(*da, bx, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE);CHKERRQ(ierr);
366   ierr = DMDASetDof(*da, dof);CHKERRQ(ierr);
367   ierr = DMDASetStencilWidth(*da, s);CHKERRQ(ierr);
368   ierr = DMDASetOwnershipRanges(*da, lx, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
369   /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
370   ierr = DMSetFromOptions(*da);CHKERRQ(ierr);
371   ierr = DMSetUp(*da);CHKERRQ(ierr);
372   ierr = DMViewFromOptions(*da,"-dm_view");CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375