xref: /petsc/src/dm/impls/da/da1.c (revision f9426fe092dba0ba2fdf65dfec8d938c4b10a31c)
1 
2 /*
3    Code for manipulating distributed regular 1d arrays in parallel.
4    This file was created by Peter Mell   6/30/95
5 */
6 
7 #include <petsc-private/dmdaimpl.h>     /*I  "petscdmda.h"   I*/
8 
9 const char *const DMDABoundaryTypes[] = {"NONE","GHOSTED","PERIODIC","DMDA_BOUNDARY_",0};
10 
11 #include <petscdraw.h>
12 #undef __FUNCT__
13 #define __FUNCT__ "DMView_DA_1d"
14 PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer)
15 {
16   PetscErrorCode ierr;
17   PetscMPIInt    rank;
18   PetscBool      iascii,isdraw,isbinary;
19   DM_DA          *dd = (DM_DA*)da->data;
20 #if defined(PETSC_HAVE_MATLAB_ENGINE)
21   PetscBool ismatlab;
22 #endif
23 
24   PetscFunctionBegin;
25   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);CHKERRQ(ierr);
26 
27   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
28   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
29   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
30 #if defined(PETSC_HAVE_MATLAB_ENGINE)
31   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);CHKERRQ(ierr);
32 #endif
33   if (iascii) {
34     PetscViewerFormat format;
35 
36     ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
37     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
38       DMDALocalInfo info;
39       ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
40       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
41       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s);CHKERRQ(ierr);
42       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);CHKERRQ(ierr);
43       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
44       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
45     } else {
46       ierr = DMView_DA_VTK(da, viewer);CHKERRQ(ierr);
47     }
48   } else if (isdraw) {
49     PetscDraw draw;
50     double    ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
51     PetscInt  base;
52     char      node[10];
53     PetscBool isnull;
54 
55     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
56     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
57 
58     ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr);
59     ierr = PetscDrawSynchronizedClear(draw);CHKERRQ(ierr);
60 
61     /* first processor draws all node lines */
62     if (!rank) {
63       PetscInt xmin_tmp;
64       ymin = 0.0; ymax = 0.3;
65 
66       for (xmin_tmp=0; xmin_tmp < dd->M; xmin_tmp++) {
67         ierr = PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
68       }
69 
70       xmin = 0.0; xmax = dd->M - 1;
71       ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr);
72       ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
73     }
74 
75     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
76     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
77 
78     /* draw my box */
79     ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w)  - 1;
80     ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr);
81     ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
82     ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
83     ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
84 
85     /* Put in index numbers */
86     base = dd->base / dd->w;
87     for (x=xmin; x<=xmax; x++) {
88       sprintf(node,"%d",(int)base++);
89       ierr = PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);CHKERRQ(ierr);
90     }
91 
92     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
93     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
94   } else if (isbinary) {
95     ierr = DMView_DA_Binary(da,viewer);CHKERRQ(ierr);
96 #if defined(PETSC_HAVE_MATLAB_ENGINE)
97   } else if (ismatlab) {
98     ierr = DMView_DA_Matlab(da,viewer);CHKERRQ(ierr);
99 #endif
100   }
101   PetscFunctionReturn(0);
102 }
103 
104 
105 #undef __FUNCT__
106 #define __FUNCT__ "DMSetUp_DA_1D"
107 PetscErrorCode  DMSetUp_DA_1D(DM da)
108 {
109   DM_DA            *dd   = (DM_DA*)da->data;
110   const PetscInt   M     = dd->M;
111   const PetscInt   dof   = dd->w;
112   const PetscInt   s     = dd->s;
113   const PetscInt   sDist = s*dof;  /* absolute stencil distance */
114   const PetscInt   *lx   = dd->lx;
115   DMDABoundaryType bx    = dd->bx;
116   MPI_Comm         comm;
117   Vec              local, global;
118   VecScatter       ltog, gtol;
119   IS               to, from;
120   PetscBool        flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
121   PetscMPIInt      rank, size;
122   PetscInt         i,j,*idx,nn,left,xs,xe,x,Xs,Xe,start,end,m,IXs,IXe;
123   PetscErrorCode   ierr;
124 
125   PetscFunctionBegin;
126   ierr = PetscObjectGetComm((PetscObject) da, &comm);CHKERRQ(ierr);
127   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
128   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
129 
130   dd->m = size;
131   m     = dd->m;
132 
133   if (s > 0) {
134     /* if not communicating data then should be ok to have nothing on some processes */
135     if (M < m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %D %D",m,M);
136     if ((M-1) < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
137   }
138 
139   /*
140      Determine locally owned region
141      xs is the first local node number, x is the number of local nodes
142   */
143   if (!lx) {
144     ierr = PetscMalloc(m*sizeof(PetscInt), &dd->lx);CHKERRQ(ierr);
145     ierr = PetscOptionsGetBool(NULL,"-da_partition_blockcomm",&flg1,NULL);CHKERRQ(ierr);
146     ierr = PetscOptionsGetBool(NULL,"-da_partition_nodes_at_end",&flg2,NULL);CHKERRQ(ierr);
147     if (flg1) {      /* Block Comm type Distribution */
148       xs = rank*M/m;
149       x  = (rank + 1)*M/m - xs;
150     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
151       x = (M + rank)/m;
152       if (M/m == x) xs = rank*x;
153       else          xs = rank*(x-1) + (M+rank)%(x*m);
154     } else { /* The odd nodes are evenly distributed across the first k nodes */
155       /* Regular PETSc Distribution */
156       x = M/m + ((M % m) > rank);
157       if (rank >= (M % m)) xs = (rank * (PetscInt)(M/m) + M % m);
158       else                 xs = rank * (PetscInt)(M/m) + rank;
159     }
160     ierr = MPI_Allgather(&xs,1,MPIU_INT,dd->lx,1,MPIU_INT,comm);CHKERRQ(ierr);
161     for (i=0; i<m-1; i++) dd->lx[i] = dd->lx[i+1] - dd->lx[i];
162     dd->lx[m-1] = M - dd->lx[m-1];
163   } else {
164     x  = lx[rank];
165     xs = 0;
166     for (i=0; i<rank; i++) xs += lx[i];
167     /* verify that data user provided is consistent */
168     left = xs;
169     for (i=rank; i<size; i++) left += lx[i];
170     if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
171   }
172 
173   /*
174    check if the scatter requires more than one process neighbor or wraps around
175    the domain more than once
176   */
177   if ((x < s) & ((M > 1) | (bx == DMDA_BOUNDARY_PERIODIC))) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
178 
179   /* From now on x,xs,xe,Xs,Xe are the exact location in the array */
180   x  *= dof;
181   xs *= dof;
182   xe  = xs + x;
183 
184   /* determine ghost region (Xs) and region scattered into (IXs)  */
185   if (xs-sDist > 0) {
186     Xs  = xs - sDist;
187     IXs = xs - sDist;
188   } else {
189     if (bx) Xs = xs - sDist;
190     else Xs = 0;
191     IXs = 0;
192   }
193   if (xe+sDist <= M*dof) {
194     Xe  = xe + sDist;
195     IXe = xe + sDist;
196   } else {
197     if (bx) Xe = xe + sDist;
198     else Xe = M*dof;
199     IXe = M*dof;
200   }
201 
202   if (bx == DMDA_BOUNDARY_PERIODIC || bx == DMDA_BOUNDARY_MIRROR) {
203     Xs  = xs - sDist;
204     Xe  = xe + sDist;
205     IXs = xs - sDist;
206     IXe = xe + sDist;
207   }
208 
209   /* allocate the base parallel and sequential vectors */
210   dd->Nlocal = x;
211   ierr       = VecCreateMPIWithArray(comm,dof,dd->Nlocal,PETSC_DECIDE,0,&global);CHKERRQ(ierr);
212   dd->nlocal = (Xe-Xs);
213   ierr       = VecCreateSeqWithArray(PETSC_COMM_SELF,dof,dd->nlocal,0,&local);CHKERRQ(ierr);
214 
215   /* Create Local to Global Vector Scatter Context */
216   /* local to global inserts non-ghost point region into global */
217   ierr = VecGetOwnershipRange(global,&start,&end);CHKERRQ(ierr);
218   ierr = ISCreateStride(comm,x,start,1,&to);CHKERRQ(ierr);
219   ierr = ISCreateStride(comm,x,xs-Xs,1,&from);CHKERRQ(ierr);
220   ierr = VecScatterCreate(local,from,global,to,&ltog);CHKERRQ(ierr);
221   ierr = PetscLogObjectParent((PetscObject)da,(PetscObject)ltog);CHKERRQ(ierr);
222   ierr = ISDestroy(&from);CHKERRQ(ierr);
223   ierr = ISDestroy(&to);CHKERRQ(ierr);
224 
225   /* Create Global to Local Vector Scatter Context */
226   /* global to local must retrieve ghost points */
227   ierr = ISCreateStride(comm,(IXe-IXs),IXs-Xs,1,&to);CHKERRQ(ierr);
228 
229   ierr = PetscMalloc((x+2*(sDist))*sizeof(PetscInt),&idx);CHKERRQ(ierr);
230   ierr = PetscLogObjectMemory((PetscObject)da,(x+2*(sDist))*sizeof(PetscInt));CHKERRQ(ierr);
231 
232   for (i=0; i<IXs-Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/
233 
234   nn = IXs-Xs;
235   if (bx == DMDA_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
236     for (i=0; i<sDist; i++) {  /* Left ghost points */
237       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
238       else                 idx[nn++] = M*dof+(xs-sDist+i);
239     }
240 
241     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */
242 
243     for (i=0; i<sDist; i++) { /* Right ghost points */
244       if ((xe+i)<M*dof) idx [nn++] =  xe+i;
245       else              idx [nn++] = (xe+i) - M*dof;
246     }
247   } else if (bx == DMDA_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
248     for (i=0; i<(sDist)/dof; i++) {  /* Left ghost points */
249       for (j=0; j<dof; j++) {
250         if ((xs-sDist+i*dof + j)>=0) idx[nn++] = xs-sDist+i*dof +j;
251         else                         idx[nn++] = sDist - dof*(i) + j;
252       }
253     }
254 
255     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */
256 
257     for (i=0; i<(sDist)/dof; i++) { /* Right ghost points */
258       for (j=0; j<dof; j++) {
259         if ((xe+i)<M*dof) idx[nn++] =  xe+i*dof+j;
260         else              idx[nn++] = M*dof - dof*(i + 2) + j;
261       }
262     }
263   } else {      /* Now do all cases with no periodicity */
264     if (0 <= xs-sDist) {
265       for (i=0; i<sDist; i++) idx[nn++] = xs - sDist + i;
266     } else {
267       for (i=0; i<xs; i++) idx[nn++] = i;
268     }
269 
270     for (i=0; i<x; i++) idx [nn++] = xs + i;
271 
272     if ((xe+sDist)<=M*dof) {
273       for (i=0; i<sDist; i++) idx[nn++]=xe+i;
274     } else {
275       for (i=xe; i<(M*dof); i++) idx[nn++]=i;
276     }
277   }
278 
279   ierr = ISCreateGeneral(comm,nn-IXs+Xs,&idx[IXs-Xs],PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
280   ierr = VecScatterCreate(global,from,local,to,&gtol);CHKERRQ(ierr);
281   ierr = PetscLogObjectParent((PetscObject)da,(PetscObject)to);CHKERRQ(ierr);
282   ierr = PetscLogObjectParent((PetscObject)da,(PetscObject)from);CHKERRQ(ierr);
283   ierr = PetscLogObjectParent((PetscObject)da,(PetscObject)gtol);CHKERRQ(ierr);
284   ierr = ISDestroy(&to);CHKERRQ(ierr);
285   ierr = ISDestroy(&from);CHKERRQ(ierr);
286   ierr = VecDestroy(&local);CHKERRQ(ierr);
287   ierr = VecDestroy(&global);CHKERRQ(ierr);
288 
289   dd->xs = xs; dd->xe = xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
290   dd->Xs = Xs; dd->Xe = Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;
291 
292   dd->gtol      = gtol;
293   dd->ltog      = ltog;
294   dd->base      = xs;
295   da->ops->view = DMView_DA_1d;
296 
297   /*
298      Set the local to global ordering in the global vector, this allows use
299      of VecSetValuesLocal().
300   */
301   for (i=0; i<Xe-IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/
302 
303   ierr = ISLocalToGlobalMappingCreate(comm,nn,idx,PETSC_COPY_VALUES,&da->ltogmap);CHKERRQ(ierr);
304   ierr = ISLocalToGlobalMappingBlock(da->ltogmap,dd->w,&da->ltogmapb);CHKERRQ(ierr);
305   ierr = PetscLogObjectParent((PetscObject)da,(PetscObject)da->ltogmap);CHKERRQ(ierr);
306 
307   dd->idx = idx;
308   dd->Nl  = nn;
309   PetscFunctionReturn(0);
310 }
311 
312 
313 #undef __FUNCT__
314 #define __FUNCT__ "DMDACreate1d"
315 /*@C
316    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
317    regular array data that is distributed across some processors.
318 
319    Collective on MPI_Comm
320 
321    Input Parameters:
322 +  comm - MPI communicator
323 .  bx - type of ghost cells at the boundary the array should have, if any. Use
324           DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_GHOSTED, or DMDA_BOUNDARY_PERIODIC.
325 .  M - global dimension of the array (use -M to indicate that it may be set to a different value
326             from the command line with -da_grid_x <M>)
327 .  dof - number of degrees of freedom per node
328 .  s - stencil width
329 -  lx - array containing number of nodes in the X direction on each processor,
330         or NULL. If non-null, must be of length as the number of processes in the MPI_Comm.
331 
332    Output Parameter:
333 .  da - the resulting distributed array object
334 
335    Options Database Key:
336 +  -dm_view - Calls DMView() at the conclusion of DMDACreate1d()
337 .  -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
338 .  -da_refine_x <rx> - refinement factor
339 -  -da_refine <n> - refine the DMDA n times before creating it, if M < 0
340 
341    Level: beginner
342 
343    Notes:
344    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
345    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
346    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.
347 
348 .keywords: distributed array, create, one-dimensional
349 
350 .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(),
351           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMLocalToLocalBegin(), DMLocalToLocalEnd(), DMDAGetRefinementFactor(),
352           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMLoad(), DMDAGetOwnershipRanges()
353 
354 @*/
355 PetscErrorCode  DMDACreate1d(MPI_Comm comm, DMDABoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
356 {
357   PetscErrorCode ierr;
358   PetscMPIInt    size;
359 
360   PetscFunctionBegin;
361   ierr = DMDACreate(comm, da);CHKERRQ(ierr);
362   ierr = DMDASetDim(*da, 1);CHKERRQ(ierr);
363   ierr = DMDASetSizes(*da, M, 1, 1);CHKERRQ(ierr);
364   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
365   ierr = DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);CHKERRQ(ierr);
366   ierr = DMDASetBoundaryType(*da, bx, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE);CHKERRQ(ierr);
367   ierr = DMDASetDof(*da, dof);CHKERRQ(ierr);
368   ierr = DMDASetStencilWidth(*da, s);CHKERRQ(ierr);
369   ierr = DMDASetOwnershipRanges(*da, lx, NULL, NULL);CHKERRQ(ierr);
370   /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
371   ierr = DMSetFromOptions(*da);CHKERRQ(ierr);
372   ierr = DMSetUp(*da);CHKERRQ(ierr);
373   ierr = DMViewFromOptions(*da,NULL,"-dm_view");CHKERRQ(ierr);
374   PetscFunctionReturn(0);
375 }
376