xref: /petsc/src/dm/impls/da/da1.c (revision d372ba47371068bdce79d7d3cf159aa0df7e4cba)
1 
2 /*
3    Code for manipulating distributed regular 1d arrays in parallel.
4    This file was created by Peter Mell   6/30/95
5 */
6 
7 #include <private/daimpl.h>     /*I  "petscdmda.h"   I*/
8 
9 const char *DMDABoundaryTypes[] = {"BOUNDARY_NONE","BOUNDARY_GHOSTED","BOUNDARY_PERIODIC","DMDA_",0};
10 
11 #undef __FUNCT__
12 #define __FUNCT__ "DMView_DA_1d"
13 PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer)
14 {
15   PetscErrorCode ierr;
16   PetscMPIInt    rank;
17   PetscBool      iascii,isdraw,isbinary;
18   DM_DA          *dd = (DM_DA*)da->data;
19 #if defined(PETSC_HAVE_MATLAB_ENGINE)
20   PetscBool      ismatlab;
21 #endif
22 
23   PetscFunctionBegin;
24   ierr = MPI_Comm_rank(((PetscObject)da)->comm,&rank);CHKERRQ(ierr);
25 
26   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
27   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
28   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
29 #if defined(PETSC_HAVE_MATLAB_ENGINE)
30   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);CHKERRQ(ierr);
31 #endif
32   if (iascii) {
33     PetscViewerFormat format;
34 
35     ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
36     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
37       DMDALocalInfo info;
38       ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
39       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
40       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s);CHKERRQ(ierr);
41       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);CHKERRQ(ierr);
42       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
43       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
44     } else {
45       ierr = DMView_DA_VTK(da, viewer);CHKERRQ(ierr);
46     }
47   } else if (isdraw) {
48     PetscDraw  draw;
49     double     ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
50     PetscInt   base;
51     char       node[10];
52     PetscBool  isnull;
53 
54     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
55     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
56 
57     ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr);
58     ierr = PetscDrawSynchronizedClear(draw);CHKERRQ(ierr);
59 
60     /* first processor draws all node lines */
61     if (!rank) {
62       PetscInt xmin_tmp;
63       ymin = 0.0; ymax = 0.3;
64 
65       /* ADIC doesn't like doubles in a for loop */
66       for (xmin_tmp =0; xmin_tmp < dd->M; xmin_tmp++) {
67          ierr = PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
68       }
69 
70       xmin = 0.0; xmax = dd->M - 1;
71       ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr);
72       ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
73     }
74 
75     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
76     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
77 
78     /* draw my box */
79     ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w)  - 1;
80     ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr);
81     ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
82     ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
83     ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
84 
85     /* Put in index numbers */
86     base = dd->base / dd->w;
87     for (x=xmin; x<=xmax; x++) {
88       sprintf(node,"%d",(int)base++);
89       ierr = PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);CHKERRQ(ierr);
90     }
91 
92     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
93     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
94   } else if (isbinary){
95     ierr = DMView_DA_Binary(da,viewer);CHKERRQ(ierr);
96 #if defined(PETSC_HAVE_MATLAB_ENGINE)
97   } else if (ismatlab) {
98     ierr = DMView_DA_Matlab(da,viewer);CHKERRQ(ierr);
99 #endif
100   } else SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for DMDA 1d",((PetscObject)viewer)->type_name);
101   PetscFunctionReturn(0);
102 }
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "DMView_DA_Private"
106 /*
107     Processes command line options to determine if/how a DMDA
108   is to be viewed. Called by DMDACreateXX()
109 */
110 PetscErrorCode DMView_DA_Private(DM da)
111 {
112   PetscErrorCode ierr;
113   PetscBool      flg1 = PETSC_FALSE;
114   PetscViewer    view;
115 
116   PetscFunctionBegin;
117   ierr = PetscOptionsBegin(((PetscObject)da)->comm,((PetscObject)da)->prefix,"DMDA viewing options","DMDA");CHKERRQ(ierr);
118     ierr = PetscOptionsBool("-da_view","Print information about the DMDA's distribution","DMView",PETSC_FALSE,&flg1,PETSC_NULL);CHKERRQ(ierr);
119     if (flg1) {
120       ierr = PetscViewerASCIIGetStdout(((PetscObject)da)->comm,&view);CHKERRQ(ierr);
121       ierr = DMView(da,view);CHKERRQ(ierr);
122     }
123     flg1 = PETSC_FALSE;
124     ierr = PetscOptionsBool("-da_view_draw","Draw how the DMDA is distributed","DMView",PETSC_FALSE,&flg1,PETSC_NULL);CHKERRQ(ierr);
125     if (flg1) {ierr = DMView(da,PETSC_VIEWER_DRAW_(((PetscObject)da)->comm));CHKERRQ(ierr);}
126   ierr = PetscOptionsEnd();CHKERRQ(ierr);
127   PetscFunctionReturn(0);
128 }
129 
130 #undef __FUNCT__
131 #define __FUNCT__ "DMSetUp_DA_1D"
132 PetscErrorCode  DMSetUp_DA_1D(DM da)
133 {
134   DM_DA                  *dd = (DM_DA*)da->data;
135   const PetscInt         M     = dd->M;
136   const PetscInt         dof   = dd->w;
137   const PetscInt         s     = dd->s;
138   const PetscInt         sDist = s*dof;  /* absolute stencil distance */
139   const PetscInt         *lx    = dd->lx;
140   const DMDABoundaryType bx  = dd->bx;
141   MPI_Comm               comm;
142   Vec                    local, global;
143   VecScatter             ltog, gtol;
144   IS                     to, from;
145   PetscBool              flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
146   PetscMPIInt            rank, size;
147   PetscInt               i,*idx,nn,left,xs,xe,x,Xs,Xe,start,end,m,IXs,IXe;
148   PetscErrorCode         ierr;
149 
150   PetscFunctionBegin;
151   if (dof < 1) SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
152   if (s < 0) SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);
153 
154   dd->dim = 1;
155   ierr = PetscMalloc(dof*sizeof(char*),&dd->fieldname);CHKERRQ(ierr);
156   ierr = PetscMemzero(dd->fieldname,dof*sizeof(char*));CHKERRQ(ierr);
157   ierr = PetscObjectGetComm((PetscObject) da, &comm);CHKERRQ(ierr);
158   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
159   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
160 
161   dd->m = size;
162   m     = dd->m;
163 
164   if (s > 0) {
165     /* if not communicating data then should be ok to have nothing on some processes */
166     if (M < m)     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %D %D",m,M);
167     if ((M-1) < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
168   }
169 
170   /*
171      Determine locally owned region
172      xs is the first local node number, x is the number of local nodes
173   */
174   if (!lx) {
175     ierr = PetscOptionsGetBool(PETSC_NULL,"-da_partition_blockcomm",&flg1,PETSC_NULL);CHKERRQ(ierr);
176     ierr = PetscOptionsGetBool(PETSC_NULL,"-da_partition_nodes_at_end",&flg2,PETSC_NULL);CHKERRQ(ierr);
177     if (flg1) {      /* Block Comm type Distribution */
178       xs = rank*M/m;
179       x  = (rank + 1)*M/m - xs;
180     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
181       x = (M + rank)/m;
182       if (M/m == x) { xs = rank*x; }
183       else          { xs = rank*(x-1) + (M+rank)%(x*m); }
184     } else { /* The odd nodes are evenly distributed across the first k nodes */
185       /* Regular PETSc Distribution */
186       x = M/m + ((M % m) > rank);
187       if (rank >= (M % m)) {xs = (rank * (PetscInt)(M/m) + M % m);}
188       else                 {xs = rank * (PetscInt)(M/m) + rank;}
189     }
190   } else {
191     x  = lx[rank];
192     xs = 0;
193     for (i=0; i<rank; i++) {
194       xs += lx[i];
195     }
196     /* verify that data user provided is consistent */
197     left = xs;
198     for (i=rank; i<size; i++) {
199       left += lx[i];
200     }
201     if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
202   }
203 
204   /*
205    check if the scatter requires more than one process neighbor or wraps around
206    the domain more than once
207   */
208   if ((x < s) & ((M > 1) | (bx == DMDA_BOUNDARY_PERIODIC))) {
209     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
210   }
211 
212   /* From now on x,xs,xe,Xs,Xe are the exact location in the array */
213   x  *= dof;
214   xs *= dof;
215   xe  = xs + x;
216 
217   /* determine ghost region */
218   if (bx) {
219     Xs = xs - sDist;
220     Xe = xe + sDist;
221   } else {
222     if ((xs-sDist) >= 0)     Xs = xs-sDist;  else Xs = 0;
223     if ((xe+sDist) <= M*dof) Xe = xe+sDist;  else Xe = M*dof;
224   }
225 
226   if (bx == DMDA_BOUNDARY_PERIODIC) {
227     IXs = xs - sDist;
228     IXe = xe + sDist;
229   } else {
230     if ((xs-sDist) >= 0)     IXs = xs-sDist;  else IXs = 0;
231     if ((xe+sDist) <= M*dof) IXe = xe+sDist;  else IXe = M*dof;
232   }
233 
234   /* allocate the base parallel and sequential vectors */
235   dd->Nlocal = x;
236   ierr = VecCreateMPIWithArray(comm,dd->Nlocal,PETSC_DECIDE,0,&global);CHKERRQ(ierr);
237   ierr = VecSetBlockSize(global,dof);CHKERRQ(ierr);
238   dd->nlocal = (Xe-Xs);
239   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dd->nlocal,0,&local);CHKERRQ(ierr);
240   ierr = VecSetBlockSize(local,dof);CHKERRQ(ierr);
241 
242   /* Create Local to Global Vector Scatter Context */
243   /* local to global inserts non-ghost point region into global */
244   ierr = VecGetOwnershipRange(global,&start,&end);CHKERRQ(ierr);
245   ierr = ISCreateStride(comm,x,start,1,&to);CHKERRQ(ierr);
246   ierr = ISCreateStride(comm,x,xs-Xs,1,&from);CHKERRQ(ierr);
247   ierr = VecScatterCreate(local,from,global,to,&ltog);CHKERRQ(ierr);
248   ierr = PetscLogObjectParent(da,ltog);CHKERRQ(ierr);
249   ierr = ISDestroy(&from);CHKERRQ(ierr);
250   ierr = ISDestroy(&to);CHKERRQ(ierr);
251 
252   /* Create Global to Local Vector Scatter Context */
253   /* global to local must retrieve ghost points */
254   ierr = ISCreateStride(comm,(IXe-IXs),IXs-Xs,1,&to);CHKERRQ(ierr);
255 
256   ierr = PetscMalloc((x+2*sDist)*sizeof(PetscInt),&idx);CHKERRQ(ierr);
257   ierr = PetscLogObjectMemory(da,(x+2*sDist)*sizeof(PetscInt));CHKERRQ(ierr);
258 
259   for (i=0; i<IXs-Xs; i++) {idx[i] = -1; } /* prepend with -1s if needed for ghosted case*/
260 
261   nn = IXs-Xs;
262   if (bx == DMDA_BOUNDARY_PERIODIC) { /* Handle all cases with wrap first */
263     for (i=0; i<sDist; i++) {  /* Left ghost points */
264       if ((xs-sDist+i)>=0) { idx[nn++] = xs-sDist+i;}
265       else                 { idx[nn++] = M*dof+(xs-sDist+i);}
266     }
267 
268     for (i=0; i<x; i++) { idx [nn++] = xs + i;}  /* Non-ghost points */
269 
270     for (i=0; i<sDist; i++) { /* Right ghost points */
271       if ((xe+i)<M*dof) { idx [nn++] =  xe+i; }
272       else              { idx [nn++] = (xe+i) - M*dof;}
273     }
274   } else {      /* Now do all cases with no wrapping */
275     if (sDist <= xs) {for (i=0; i<sDist; i++) {idx[nn++] = xs - sDist + i;}}
276     else             {for (i=0; i<xs;    i++) {idx[nn++] = i;}}
277 
278     for (i=0; i<x; i++) { idx [nn++] = xs + i;}
279 
280     if ((xe+sDist)<=M*dof) {for (i=0;  i<sDist;   i++) {idx[nn++]=xe+i;}}
281     else                   {for (i=xe; i<(M*dof); i++) {idx[nn++]=i;}}
282   }
283 
284   ierr = ISCreateGeneral(comm,nn-IXs+Xs,&idx[IXs-Xs],PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
285   ierr = VecScatterCreate(global,from,local,to,&gtol);CHKERRQ(ierr);
286   ierr = PetscLogObjectParent(da,to);CHKERRQ(ierr);
287   ierr = PetscLogObjectParent(da,from);CHKERRQ(ierr);
288   ierr = PetscLogObjectParent(da,gtol);CHKERRQ(ierr);
289   ierr = ISDestroy(&to);CHKERRQ(ierr);
290   ierr = ISDestroy(&from);CHKERRQ(ierr);
291   ierr = VecDestroy(&local);CHKERRQ(ierr);
292   ierr = VecDestroy(&global);CHKERRQ(ierr);
293 
294   dd->xs = xs; dd->xe = xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
295   dd->Xs = Xs; dd->Xe = Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;
296 
297   dd->gtol      = gtol;
298   dd->ltog      = ltog;
299   dd->base      = xs;
300   da->ops->view = DMView_DA_1d;
301 
302   /*
303      Set the local to global ordering in the global vector, this allows use
304      of VecSetValuesLocal().
305   */
306   for (i=0; i<Xe-IXe; i++) {idx[nn++] = -1; } /* pad with -1s if needed for ghosted case*/
307 
308   ierr = ISLocalToGlobalMappingCreate(comm,nn,idx,PETSC_COPY_VALUES,&da->ltogmap);CHKERRQ(ierr);
309   ierr = ISLocalToGlobalMappingBlock(da->ltogmap,dd->w,&da->ltogmapb);CHKERRQ(ierr);
310   ierr = PetscLogObjectParent(da,da->ltogmap);CHKERRQ(ierr);
311 
312   dd->idx = idx;
313   dd->Nl  = nn;
314 
315   PetscFunctionReturn(0);
316 }
317 
318 
319 #undef __FUNCT__
320 #define __FUNCT__ "DMDACreate1d"
321 /*@C
322    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
323    regular array data that is distributed across some processors.
324 
325    Collective on MPI_Comm
326 
327    Input Parameters:
328 +  comm - MPI communicator
329 .  bx - type of ghost cells at the boundary the array should have, if any. Use
330           DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_GHOSTED, or DMDA_BOUNDARY_PERIODIC.
331 .  M - global dimension of the array (use -M to indicate that it may be set to a different value
332             from the command line with -da_grid_x <M>)
333 .  dof - number of degrees of freedom per node
334 .  s - stencil width
335 -  lx - array containing number of nodes in the X direction on each processor,
336         or PETSC_NULL. If non-null, must be of length as the number of processes in the MPI_Comm.
337 
338    Output Parameter:
339 .  da - the resulting distributed array object
340 
341    Options Database Key:
342 +  -da_view - Calls DMView() at the conclusion of DMDACreate1d()
343 .  -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
344 -  -da_refine_x - refinement factor
345 
346    Level: beginner
347 
348    Notes:
349    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
350    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
351    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.
352 
353 
354 .keywords: distributed array, create, one-dimensional
355 
356 .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(),
357           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMDALocalToLocalBegin(), DMDALocalToLocalEnd(), DMDAGetRefinementFactor(),
358           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMDALoad(), DMDAGetOwnershipRanges()
359 
360 @*/
361 PetscErrorCode  DMDACreate1d(MPI_Comm comm, DMDABoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
362 {
363   PetscErrorCode ierr;
364   PetscMPIInt    size;
365 
366   PetscFunctionBegin;
367   ierr = DMDACreate(comm, da);CHKERRQ(ierr);
368   ierr = DMDASetDim(*da, 1);CHKERRQ(ierr);
369   ierr = DMDASetSizes(*da, M, 1, 1);CHKERRQ(ierr);
370   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
371   ierr = DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);CHKERRQ(ierr);
372   ierr = DMDASetBoundaryType(*da, bx, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE);CHKERRQ(ierr);
373   ierr = DMDASetDof(*da, dof);CHKERRQ(ierr);
374   ierr = DMDASetStencilWidth(*da, s);CHKERRQ(ierr);
375   ierr = DMDASetOwnershipRanges(*da, lx, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
376   /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
377   ierr = DMSetFromOptions(*da);CHKERRQ(ierr);
378   ierr = DMSetUp(*da);CHKERRQ(ierr);
379   ierr = DMView_DA_Private(*da);CHKERRQ(ierr);
380   PetscFunctionReturn(0);
381 }
382