xref: /petsc/src/dm/impls/da/da1.c (revision efe48dd8cf8b935accbbb9f4bcb20bc83865fa4d)
1 #define PETSCDM_DLL
2 /*
3    Code for manipulating distributed regular 1d arrays in parallel.
4    This file was created by Peter Mell   6/30/95
5 */
6 
7 #include "private/daimpl.h"     /*I  "petscdm.h"   I*/
8 
9 const char *DMDAPeriodicTypes[] = {"NONPERIODIC","XPERIODIC","YPERIODIC","XYPERIODIC",
10                                    "XYZPERIODIC","XZPERIODIC","YZPERIODIC","ZPERIODIC","XYZGHOSTED","DMDAPeriodicType","DMDA_",0};
11 
12 #undef __FUNCT__
13 #define __FUNCT__ "DMView_DA_1d"
14 PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer)
15 {
16   PetscErrorCode ierr;
17   PetscMPIInt    rank;
18   PetscBool      iascii,isdraw,isbinary;
19   DM_DA          *dd = (DM_DA*)da->data;
20 #if defined(PETSC_HAVE_MATLAB_ENGINE)
21   PetscBool      ismatlab;
22 #endif
23 
24   PetscFunctionBegin;
25   ierr = MPI_Comm_rank(((PetscObject)da)->comm,&rank);CHKERRQ(ierr);
26 
27   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
28   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
29   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
30 #if defined(PETSC_HAVE_MATLAB_ENGINE)
31   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);CHKERRQ(ierr);
32 #endif
33   if (iascii) {
34     PetscViewerFormat format;
35 
36     ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
37     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
38       DMDALocalInfo info;
39       ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
40       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s);CHKERRQ(ierr);
41       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);CHKERRQ(ierr);
42       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
43     } else {
44       ierr = DMView_DA_VTK(da, viewer);CHKERRQ(ierr);
45     }
46   } else if (isdraw) {
47     PetscDraw  draw;
48     double     ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
49     PetscInt   base;
50     char       node[10];
51     PetscBool  isnull;
52 
53     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
54     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
55 
56     ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr);
57     ierr = PetscDrawSynchronizedClear(draw);CHKERRQ(ierr);
58 
59     /* first processor draws all node lines */
60     if (!rank) {
61       PetscInt xmin_tmp;
62       ymin = 0.0; ymax = 0.3;
63 
64       /* ADIC doesn't like doubles in a for loop */
65       for (xmin_tmp =0; xmin_tmp < dd->M; xmin_tmp++) {
66          ierr = PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
67       }
68 
69       xmin = 0.0; xmax = dd->M - 1;
70       ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr);
71       ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
72     }
73 
74     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
75     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
76 
77     /* draw my box */
78     ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w)  - 1;
79     ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr);
80     ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
81     ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
82     ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
83 
84     /* Put in index numbers */
85     base = dd->base / dd->w;
86     for (x=xmin; x<=xmax; x++) {
87       sprintf(node,"%d",(int)base++);
88       ierr = PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);CHKERRQ(ierr);
89     }
90 
91     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
92     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
93   } else if (isbinary){
94     ierr = DMView_DA_Binary(da,viewer);CHKERRQ(ierr);
95 #if defined(PETSC_HAVE_MATLAB_ENGINE)
96   } else if (ismatlab) {
97     ierr = DMView_DA_Matlab(da,viewer);CHKERRQ(ierr);
98 #endif
99   } else SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for DMDA 1d",((PetscObject)viewer)->type_name);
100   PetscFunctionReturn(0);
101 }
102 
103 #undef __FUNCT__
104 #define __FUNCT__ "DMView_DA_Private"
105 /*
106     Processes command line options to determine if/how a DMDA
107   is to be viewed. Called by DMDACreateXX()
108 */
109 PetscErrorCode DMView_DA_Private(DM da)
110 {
111   PetscErrorCode ierr;
112   PetscBool      flg1 = PETSC_FALSE;
113   PetscViewer    view;
114 
115   PetscFunctionBegin;
116   ierr = PetscOptionsBegin(((PetscObject)da)->comm,((PetscObject)da)->prefix,"DMDA viewing options","DMDA");CHKERRQ(ierr);
117     ierr = PetscOptionsBool("-da_view","Print information about the DMDA's distribution","DMView",PETSC_FALSE,&flg1,PETSC_NULL);CHKERRQ(ierr);
118     if (flg1) {
119       ierr = PetscViewerASCIIGetStdout(((PetscObject)da)->comm,&view);CHKERRQ(ierr);
120       ierr = DMView(da,view);CHKERRQ(ierr);
121     }
122     flg1 = PETSC_FALSE;
123     ierr = PetscOptionsBool("-da_view_draw","Draw how the DMDA is distributed","DMView",PETSC_FALSE,&flg1,PETSC_NULL);CHKERRQ(ierr);
124     if (flg1) {ierr = DMView(da,PETSC_VIEWER_DRAW_(((PetscObject)da)->comm));CHKERRQ(ierr);}
125   ierr = PetscOptionsEnd();CHKERRQ(ierr);
126   PetscFunctionReturn(0);
127 }
128 
129 #undef __FUNCT__
130 #define __FUNCT__ "DMSetUp_DA_1D"
131 PetscErrorCode PETSCDM_DLLEXPORT DMSetUp_DA_1D(DM da)
132 {
133   DM_DA                  *dd = (DM_DA*)da->data;
134   const PetscInt         M     = dd->M;
135   const PetscInt         dof   = dd->w;
136   const PetscInt         s     = dd->s;
137   const PetscInt         sDist = s*dof;  /* absolute stencil distance */
138   const PetscInt         *lx    = dd->lx;
139   const DMDAPeriodicType wrap  = dd->wrap;
140   MPI_Comm               comm;
141   Vec                    local, global;
142   VecScatter             ltog, gtol;
143   IS                     to, from;
144   PetscBool              flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
145   PetscMPIInt            rank, size;
146   PetscInt               i,*idx,nn,left,xs,xe,x,Xs,Xe,start,end,m;
147   PetscErrorCode         ierr;
148 
149   PetscFunctionBegin;
150   if (dof < 1) SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
151   if (s < 0) SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);
152 
153   dd->dim = 1;
154   ierr = PetscMalloc(dof*sizeof(char*),&dd->fieldname);CHKERRQ(ierr);
155   ierr = PetscMemzero(dd->fieldname,dof*sizeof(char*));CHKERRQ(ierr);
156   ierr = PetscObjectGetComm((PetscObject) da, &comm);CHKERRQ(ierr);
157   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
158   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
159 
160   dd->m = size;
161   m     = dd->m;
162 
163   if (s > 0) {
164     /* if not communicating data then should be ok to have nothing on some processes */
165     if (M < m)     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %D %D",m,M);
166     if ((M-1) < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
167   }
168 
169   /*
170      Determine locally owned region
171      xs is the first local node number, x is the number of local nodes
172   */
173   if (!lx) {
174     ierr = PetscOptionsGetBool(PETSC_NULL,"-da_partition_blockcomm",&flg1,PETSC_NULL);CHKERRQ(ierr);
175     ierr = PetscOptionsGetBool(PETSC_NULL,"-da_partition_nodes_at_end",&flg2,PETSC_NULL);CHKERRQ(ierr);
176     if (flg1) {      /* Block Comm type Distribution */
177       xs = rank*M/m;
178       x  = (rank + 1)*M/m - xs;
179     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
180       x = (M + rank)/m;
181       if (M/m == x) { xs = rank*x; }
182       else          { xs = rank*(x-1) + (M+rank)%(x*m); }
183     } else { /* The odd nodes are evenly distributed across the first k nodes */
184       /* Regular PETSc Distribution */
185       x = M/m + ((M % m) > rank);
186       if (rank >= (M % m)) {xs = (rank * (PetscInt)(M/m) + M % m);}
187       else                 {xs = rank * (PetscInt)(M/m) + rank;}
188     }
189   } else {
190     x  = lx[rank];
191     xs = 0;
192     for (i=0; i<rank; i++) {
193       xs += lx[i];
194     }
195     /* verify that data user provided is consistent */
196     left = xs;
197     for (i=rank; i<size; i++) {
198       left += lx[i];
199     }
200     if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
201   }
202 
203   /* From now on x,xs,xe,Xs,Xe are the exact location in the array */
204   x  *= dof;
205   xs *= dof;
206   xe  = xs + x;
207 
208   /* determine ghost region */
209   if (wrap == DMDA_XPERIODIC || wrap == DMDA_XYZGHOSTED) {
210     Xs = xs - sDist;
211     Xe = xe + sDist;
212   } else {
213     if ((xs-sDist) >= 0)     Xs = xs-sDist;  else Xs = 0;
214     if ((xe+sDist) <= M*dof) Xe = xe+sDist;  else Xe = M*dof;
215   }
216 
217   /* allocate the base parallel and sequential vectors */
218   dd->Nlocal = x;
219   ierr = VecCreateMPIWithArray(comm,dd->Nlocal,PETSC_DECIDE,0,&global);CHKERRQ(ierr);
220   ierr = VecSetBlockSize(global,dof);CHKERRQ(ierr);
221   dd->nlocal = (Xe-Xs);
222   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dd->nlocal,0,&local);CHKERRQ(ierr);
223   ierr = VecSetBlockSize(local,dof);CHKERRQ(ierr);
224 
225   /* Create Local to Global Vector Scatter Context */
226   /* local to global inserts non-ghost point region into global */
227   ierr = VecGetOwnershipRange(global,&start,&end);CHKERRQ(ierr);
228   ierr = ISCreateStride(comm,x,start,1,&to);CHKERRQ(ierr);
229   ierr = ISCreateStride(comm,x,xs-Xs,1,&from);CHKERRQ(ierr);
230   ierr = VecScatterCreate(local,from,global,to,&ltog);CHKERRQ(ierr);
231   ierr = PetscLogObjectParent(da,ltog);CHKERRQ(ierr);
232   ierr = ISDestroy(from);CHKERRQ(ierr);
233   ierr = ISDestroy(to);CHKERRQ(ierr);
234 
235   /* Create Global to Local Vector Scatter Context */
236   /* global to local must retrieve ghost points */
237   if  (wrap == DMDA_XYZGHOSTED) {
238     if (size == 1) {
239       ierr = ISCreateStride(comm,(xe-xs),sDist,1,&to);CHKERRQ(ierr);
240     } else if (!rank) {
241       ierr = ISCreateStride(comm,(Xe-xs),sDist,1,&to);CHKERRQ(ierr);
242     } else if (rank == size-1) {
243       ierr = ISCreateStride(comm,(xe-Xs),0,1,&to);CHKERRQ(ierr);
244     } else {
245       ierr = ISCreateStride(comm,(Xe-Xs),0,1,&to);CHKERRQ(ierr);
246     }
247   } else {
248     ierr = ISCreateStride(comm,(Xe-Xs),0,1,&to);CHKERRQ(ierr);
249   }
250 
251   ierr = PetscMalloc((x+2*sDist)*sizeof(PetscInt),&idx);CHKERRQ(ierr);
252   ierr = PetscLogObjectMemory(da,(x+2*sDist)*sizeof(PetscInt));CHKERRQ(ierr);
253 
254   nn = 0;
255   if (wrap == DMDA_XPERIODIC) {    /* Handle all cases with wrap first */
256 
257     for (i=0; i<sDist; i++) {  /* Left ghost points */
258       if ((xs-sDist+i)>=0) { idx[nn++] = xs-sDist+i;}
259       else                 { idx[nn++] = M*dof+(xs-sDist+i);}
260     }
261 
262     for (i=0; i<x; i++) { idx [nn++] = xs + i;}  /* Non-ghost points */
263 
264     for (i=0; i<sDist; i++) { /* Right ghost points */
265       if ((xe+i)<M*dof) { idx [nn++] =  xe+i; }
266       else              { idx [nn++] = (xe+i) - M*dof;}
267     }
268   } else if (wrap == DMDA_XYZGHOSTED) {
269 
270     if (sDist <= xs) {for (i=0; i<sDist; i++) {idx[nn++] = xs - sDist + i;}}
271 
272     for (i=0; i<x; i++) { idx [nn++] = xs + i;}
273 
274     if ((xe+sDist)<=M*dof) {for (i=0;  i<sDist;     i++) {idx[nn++]=xe+i;}}
275 
276   } else {      /* Now do all cases with no wrapping */
277 
278     if (sDist <= xs) {for (i=0; i<sDist; i++) {idx[nn++] = xs - sDist + i;}}
279     else             {for (i=0; i<xs;    i++) {idx[nn++] = i;}}
280 
281     for (i=0; i<x; i++) { idx [nn++] = xs + i;}
282 
283     if ((xe+sDist)<=M*dof) {for (i=0;  i<sDist;   i++) {idx[nn++]=xe+i;}}
284     else                   {for (i=xe; i<(M*dof); i++) {idx[nn++]=i;}}
285   }
286 
287   ierr = ISCreateGeneral(comm,nn,idx,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
288   ierr = VecScatterCreate(global,from,local,to,&gtol);CHKERRQ(ierr);
289   ierr = PetscLogObjectParent(da,to);CHKERRQ(ierr);
290   ierr = PetscLogObjectParent(da,from);CHKERRQ(ierr);
291   ierr = PetscLogObjectParent(da,gtol);CHKERRQ(ierr);
292   ierr = ISDestroy(to);CHKERRQ(ierr);
293   ierr = ISDestroy(from);CHKERRQ(ierr);
294   ierr = VecDestroy(local);CHKERRQ(ierr);
295   ierr = VecDestroy(global);CHKERRQ(ierr);
296 
297   dd->xs = xs; dd->xe = xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
298   dd->Xs = Xs; dd->Xe = Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;
299 
300   dd->gtol      = gtol;
301   dd->ltog      = ltog;
302   dd->base      = xs;
303   da->ops->view = DMView_DA_1d;
304 
305   /*
306      Set the local to global ordering in the global vector, this allows use
307      of VecSetValuesLocal().
308   */
309   if (wrap == DMDA_XYZGHOSTED) {
310     PetscInt *tmpidx;
311     if (size == 1) {
312       ierr = PetscMalloc((nn+2*sDist)*sizeof(PetscInt),&tmpidx);CHKERRQ(ierr);
313       for (i=0; i<sDist; i++) tmpidx[i] = -1;
314       ierr = PetscMemcpy(tmpidx+sDist,idx,nn*sizeof(PetscInt));CHKERRQ(ierr);
315       for (i=nn+sDist; i<nn+2*sDist; i++) tmpidx[i] = -1;
316       ierr = PetscFree(idx);CHKERRQ(ierr);
317       idx  = tmpidx;
318       nn  += 2*sDist;
319     } else if (!rank) { /* must preprend -1 marker for ghost location that have no global value */
320       ierr = PetscMalloc((nn+sDist)*sizeof(PetscInt),&tmpidx);CHKERRQ(ierr);
321       for (i=0; i<sDist; i++) tmpidx[i] = -1;
322       ierr = PetscMemcpy(tmpidx+sDist,idx,nn*sizeof(PetscInt));CHKERRQ(ierr);
323       ierr = PetscFree(idx);CHKERRQ(ierr);
324       idx  = tmpidx;
325       nn  += sDist;
326     } else if (rank  == size-1) { /* must postpend -1 marker for ghost location that have no global value */
327       ierr = PetscMalloc((nn+sDist)*sizeof(PetscInt),&tmpidx);CHKERRQ(ierr);
328       ierr = PetscMemcpy(tmpidx,idx,nn*sizeof(PetscInt));CHKERRQ(ierr);
329       for (i=nn; i<nn+sDist; i++) tmpidx[i] = -1;
330       ierr = PetscFree(idx);CHKERRQ(ierr);
331       idx  = tmpidx;
332       nn  += sDist;
333     }
334   }
335   ierr = ISLocalToGlobalMappingCreate(comm,nn,idx,PETSC_OWN_POINTER,&dd->ltogmap);CHKERRQ(ierr);
336   ierr = ISLocalToGlobalMappingBlock(dd->ltogmap,dd->w,&dd->ltogmapb);CHKERRQ(ierr);
337   ierr = PetscLogObjectParent(da,dd->ltogmap);CHKERRQ(ierr);
338 
339   dd->idx = idx;
340   dd->Nl  = nn;
341 
342   PetscFunctionReturn(0);
343 }
344 
345 
346 #undef __FUNCT__
347 #define __FUNCT__ "DMDACreate1d"
348 /*@C
349    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
350    regular array data that is distributed across some processors.
351 
352    Collective on MPI_Comm
353 
354    Input Parameters:
355 +  comm - MPI communicator
356 .  wrap - type of periodicity should the array have, if any. Use
357           either DMDA_NONPERIODIC or DMDA_XPERIODIC
358 .  M - global dimension of the array (use -M to indicate that it may be set to a different value
359             from the command line with -da_grid_x <M>)
360 .  dof - number of degrees of freedom per node
361 .  s - stencil width
362 -  lx - array containing number of nodes in the X direction on each processor,
363         or PETSC_NULL. If non-null, must be of length as m.
364 
365    Output Parameter:
366 .  da - the resulting distributed array object
367 
368    Options Database Key:
369 +  -da_view - Calls DMView() at the conclusion of DMDACreate1d()
370 .  -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
371 -  -da_refine_x - refinement factor
372 
373    Level: beginner
374 
375    Notes:
376    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
377    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
378    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.
379 
380 
381 .keywords: distributed array, create, one-dimensional
382 
383 .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(),
384           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMDALocalToLocalBegin(), DMDALocalToLocalEnd(), DMDAGetRefinementFactor(),
385           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMDALoad(), DMDAGetOwnershipRanges()
386 
387 @*/
388 PetscErrorCode PETSCDM_DLLEXPORT DMDACreate1d(MPI_Comm comm, DMDAPeriodicType wrap, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
389 {
390   PetscErrorCode ierr;
391   PetscMPIInt    size;
392 
393   PetscFunctionBegin;
394   ierr = DMDACreate(comm, da);CHKERRQ(ierr);
395   ierr = DMDASetDim(*da, 1);CHKERRQ(ierr);
396   ierr = DMDASetSizes(*da, M, 1, 1);CHKERRQ(ierr);
397   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
398   ierr = DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);CHKERRQ(ierr);
399   ierr = DMDASetPeriodicity(*da, wrap);CHKERRQ(ierr);
400   ierr = DMDASetDof(*da, dof);CHKERRQ(ierr);
401   ierr = DMDASetStencilWidth(*da, s);CHKERRQ(ierr);
402   ierr = DMDASetOwnershipRanges(*da, lx, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
403   /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
404   ierr = DMSetFromOptions(*da);CHKERRQ(ierr);
405   ierr = DMSetUp(*da);CHKERRQ(ierr);
406   ierr = DMView_DA_Private(*da);CHKERRQ(ierr);
407   PetscFunctionReturn(0);
408 }
409