xref: /petsc/src/dm/impls/da/da1.c (revision d7bf68aed858a02b69c3ac329d35f78f24359a6b)
1 #define PETSCDM_DLL
2 /*
3    Code for manipulating distributed regular 1d arrays in parallel.
4    This file was created by Peter Mell   6/30/95
5 */
6 
7 #include "private/daimpl.h"     /*I  "petscda.h"   I*/
8 
9 const char *DAPeriodicTypes[] = {"NONPERIODIC","XPERIODIC","YPERIODIC","XYPERIODIC",
10                                  "XYZPERIODIC","XZPERIODIC","YZPERIODIC","ZPERIODIC","XYZGHOSTED","DAPeriodicType","DA_",0};
11 
12 #undef __FUNCT__
13 #define __FUNCT__ "DAView_1d"
14 PetscErrorCode DAView_1d(DA da,PetscViewer viewer)
15 {
16   PetscErrorCode ierr;
17   PetscMPIInt    rank;
18   PetscBool      iascii,isdraw;
19   DM_DA          *dd = (DM_DA*)da->data;
20 
21   PetscFunctionBegin;
22   ierr = MPI_Comm_rank(((PetscObject)da)->comm,&rank);CHKERRQ(ierr);
23 
24   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
25   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
26   if (iascii) {
27     PetscViewerFormat format;
28 
29     ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
30     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
31       DALocalInfo info;
32       ierr = DAGetLocalInfo(da,&info);CHKERRQ(ierr);
33       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s);CHKERRQ(ierr);
34       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);CHKERRQ(ierr);
35       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     }
37   } else if (isdraw) {
38     PetscDraw  draw;
39     double     ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
40     PetscInt   base;
41     char       node[10];
42     PetscBool  isnull;
43 
44     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
45     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
46 
47     ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr);
48     ierr = PetscDrawSynchronizedClear(draw);CHKERRQ(ierr);
49 
50     /* first processor draws all node lines */
51     if (!rank) {
52       PetscInt xmin_tmp;
53       ymin = 0.0; ymax = 0.3;
54 
55       /* ADIC doesn't like doubles in a for loop */
56       for (xmin_tmp =0; xmin_tmp < dd->M; xmin_tmp++) {
57          ierr = PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
58       }
59 
60       xmin = 0.0; xmax = dd->M - 1;
61       ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr);
62       ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
63     }
64 
65     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
66     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
67 
68     /* draw my box */
69     ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w)  - 1;
70     ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr);
71     ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
72     ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
73     ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
74 
75     /* Put in index numbers */
76     base = dd->base / dd->w;
77     for (x=xmin; x<=xmax; x++) {
78       sprintf(node,"%d",(int)base++);
79       ierr = PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);CHKERRQ(ierr);
80     }
81 
82     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
83     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
84   } else {
85     SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for DA 1d",((PetscObject)viewer)->type_name);
86   }
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "DAView_Private"
92 /*
93     Processes command line options to determine if/how a DA
94   is to be viewed. Called by DACreateXX()
95 */
96 PetscErrorCode DAView_Private(DA da)
97 {
98   PetscErrorCode ierr;
99   PetscBool      flg1 = PETSC_FALSE;
100   PetscViewer    view;
101 
102   PetscFunctionBegin;
103   ierr = PetscOptionsBegin(((PetscObject)da)->comm,((PetscObject)da)->prefix,"DA viewing options","DA");CHKERRQ(ierr);
104     ierr = PetscOptionsTruth("-da_view","Print information about the DA's distribution","DAView",PETSC_FALSE,&flg1,PETSC_NULL);CHKERRQ(ierr);
105     if (flg1) {
106       ierr = PetscViewerASCIIGetStdout(((PetscObject)da)->comm,&view);CHKERRQ(ierr);
107       ierr = DAView(da,view);CHKERRQ(ierr);
108     }
109     flg1 = PETSC_FALSE;
110     ierr = PetscOptionsTruth("-da_view_draw","Draw how the DA is distributed","DAView",PETSC_FALSE,&flg1,PETSC_NULL);CHKERRQ(ierr);
111     if (flg1) {ierr = DAView(da,PETSC_VIEWER_DRAW_(((PetscObject)da)->comm));CHKERRQ(ierr);}
112   ierr = PetscOptionsEnd();CHKERRQ(ierr);
113   PetscFunctionReturn(0);
114 }
115 
116 #undef __FUNCT__
117 #define __FUNCT__ "DASetUp_1D"
118 PetscErrorCode PETSCDM_DLLEXPORT DASetUp_1D(DA da)
119 {
120   DM_DA                *dd = (DM_DA*)da->data;
121   const PetscInt       M     = dd->M;
122   const PetscInt       dof   = dd->w;
123   const PetscInt       s     = dd->s;
124   const PetscInt       sDist = s*dof;  /* absolute stencil distance */
125   const PetscInt      *lx    = dd->lx;
126   const DAPeriodicType wrap  = dd->wrap;
127   MPI_Comm             comm;
128   Vec                  local, global;
129   VecScatter           ltog, gtol;
130   IS                   to, from;
131   PetscBool            flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
132   PetscMPIInt          rank, size;
133   PetscInt             i,*idx,nn,left,xs,xe,x,Xs,Xe,start,end,m;
134   PetscErrorCode       ierr;
135 
136   PetscFunctionBegin;
137   if (dof < 1) SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
138   if (s < 0) SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);
139 
140   dd->dim = 1;
141   ierr = PetscMalloc(dof*sizeof(char*),&dd->fieldname);CHKERRQ(ierr);
142   ierr = PetscMemzero(dd->fieldname,dof*sizeof(char*));CHKERRQ(ierr);
143   ierr = PetscObjectGetComm((PetscObject) da, &comm);CHKERRQ(ierr);
144   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
145   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
146 
147   dd->m = size;
148   m     = dd->m;
149 
150   if (s > 0) {
151     /* if not communicating data then should be ok to have nothing on some processes */
152     if (M < m)     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %D %D",m,M);
153     if ((M-1) < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
154   }
155 
156   /*
157      Determine locally owned region
158      xs is the first local node number, x is the number of local nodes
159   */
160   if (!lx) {
161     ierr = PetscOptionsGetTruth(PETSC_NULL,"-da_partition_blockcomm",&flg1,PETSC_NULL);CHKERRQ(ierr);
162     ierr = PetscOptionsGetTruth(PETSC_NULL,"-da_partition_nodes_at_end",&flg2,PETSC_NULL);CHKERRQ(ierr);
163     if (flg1) {      /* Block Comm type Distribution */
164       xs = rank*M/m;
165       x  = (rank + 1)*M/m - xs;
166     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
167       x = (M + rank)/m;
168       if (M/m == x) { xs = rank*x; }
169       else          { xs = rank*(x-1) + (M+rank)%(x*m); }
170     } else { /* The odd nodes are evenly distributed across the first k nodes */
171       /* Regular PETSc Distribution */
172       x = M/m + ((M % m) > rank);
173       if (rank >= (M % m)) {xs = (rank * (PetscInt)(M/m) + M % m);}
174       else                 {xs = rank * (PetscInt)(M/m) + rank;}
175     }
176   } else {
177     x  = lx[rank];
178     xs = 0;
179     for (i=0; i<rank; i++) {
180       xs += lx[i];
181     }
182     /* verify that data user provided is consistent */
183     left = xs;
184     for (i=rank; i<size; i++) {
185       left += lx[i];
186     }
187     if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
188   }
189 
190   /* From now on x,xs,xe,Xs,Xe are the exact location in the array */
191   x  *= dof;
192   xs *= dof;
193   xe  = xs + x;
194 
195   /* determine ghost region */
196   if (wrap == DA_XPERIODIC || wrap == DA_XYZGHOSTED) {
197     Xs = xs - sDist;
198     Xe = xe + sDist;
199   } else {
200     if ((xs-sDist) >= 0)     Xs = xs-sDist;  else Xs = 0;
201     if ((xe+sDist) <= M*dof) Xe = xe+sDist;  else Xe = M*dof;
202   }
203 
204   /* allocate the base parallel and sequential vectors */
205   dd->Nlocal = x;
206   ierr = VecCreateMPIWithArray(comm,dd->Nlocal,PETSC_DECIDE,0,&global);CHKERRQ(ierr);
207   ierr = VecSetBlockSize(global,dof);CHKERRQ(ierr);
208   dd->nlocal = (Xe-Xs);
209   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dd->nlocal,0,&local);CHKERRQ(ierr);
210   ierr = VecSetBlockSize(local,dof);CHKERRQ(ierr);
211 
212   /* Create Local to Global Vector Scatter Context */
213   /* local to global inserts non-ghost point region into global */
214   ierr = VecGetOwnershipRange(global,&start,&end);CHKERRQ(ierr);
215   ierr = ISCreateStride(comm,x,start,1,&to);CHKERRQ(ierr);
216   ierr = ISCreateStride(comm,x,xs-Xs,1,&from);CHKERRQ(ierr);
217   ierr = VecScatterCreate(local,from,global,to,&ltog);CHKERRQ(ierr);
218   ierr = PetscLogObjectParent(da,ltog);CHKERRQ(ierr);
219   ierr = ISDestroy(from);CHKERRQ(ierr);
220   ierr = ISDestroy(to);CHKERRQ(ierr);
221 
222   /* Create Global to Local Vector Scatter Context */
223   /* global to local must retrieve ghost points */
224   if  (wrap == DA_XYZGHOSTED) {
225     if (size == 1) {
226       ierr = ISCreateStride(comm,(xe-xs),sDist,1,&to);CHKERRQ(ierr);
227     } else if (!rank) {
228       ierr = ISCreateStride(comm,(Xe-xs),sDist,1,&to);CHKERRQ(ierr);
229     } else if (rank == size-1) {
230       ierr = ISCreateStride(comm,(xe-Xs),0,1,&to);CHKERRQ(ierr);
231     } else {
232       ierr = ISCreateStride(comm,(Xe-Xs),0,1,&to);CHKERRQ(ierr);
233     }
234   } else {
235     ierr = ISCreateStride(comm,(Xe-Xs),0,1,&to);CHKERRQ(ierr);
236   }
237 
238   ierr = PetscMalloc((x+2*sDist)*sizeof(PetscInt),&idx);CHKERRQ(ierr);
239   ierr = PetscLogObjectMemory(da,(x+2*sDist)*sizeof(PetscInt));CHKERRQ(ierr);
240 
241   nn = 0;
242   if (wrap == DA_XPERIODIC) {    /* Handle all cases with wrap first */
243 
244     for (i=0; i<sDist; i++) {  /* Left ghost points */
245       if ((xs-sDist+i)>=0) { idx[nn++] = xs-sDist+i;}
246       else                 { idx[nn++] = M*dof+(xs-sDist+i);}
247     }
248 
249     for (i=0; i<x; i++) { idx [nn++] = xs + i;}  /* Non-ghost points */
250 
251     for (i=0; i<sDist; i++) { /* Right ghost points */
252       if ((xe+i)<M*dof) { idx [nn++] =  xe+i; }
253       else              { idx [nn++] = (xe+i) - M*dof;}
254     }
255   } else if (wrap == DA_XYZGHOSTED) {
256 
257     if (sDist <= xs) {for (i=0; i<sDist; i++) {idx[nn++] = xs - sDist + i;}}
258 
259     for (i=0; i<x; i++) { idx [nn++] = xs + i;}
260 
261     if ((xe+sDist)<=M*dof) {for (i=0;  i<sDist;     i++) {idx[nn++]=xe+i;}}
262 
263   } else {      /* Now do all cases with no wrapping */
264 
265     if (sDist <= xs) {for (i=0; i<sDist; i++) {idx[nn++] = xs - sDist + i;}}
266     else             {for (i=0; i<xs;    i++) {idx[nn++] = i;}}
267 
268     for (i=0; i<x; i++) { idx [nn++] = xs + i;}
269 
270     if ((xe+sDist)<=M*dof) {for (i=0;  i<sDist;   i++) {idx[nn++]=xe+i;}}
271     else                   {for (i=xe; i<(M*dof); i++) {idx[nn++]=i;}}
272   }
273 
274   ierr = ISCreateGeneral(comm,nn,idx,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
275   ierr = VecScatterCreate(global,from,local,to,&gtol);CHKERRQ(ierr);
276   ierr = PetscLogObjectParent(da,to);CHKERRQ(ierr);
277   ierr = PetscLogObjectParent(da,from);CHKERRQ(ierr);
278   ierr = PetscLogObjectParent(da,gtol);CHKERRQ(ierr);
279   ierr = ISDestroy(to);CHKERRQ(ierr);
280   ierr = ISDestroy(from);CHKERRQ(ierr);
281   ierr = VecDestroy(local);CHKERRQ(ierr);
282   ierr = VecDestroy(global);CHKERRQ(ierr);
283 
284   dd->xs = xs; dd->xe = xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
285   dd->Xs = Xs; dd->Xe = Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;
286 
287   dd->gtol      = gtol;
288   dd->ltog      = ltog;
289   dd->base      = xs;
290   da->ops->view = DAView_1d;
291 
292   /*
293      Set the local to global ordering in the global vector, this allows use
294      of VecSetValuesLocal().
295   */
296   if (wrap == DA_XYZGHOSTED) {
297     PetscInt *tmpidx;
298     if (size == 1) {
299       ierr = PetscMalloc((nn+2*sDist)*sizeof(PetscInt),&tmpidx);CHKERRQ(ierr);
300       for (i=0; i<sDist; i++) tmpidx[i] = -1;
301       ierr = PetscMemcpy(tmpidx+sDist,idx,nn*sizeof(PetscInt));CHKERRQ(ierr);
302       for (i=nn+sDist; i<nn+2*sDist; i++) tmpidx[i] = -1;
303       ierr = PetscFree(idx);CHKERRQ(ierr);
304       idx  = tmpidx;
305       nn  += 2*sDist;
306     } else if (!rank) { /* must preprend -1 marker for ghost location that have no global value */
307       ierr = PetscMalloc((nn+sDist)*sizeof(PetscInt),&tmpidx);CHKERRQ(ierr);
308       for (i=0; i<sDist; i++) tmpidx[i] = -1;
309       ierr = PetscMemcpy(tmpidx+sDist,idx,nn*sizeof(PetscInt));CHKERRQ(ierr);
310       ierr = PetscFree(idx);CHKERRQ(ierr);
311       idx  = tmpidx;
312       nn  += sDist;
313     } else if (rank  == size-1) { /* must postpend -1 marker for ghost location that have no global value */
314       ierr = PetscMalloc((nn+sDist)*sizeof(PetscInt),&tmpidx);CHKERRQ(ierr);
315       ierr = PetscMemcpy(tmpidx,idx,nn*sizeof(PetscInt));CHKERRQ(ierr);
316       for (i=nn; i<nn+sDist; i++) tmpidx[i] = -1;
317       ierr = PetscFree(idx);CHKERRQ(ierr);
318       idx  = tmpidx;
319       nn  += sDist;
320     }
321   }
322   ierr = ISLocalToGlobalMappingCreate(comm,nn,idx,PETSC_OWN_POINTER,&dd->ltogmap);CHKERRQ(ierr);
323   ierr = ISLocalToGlobalMappingBlock(dd->ltogmap,dd->w,&dd->ltogmapb);CHKERRQ(ierr);
324   ierr = PetscLogObjectParent(da,dd->ltogmap);CHKERRQ(ierr);
325 
326   dd->idx = idx;
327   dd->Nl  = nn;
328 
329   PetscFunctionReturn(0);
330 }
331 
332 
333 #undef __FUNCT__
334 #define __FUNCT__ "DACreate1d"
335 /*@C
336    DACreate1d - Creates an object that will manage the communication of  one-dimensional
337    regular array data that is distributed across some processors.
338 
339    Collective on MPI_Comm
340 
341    Input Parameters:
342 +  comm - MPI communicator
343 .  wrap - type of periodicity should the array have, if any. Use
344           either DA_NONPERIODIC or DA_XPERIODIC
345 .  M - global dimension of the array (use -M to indicate that it may be set to a different value
346             from the command line with -da_grid_x <M>)
347 .  dof - number of degrees of freedom per node
348 .  s - stencil width
349 -  lx - array containing number of nodes in the X direction on each processor,
350         or PETSC_NULL. If non-null, must be of length as m.
351 
352    Output Parameter:
353 .  da - the resulting distributed array object
354 
355    Options Database Key:
356 +  -da_view - Calls DAView() at the conclusion of DACreate1d()
357 .  -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
358 -  -da_refine_x - refinement factor
359 
360    Level: beginner
361 
362    Notes:
363    The array data itself is NOT stored in the DA, it is stored in Vec objects;
364    The appropriate vector objects can be obtained with calls to DACreateGlobalVector()
365    and DACreateLocalVector() and calls to VecDuplicate() if more are needed.
366 
367 
368 .keywords: distributed array, create, one-dimensional
369 
370 .seealso: DADestroy(), DAView(), DACreate2d(), DACreate3d(), DAGlobalToLocalBegin(), DASetRefinementFactor(),
371           DAGlobalToLocalEnd(), DALocalToGlobal(), DALocalToLocalBegin(), DALocalToLocalEnd(), DAGetRefinementFactor(),
372           DAGetInfo(), DACreateGlobalVector(), DACreateLocalVector(), DACreateNaturalVector(), DALoad(), DAView(), DAGetOwnershipRanges()
373 
374 @*/
375 PetscErrorCode PETSCDM_DLLEXPORT DACreate1d(MPI_Comm comm, DAPeriodicType wrap, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DA *da)
376 {
377   PetscErrorCode ierr;
378   PetscMPIInt    size;
379 
380   PetscFunctionBegin;
381   ierr = DACreate(comm, da);CHKERRQ(ierr);
382   ierr = DASetDim(*da, 1);CHKERRQ(ierr);
383   ierr = DASetSizes(*da, M, 1, 1);CHKERRQ(ierr);
384   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
385   ierr = DASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);CHKERRQ(ierr);
386   ierr = DASetPeriodicity(*da, wrap);CHKERRQ(ierr);
387   ierr = DASetDof(*da, dof);CHKERRQ(ierr);
388   ierr = DASetStencilWidth(*da, s);CHKERRQ(ierr);
389   ierr = DASetOwnershipRanges(*da, lx, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
390   /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
391   ierr = DASetFromOptions(*da);CHKERRQ(ierr);
392   ierr = DASetUp(*da);CHKERRQ(ierr);
393   PetscFunctionReturn(0);
394 }
395