xref: /petsc/src/dm/impls/da/da1.c (revision 40badf4fbc550ac1f60bd080eaff6de6d55b946d)
1 
2 /*
3    Code for manipulating distributed regular 1d arrays in parallel.
4    This file was created by Peter Mell   6/30/95
5 */
6 
7 #include <petsc/private/dmdaimpl.h>     /*I  "petscdmda.h"   I*/
8 
9 #include <petscdraw.h>
10 static PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer)
11 {
12   PetscMPIInt    rank;
13   PetscBool      iascii,isdraw,isglvis,isbinary;
14   DM_DA          *dd = (DM_DA*)da->data;
15 #if defined(PETSC_HAVE_MATLAB_ENGINE)
16   PetscBool ismatlab;
17 #endif
18 
19   PetscFunctionBegin;
20   CHKERRMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank));
21 
22   CHKERRQ(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
23   CHKERRQ(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw));
24   CHKERRQ(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis));
25   CHKERRQ(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary));
26 #if defined(PETSC_HAVE_MATLAB_ENGINE)
27   CHKERRQ(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab));
28 #endif
29   if (iascii) {
30     PetscViewerFormat format;
31 
32     CHKERRQ(PetscViewerGetFormat(viewer, &format));
33     if (format == PETSC_VIEWER_LOAD_BALANCE) {
34       PetscInt      i,nmax = 0,nmin = PETSC_MAX_INT,navg = 0,*nz,nzlocal;
35       DMDALocalInfo info;
36       PetscMPIInt   size;
37       CHKERRMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da),&size));
38       CHKERRQ(DMDAGetLocalInfo(da,&info));
39       nzlocal = info.xm;
40       CHKERRQ(PetscMalloc1(size,&nz));
41       CHKERRMPI(MPI_Allgather(&nzlocal,1,MPIU_INT,nz,1,MPIU_INT,PetscObjectComm((PetscObject)da)));
42       for (i=0; i<(PetscInt)size; i++) {
43         nmax = PetscMax(nmax,nz[i]);
44         nmin = PetscMin(nmin,nz[i]);
45         navg += nz[i];
46       }
47       CHKERRQ(PetscFree(nz));
48       navg = navg/size;
49       CHKERRQ(PetscViewerASCIIPrintf(viewer,"  Load Balance - Grid Points: Min %D  avg %D  max %D\n",nmin,navg,nmax));
50       PetscFunctionReturn(0);
51     }
52     if (format != PETSC_VIEWER_ASCII_VTK_DEPRECATED && format != PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED && format != PETSC_VIEWER_ASCII_GLVIS) {
53       DMDALocalInfo info;
54       CHKERRQ(DMDAGetLocalInfo(da,&info));
55       CHKERRQ(PetscViewerASCIIPushSynchronized(viewer));
56       CHKERRQ(PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s));
57       CHKERRQ(PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm));
58       CHKERRQ(PetscViewerFlush(viewer));
59       CHKERRQ(PetscViewerASCIIPopSynchronized(viewer));
60     } else if (format == PETSC_VIEWER_ASCII_GLVIS) {
61       CHKERRQ(DMView_DA_GLVis(da,viewer));
62     } else {
63       CHKERRQ(DMView_DA_VTK(da, viewer));
64     }
65   } else if (isdraw) {
66     PetscDraw      draw;
67     double         ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
68     PetscInt       base;
69     char           node[10];
70     PetscBool      isnull;
71     PetscErrorCode ierr;
72 
73     CHKERRQ(PetscViewerDrawGetDraw(viewer,0,&draw));
74     CHKERRQ(PetscDrawIsNull(draw,&isnull));
75     if (isnull) PetscFunctionReturn(0);
76 
77     CHKERRQ(PetscDrawCheckResizedWindow(draw));
78     CHKERRQ(PetscDrawClear(draw));
79     CHKERRQ(PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax));
80 
81     ierr = PetscDrawCollectiveBegin(draw);CHKERRQ(ierr);
82     /* first processor draws all node lines */
83     if (rank == 0) {
84       PetscInt xmin_tmp;
85       ymin = 0.0; ymax = 0.3;
86       for (xmin_tmp=0; xmin_tmp < dd->M; xmin_tmp++) {
87         CHKERRQ(PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK));
88       }
89       xmin = 0.0; xmax = dd->M - 1;
90       CHKERRQ(PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK));
91       CHKERRQ(PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK));
92     }
93     ierr = PetscDrawCollectiveEnd(draw);CHKERRQ(ierr);
94     CHKERRQ(PetscDrawFlush(draw));
95     CHKERRQ(PetscDrawPause(draw));
96 
97     ierr = PetscDrawCollectiveBegin(draw);CHKERRQ(ierr);
98     /* draw my box */
99     ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w)  - 1;
100     CHKERRQ(PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED));
101     CHKERRQ(PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED));
102     CHKERRQ(PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED));
103     CHKERRQ(PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED));
104     /* Put in index numbers */
105     base = dd->base / dd->w;
106     for (x=xmin; x<=xmax; x++) {
107       CHKERRQ(PetscSNPrintf(node,sizeof(node),"%d",(int)base++));
108       CHKERRQ(PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node));
109     }
110     ierr = PetscDrawCollectiveEnd(draw);CHKERRQ(ierr);
111     CHKERRQ(PetscDrawFlush(draw));
112     CHKERRQ(PetscDrawPause(draw));
113     CHKERRQ(PetscDrawSave(draw));
114   } else if (isglvis) {
115     CHKERRQ(DMView_DA_GLVis(da,viewer));
116   } else if (isbinary) {
117     CHKERRQ(DMView_DA_Binary(da,viewer));
118 #if defined(PETSC_HAVE_MATLAB_ENGINE)
119   } else if (ismatlab) {
120     CHKERRQ(DMView_DA_Matlab(da,viewer));
121 #endif
122   }
123   PetscFunctionReturn(0);
124 }
125 
126 PetscErrorCode  DMSetUp_DA_1D(DM da)
127 {
128   DM_DA            *dd   = (DM_DA*)da->data;
129   const PetscInt   M     = dd->M;
130   const PetscInt   dof   = dd->w;
131   const PetscInt   s     = dd->s;
132   const PetscInt   sDist = s;  /* stencil distance in points */
133   const PetscInt   *lx   = dd->lx;
134   DMBoundaryType   bx    = dd->bx;
135   MPI_Comm         comm;
136   Vec              local, global;
137   VecScatter       gtol;
138   IS               to, from;
139   PetscBool        flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
140   PetscMPIInt      rank, size;
141   PetscInt         i,*idx,nn,left,xs,xe,x,Xs,Xe,start,m,IXs,IXe;
142 
143   PetscFunctionBegin;
144   CHKERRQ(PetscObjectGetComm((PetscObject) da, &comm));
145   CHKERRMPI(MPI_Comm_size(comm,&size));
146   CHKERRMPI(MPI_Comm_rank(comm,&rank));
147 
148   dd->p = 1;
149   dd->n = 1;
150   dd->m = size;
151   m     = dd->m;
152 
153   if (s > 0) {
154     /* if not communicating data then should be ok to have nothing on some processes */
155     PetscCheck(M >= m,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %D %D",m,M);
156     PetscCheck((M-1) >= s || size <= 1,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
157   }
158 
159   /*
160      Determine locally owned region
161      xs is the first local node number, x is the number of local nodes
162   */
163   if (!lx) {
164     CHKERRQ(PetscMalloc1(m, &dd->lx));
165     CHKERRQ(PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_blockcomm",&flg1,NULL));
166     CHKERRQ(PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_nodes_at_end",&flg2,NULL));
167     if (flg1) {      /* Block Comm type Distribution */
168       xs = rank*M/m;
169       x  = (rank + 1)*M/m - xs;
170     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
171       x = (M + rank)/m;
172       if (M/m == x) xs = rank*x;
173       else          xs = rank*(x-1) + (M+rank)%(x*m);
174     } else { /* The odd nodes are evenly distributed across the first k nodes */
175       /* Regular PETSc Distribution */
176       x = M/m + ((M % m) > rank);
177       if (rank >= (M % m)) xs = (rank * (PetscInt)(M/m) + M % m);
178       else                 xs = rank * (PetscInt)(M/m) + rank;
179     }
180     CHKERRMPI(MPI_Allgather(&xs,1,MPIU_INT,dd->lx,1,MPIU_INT,comm));
181     for (i=0; i<m-1; i++) dd->lx[i] = dd->lx[i+1] - dd->lx[i];
182     dd->lx[m-1] = M - dd->lx[m-1];
183   } else {
184     x  = lx[rank];
185     xs = 0;
186     for (i=0; i<rank; i++) xs += lx[i];
187     /* verify that data user provided is consistent */
188     left = xs;
189     for (i=rank; i<size; i++) left += lx[i];
190     PetscCheck(left == M,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
191   }
192 
193   /*
194    check if the scatter requires more than one process neighbor or wraps around
195    the domain more than once
196   */
197   PetscCheck((x >= s) || ((M <= 1) && (bx != DM_BOUNDARY_PERIODIC)),PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
198 
199   xe  = xs + x;
200 
201   /* determine ghost region (Xs) and region scattered into (IXs)  */
202   if (xs-sDist > 0) {
203     Xs  = xs - sDist;
204     IXs = xs - sDist;
205   } else {
206     if (bx) Xs = xs - sDist;
207     else Xs = 0;
208     IXs = 0;
209   }
210   if (xe+sDist <= M) {
211     Xe  = xe + sDist;
212     IXe = xe + sDist;
213   } else {
214     if (bx) Xe = xe + sDist;
215     else Xe = M;
216     IXe = M;
217   }
218 
219   if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
220     Xs  = xs - sDist;
221     Xe  = xe + sDist;
222     IXs = xs - sDist;
223     IXe = xe + sDist;
224   }
225 
226   /* allocate the base parallel and sequential vectors */
227   dd->Nlocal = dof*x;
228   CHKERRQ(VecCreateMPIWithArray(comm,dof,dd->Nlocal,PETSC_DECIDE,NULL,&global));
229   dd->nlocal = dof*(Xe-Xs);
230   CHKERRQ(VecCreateSeqWithArray(PETSC_COMM_SELF,dof,dd->nlocal,NULL,&local));
231 
232   CHKERRQ(VecGetOwnershipRange(global,&start,NULL));
233 
234   /* Create Global to Local Vector Scatter Context */
235   /* global to local must retrieve ghost points */
236   CHKERRQ(ISCreateStride(comm,dof*(IXe-IXs),dof*(IXs-Xs),1,&to));
237 
238   CHKERRQ(PetscMalloc1(x+2*sDist,&idx));
239   CHKERRQ(PetscLogObjectMemory((PetscObject)da,(x+2*(sDist))*sizeof(PetscInt)));
240 
241   for (i=0; i<IXs-Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/
242 
243   nn = IXs-Xs;
244   if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
245     for (i=0; i<sDist; i++) {  /* Left ghost points */
246       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
247       else                 idx[nn++] = M+(xs-sDist+i);
248     }
249 
250     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */
251 
252     for (i=0; i<sDist; i++) { /* Right ghost points */
253       if ((xe+i)<M) idx [nn++] =  xe+i;
254       else          idx [nn++] = (xe+i) - M;
255     }
256   } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
257     for (i=0; i<(sDist); i++) {  /* Left ghost points */
258       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
259       else                 idx[nn++] = sDist - i;
260     }
261 
262     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */
263 
264     for (i=0; i<(sDist); i++) { /* Right ghost points */
265       if ((xe+i)<M) idx[nn++] =  xe+i;
266       else          idx[nn++] = M - (i + 2);
267     }
268   } else {      /* Now do all cases with no periodicity */
269     if (0 <= xs-sDist) {
270       for (i=0; i<sDist; i++) idx[nn++] = xs - sDist + i;
271     } else {
272       for (i=0; i<xs; i++) idx[nn++] = i;
273     }
274 
275     for (i=0; i<x; i++) idx [nn++] = xs + i;
276 
277     if ((xe+sDist)<=M) {
278       for (i=0; i<sDist; i++) idx[nn++]=xe+i;
279     } else {
280       for (i=xe; i<M; i++) idx[nn++]=i;
281     }
282   }
283 
284   CHKERRQ(ISCreateBlock(comm,dof,nn-IXs+Xs,&idx[IXs-Xs],PETSC_USE_POINTER,&from));
285   CHKERRQ(VecScatterCreate(global,from,local,to,&gtol));
286   CHKERRQ(PetscLogObjectParent((PetscObject)da,(PetscObject)gtol));
287   CHKERRQ(ISDestroy(&to));
288   CHKERRQ(ISDestroy(&from));
289   CHKERRQ(VecDestroy(&local));
290   CHKERRQ(VecDestroy(&global));
291 
292   dd->xs = dof*xs; dd->xe = dof*xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
293   dd->Xs = dof*Xs; dd->Xe = dof*Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;
294 
295   dd->gtol      = gtol;
296   dd->base      = dof*xs;
297   da->ops->view = DMView_DA_1d;
298 
299   /*
300      Set the local to global ordering in the global vector, this allows use
301      of VecSetValuesLocal().
302   */
303   for (i=0; i<Xe-IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/
304 
305   CHKERRQ(ISLocalToGlobalMappingCreate(comm,dof,nn,idx,PETSC_OWN_POINTER,&da->ltogmap));
306   CHKERRQ(PetscLogObjectParent((PetscObject)da,(PetscObject)da->ltogmap));
307 
308   PetscFunctionReturn(0);
309 }
310 
311 /*@C
312    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
313    regular array data that is distributed across some processors.
314 
315    Collective
316 
317    Input Parameters:
318 +  comm - MPI communicator
319 .  bx - type of ghost cells at the boundary the array should have, if any. Use
320           DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, or DM_BOUNDARY_PERIODIC.
321 .  M - global dimension of the array (that is the number of grid points)
322             from the command line with -da_grid_x <M>)
323 .  dof - number of degrees of freedom per node
324 .  s - stencil width
325 -  lx - array containing number of nodes in the X direction on each processor,
326         or NULL. If non-null, must be of length as the number of processes in the MPI_Comm.
327         The sum of these entries must equal M
328 
329    Output Parameter:
330 .  da - the resulting distributed array object
331 
332    Options Database Key:
333 +  -dm_view - Calls DMView() at the conclusion of DMDACreate1d()
334 .  -da_grid_x <nx> - number of grid points in x direction
335 .  -da_refine_x <rx> - refinement factor
336 -  -da_refine <n> - refine the DMDA n times before creating it
337 
338    Level: beginner
339 
340    Notes:
341    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
342    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
343    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.
344 
345    You must call DMSetUp() after this call before using this DM.
346 
347    If you wish to use the options database to change values in the DMDA call DMSetFromOptions() after this call
348    but before DMSetUp().
349 
350 .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(),
351           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMLocalToLocalBegin(), DMLocalToLocalEnd(), DMDAGetRefinementFactor(),
352           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMLoad(), DMDAGetOwnershipRanges(),
353           DMStagCreate1d()
354 
355 @*/
356 PetscErrorCode  DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
357 {
358   PetscMPIInt    size;
359 
360   PetscFunctionBegin;
361   CHKERRQ(DMDACreate(comm, da));
362   CHKERRQ(DMSetDimension(*da, 1));
363   CHKERRQ(DMDASetSizes(*da, M, 1, 1));
364   CHKERRMPI(MPI_Comm_size(comm, &size));
365   CHKERRQ(DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE));
366   CHKERRQ(DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE));
367   CHKERRQ(DMDASetDof(*da, dof));
368   CHKERRQ(DMDASetStencilWidth(*da, s));
369   CHKERRQ(DMDASetOwnershipRanges(*da, lx, NULL, NULL));
370   PetscFunctionReturn(0);
371 }
372