1 2 /* 3 Code for manipulating distributed regular 1d arrays in parallel. 4 This file was created by Peter Mell 6/30/95 5 */ 6 7 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 8 9 #include <petscdraw.h> 10 static PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer) 11 { 12 PetscErrorCode ierr; 13 PetscMPIInt rank; 14 PetscBool iascii,isdraw,isglvis,isbinary; 15 DM_DA *dd = (DM_DA*)da->data; 16 #if defined(PETSC_HAVE_MATLAB_ENGINE) 17 PetscBool ismatlab; 18 #endif 19 20 PetscFunctionBegin; 21 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);CHKERRQ(ierr); 22 23 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 24 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 25 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);CHKERRQ(ierr); 26 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 27 #if defined(PETSC_HAVE_MATLAB_ENGINE) 28 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);CHKERRQ(ierr); 29 #endif 30 if (iascii) { 31 PetscViewerFormat format; 32 33 ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr); 34 if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL && format != PETSC_VIEWER_ASCII_GLVIS) { 35 DMDALocalInfo info; 36 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 37 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 38 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s);CHKERRQ(ierr); 39 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);CHKERRQ(ierr); 40 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 41 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 42 } else if (format == PETSC_VIEWER_ASCII_GLVIS) { 43 ierr = DMView_DA_GLVis(da,viewer);CHKERRQ(ierr); 44 } else { 45 ierr = DMView_DA_VTK(da, viewer);CHKERRQ(ierr); 46 } 47 } else if (isdraw) { 48 PetscDraw draw; 49 double ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x; 50 PetscInt base; 51 char node[10]; 52 PetscBool isnull; 53 54 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 55 ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); 56 if (isnull) PetscFunctionReturn(0); 57 58 ierr = PetscDrawCheckResizedWindow(draw);CHKERRQ(ierr); 59 ierr = PetscDrawClear(draw);CHKERRQ(ierr); 60 ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr); 61 62 ierr = PetscDrawCollectiveBegin(draw);CHKERRQ(ierr); 63 /* first processor draws all node lines */ 64 if (!rank) { 65 PetscInt xmin_tmp; 66 ymin = 0.0; ymax = 0.3; 67 for (xmin_tmp=0; xmin_tmp < dd->M; xmin_tmp++) { 68 ierr = PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr); 69 } 70 xmin = 0.0; xmax = dd->M - 1; 71 ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr); 72 ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr); 73 } 74 ierr = PetscDrawCollectiveEnd(draw);CHKERRQ(ierr); 75 ierr = PetscDrawFlush(draw);CHKERRQ(ierr); 76 ierr = PetscDrawPause(draw);CHKERRQ(ierr); 77 78 ierr = PetscDrawCollectiveBegin(draw);CHKERRQ(ierr); 79 /* draw my box */ 80 ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w) - 1; 81 ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr); 82 ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr); 83 ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr); 84 ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr); 85 /* Put in index numbers */ 86 base = dd->base / dd->w; 87 for (x=xmin; x<=xmax; x++) { 88 ierr = PetscSNPrintf(node,sizeof(node),"%d",(int)base++);CHKERRQ(ierr); 89 ierr = PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);CHKERRQ(ierr); 90 } 91 ierr = PetscDrawCollectiveEnd(draw);CHKERRQ(ierr); 92 ierr = PetscDrawFlush(draw);CHKERRQ(ierr); 93 ierr = PetscDrawPause(draw);CHKERRQ(ierr); 94 ierr = PetscDrawSave(draw);CHKERRQ(ierr); 95 } else if (isglvis) { 96 ierr = DMView_DA_GLVis(da,viewer);CHKERRQ(ierr); 97 } else if (isbinary) { 98 ierr = DMView_DA_Binary(da,viewer);CHKERRQ(ierr); 99 #if defined(PETSC_HAVE_MATLAB_ENGINE) 100 } else if (ismatlab) { 101 ierr = DMView_DA_Matlab(da,viewer);CHKERRQ(ierr); 102 #endif 103 } 104 PetscFunctionReturn(0); 105 } 106 107 108 PetscErrorCode DMSetUp_DA_1D(DM da) 109 { 110 DM_DA *dd = (DM_DA*)da->data; 111 const PetscInt M = dd->M; 112 const PetscInt dof = dd->w; 113 const PetscInt s = dd->s; 114 const PetscInt sDist = s; /* stencil distance in points */ 115 const PetscInt *lx = dd->lx; 116 DMBoundaryType bx = dd->bx; 117 MPI_Comm comm; 118 Vec local, global; 119 VecScatter gtol; 120 IS to, from; 121 PetscBool flg1 = PETSC_FALSE, flg2 = PETSC_FALSE; 122 PetscMPIInt rank, size; 123 PetscInt i,*idx,nn,left,xs,xe,x,Xs,Xe,start,m,IXs,IXe; 124 PetscErrorCode ierr; 125 126 PetscFunctionBegin; 127 ierr = PetscObjectGetComm((PetscObject) da, &comm);CHKERRQ(ierr); 128 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 129 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 130 131 dd->p = 1; 132 dd->n = 1; 133 dd->m = size; 134 m = dd->m; 135 136 if (s > 0) { 137 /* if not communicating data then should be ok to have nothing on some processes */ 138 if (M < m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %D %D",m,M); 139 if ((M-1) < s && size > 1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s); 140 } 141 142 /* 143 Determine locally owned region 144 xs is the first local node number, x is the number of local nodes 145 */ 146 if (!lx) { 147 ierr = PetscMalloc1(m, &dd->lx);CHKERRQ(ierr); 148 ierr = PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_blockcomm",&flg1,NULL);CHKERRQ(ierr); 149 ierr = PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_nodes_at_end",&flg2,NULL);CHKERRQ(ierr); 150 if (flg1) { /* Block Comm type Distribution */ 151 xs = rank*M/m; 152 x = (rank + 1)*M/m - xs; 153 } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */ 154 x = (M + rank)/m; 155 if (M/m == x) xs = rank*x; 156 else xs = rank*(x-1) + (M+rank)%(x*m); 157 } else { /* The odd nodes are evenly distributed across the first k nodes */ 158 /* Regular PETSc Distribution */ 159 x = M/m + ((M % m) > rank); 160 if (rank >= (M % m)) xs = (rank * (PetscInt)(M/m) + M % m); 161 else xs = rank * (PetscInt)(M/m) + rank; 162 } 163 ierr = MPI_Allgather(&xs,1,MPIU_INT,dd->lx,1,MPIU_INT,comm);CHKERRQ(ierr); 164 for (i=0; i<m-1; i++) dd->lx[i] = dd->lx[i+1] - dd->lx[i]; 165 dd->lx[m-1] = M - dd->lx[m-1]; 166 } else { 167 x = lx[rank]; 168 xs = 0; 169 for (i=0; i<rank; i++) xs += lx[i]; 170 /* verify that data user provided is consistent */ 171 left = xs; 172 for (i=rank; i<size; i++) left += lx[i]; 173 if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M); 174 } 175 176 /* 177 check if the scatter requires more than one process neighbor or wraps around 178 the domain more than once 179 */ 180 if ((x < s) & ((M > 1) | (bx == DM_BOUNDARY_PERIODIC))) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s); 181 182 xe = xs + x; 183 184 /* determine ghost region (Xs) and region scattered into (IXs) */ 185 if (xs-sDist > 0) { 186 Xs = xs - sDist; 187 IXs = xs - sDist; 188 } else { 189 if (bx) Xs = xs - sDist; 190 else Xs = 0; 191 IXs = 0; 192 } 193 if (xe+sDist <= M) { 194 Xe = xe + sDist; 195 IXe = xe + sDist; 196 } else { 197 if (bx) Xe = xe + sDist; 198 else Xe = M; 199 IXe = M; 200 } 201 202 if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) { 203 Xs = xs - sDist; 204 Xe = xe + sDist; 205 IXs = xs - sDist; 206 IXe = xe + sDist; 207 } 208 209 /* allocate the base parallel and sequential vectors */ 210 dd->Nlocal = dof*x; 211 ierr = VecCreateMPIWithArray(comm,dof,dd->Nlocal,PETSC_DECIDE,NULL,&global);CHKERRQ(ierr); 212 dd->nlocal = dof*(Xe-Xs); 213 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dof,dd->nlocal,NULL,&local);CHKERRQ(ierr); 214 215 ierr = VecGetOwnershipRange(global,&start,NULL);CHKERRQ(ierr); 216 217 /* Create Global to Local Vector Scatter Context */ 218 /* global to local must retrieve ghost points */ 219 ierr = ISCreateStride(comm,dof*(IXe-IXs),dof*(IXs-Xs),1,&to);CHKERRQ(ierr); 220 221 ierr = PetscMalloc1(x+2*sDist,&idx);CHKERRQ(ierr); 222 ierr = PetscLogObjectMemory((PetscObject)da,(x+2*(sDist))*sizeof(PetscInt));CHKERRQ(ierr); 223 224 for (i=0; i<IXs-Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/ 225 226 nn = IXs-Xs; 227 if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */ 228 for (i=0; i<sDist; i++) { /* Left ghost points */ 229 if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i; 230 else idx[nn++] = M+(xs-sDist+i); 231 } 232 233 for (i=0; i<x; i++) idx [nn++] = xs + i; /* Non-ghost points */ 234 235 for (i=0; i<sDist; i++) { /* Right ghost points */ 236 if ((xe+i)<M) idx [nn++] = xe+i; 237 else idx [nn++] = (xe+i) - M; 238 } 239 } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */ 240 for (i=0; i<(sDist); i++) { /* Left ghost points */ 241 if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i; 242 else idx[nn++] = sDist - i; 243 } 244 245 for (i=0; i<x; i++) idx [nn++] = xs + i; /* Non-ghost points */ 246 247 for (i=0; i<(sDist); i++) { /* Right ghost points */ 248 if ((xe+i)<M) idx[nn++] = xe+i; 249 else idx[nn++] = M - (i + 2); 250 } 251 } else { /* Now do all cases with no periodicity */ 252 if (0 <= xs-sDist) { 253 for (i=0; i<sDist; i++) idx[nn++] = xs - sDist + i; 254 } else { 255 for (i=0; i<xs; i++) idx[nn++] = i; 256 } 257 258 for (i=0; i<x; i++) idx [nn++] = xs + i; 259 260 if ((xe+sDist)<=M) { 261 for (i=0; i<sDist; i++) idx[nn++]=xe+i; 262 } else { 263 for (i=xe; i<M; i++) idx[nn++]=i; 264 } 265 } 266 267 ierr = ISCreateBlock(comm,dof,nn-IXs+Xs,&idx[IXs-Xs],PETSC_USE_POINTER,&from);CHKERRQ(ierr); 268 ierr = VecScatterCreate(global,from,local,to,>ol);CHKERRQ(ierr); 269 ierr = PetscLogObjectParent((PetscObject)da,(PetscObject)gtol);CHKERRQ(ierr); 270 ierr = ISDestroy(&to);CHKERRQ(ierr); 271 ierr = ISDestroy(&from);CHKERRQ(ierr); 272 ierr = VecDestroy(&local);CHKERRQ(ierr); 273 ierr = VecDestroy(&global);CHKERRQ(ierr); 274 275 dd->xs = dof*xs; dd->xe = dof*xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1; 276 dd->Xs = dof*Xs; dd->Xe = dof*Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1; 277 278 dd->gtol = gtol; 279 dd->base = dof*xs; 280 da->ops->view = DMView_DA_1d; 281 282 /* 283 Set the local to global ordering in the global vector, this allows use 284 of VecSetValuesLocal(). 285 */ 286 for (i=0; i<Xe-IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/ 287 288 ierr = ISLocalToGlobalMappingCreate(comm,dof,nn,idx,PETSC_OWN_POINTER,&da->ltogmap);CHKERRQ(ierr); 289 ierr = PetscLogObjectParent((PetscObject)da,(PetscObject)da->ltogmap);CHKERRQ(ierr); 290 291 PetscFunctionReturn(0); 292 } 293 294 295 /*@C 296 DMDACreate1d - Creates an object that will manage the communication of one-dimensional 297 regular array data that is distributed across some processors. 298 299 Collective on MPI_Comm 300 301 Input Parameters: 302 + comm - MPI communicator 303 . bx - type of ghost cells at the boundary the array should have, if any. Use 304 DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, or DM_BOUNDARY_PERIODIC. 305 . M - global dimension of the array 306 from the command line with -da_grid_x <M>) 307 . dof - number of degrees of freedom per node 308 . s - stencil width 309 - lx - array containing number of nodes in the X direction on each processor, 310 or NULL. If non-null, must be of length as the number of processes in the MPI_Comm. 311 312 Output Parameter: 313 . da - the resulting distributed array object 314 315 Options Database Key: 316 + -dm_view - Calls DMView() at the conclusion of DMDACreate1d() 317 . -da_grid_x <nx> - number of grid points in x direction; can set if M < 0 318 . -da_refine_x <rx> - refinement factor 319 - -da_refine <n> - refine the DMDA n times before creating it, if M < 0 320 321 Level: beginner 322 323 Notes: 324 The array data itself is NOT stored in the DMDA, it is stored in Vec objects; 325 The appropriate vector objects can be obtained with calls to DMCreateGlobalVector() 326 and DMCreateLocalVector() and calls to VecDuplicate() if more are needed. 327 328 You must call DMSetUp() after this call before using this DM. 329 330 If you wish to use the options database to change values in the DMDA call DMSetFromOptions() after this call 331 but before DMSetUp(). 332 333 .keywords: distributed array, create, one-dimensional 334 335 .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(), 336 DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMLocalToLocalBegin(), DMLocalToLocalEnd(), DMDAGetRefinementFactor(), 337 DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMLoad(), DMDAGetOwnershipRanges() 338 339 @*/ 340 PetscErrorCode DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da) 341 { 342 PetscErrorCode ierr; 343 PetscMPIInt size; 344 345 PetscFunctionBegin; 346 ierr = DMDACreate(comm, da);CHKERRQ(ierr); 347 ierr = DMSetDimension(*da, 1);CHKERRQ(ierr); 348 ierr = DMDASetSizes(*da, M, 1, 1);CHKERRQ(ierr); 349 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 350 ierr = DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);CHKERRQ(ierr); 351 ierr = DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE);CHKERRQ(ierr); 352 ierr = DMDASetDof(*da, dof);CHKERRQ(ierr); 353 ierr = DMDASetStencilWidth(*da, s);CHKERRQ(ierr); 354 ierr = DMDASetOwnershipRanges(*da, lx, NULL, NULL);CHKERRQ(ierr); 355 PetscFunctionReturn(0); 356 } 357