1 2 /* 3 Code for manipulating distributed regular 1d arrays in parallel. 4 This file was created by Peter Mell 6/30/95 5 */ 6 7 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 8 9 #include <petscdraw.h> 10 static PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer) 11 { 12 PetscMPIInt rank; 13 PetscBool iascii,isdraw,isglvis,isbinary; 14 DM_DA *dd = (DM_DA*)da->data; 15 #if defined(PETSC_HAVE_MATLAB_ENGINE) 16 PetscBool ismatlab; 17 #endif 18 19 PetscFunctionBegin; 20 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank)); 21 22 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii)); 23 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw)); 24 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis)); 25 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary)); 26 #if defined(PETSC_HAVE_MATLAB_ENGINE) 27 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab)); 28 #endif 29 if (iascii) { 30 PetscViewerFormat format; 31 32 PetscCall(PetscViewerGetFormat(viewer, &format)); 33 if (format == PETSC_VIEWER_LOAD_BALANCE) { 34 PetscInt i,nmax = 0,nmin = PETSC_MAX_INT,navg = 0,*nz,nzlocal; 35 DMDALocalInfo info; 36 PetscMPIInt size; 37 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da),&size)); 38 PetscCall(DMDAGetLocalInfo(da,&info)); 39 nzlocal = info.xm; 40 PetscCall(PetscMalloc1(size,&nz)); 41 PetscCallMPI(MPI_Allgather(&nzlocal,1,MPIU_INT,nz,1,MPIU_INT,PetscObjectComm((PetscObject)da))); 42 for (i=0; i<(PetscInt)size; i++) { 43 nmax = PetscMax(nmax,nz[i]); 44 nmin = PetscMin(nmin,nz[i]); 45 navg += nz[i]; 46 } 47 PetscCall(PetscFree(nz)); 48 navg = navg/size; 49 PetscCall(PetscViewerASCIIPrintf(viewer," Load Balance - Grid Points: Min %" PetscInt_FMT " avg %" PetscInt_FMT " max %" PetscInt_FMT "\n",nmin,navg,nmax)); 50 PetscFunctionReturn(0); 51 } 52 if (format != PETSC_VIEWER_ASCII_VTK_DEPRECATED && format != PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED && format != PETSC_VIEWER_ASCII_GLVIS) { 53 DMDALocalInfo info; 54 PetscCall(DMDAGetLocalInfo(da,&info)); 55 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 56 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %" PetscInt_FMT " m %" PetscInt_FMT " w %" PetscInt_FMT " s %" PetscInt_FMT "\n",rank,dd->M,dd->m,dd->w,dd->s)); 57 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %" PetscInt_FMT " %" PetscInt_FMT "\n",info.xs,info.xs+info.xm)); 58 PetscCall(PetscViewerFlush(viewer)); 59 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 60 } else if (format == PETSC_VIEWER_ASCII_GLVIS) { 61 PetscCall(DMView_DA_GLVis(da,viewer)); 62 } else { 63 PetscCall(DMView_DA_VTK(da, viewer)); 64 } 65 } else if (isdraw) { 66 PetscDraw draw; 67 double ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x; 68 PetscInt base; 69 char node[10]; 70 PetscBool isnull; 71 72 PetscCall(PetscViewerDrawGetDraw(viewer,0,&draw)); 73 PetscCall(PetscDrawIsNull(draw,&isnull)); 74 if (isnull) PetscFunctionReturn(0); 75 76 PetscCall(PetscDrawCheckResizedWindow(draw)); 77 PetscCall(PetscDrawClear(draw)); 78 PetscCall(PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax)); 79 80 PetscDrawCollectiveBegin(draw); 81 /* first processor draws all node lines */ 82 if (rank == 0) { 83 PetscInt xmin_tmp; 84 ymin = 0.0; ymax = 0.3; 85 for (xmin_tmp=0; xmin_tmp < dd->M; xmin_tmp++) { 86 PetscCall(PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK)); 87 } 88 xmin = 0.0; xmax = dd->M - 1; 89 PetscCall(PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK)); 90 PetscCall(PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK)); 91 } 92 PetscDrawCollectiveEnd(draw); 93 PetscCall(PetscDrawFlush(draw)); 94 PetscCall(PetscDrawPause(draw)); 95 96 PetscDrawCollectiveBegin(draw); 97 /* draw my box */ 98 ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w) - 1; 99 PetscCall(PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED)); 100 PetscCall(PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED)); 101 PetscCall(PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED)); 102 PetscCall(PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED)); 103 /* Put in index numbers */ 104 base = dd->base / dd->w; 105 for (x=xmin; x<=xmax; x++) { 106 PetscCall(PetscSNPrintf(node,sizeof(node),"%d",(int)base++)); 107 PetscCall(PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node)); 108 } 109 PetscDrawCollectiveEnd(draw); 110 PetscCall(PetscDrawFlush(draw)); 111 PetscCall(PetscDrawPause(draw)); 112 PetscCall(PetscDrawSave(draw)); 113 } else if (isglvis) { 114 PetscCall(DMView_DA_GLVis(da,viewer)); 115 } else if (isbinary) { 116 PetscCall(DMView_DA_Binary(da,viewer)); 117 #if defined(PETSC_HAVE_MATLAB_ENGINE) 118 } else if (ismatlab) { 119 PetscCall(DMView_DA_Matlab(da,viewer)); 120 #endif 121 } 122 PetscFunctionReturn(0); 123 } 124 125 PetscErrorCode DMSetUp_DA_1D(DM da) 126 { 127 DM_DA *dd = (DM_DA*)da->data; 128 const PetscInt M = dd->M; 129 const PetscInt dof = dd->w; 130 const PetscInt s = dd->s; 131 const PetscInt sDist = s; /* stencil distance in points */ 132 const PetscInt *lx = dd->lx; 133 DMBoundaryType bx = dd->bx; 134 MPI_Comm comm; 135 Vec local, global; 136 VecScatter gtol; 137 IS to, from; 138 PetscBool flg1 = PETSC_FALSE, flg2 = PETSC_FALSE; 139 PetscMPIInt rank, size; 140 PetscInt i,*idx,nn,left,xs,xe,x,Xs,Xe,start,m,IXs,IXe; 141 142 PetscFunctionBegin; 143 PetscCall(PetscObjectGetComm((PetscObject) da, &comm)); 144 PetscCallMPI(MPI_Comm_size(comm,&size)); 145 PetscCallMPI(MPI_Comm_rank(comm,&rank)); 146 147 dd->p = 1; 148 dd->n = 1; 149 dd->m = size; 150 m = dd->m; 151 152 if (s > 0) { 153 /* if not communicating data then should be ok to have nothing on some processes */ 154 PetscCheck(M >= m,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %" PetscInt_FMT " %" PetscInt_FMT,m,M); 155 PetscCheck((M-1) >= s || size <= 1,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %" PetscInt_FMT " %" PetscInt_FMT,M-1,s); 156 } 157 158 /* 159 Determine locally owned region 160 xs is the first local node number, x is the number of local nodes 161 */ 162 if (!lx) { 163 PetscCall(PetscMalloc1(m, &dd->lx)); 164 PetscCall(PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_blockcomm",&flg1,NULL)); 165 PetscCall(PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_nodes_at_end",&flg2,NULL)); 166 if (flg1) { /* Block Comm type Distribution */ 167 xs = rank*M/m; 168 x = (rank + 1)*M/m - xs; 169 } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */ 170 x = (M + rank)/m; 171 if (M/m == x) xs = rank*x; 172 else xs = rank*(x-1) + (M+rank)%(x*m); 173 } else { /* The odd nodes are evenly distributed across the first k nodes */ 174 /* Regular PETSc Distribution */ 175 x = M/m + ((M % m) > rank); 176 if (rank >= (M % m)) xs = (rank * (PetscInt)(M/m) + M % m); 177 else xs = rank * (PetscInt)(M/m) + rank; 178 } 179 PetscCallMPI(MPI_Allgather(&xs,1,MPIU_INT,dd->lx,1,MPIU_INT,comm)); 180 for (i=0; i<m-1; i++) dd->lx[i] = dd->lx[i+1] - dd->lx[i]; 181 dd->lx[m-1] = M - dd->lx[m-1]; 182 } else { 183 x = lx[rank]; 184 xs = 0; 185 for (i=0; i<rank; i++) xs += lx[i]; 186 /* verify that data user provided is consistent */ 187 left = xs; 188 for (i=rank; i<size; i++) left += lx[i]; 189 PetscCheck(left == M,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %" PetscInt_FMT " %" PetscInt_FMT,left,M); 190 } 191 192 /* 193 check if the scatter requires more than one process neighbor or wraps around 194 the domain more than once 195 */ 196 PetscCheck((x >= s) || ((M <= 1) && (bx != DM_BOUNDARY_PERIODIC)),PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %" PetscInt_FMT " is smaller than stencil width s %" PetscInt_FMT,x,s); 197 198 xe = xs + x; 199 200 /* determine ghost region (Xs) and region scattered into (IXs) */ 201 if (xs-sDist > 0) { 202 Xs = xs - sDist; 203 IXs = xs - sDist; 204 } else { 205 if (bx) Xs = xs - sDist; 206 else Xs = 0; 207 IXs = 0; 208 } 209 if (xe+sDist <= M) { 210 Xe = xe + sDist; 211 IXe = xe + sDist; 212 } else { 213 if (bx) Xe = xe + sDist; 214 else Xe = M; 215 IXe = M; 216 } 217 218 if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) { 219 Xs = xs - sDist; 220 Xe = xe + sDist; 221 IXs = xs - sDist; 222 IXe = xe + sDist; 223 } 224 225 /* allocate the base parallel and sequential vectors */ 226 dd->Nlocal = dof*x; 227 PetscCall(VecCreateMPIWithArray(comm,dof,dd->Nlocal,PETSC_DECIDE,NULL,&global)); 228 dd->nlocal = dof*(Xe-Xs); 229 PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,dof,dd->nlocal,NULL,&local)); 230 231 PetscCall(VecGetOwnershipRange(global,&start,NULL)); 232 233 /* Create Global to Local Vector Scatter Context */ 234 /* global to local must retrieve ghost points */ 235 PetscCall(ISCreateStride(comm,dof*(IXe-IXs),dof*(IXs-Xs),1,&to)); 236 237 PetscCall(PetscMalloc1(x+2*sDist,&idx)); 238 PetscCall(PetscLogObjectMemory((PetscObject)da,(x+2*(sDist))*sizeof(PetscInt))); 239 240 for (i=0; i<IXs-Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/ 241 242 nn = IXs-Xs; 243 if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */ 244 for (i=0; i<sDist; i++) { /* Left ghost points */ 245 if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i; 246 else idx[nn++] = M+(xs-sDist+i); 247 } 248 249 for (i=0; i<x; i++) idx [nn++] = xs + i; /* Non-ghost points */ 250 251 for (i=0; i<sDist; i++) { /* Right ghost points */ 252 if ((xe+i)<M) idx [nn++] = xe+i; 253 else idx [nn++] = (xe+i) - M; 254 } 255 } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */ 256 for (i=0; i<(sDist); i++) { /* Left ghost points */ 257 if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i; 258 else idx[nn++] = sDist - i; 259 } 260 261 for (i=0; i<x; i++) idx [nn++] = xs + i; /* Non-ghost points */ 262 263 for (i=0; i<(sDist); i++) { /* Right ghost points */ 264 if ((xe+i)<M) idx[nn++] = xe+i; 265 else idx[nn++] = M - (i + 2); 266 } 267 } else { /* Now do all cases with no periodicity */ 268 if (0 <= xs-sDist) { 269 for (i=0; i<sDist; i++) idx[nn++] = xs - sDist + i; 270 } else { 271 for (i=0; i<xs; i++) idx[nn++] = i; 272 } 273 274 for (i=0; i<x; i++) idx [nn++] = xs + i; 275 276 if ((xe+sDist)<=M) { 277 for (i=0; i<sDist; i++) idx[nn++]=xe+i; 278 } else { 279 for (i=xe; i<M; i++) idx[nn++]=i; 280 } 281 } 282 283 PetscCall(ISCreateBlock(comm,dof,nn-IXs+Xs,&idx[IXs-Xs],PETSC_USE_POINTER,&from)); 284 PetscCall(VecScatterCreate(global,from,local,to,>ol)); 285 PetscCall(PetscLogObjectParent((PetscObject)da,(PetscObject)gtol)); 286 PetscCall(ISDestroy(&to)); 287 PetscCall(ISDestroy(&from)); 288 PetscCall(VecDestroy(&local)); 289 PetscCall(VecDestroy(&global)); 290 291 dd->xs = dof*xs; dd->xe = dof*xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1; 292 dd->Xs = dof*Xs; dd->Xe = dof*Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1; 293 294 dd->gtol = gtol; 295 dd->base = dof*xs; 296 da->ops->view = DMView_DA_1d; 297 298 /* 299 Set the local to global ordering in the global vector, this allows use 300 of VecSetValuesLocal(). 301 */ 302 for (i=0; i<Xe-IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/ 303 304 PetscCall(ISLocalToGlobalMappingCreate(comm,dof,nn,idx,PETSC_OWN_POINTER,&da->ltogmap)); 305 PetscCall(PetscLogObjectParent((PetscObject)da,(PetscObject)da->ltogmap)); 306 307 PetscFunctionReturn(0); 308 } 309 310 /*@C 311 DMDACreate1d - Creates an object that will manage the communication of one-dimensional 312 regular array data that is distributed across some processors. 313 314 Collective 315 316 Input Parameters: 317 + comm - MPI communicator 318 . bx - type of ghost cells at the boundary the array should have, if any. Use 319 DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, or DM_BOUNDARY_PERIODIC. 320 . M - global dimension of the array (that is the number of grid points) 321 from the command line with -da_grid_x <M>) 322 . dof - number of degrees of freedom per node 323 . s - stencil width 324 - lx - array containing number of nodes in the X direction on each processor, 325 or NULL. If non-null, must be of length as the number of processes in the MPI_Comm. 326 The sum of these entries must equal M 327 328 Output Parameter: 329 . da - the resulting distributed array object 330 331 Options Database Key: 332 + -dm_view - Calls DMView() at the conclusion of DMDACreate1d() 333 . -da_grid_x <nx> - number of grid points in x direction 334 . -da_refine_x <rx> - refinement factor 335 - -da_refine <n> - refine the DMDA n times before creating it 336 337 Level: beginner 338 339 Notes: 340 The array data itself is NOT stored in the DMDA, it is stored in Vec objects; 341 The appropriate vector objects can be obtained with calls to DMCreateGlobalVector() 342 and DMCreateLocalVector() and calls to VecDuplicate() if more are needed. 343 344 You must call DMSetUp() after this call before using this DM. 345 346 If you wish to use the options database to change values in the DMDA call DMSetFromOptions() after this call 347 but before DMSetUp(). 348 349 .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(), 350 DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMLocalToLocalBegin(), DMLocalToLocalEnd(), DMDAGetRefinementFactor(), 351 DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMLoad(), DMDAGetOwnershipRanges(), 352 DMStagCreate1d() 353 354 @*/ 355 PetscErrorCode DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da) 356 { 357 PetscMPIInt size; 358 359 PetscFunctionBegin; 360 PetscCall(DMDACreate(comm, da)); 361 PetscCall(DMSetDimension(*da, 1)); 362 PetscCall(DMDASetSizes(*da, M, 1, 1)); 363 PetscCallMPI(MPI_Comm_size(comm, &size)); 364 PetscCall(DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE)); 365 PetscCall(DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE)); 366 PetscCall(DMDASetDof(*da, dof)); 367 PetscCall(DMDASetStencilWidth(*da, s)); 368 PetscCall(DMDASetOwnershipRanges(*da, lx, NULL, NULL)); 369 PetscFunctionReturn(0); 370 } 371