1 2 /* 3 Code for manipulating distributed regular arrays in parallel. 4 */ 5 6 #include <petsc-private/daimpl.h> /*I "petscdmda.h" I*/ 7 8 #undef __FUNCT__ 9 #define __FUNCT__ "DMCreateCoordinateDM_DA" 10 PetscErrorCode DMCreateCoordinateDM_DA(DM dm, DM *cdm) 11 { 12 DM_DA *da = (DM_DA*) dm->data; 13 PetscMPIInt size; 14 PetscErrorCode ierr; 15 16 PetscFunctionBegin; 17 ierr = MPI_Comm_size(((PetscObject) dm)->comm, &size);CHKERRQ(ierr); 18 if (da->dim == 1) { 19 PetscInt s,m,*lc,l; 20 DMDABoundaryType bx; 21 22 ierr = DMDAGetInfo(dm,0,&m,0,0,0,0,0,0,&s,&bx,0,0,0);CHKERRQ(ierr); 23 ierr = DMDAGetCorners(dm,0,0,0,&l,0,0);CHKERRQ(ierr); 24 ierr = PetscMalloc(size * sizeof(PetscInt), &lc);CHKERRQ(ierr); 25 ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 26 ierr = DMDACreate1d(((PetscObject)dm)->comm,bx,m,1,s,lc,cdm);CHKERRQ(ierr); 27 ierr = PetscFree(lc);CHKERRQ(ierr); 28 } else if (da->dim == 2) { 29 PetscInt i,s,m,*lc,*ld,l,k,n,M,N; 30 DMDABoundaryType bx,by; 31 32 ierr = DMDAGetInfo(dm,0,&m,&n,0,&M,&N,0,0,&s,&bx,&by,0,0);CHKERRQ(ierr); 33 ierr = DMDAGetCorners(dm,0,0,0,&l,&k,0);CHKERRQ(ierr); 34 ierr = PetscMalloc2(size,PetscInt,&lc,size,PetscInt,&ld);CHKERRQ(ierr); 35 /* only first M values in lc matter */ 36 ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 37 /* every Mth value in ld matters */ 38 ierr = MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 39 for (i = 0; i < N; ++i) ld[i] = ld[M*i]; 40 if (bx == DMDA_BOUNDARY_MIRROR || by == DMDA_BOUNDARY_MIRROR) { 41 ierr = DMDACreate2d(((PetscObject)dm)->comm,bx,by,DMDA_STENCIL_STAR,m,n,M,N,2,s,lc,ld,cdm);CHKERRQ(ierr); 42 } else { 43 ierr = DMDACreate2d(((PetscObject)dm)->comm,bx,by,DMDA_STENCIL_BOX,m,n,M,N,2,s,lc,ld,cdm);CHKERRQ(ierr); 44 } 45 ierr = PetscFree2(lc,ld);CHKERRQ(ierr); 46 } else if (da->dim == 3) { 47 PetscInt i,s,m,*lc,*ld,*le,l,k,q,n,M,N,P,p; 48 DMDABoundaryType bx,by,bz; 49 50 ierr = DMDAGetInfo(dm,0,&m,&n,&p,&M,&N,&P,0,&s,&bx,&by,&bz,0);CHKERRQ(ierr); 51 ierr = DMDAGetCorners(dm,0,0,0,&l,&k,&q);CHKERRQ(ierr); 52 ierr = PetscMalloc3(size,PetscInt,&lc,size,PetscInt,&ld,size,PetscInt,&le);CHKERRQ(ierr); 53 /* only first M values in lc matter */ 54 ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 55 /* every Mth value in ld matters */ 56 ierr = MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 57 for (i = 0; i < N; ++i) ld[i] = ld[M*i]; 58 ierr = MPI_Allgather(&q,1,MPIU_INT,le,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 59 for (i = 0; i < P; ++i) le[i] = le[M*N*i]; 60 ierr = DMDACreate3d(((PetscObject)dm)->comm,bx,by,bz,DMDA_STENCIL_BOX,m,n,p,M,N,P,3,s,lc,ld,le,cdm);CHKERRQ(ierr); 61 ierr = PetscFree3(lc,ld,le);CHKERRQ(ierr); 62 } 63 PetscFunctionReturn(0); 64 } 65 66 #undef __FUNCT__ 67 #define __FUNCT__ "DMDASetFieldName" 68 /*@C 69 DMDASetFieldName - Sets the names of individual field components in multicomponent 70 vectors associated with a DMDA. 71 72 Not Collective 73 74 Input Parameters: 75 + da - the distributed array 76 . nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the 77 number of degrees of freedom per node within the DMDA 78 - names - the name of the field (component) 79 80 Level: intermediate 81 82 .keywords: distributed array, get, component name 83 84 .seealso: DMDAGetFieldName(), DMDASetCoordinateName(), DMDAGetCoordinateName() 85 @*/ 86 PetscErrorCode DMDASetFieldName(DM da,PetscInt nf,const char name[]) 87 { 88 PetscErrorCode ierr; 89 DM_DA *dd = (DM_DA*)da->data; 90 91 PetscFunctionBegin; 92 PetscValidHeaderSpecific(da,DM_CLASSID,1); 93 if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf); 94 ierr = PetscFree(dd->fieldname[nf]);CHKERRQ(ierr); 95 ierr = PetscStrallocpy(name,&dd->fieldname[nf]);CHKERRQ(ierr); 96 PetscFunctionReturn(0); 97 } 98 99 #undef __FUNCT__ 100 #define __FUNCT__ "DMDAGetFieldName" 101 /*@C 102 DMDAGetFieldName - Gets the names of individual field components in multicomponent 103 vectors associated with a DMDA. 104 105 Not Collective 106 107 Input Parameter: 108 + da - the distributed array 109 - nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the 110 number of degrees of freedom per node within the DMDA 111 112 Output Parameter: 113 . names - the name of the field (component) 114 115 Level: intermediate 116 117 .keywords: distributed array, get, component name 118 119 .seealso: DMDASetFieldName(), DMDASetCoordinateName(), DMDAGetCoordinateName() 120 @*/ 121 PetscErrorCode DMDAGetFieldName(DM da,PetscInt nf,const char **name) 122 { 123 DM_DA *dd = (DM_DA*)da->data; 124 125 PetscFunctionBegin; 126 PetscValidHeaderSpecific(da,DM_CLASSID,1); 127 PetscValidPointer(name,3); 128 if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf); 129 *name = dd->fieldname[nf]; 130 PetscFunctionReturn(0); 131 } 132 133 #undef __FUNCT__ 134 #define __FUNCT__ "DMDASetCoordinateName" 135 /*@C 136 DMDASetCoordinateName - Sets the name of the coordinate directions associated with a DMDA, for example "x" or "y" 137 138 Not Collective 139 140 Input Parameters: 141 + da - the distributed array 142 . nf - coordinate number for the DMDA (0, 1, ... dim-1), 143 - name - the name of the coordinate 144 145 Level: intermediate 146 147 .keywords: distributed array, get, component name 148 149 .seealso: DMDAGetCoordinateName(), DMDASetFieldName(), DMDAGetFieldName() 150 @*/ 151 PetscErrorCode DMDASetCoordinateName(DM da,PetscInt nf,const char name[]) 152 { 153 PetscErrorCode ierr; 154 DM_DA *dd = (DM_DA*)da->data; 155 156 PetscFunctionBegin; 157 PetscValidHeaderSpecific(da,DM_CLASSID,1); 158 if (nf < 0 || nf >= dd->dim) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid coordinate number: %D",nf); 159 ierr = PetscFree(dd->coordinatename[nf]);CHKERRQ(ierr); 160 ierr = PetscStrallocpy(name,&dd->coordinatename[nf]);CHKERRQ(ierr); 161 PetscFunctionReturn(0); 162 } 163 164 #undef __FUNCT__ 165 #define __FUNCT__ "DMDAGetCoordinateName" 166 /*@C 167 DMDAGetCoordinateName - Gets the name of a coodinate direction associated with a DMDA. 168 169 Not Collective 170 171 Input Parameter: 172 + da - the distributed array 173 - nf - number for the DMDA (0, 1, ... dim-1) 174 175 Output Parameter: 176 . names - the name of the coordinate direction 177 178 Level: intermediate 179 180 .keywords: distributed array, get, component name 181 182 .seealso: DMDASetCoordinateName(), DMDASetFieldName(), DMDAGetFieldName() 183 @*/ 184 PetscErrorCode DMDAGetCoordinateName(DM da,PetscInt nf,const char **name) 185 { 186 DM_DA *dd = (DM_DA*)da->data; 187 188 PetscFunctionBegin; 189 PetscValidHeaderSpecific(da,DM_CLASSID,1); 190 PetscValidPointer(name,3); 191 if (nf < 0 || nf >= dd->dim) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid coordinate number: %D",nf); 192 *name = dd->coordinatename[nf]; 193 PetscFunctionReturn(0); 194 } 195 196 #undef __FUNCT__ 197 #define __FUNCT__ "DMDAGetCorners" 198 /*@ 199 DMDAGetCorners - Returns the global (x,y,z) indices of the lower left 200 corner of the local region, excluding ghost points. 201 202 Not Collective 203 204 Input Parameter: 205 . da - the distributed array 206 207 Output Parameters: 208 + x,y,z - the corner indices (where y and z are optional; these are used 209 for 2D and 3D problems) 210 - m,n,p - widths in the corresponding directions (where n and p are optional; 211 these are used for 2D and 3D problems) 212 213 Note: 214 The corner information is independent of the number of degrees of 215 freedom per node set with the DMDACreateXX() routine. Thus the x, y, z, and 216 m, n, p can be thought of as coordinates on a logical grid, where each 217 grid point has (potentially) several degrees of freedom. 218 Any of y, z, n, and p can be passed in as PETSC_NULL if not needed. 219 220 Level: beginner 221 222 .keywords: distributed array, get, corners, nodes, local indices 223 224 .seealso: DMDAGetGhostCorners(), DMDAGetOwnershipRanges() 225 @*/ 226 PetscErrorCode DMDAGetCorners(DM da,PetscInt *x,PetscInt *y,PetscInt *z,PetscInt *m,PetscInt *n,PetscInt *p) 227 { 228 PetscInt w; 229 DM_DA *dd = (DM_DA*)da->data; 230 231 PetscFunctionBegin; 232 PetscValidHeaderSpecific(da,DM_CLASSID,1); 233 /* since the xs, xe ... have all been multiplied by the number of degrees 234 of freedom per cell, w = dd->w, we divide that out before returning.*/ 235 w = dd->w; 236 if (x) *x = dd->xs/w + dd->xo; if (m) *m = (dd->xe - dd->xs)/w; 237 /* the y and z have NOT been multiplied by w */ 238 if (y) *y = dd->ys + dd->yo; if (n) *n = (dd->ye - dd->ys); 239 if (z) *z = dd->zs + dd->zo; if (p) *p = (dd->ze - dd->zs); 240 PetscFunctionReturn(0); 241 } 242 243 #undef __FUNCT__ 244 #define __FUNCT__ "DMDAGetLocalBoundingBox" 245 /*@ 246 DMDAGetLocalBoundingBox - Returns the local bounding box for the DMDA. 247 248 Not Collective 249 250 Input Parameter: 251 . da - the distributed array 252 253 Output Parameters: 254 + lmin - local minimum coordinates (length dim, optional) 255 - lmax - local maximim coordinates (length dim, optional) 256 257 Level: beginner 258 259 .keywords: distributed array, get, coordinates 260 261 .seealso: DMDAGetCoordinateDA(), DMGetCoordinates(), DMDAGetBoundingBox() 262 @*/ 263 PetscErrorCode DMDAGetLocalBoundingBox(DM da,PetscReal lmin[],PetscReal lmax[]) 264 { 265 PetscErrorCode ierr; 266 Vec coords = PETSC_NULL; 267 PetscInt dim,i,j; 268 const PetscScalar *local_coords; 269 PetscReal min[3]={PETSC_MAX_REAL,PETSC_MAX_REAL,PETSC_MAX_REAL},max[3]={PETSC_MIN_REAL,PETSC_MIN_REAL,PETSC_MIN_REAL}; 270 PetscInt N,Ni; 271 DM_DA *dd = (DM_DA*)da->data; 272 273 PetscFunctionBegin; 274 PetscValidHeaderSpecific(da,DM_CLASSID,1); 275 dim = dd->dim; 276 ierr = DMGetCoordinates(da,&coords);CHKERRQ(ierr); 277 if (coords) { 278 ierr = VecGetArrayRead(coords,&local_coords);CHKERRQ(ierr); 279 ierr = VecGetLocalSize(coords,&N);CHKERRQ(ierr); 280 Ni = N/dim; 281 for (i=0; i<Ni; i++) { 282 for (j=0; j<3; j++) { 283 min[j] = j < dim ? PetscMin(min[j],PetscRealPart(local_coords[i*dim+j])) : 0; 284 max[j] = j < dim ? PetscMax(max[j],PetscRealPart(local_coords[i*dim+j])) : 0; 285 } 286 } 287 ierr = VecRestoreArrayRead(coords,&local_coords);CHKERRQ(ierr); 288 } else { /* Just use grid indices */ 289 DMDALocalInfo info; 290 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 291 min[0] = info.xs; 292 min[1] = info.ys; 293 min[2] = info.zs; 294 max[0] = info.xs + info.xm-1; 295 max[1] = info.ys + info.ym-1; 296 max[2] = info.zs + info.zm-1; 297 } 298 if (lmin) {ierr = PetscMemcpy(lmin,min,dim*sizeof(PetscReal));CHKERRQ(ierr);} 299 if (lmax) {ierr = PetscMemcpy(lmax,max,dim*sizeof(PetscReal));CHKERRQ(ierr);} 300 PetscFunctionReturn(0); 301 } 302 303 #undef __FUNCT__ 304 #define __FUNCT__ "DMDAGetBoundingBox" 305 /*@ 306 DMDAGetBoundingBox - Returns the global bounding box for the DMDA. 307 308 Collective on DMDA 309 310 Input Parameter: 311 . da - the distributed array 312 313 Output Parameters: 314 + gmin - global minimum coordinates (length dim, optional) 315 - gmax - global maximim coordinates (length dim, optional) 316 317 Level: beginner 318 319 .keywords: distributed array, get, coordinates 320 321 .seealso: DMDAGetCoordinateDA(), DMGetCoordinates(), DMDAGetLocalBoundingBox() 322 @*/ 323 PetscErrorCode DMDAGetBoundingBox(DM da,PetscReal gmin[],PetscReal gmax[]) 324 { 325 PetscErrorCode ierr; 326 PetscMPIInt count; 327 PetscReal lmin[3],lmax[3]; 328 DM_DA *dd = (DM_DA*)da->data; 329 330 PetscFunctionBegin; 331 PetscValidHeaderSpecific(da,DM_CLASSID,1); 332 ierr = PetscMPIIntCast(dd->dim,&count);CHKERRQ(ierr); 333 ierr = DMDAGetLocalBoundingBox(da,lmin,lmax);CHKERRQ(ierr); 334 if (gmin) {ierr = MPI_Allreduce(lmin,gmin,count,MPIU_REAL,MPIU_MIN,((PetscObject)da)->comm);CHKERRQ(ierr);} 335 if (gmax) {ierr = MPI_Allreduce(lmax,gmax,count,MPIU_REAL,MPIU_MAX,((PetscObject)da)->comm);CHKERRQ(ierr);} 336 PetscFunctionReturn(0); 337 } 338 339 #undef __FUNCT__ 340 #define __FUNCT__ "DMDAGetReducedDMDA" 341 /*@ 342 DMDAGetReducedDMDA - Gets the DMDA with the same layout but with fewer or more fields 343 344 Collective on DMDA 345 346 Input Parameter: 347 + da - the distributed array 348 . nfields - number of fields in new DMDA 349 350 Output Parameter: 351 . nda - the new DMDA 352 353 Level: intermediate 354 355 .keywords: distributed array, get, corners, nodes, local indices, coordinates 356 357 .seealso: DMDAGetGhostCorners(), DMSetCoordinates(), DMDASetUniformCoordinates(), DMGetCoordinates(), DMDAGetGhostedCoordinates() 358 @*/ 359 PetscErrorCode DMDAGetReducedDMDA(DM da,PetscInt nfields,DM *nda) 360 { 361 PetscErrorCode ierr; 362 DM_DA *dd = (DM_DA*)da->data; 363 PetscInt s,m,n,p,M,N,P,dim,Mo,No,Po; 364 const PetscInt *lx,*ly,*lz; 365 DMDABoundaryType bx,by,bz; 366 DMDAStencilType stencil_type; 367 PetscInt ox,oy,oz; 368 PetscInt cl,rl; 369 370 PetscFunctionBegin; 371 dim = dd->dim; 372 M = dd->M; 373 N = dd->N; 374 P = dd->P; 375 m = dd->m; 376 n = dd->n; 377 p = dd->p; 378 s = dd->s; 379 bx = dd->bx; 380 by = dd->by; 381 bz = dd->bz; 382 383 stencil_type = dd->stencil_type; 384 385 ierr = DMDAGetOwnershipRanges(da,&lx,&ly,&lz);CHKERRQ(ierr); 386 if (dim == 1) { 387 ierr = DMDACreate1d(((PetscObject)da)->comm,bx,M,nfields,s,dd->lx,nda);CHKERRQ(ierr); 388 } else if (dim == 2) { 389 ierr = DMDACreate2d(((PetscObject)da)->comm,bx,by,stencil_type,M,N,m,n,nfields,s,lx,ly,nda);CHKERRQ(ierr); 390 } else if (dim == 3) { 391 ierr = DMDACreate3d(((PetscObject)da)->comm,bx,by,bz,stencil_type,M,N,P,m,n,p,nfields,s,lx,ly,lz,nda);CHKERRQ(ierr); 392 } 393 if (da->coordinates) { 394 ierr = PetscObjectReference((PetscObject)da->coordinates);CHKERRQ(ierr); 395 396 (*nda)->coordinates = da->coordinates; 397 } 398 399 /* allow for getting a reduced DA corresponding to a domain decomposition */ 400 ierr = DMDAGetOffset(da,&ox,&oy,&oz,&Mo,&No,&Po);CHKERRQ(ierr); 401 ierr = DMDASetOffset(*nda,ox,oy,oz,Mo,No,Po);CHKERRQ(ierr); 402 403 /* allow for getting a reduced DA corresponding to a coarsened DA */ 404 ierr = DMGetCoarsenLevel(da,&cl);CHKERRQ(ierr); 405 ierr = DMGetRefineLevel(da,&rl);CHKERRQ(ierr); 406 407 (*nda)->levelup = rl; 408 (*nda)->leveldown = cl; 409 PetscFunctionReturn(0); 410 } 411 412