1 2 /* 3 Code for manipulating distributed regular arrays in parallel. 4 */ 5 6 #include <petsc-private/daimpl.h> /*I "petscdmda.h" I*/ 7 8 #undef __FUNCT__ 9 #define __FUNCT__ "DMDASetCoordinates" 10 /*@ 11 DMDASetCoordinates - Sets into the DMDA a vector that indicates the 12 coordinates of the local nodes (NOT including ghost nodes). 13 14 Collective on DMDA 15 16 Input Parameter: 17 + da - the distributed array 18 - c - coordinate vector 19 20 Note: 21 The coordinates should NOT include those for all ghost points 22 23 Level: intermediate 24 25 .keywords: distributed array, get, corners, nodes, local indices, coordinates 26 27 .seealso: DMDASetGhostCoordinates(), DMDAGetGhostCorners(), DMDAGetCoordinates(), DMDASetUniformCoordinates(). DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA() 28 @*/ 29 PetscErrorCode DMDASetCoordinates(DM da,Vec c) 30 { 31 PetscErrorCode ierr; 32 DM_DA *dd = (DM_DA*)da->data; 33 PetscInt bs; 34 35 PetscFunctionBegin; 36 PetscValidHeaderSpecific(da,DM_CLASSID,1); 37 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 38 ierr = VecGetBlockSize(c,&bs);CHKERRQ(ierr); 39 /* I am relaxing this requirement, so that we can use a PetscSection */ 40 /* if (bs != dd->dim) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_INCOMP,"Block size of vector must match dimension of DMDA"); */ 41 ierr = PetscObjectReference((PetscObject)c);CHKERRQ(ierr); 42 ierr = VecDestroy(&dd->coordinates);CHKERRQ(ierr); 43 dd->coordinates = c; 44 ierr = VecDestroy(&dd->ghosted_coordinates);CHKERRQ(ierr); 45 PetscFunctionReturn(0); 46 } 47 48 #undef __FUNCT__ 49 #define __FUNCT__ "DMDASetGhostedCoordinates" 50 /*@ 51 DMDASetGhostedCoordinates - Sets into the DMDA a vector that indicates the 52 coordinates of the local nodes, including ghost nodes. 53 54 Collective on DMDA 55 56 Input Parameter: 57 + da - the distributed array 58 - c - coordinate vector 59 60 Note: 61 The coordinates of interior ghost points can be set using DMDASetCoordinates() 62 followed by DMDAGetGhostedCoordinates(). This is intended to enable the setting 63 of ghost coordinates outside of the domain. 64 65 Non-ghosted coordinates, if set, are assumed still valid. 66 67 Level: intermediate 68 69 .keywords: distributed array, get, corners, nodes, local indices, coordinates 70 71 .seealso: DMDASetCoordinates(), DMDAGetGhostCorners(), DMDAGetCoordinates(), DMDASetUniformCoordinates(). DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA() 72 @*/ 73 PetscErrorCode DMDASetGhostedCoordinates(DM da,Vec c) 74 { 75 PetscErrorCode ierr; 76 DM_DA *dd = (DM_DA*)da->data; 77 PetscInt bs; 78 79 PetscFunctionBegin; 80 PetscValidHeaderSpecific(da,DM_CLASSID,1); 81 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 82 ierr = VecGetBlockSize(c,&bs);CHKERRQ(ierr); 83 if (bs != dd->dim) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_INCOMP,"Block size of vector must match dimension of DMDA"); 84 ierr = PetscObjectReference((PetscObject)c);CHKERRQ(ierr); 85 ierr = VecDestroy(&dd->ghosted_coordinates);CHKERRQ(ierr); 86 dd->ghosted_coordinates = c; 87 PetscFunctionReturn(0); 88 } 89 90 #undef __FUNCT__ 91 #define __FUNCT__ "DMDAGetCoordinates" 92 /*@ 93 DMDAGetCoordinates - Gets the node coordinates associated with a DMDA. 94 95 Not Collective 96 97 Input Parameter: 98 . da - the distributed array 99 100 Output Parameter: 101 . c - coordinate vector 102 103 Note: 104 Each process has only the coordinates for its local nodes (does NOT have the 105 coordinates for the ghost nodes). 106 107 For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 108 and (x_0,y_0,z_0,x_1,y_1,z_1...) 109 110 Level: intermediate 111 112 .keywords: distributed array, get, corners, nodes, local indices, coordinates 113 114 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA() 115 @*/ 116 PetscErrorCode DMDAGetCoordinates(DM da,Vec *c) 117 { 118 DM_DA *dd = (DM_DA*)da->data; 119 PetscFunctionBegin; 120 PetscValidHeaderSpecific(da,DM_CLASSID,1); 121 PetscValidPointer(c,2); 122 *c = dd->coordinates; 123 PetscFunctionReturn(0); 124 } 125 126 #undef __FUNCT__ 127 #define __FUNCT__ "DMDAGetCoordinateDA" 128 /*@ 129 DMDAGetCoordinateDA - Gets the DMDA that scatters between global and local DMDA coordinates 130 131 Collective on DMDA 132 133 Input Parameter: 134 . da - the distributed array 135 136 Output Parameter: 137 . dac - coordinate DMDA 138 139 Level: intermediate 140 141 .keywords: distributed array, get, corners, nodes, local indices, coordinates 142 143 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetGhostedCoordinates() 144 @*/ 145 PetscErrorCode DMDAGetCoordinateDA(DM da,DM *cda) 146 { 147 PetscMPIInt size; 148 PetscErrorCode ierr; 149 DM_DA *dd = (DM_DA*)da->data; 150 151 PetscFunctionBegin; 152 if (!dd->da_coordinates) { 153 ierr = MPI_Comm_size(((PetscObject)da)->comm,&size);CHKERRQ(ierr); 154 if (dd->dim == 1) { 155 PetscInt s,m,*lc,l; 156 DMDABoundaryType bx; 157 ierr = DMDAGetInfo(da,0,&m,0,0,0,0,0,0,&s,&bx,0,0,0);CHKERRQ(ierr); 158 ierr = DMDAGetCorners(da,0,0,0,&l,0,0);CHKERRQ(ierr); 159 ierr = PetscMalloc(size*sizeof(PetscInt),&lc);CHKERRQ(ierr); 160 ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr); 161 ierr = DMDACreate1d(((PetscObject)da)->comm,bx,m,1,s,lc,&dd->da_coordinates);CHKERRQ(ierr); 162 ierr = PetscFree(lc);CHKERRQ(ierr); 163 } else if (dd->dim == 2) { 164 PetscInt i,s,m,*lc,*ld,l,k,n,M,N; 165 DMDABoundaryType bx,by; 166 ierr = DMDAGetInfo(da,0,&m,&n,0,&M,&N,0,0,&s,&bx,&by,0,0);CHKERRQ(ierr); 167 ierr = DMDAGetCorners(da,0,0,0,&l,&k,0);CHKERRQ(ierr); 168 ierr = PetscMalloc2(size,PetscInt,&lc,size,PetscInt,&ld);CHKERRQ(ierr); 169 /* only first M values in lc matter */ 170 ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr); 171 /* every Mth value in ld matters */ 172 ierr = MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr); 173 for ( i=0; i<N; i++) { 174 ld[i] = ld[M*i]; 175 } 176 ierr = DMDACreate2d(((PetscObject)da)->comm,bx,by,DMDA_STENCIL_BOX,m,n,M,N,2,s,lc,ld,&dd->da_coordinates);CHKERRQ(ierr); 177 ierr = PetscFree2(lc,ld);CHKERRQ(ierr); 178 } else if (dd->dim == 3) { 179 PetscInt i,s,m,*lc,*ld,*le,l,k,q,n,M,N,P,p; 180 DMDABoundaryType bx,by,bz; 181 ierr = DMDAGetInfo(da,0,&m,&n,&p,&M,&N,&P,0,&s,&bx,&by,&bz,0);CHKERRQ(ierr); 182 ierr = DMDAGetCorners(da,0,0,0,&l,&k,&q);CHKERRQ(ierr); 183 ierr = PetscMalloc3(size,PetscInt,&lc,size,PetscInt,&ld,size,PetscInt,&le);CHKERRQ(ierr); 184 /* only first M values in lc matter */ 185 ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr); 186 /* every Mth value in ld matters */ 187 ierr = MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr); 188 for ( i=0; i<N; i++) { 189 ld[i] = ld[M*i]; 190 } 191 ierr = MPI_Allgather(&q,1,MPIU_INT,le,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr); 192 for ( i=0; i<P; i++) { 193 le[i] = le[M*N*i]; 194 } 195 ierr = DMDACreate3d(((PetscObject)da)->comm,bx,by,bz,DMDA_STENCIL_BOX,m,n,p,M,N,P,3,s,lc,ld,le,&dd->da_coordinates);CHKERRQ(ierr); 196 ierr = PetscFree3(lc,ld,le);CHKERRQ(ierr); 197 } 198 } 199 *cda = dd->da_coordinates; 200 PetscFunctionReturn(0); 201 } 202 203 204 #undef __FUNCT__ 205 #define __FUNCT__ "DMDAGetGhostedCoordinates" 206 /*@ 207 DMDAGetGhostedCoordinates - Gets the node coordinates associated with a DMDA. 208 209 Collective on DMDA 210 211 Input Parameter: 212 . da - the distributed array 213 214 Output Parameter: 215 . c - coordinate vector 216 217 Note: 218 Each process has only the coordinates for its local AND ghost nodes 219 220 For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 221 and (x_0,y_0,z_0,x_1,y_1,z_1...) 222 223 Level: intermediate 224 225 .keywords: distributed array, get, corners, nodes, local indices, coordinates 226 227 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetCoordinateDA() 228 @*/ 229 PetscErrorCode DMDAGetGhostedCoordinates(DM da,Vec *c) 230 { 231 PetscErrorCode ierr; 232 DM_DA *dd = (DM_DA*)da->data; 233 234 PetscFunctionBegin; 235 PetscValidHeaderSpecific(da,DM_CLASSID,1); 236 PetscValidPointer(c,2); 237 if (!dd->coordinates) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER,"You must call DMDASetCoordinates() before this call"); 238 if (!dd->ghosted_coordinates) { 239 DM dac; 240 ierr = DMDAGetCoordinateDA(da,&dac);CHKERRQ(ierr); 241 ierr = DMCreateLocalVector(dac,&dd->ghosted_coordinates);CHKERRQ(ierr); 242 ierr = DMGlobalToLocalBegin(dac,dd->coordinates,INSERT_VALUES,dd->ghosted_coordinates);CHKERRQ(ierr); 243 ierr = DMGlobalToLocalEnd(dac,dd->coordinates,INSERT_VALUES,dd->ghosted_coordinates);CHKERRQ(ierr); 244 } 245 *c = dd->ghosted_coordinates; 246 PetscFunctionReturn(0); 247 } 248 249 #undef __FUNCT__ 250 #define __FUNCT__ "DMDASetFieldName" 251 /*@C 252 DMDASetFieldName - Sets the names of individual field components in multicomponent 253 vectors associated with a DMDA. 254 255 Not Collective 256 257 Input Parameters: 258 + da - the distributed array 259 . nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the 260 number of degrees of freedom per node within the DMDA 261 - names - the name of the field (component) 262 263 Level: intermediate 264 265 .keywords: distributed array, get, component name 266 267 .seealso: DMDAGetFieldName() 268 @*/ 269 PetscErrorCode DMDASetFieldName(DM da,PetscInt nf,const char name[]) 270 { 271 PetscErrorCode ierr; 272 DM_DA *dd = (DM_DA*)da->data; 273 274 PetscFunctionBegin; 275 PetscValidHeaderSpecific(da,DM_CLASSID,1); 276 if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf); 277 ierr = PetscFree(dd->fieldname[nf]);CHKERRQ(ierr); 278 ierr = PetscStrallocpy(name,&dd->fieldname[nf]);CHKERRQ(ierr); 279 PetscFunctionReturn(0); 280 } 281 282 #undef __FUNCT__ 283 #define __FUNCT__ "DMDAGetFieldName" 284 /*@C 285 DMDAGetFieldName - Gets the names of individual field components in multicomponent 286 vectors associated with a DMDA. 287 288 Not Collective 289 290 Input Parameter: 291 + da - the distributed array 292 - nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the 293 number of degrees of freedom per node within the DMDA 294 295 Output Parameter: 296 . names - the name of the field (component) 297 298 Level: intermediate 299 300 .keywords: distributed array, get, component name 301 302 .seealso: DMDASetFieldName() 303 @*/ 304 PetscErrorCode DMDAGetFieldName(DM da,PetscInt nf,const char **name) 305 { 306 DM_DA *dd = (DM_DA*)da->data; 307 308 PetscFunctionBegin; 309 PetscValidHeaderSpecific(da,DM_CLASSID,1); 310 PetscValidPointer(name,3); 311 if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf); 312 *name = dd->fieldname[nf]; 313 PetscFunctionReturn(0); 314 } 315 316 #undef __FUNCT__ 317 #define __FUNCT__ "DMDAGetCorners" 318 /*@ 319 DMDAGetCorners - Returns the global (x,y,z) indices of the lower left 320 corner of the local region, excluding ghost points. 321 322 Not Collective 323 324 Input Parameter: 325 . da - the distributed array 326 327 Output Parameters: 328 + x,y,z - the corner indices (where y and z are optional; these are used 329 for 2D and 3D problems) 330 - m,n,p - widths in the corresponding directions (where n and p are optional; 331 these are used for 2D and 3D problems) 332 333 Note: 334 The corner information is independent of the number of degrees of 335 freedom per node set with the DMDACreateXX() routine. Thus the x, y, z, and 336 m, n, p can be thought of as coordinates on a logical grid, where each 337 grid point has (potentially) several degrees of freedom. 338 Any of y, z, n, and p can be passed in as PETSC_NULL if not needed. 339 340 Level: beginner 341 342 .keywords: distributed array, get, corners, nodes, local indices 343 344 .seealso: DMDAGetGhostCorners(), DMDAGetOwnershipRanges() 345 @*/ 346 PetscErrorCode DMDAGetCorners(DM da,PetscInt *x,PetscInt *y,PetscInt *z,PetscInt *m,PetscInt *n,PetscInt *p) 347 { 348 PetscInt w; 349 DM_DA *dd = (DM_DA*)da->data; 350 351 PetscFunctionBegin; 352 PetscValidHeaderSpecific(da,DM_CLASSID,1); 353 /* since the xs, xe ... have all been multiplied by the number of degrees 354 of freedom per cell, w = dd->w, we divide that out before returning.*/ 355 w = dd->w; 356 if (x) *x = dd->xs/w; if(m) *m = (dd->xe - dd->xs)/w; 357 /* the y and z have NOT been multiplied by w */ 358 if (y) *y = dd->ys; if (n) *n = (dd->ye - dd->ys); 359 if (z) *z = dd->zs; if (p) *p = (dd->ze - dd->zs); 360 PetscFunctionReturn(0); 361 } 362 363 #undef __FUNCT__ 364 #define __FUNCT__ "DMDAGetLocalBoundingBox" 365 /*@ 366 DMDAGetLocalBoundingBox - Returns the local bounding box for the DMDA. 367 368 Not Collective 369 370 Input Parameter: 371 . da - the distributed array 372 373 Output Parameters: 374 + lmin - local minimum coordinates (length dim, optional) 375 - lmax - local maximim coordinates (length dim, optional) 376 377 Level: beginner 378 379 .keywords: distributed array, get, coordinates 380 381 .seealso: DMDAGetCoordinateDA(), DMDAGetCoordinates(), DMDAGetBoundingBox() 382 @*/ 383 PetscErrorCode DMDAGetLocalBoundingBox(DM da,PetscReal lmin[],PetscReal lmax[]) 384 { 385 PetscErrorCode ierr; 386 Vec coords = PETSC_NULL; 387 PetscInt dim,i,j; 388 const PetscScalar *local_coords; 389 PetscReal min[3]={PETSC_MAX_REAL,PETSC_MAX_REAL,PETSC_MAX_REAL},max[3]={PETSC_MIN_REAL,PETSC_MIN_REAL,PETSC_MIN_REAL}; 390 PetscInt N,Ni; 391 DM_DA *dd = (DM_DA*)da->data; 392 393 PetscFunctionBegin; 394 PetscValidHeaderSpecific(da,DM_CLASSID,1); 395 dim = dd->dim; 396 ierr = DMDAGetCoordinates(da,&coords);CHKERRQ(ierr); 397 if (coords) { 398 ierr = VecGetArrayRead(coords,&local_coords);CHKERRQ(ierr); 399 ierr = VecGetLocalSize(coords,&N);CHKERRQ(ierr); 400 Ni = N/dim; 401 for (i=0; i<Ni; i++) { 402 for (j=0; j<3; j++) { 403 min[j] = j < dim ? PetscMin(min[j],PetscRealPart(local_coords[i*dim+j])) : 0; 404 max[j] = j < dim ? PetscMax(min[j],PetscRealPart(local_coords[i*dim+j])) : 0; 405 } 406 } 407 ierr = VecRestoreArrayRead(coords,&local_coords);CHKERRQ(ierr); 408 } else { /* Just use grid indices */ 409 DMDALocalInfo info; 410 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 411 min[0] = info.xs; 412 min[1] = info.ys; 413 min[2] = info.zs; 414 max[0] = info.xs + info.xm-1; 415 max[1] = info.ys + info.ym-1; 416 max[2] = info.zs + info.zm-1; 417 } 418 if (lmin) {ierr = PetscMemcpy(lmin,min,dim*sizeof(PetscReal));CHKERRQ(ierr);} 419 if (lmax) {ierr = PetscMemcpy(lmax,max,dim*sizeof(PetscReal));CHKERRQ(ierr);} 420 PetscFunctionReturn(0); 421 } 422 423 #undef __FUNCT__ 424 #define __FUNCT__ "DMDAGetBoundingBox" 425 /*@ 426 DMDAGetBoundingBox - Returns the global bounding box for the DMDA. 427 428 Collective on DMDA 429 430 Input Parameter: 431 . da - the distributed array 432 433 Output Parameters: 434 + gmin - global minimum coordinates (length dim, optional) 435 - gmax - global maximim coordinates (length dim, optional) 436 437 Level: beginner 438 439 .keywords: distributed array, get, coordinates 440 441 .seealso: DMDAGetCoordinateDA(), DMDAGetCoordinates(), DMDAGetLocalBoundingBox() 442 @*/ 443 PetscErrorCode DMDAGetBoundingBox(DM da,PetscReal gmin[],PetscReal gmax[]) 444 { 445 PetscErrorCode ierr; 446 PetscMPIInt count; 447 PetscReal lmin[3],lmax[3]; 448 DM_DA *dd = (DM_DA*)da->data; 449 450 PetscFunctionBegin; 451 PetscValidHeaderSpecific(da,DM_CLASSID,1); 452 count = PetscMPIIntCast(dd->dim); 453 ierr = DMDAGetLocalBoundingBox(da,lmin,lmax);CHKERRQ(ierr); 454 if (gmin) {ierr = MPI_Allreduce(lmin,gmin,count,MPIU_REAL,MPIU_MIN,((PetscObject)da)->comm);CHKERRQ(ierr);} 455 if (gmax) {ierr = MPI_Allreduce(lmax,gmax,count,MPIU_REAL,MPIU_MAX,((PetscObject)da)->comm);CHKERRQ(ierr);} 456 PetscFunctionReturn(0); 457 } 458 459 #undef __FUNCT__ 460 #define __FUNCT__ "DMDAGetReducedDA" 461 /*@ 462 DMDAGetReducedDA - Gets the DMDA with the same layout but with fewer or more fields 463 464 Collective on DMDA 465 466 Input Parameter: 467 + da - the distributed array 468 . nfields - number of fields in new DMDA 469 470 Output Parameter: 471 . nda - the new DMDA 472 473 Level: intermediate 474 475 .keywords: distributed array, get, corners, nodes, local indices, coordinates 476 477 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetGhostedCoordinates() 478 @*/ 479 PetscErrorCode DMDAGetReducedDA(DM da,PetscInt nfields,DM *nda) 480 { 481 PetscErrorCode ierr; 482 DM_DA *dd = (DM_DA*)da->data; 483 484 PetscInt s,m,n,p,M,N,P,dim; 485 const PetscInt *lx,*ly,*lz; 486 DMDABoundaryType bx,by,bz; 487 DMDAStencilType stencil_type; 488 489 PetscFunctionBegin; 490 ierr = DMDAGetInfo(da,&dim,&M,&N,&P,&m,&n,&p,0,&s,&bx,&by,&bz,&stencil_type);CHKERRQ(ierr); 491 ierr = DMDAGetOwnershipRanges(da,&lx,&ly,&lz);CHKERRQ(ierr); 492 if (dim == 1) { 493 ierr = DMDACreate1d(((PetscObject)da)->comm,bx,M,nfields,s,dd->lx,nda);CHKERRQ(ierr); 494 } else if (dim == 2) { 495 ierr = DMDACreate2d(((PetscObject)da)->comm,bx,by,stencil_type,M,N,m,n,nfields,s,lx,ly,nda);CHKERRQ(ierr); 496 } else if (dim == 3) { 497 ierr = DMDACreate3d(((PetscObject)da)->comm,bx,by,bz,stencil_type,M,N,P,m,n,p,nfields,s,lx,ly,lz,nda);CHKERRQ(ierr); 498 } 499 if (dd->coordinates) { 500 DM_DA *ndd = (DM_DA*)(*nda)->data; 501 ierr = PetscObjectReference((PetscObject)dd->coordinates);CHKERRQ(ierr); 502 ndd->coordinates = dd->coordinates; 503 } 504 PetscFunctionReturn(0); 505 } 506 507