1 2 /* 3 Code for manipulating distributed regular arrays in parallel. 4 */ 5 6 #include <petsc-private/daimpl.h> /*I "petscdmda.h" I*/ 7 8 #undef __FUNCT__ 9 #define __FUNCT__ "DMDASetCoordinates" 10 /*@ 11 DMDASetCoordinates - Sets into the DMDA a vector that indicates the 12 coordinates of the local nodes (NOT including ghost nodes). 13 14 Collective on DMDA 15 16 Input Parameter: 17 + da - the distributed array 18 - c - coordinate vector 19 20 Note: 21 The coordinates should NOT include those for all ghost points 22 23 Level: intermediate 24 25 .keywords: distributed array, get, corners, nodes, local indices, coordinates 26 27 .seealso: DMDASetGhostCoordinates(), DMDAGetGhostCorners(), DMDAGetCoordinates(), DMDASetUniformCoordinates(). DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA() 28 @*/ 29 PetscErrorCode DMDASetCoordinates(DM da,Vec c) 30 { 31 PetscErrorCode ierr; 32 DM_DA *dd = (DM_DA*)da->data; 33 PetscInt bs; 34 35 PetscFunctionBegin; 36 PetscValidHeaderSpecific(da,DM_CLASSID,1); 37 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 38 ierr = VecGetBlockSize(c,&bs);CHKERRQ(ierr); 39 if (bs != dd->dim) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_INCOMP,"Block size of vector must match dimension of DMDA"); 40 ierr = PetscObjectReference((PetscObject)c);CHKERRQ(ierr); 41 ierr = VecDestroy(&dd->coordinates);CHKERRQ(ierr); 42 dd->coordinates = c; 43 ierr = VecDestroy(&dd->ghosted_coordinates);CHKERRQ(ierr); 44 PetscFunctionReturn(0); 45 } 46 47 #undef __FUNCT__ 48 #define __FUNCT__ "DMDASetGhostedCoordinates" 49 /*@ 50 DMDASetGhostedCoordinates - Sets into the DMDA a vector that indicates the 51 coordinates of the local nodes, including ghost nodes. 52 53 Collective on DMDA 54 55 Input Parameter: 56 + da - the distributed array 57 - c - coordinate vector 58 59 Note: 60 The coordinates of interior ghost points can be set using DMDASetCoordinates() 61 followed by DMDAGetGhostedCoordinates(). This is intended to enable the setting 62 of ghost coordinates outside of the domain. 63 64 Non-ghosted coordinates, if set, are assumed still valid. 65 66 Level: intermediate 67 68 .keywords: distributed array, get, corners, nodes, local indices, coordinates 69 70 .seealso: DMDASetCoordinates(), DMDAGetGhostCorners(), DMDAGetCoordinates(), DMDASetUniformCoordinates(). DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA() 71 @*/ 72 PetscErrorCode DMDASetGhostedCoordinates(DM da,Vec c) 73 { 74 PetscErrorCode ierr; 75 DM_DA *dd = (DM_DA*)da->data; 76 PetscInt bs; 77 78 PetscFunctionBegin; 79 PetscValidHeaderSpecific(da,DM_CLASSID,1); 80 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 81 ierr = VecGetBlockSize(c,&bs);CHKERRQ(ierr); 82 if (bs != dd->dim) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_INCOMP,"Block size of vector must match dimension of DMDA"); 83 ierr = PetscObjectReference((PetscObject)c);CHKERRQ(ierr); 84 ierr = VecDestroy(&dd->ghosted_coordinates);CHKERRQ(ierr); 85 dd->ghosted_coordinates = c; 86 PetscFunctionReturn(0); 87 } 88 89 #undef __FUNCT__ 90 #define __FUNCT__ "DMDAGetCoordinates" 91 /*@ 92 DMDAGetCoordinates - Gets the node coordinates associated with a DMDA. 93 94 Not Collective 95 96 Input Parameter: 97 . da - the distributed array 98 99 Output Parameter: 100 . c - coordinate vector 101 102 Note: 103 Each process has only the coordinates for its local nodes (does NOT have the 104 coordinates for the ghost nodes). 105 106 For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 107 and (x_0,y_0,z_0,x_1,y_1,z_1...) 108 109 Level: intermediate 110 111 .keywords: distributed array, get, corners, nodes, local indices, coordinates 112 113 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA() 114 @*/ 115 PetscErrorCode DMDAGetCoordinates(DM da,Vec *c) 116 { 117 DM_DA *dd = (DM_DA*)da->data; 118 PetscFunctionBegin; 119 PetscValidHeaderSpecific(da,DM_CLASSID,1); 120 PetscValidPointer(c,2); 121 *c = dd->coordinates; 122 PetscFunctionReturn(0); 123 } 124 125 #undef __FUNCT__ 126 #define __FUNCT__ "DMDAGetCoordinateDA" 127 /*@ 128 DMDAGetCoordinateDA - Gets the DMDA that scatters between global and local DMDA coordinates 129 130 Collective on DMDA 131 132 Input Parameter: 133 . da - the distributed array 134 135 Output Parameter: 136 . dac - coordinate DMDA 137 138 Level: intermediate 139 140 .keywords: distributed array, get, corners, nodes, local indices, coordinates 141 142 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetGhostedCoordinates() 143 @*/ 144 PetscErrorCode DMDAGetCoordinateDA(DM da,DM *cda) 145 { 146 PetscMPIInt size; 147 PetscErrorCode ierr; 148 DM_DA *dd = (DM_DA*)da->data; 149 150 PetscFunctionBegin; 151 if (!dd->da_coordinates) { 152 ierr = MPI_Comm_size(((PetscObject)da)->comm,&size);CHKERRQ(ierr); 153 if (dd->dim == 1) { 154 PetscInt s,m,*lc,l; 155 DMDABoundaryType bx; 156 ierr = DMDAGetInfo(da,0,&m,0,0,0,0,0,0,&s,&bx,0,0,0);CHKERRQ(ierr); 157 ierr = DMDAGetCorners(da,0,0,0,&l,0,0);CHKERRQ(ierr); 158 ierr = PetscMalloc(size*sizeof(PetscInt),&lc);CHKERRQ(ierr); 159 ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr); 160 ierr = DMDACreate1d(((PetscObject)da)->comm,bx,m,1,s,lc,&dd->da_coordinates);CHKERRQ(ierr); 161 ierr = PetscFree(lc);CHKERRQ(ierr); 162 } else if (dd->dim == 2) { 163 PetscInt i,s,m,*lc,*ld,l,k,n,M,N; 164 DMDABoundaryType bx,by; 165 ierr = DMDAGetInfo(da,0,&m,&n,0,&M,&N,0,0,&s,&bx,&by,0,0);CHKERRQ(ierr); 166 ierr = DMDAGetCorners(da,0,0,0,&l,&k,0);CHKERRQ(ierr); 167 ierr = PetscMalloc2(size,PetscInt,&lc,size,PetscInt,&ld);CHKERRQ(ierr); 168 /* only first M values in lc matter */ 169 ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr); 170 /* every Mth value in ld matters */ 171 ierr = MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr); 172 for ( i=0; i<N; i++) { 173 ld[i] = ld[M*i]; 174 } 175 ierr = DMDACreate2d(((PetscObject)da)->comm,bx,by,DMDA_STENCIL_BOX,m,n,M,N,2,s,lc,ld,&dd->da_coordinates);CHKERRQ(ierr); 176 ierr = PetscFree2(lc,ld);CHKERRQ(ierr); 177 } else if (dd->dim == 3) { 178 PetscInt i,s,m,*lc,*ld,*le,l,k,q,n,M,N,P,p; 179 DMDABoundaryType bx,by,bz; 180 ierr = DMDAGetInfo(da,0,&m,&n,&p,&M,&N,&P,0,&s,&bx,&by,&bz,0);CHKERRQ(ierr); 181 ierr = DMDAGetCorners(da,0,0,0,&l,&k,&q);CHKERRQ(ierr); 182 ierr = PetscMalloc3(size,PetscInt,&lc,size,PetscInt,&ld,size,PetscInt,&le);CHKERRQ(ierr); 183 /* only first M values in lc matter */ 184 ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr); 185 /* every Mth value in ld matters */ 186 ierr = MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr); 187 for ( i=0; i<N; i++) { 188 ld[i] = ld[M*i]; 189 } 190 ierr = MPI_Allgather(&q,1,MPIU_INT,le,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr); 191 for ( i=0; i<P; i++) { 192 le[i] = le[M*N*i]; 193 } 194 ierr = DMDACreate3d(((PetscObject)da)->comm,bx,by,bz,DMDA_STENCIL_BOX,m,n,p,M,N,P,3,s,lc,ld,le,&dd->da_coordinates);CHKERRQ(ierr); 195 ierr = PetscFree3(lc,ld,le);CHKERRQ(ierr); 196 } 197 } 198 *cda = dd->da_coordinates; 199 PetscFunctionReturn(0); 200 } 201 202 203 #undef __FUNCT__ 204 #define __FUNCT__ "DMDAGetGhostedCoordinates" 205 /*@ 206 DMDAGetGhostedCoordinates - Gets the node coordinates associated with a DMDA. 207 208 Collective on DMDA 209 210 Input Parameter: 211 . da - the distributed array 212 213 Output Parameter: 214 . c - coordinate vector 215 216 Note: 217 Each process has only the coordinates for its local AND ghost nodes 218 219 For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 220 and (x_0,y_0,z_0,x_1,y_1,z_1...) 221 222 Level: intermediate 223 224 .keywords: distributed array, get, corners, nodes, local indices, coordinates 225 226 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetCoordinateDA() 227 @*/ 228 PetscErrorCode DMDAGetGhostedCoordinates(DM da,Vec *c) 229 { 230 PetscErrorCode ierr; 231 DM_DA *dd = (DM_DA*)da->data; 232 233 PetscFunctionBegin; 234 PetscValidHeaderSpecific(da,DM_CLASSID,1); 235 PetscValidPointer(c,2); 236 if (!dd->coordinates) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER,"You must call DMDASetCoordinates() before this call"); 237 if (!dd->ghosted_coordinates) { 238 DM dac; 239 ierr = DMDAGetCoordinateDA(da,&dac);CHKERRQ(ierr); 240 ierr = DMCreateLocalVector(dac,&dd->ghosted_coordinates);CHKERRQ(ierr); 241 ierr = DMGlobalToLocalBegin(dac,dd->coordinates,INSERT_VALUES,dd->ghosted_coordinates);CHKERRQ(ierr); 242 ierr = DMGlobalToLocalEnd(dac,dd->coordinates,INSERT_VALUES,dd->ghosted_coordinates);CHKERRQ(ierr); 243 } 244 *c = dd->ghosted_coordinates; 245 PetscFunctionReturn(0); 246 } 247 248 #undef __FUNCT__ 249 #define __FUNCT__ "DMDASetFieldName" 250 /*@C 251 DMDASetFieldName - Sets the names of individual field components in multicomponent 252 vectors associated with a DMDA. 253 254 Not Collective 255 256 Input Parameters: 257 + da - the distributed array 258 . nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the 259 number of degrees of freedom per node within the DMDA 260 - names - the name of the field (component) 261 262 Level: intermediate 263 264 .keywords: distributed array, get, component name 265 266 .seealso: DMDAGetFieldName() 267 @*/ 268 PetscErrorCode DMDASetFieldName(DM da,PetscInt nf,const char name[]) 269 { 270 PetscErrorCode ierr; 271 DM_DA *dd = (DM_DA*)da->data; 272 273 PetscFunctionBegin; 274 PetscValidHeaderSpecific(da,DM_CLASSID,1); 275 if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf); 276 ierr = PetscFree(dd->fieldname[nf]);CHKERRQ(ierr); 277 ierr = PetscStrallocpy(name,&dd->fieldname[nf]);CHKERRQ(ierr); 278 PetscFunctionReturn(0); 279 } 280 281 #undef __FUNCT__ 282 #define __FUNCT__ "DMDAGetFieldName" 283 /*@C 284 DMDAGetFieldName - Gets the names of individual field components in multicomponent 285 vectors associated with a DMDA. 286 287 Not Collective 288 289 Input Parameter: 290 + da - the distributed array 291 - nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the 292 number of degrees of freedom per node within the DMDA 293 294 Output Parameter: 295 . names - the name of the field (component) 296 297 Level: intermediate 298 299 .keywords: distributed array, get, component name 300 301 .seealso: DMDASetFieldName() 302 @*/ 303 PetscErrorCode DMDAGetFieldName(DM da,PetscInt nf,const char **name) 304 { 305 DM_DA *dd = (DM_DA*)da->data; 306 307 PetscFunctionBegin; 308 PetscValidHeaderSpecific(da,DM_CLASSID,1); 309 PetscValidPointer(name,3); 310 if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf); 311 *name = dd->fieldname[nf]; 312 PetscFunctionReturn(0); 313 } 314 315 #undef __FUNCT__ 316 #define __FUNCT__ "DMDAGetCorners" 317 /*@ 318 DMDAGetCorners - Returns the global (x,y,z) indices of the lower left 319 corner of the local region, excluding ghost points. 320 321 Not Collective 322 323 Input Parameter: 324 . da - the distributed array 325 326 Output Parameters: 327 + x,y,z - the corner indices (where y and z are optional; these are used 328 for 2D and 3D problems) 329 - m,n,p - widths in the corresponding directions (where n and p are optional; 330 these are used for 2D and 3D problems) 331 332 Note: 333 The corner information is independent of the number of degrees of 334 freedom per node set with the DMDACreateXX() routine. Thus the x, y, z, and 335 m, n, p can be thought of as coordinates on a logical grid, where each 336 grid point has (potentially) several degrees of freedom. 337 Any of y, z, n, and p can be passed in as PETSC_NULL if not needed. 338 339 Level: beginner 340 341 .keywords: distributed array, get, corners, nodes, local indices 342 343 .seealso: DMDAGetGhostCorners(), DMDAGetOwnershipRanges() 344 @*/ 345 PetscErrorCode DMDAGetCorners(DM da,PetscInt *x,PetscInt *y,PetscInt *z,PetscInt *m,PetscInt *n,PetscInt *p) 346 { 347 PetscInt w; 348 DM_DA *dd = (DM_DA*)da->data; 349 350 PetscFunctionBegin; 351 PetscValidHeaderSpecific(da,DM_CLASSID,1); 352 /* since the xs, xe ... have all been multiplied by the number of degrees 353 of freedom per cell, w = dd->w, we divide that out before returning.*/ 354 w = dd->w; 355 if (x) *x = dd->xs/w; if(m) *m = (dd->xe - dd->xs)/w; 356 /* the y and z have NOT been multiplied by w */ 357 if (y) *y = dd->ys; if (n) *n = (dd->ye - dd->ys); 358 if (z) *z = dd->zs; if (p) *p = (dd->ze - dd->zs); 359 PetscFunctionReturn(0); 360 } 361 362 #undef __FUNCT__ 363 #define __FUNCT__ "DMDAGetLocalBoundingBox" 364 /*@ 365 DMDAGetLocalBoundingBox - Returns the local bounding box for the DMDA. 366 367 Not Collective 368 369 Input Parameter: 370 . da - the distributed array 371 372 Output Parameters: 373 + lmin - local minimum coordinates (length dim, optional) 374 - lmax - local maximim coordinates (length dim, optional) 375 376 Level: beginner 377 378 .keywords: distributed array, get, coordinates 379 380 .seealso: DMDAGetCoordinateDA(), DMDAGetCoordinates(), DMDAGetBoundingBox() 381 @*/ 382 PetscErrorCode DMDAGetLocalBoundingBox(DM da,PetscReal lmin[],PetscReal lmax[]) 383 { 384 PetscErrorCode ierr; 385 Vec coords = PETSC_NULL; 386 PetscInt dim,i,j; 387 const PetscScalar *local_coords; 388 PetscReal min[3]={PETSC_MAX_REAL,PETSC_MAX_REAL,PETSC_MAX_REAL},max[3]={PETSC_MIN_REAL,PETSC_MIN_REAL,PETSC_MIN_REAL}; 389 PetscInt N,Ni; 390 DM_DA *dd = (DM_DA*)da->data; 391 392 PetscFunctionBegin; 393 PetscValidHeaderSpecific(da,DM_CLASSID,1); 394 dim = dd->dim; 395 ierr = DMDAGetCoordinates(da,&coords);CHKERRQ(ierr); 396 if (coords) { 397 ierr = VecGetArrayRead(coords,&local_coords);CHKERRQ(ierr); 398 ierr = VecGetLocalSize(coords,&N);CHKERRQ(ierr); 399 Ni = N/dim; 400 for (i=0; i<Ni; i++) { 401 for (j=0; j<3; j++) { 402 min[j] = j < dim ? PetscMin(min[j],PetscRealPart(local_coords[i*dim+j])) : 0; 403 max[j] = j < dim ? PetscMax(min[j],PetscRealPart(local_coords[i*dim+j])) : 0; 404 } 405 } 406 ierr = VecRestoreArrayRead(coords,&local_coords);CHKERRQ(ierr); 407 } else { /* Just use grid indices */ 408 DMDALocalInfo info; 409 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 410 min[0] = info.xs; 411 min[1] = info.ys; 412 min[2] = info.zs; 413 max[0] = info.xs + info.xm-1; 414 max[1] = info.ys + info.ym-1; 415 max[2] = info.zs + info.zm-1; 416 } 417 if (lmin) {ierr = PetscMemcpy(lmin,min,dim*sizeof(PetscReal));CHKERRQ(ierr);} 418 if (lmax) {ierr = PetscMemcpy(lmax,max,dim*sizeof(PetscReal));CHKERRQ(ierr);} 419 PetscFunctionReturn(0); 420 } 421 422 #undef __FUNCT__ 423 #define __FUNCT__ "DMDAGetBoundingBox" 424 /*@ 425 DMDAGetBoundingBox - Returns the global bounding box for the DMDA. 426 427 Collective on DMDA 428 429 Input Parameter: 430 . da - the distributed array 431 432 Output Parameters: 433 + gmin - global minimum coordinates (length dim, optional) 434 - gmax - global maximim coordinates (length dim, optional) 435 436 Level: beginner 437 438 .keywords: distributed array, get, coordinates 439 440 .seealso: DMDAGetCoordinateDA(), DMDAGetCoordinates(), DMDAGetLocalBoundingBox() 441 @*/ 442 PetscErrorCode DMDAGetBoundingBox(DM da,PetscReal gmin[],PetscReal gmax[]) 443 { 444 PetscErrorCode ierr; 445 PetscMPIInt count; 446 PetscReal lmin[3],lmax[3]; 447 DM_DA *dd = (DM_DA*)da->data; 448 449 PetscFunctionBegin; 450 PetscValidHeaderSpecific(da,DM_CLASSID,1); 451 count = PetscMPIIntCast(dd->dim); 452 ierr = DMDAGetLocalBoundingBox(da,lmin,lmax);CHKERRQ(ierr); 453 if (gmin) {ierr = MPI_Allreduce(lmin,gmin,count,MPIU_REAL,MPIU_MIN,((PetscObject)da)->comm);CHKERRQ(ierr);} 454 if (gmax) {ierr = MPI_Allreduce(lmax,gmax,count,MPIU_REAL,MPIU_MAX,((PetscObject)da)->comm);CHKERRQ(ierr);} 455 PetscFunctionReturn(0); 456 } 457 458 #undef __FUNCT__ 459 #define __FUNCT__ "DMDAGetReducedDA" 460 /*@ 461 DMDAGetReducedDA - Gets the DMDA with the same layout but with fewer or more fields 462 463 Collective on DMDA 464 465 Input Parameter: 466 + da - the distributed array 467 . nfields - number of fields in new DMDA 468 469 Output Parameter: 470 . nda - the new DMDA 471 472 Level: intermediate 473 474 .keywords: distributed array, get, corners, nodes, local indices, coordinates 475 476 .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetGhostedCoordinates() 477 @*/ 478 PetscErrorCode DMDAGetReducedDA(DM da,PetscInt nfields,DM *nda) 479 { 480 PetscErrorCode ierr; 481 DM_DA *dd = (DM_DA*)da->data; 482 483 PetscInt s,m,n,p,M,N,P,dim; 484 const PetscInt *lx,*ly,*lz; 485 DMDABoundaryType bx,by,bz; 486 DMDAStencilType stencil_type; 487 488 PetscFunctionBegin; 489 ierr = DMDAGetInfo(da,&dim,&M,&N,&P,&m,&n,&p,0,&s,&bx,&by,&bz,&stencil_type);CHKERRQ(ierr); 490 ierr = DMDAGetOwnershipRanges(da,&lx,&ly,&lz);CHKERRQ(ierr); 491 if (dim == 1) { 492 ierr = DMDACreate1d(((PetscObject)da)->comm,bx,M,nfields,s,dd->lx,nda);CHKERRQ(ierr); 493 } else if (dim == 2) { 494 ierr = DMDACreate2d(((PetscObject)da)->comm,bx,by,stencil_type,M,N,m,n,nfields,s,lx,ly,nda);CHKERRQ(ierr); 495 } else if (dim == 3) { 496 ierr = DMDACreate3d(((PetscObject)da)->comm,bx,by,bz,stencil_type,M,N,P,m,n,p,nfields,s,lx,ly,lz,nda);CHKERRQ(ierr); 497 } 498 if (dd->coordinates) { 499 DM_DA *ndd = (DM_DA*)(*nda)->data; 500 ierr = PetscObjectReference((PetscObject)dd->coordinates);CHKERRQ(ierr); 501 ndd->coordinates = dd->coordinates; 502 } 503 PetscFunctionReturn(0); 504 } 505 506