1 2 /* 3 Plots vectors obtained with DMDACreate2d() 4 */ 5 6 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 7 #include <petsc/private/vecimpl.h> 8 #include <petscdraw.h> 9 #include <petscviewerhdf5.h> 10 11 /* 12 The data that is passed into the graphics callback 13 */ 14 typedef struct { 15 PetscMPIInt rank; 16 PetscInt m,n,dof,k; 17 PetscReal xmin,xmax,ymin,ymax,min,max; 18 const PetscScalar *xy,*v; 19 PetscBool showgrid; 20 const char *name0,*name1; 21 } ZoomCtx; 22 23 /* 24 This does the drawing for one particular field 25 in one particular set of coordinates. It is a callback 26 called from PetscDrawZoom() 27 */ 28 #undef __FUNCT__ 29 #define __FUNCT__ "VecView_MPI_Draw_DA2d_Zoom" 30 PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw,void *ctx) 31 { 32 ZoomCtx *zctx = (ZoomCtx*)ctx; 33 PetscErrorCode ierr; 34 PetscInt m,n,i,j,k,dof,id,c1,c2,c3,c4; 35 PetscReal min,max,x1,x2,x3,x4,y_1,y2,y3,y4; 36 const PetscScalar *xy,*v; 37 38 PetscFunctionBegin; 39 m = zctx->m; 40 n = zctx->n; 41 dof = zctx->dof; 42 k = zctx->k; 43 xy = zctx->xy; 44 v = zctx->v; 45 min = zctx->min; 46 max = zctx->max; 47 48 /* PetscDraw the contour plot patch */ 49 ierr = PetscDrawCollectiveBegin(draw);CHKERRQ(ierr); 50 for (j=0; j<n-1; j++) { 51 for (i=0; i<m-1; i++) { 52 id = i+j*m; 53 x1 = PetscRealPart(xy[2*id]); 54 y_1 = PetscRealPart(xy[2*id+1]); 55 c1 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 56 57 id = i+j*m+1; 58 x2 = PetscRealPart(xy[2*id]); 59 y2 = PetscRealPart(xy[2*id+1]); 60 c2 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 61 62 id = i+j*m+1+m; 63 x3 = PetscRealPart(xy[2*id]); 64 y3 = PetscRealPart(xy[2*id+1]); 65 c3 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 66 67 id = i+j*m+m; 68 x4 = PetscRealPart(xy[2*id]); 69 y4 = PetscRealPart(xy[2*id+1]); 70 c4 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 71 72 ierr = PetscDrawTriangle(draw,x1,y_1,x2,y2,x3,y3,c1,c2,c3);CHKERRQ(ierr); 73 ierr = PetscDrawTriangle(draw,x1,y_1,x3,y3,x4,y4,c1,c3,c4);CHKERRQ(ierr); 74 if (zctx->showgrid) { 75 ierr = PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK);CHKERRQ(ierr); 76 ierr = PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK);CHKERRQ(ierr); 77 ierr = PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK);CHKERRQ(ierr); 78 ierr = PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK);CHKERRQ(ierr); 79 } 80 } 81 } 82 if (!zctx->rank) { 83 if (zctx->name0 || zctx->name1) { 84 PetscReal xl,yl,xr,yr,x,y; 85 ierr = PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr);CHKERRQ(ierr); 86 x = xl + .30*(xr - xl); 87 xl = xl + .01*(xr - xl); 88 y = yr - .30*(yr - yl); 89 yl = yl + .01*(yr - yl); 90 if (zctx->name0) {ierr = PetscDrawString(draw,x,yl,PETSC_DRAW_BLACK,zctx->name0);CHKERRQ(ierr);} 91 if (zctx->name1) {ierr = PetscDrawStringVertical(draw,xl,y,PETSC_DRAW_BLACK,zctx->name1);CHKERRQ(ierr);} 92 } 93 /* 94 Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits 95 but that may require some refactoring. 96 */ 97 { 98 PetscReal xmin = zctx->xmin, ymin = zctx->ymin; 99 PetscReal xmax = zctx->xmax, ymax = zctx->ymax; 100 char value[16]; size_t len; PetscReal w; 101 ierr = PetscSNPrintf(value,16,"%f",xmin);CHKERRQ(ierr); 102 ierr = PetscDrawString(draw,xmin,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 103 ierr = PetscSNPrintf(value,16,"%f",xmax);CHKERRQ(ierr); 104 ierr = PetscStrlen(value,&len);CHKERRQ(ierr); 105 ierr = PetscDrawStringGetSize(draw,&w,NULL);CHKERRQ(ierr); 106 ierr = PetscDrawString(draw,xmax - len*w,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 107 ierr = PetscSNPrintf(value,16,"%f",ymin);CHKERRQ(ierr); 108 ierr = PetscDrawString(draw,xmin - .05*(xmax - xmin),ymin,PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 109 ierr = PetscSNPrintf(value,16,"%f",ymax);CHKERRQ(ierr); 110 ierr = PetscDrawString(draw,xmin - .05*(xmax - xmin),ymax,PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 111 } 112 } 113 ierr = PetscDrawCollectiveEnd(draw);CHKERRQ(ierr); 114 PetscFunctionReturn(0); 115 } 116 117 #undef __FUNCT__ 118 #define __FUNCT__ "VecView_MPI_Draw_DA2d" 119 PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin,PetscViewer viewer) 120 { 121 DM da,dac,dag; 122 PetscErrorCode ierr; 123 PetscInt N,s,M,w,ncoors = 4; 124 const PetscInt *lx,*ly; 125 PetscReal coors[4]; 126 PetscDraw draw,popup; 127 PetscBool isnull,useports = PETSC_FALSE; 128 MPI_Comm comm; 129 Vec xlocal,xcoor,xcoorl; 130 DMBoundaryType bx,by; 131 DMDAStencilType st; 132 ZoomCtx zctx; 133 PetscDrawViewPorts *ports = NULL; 134 PetscViewerFormat format; 135 PetscInt *displayfields; 136 PetscInt ndisplayfields,i,nbounds; 137 const PetscReal *bounds; 138 139 PetscFunctionBegin; 140 zctx.showgrid = PETSC_FALSE; 141 142 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 143 ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); 144 if (isnull) PetscFunctionReturn(0); 145 146 ierr = PetscViewerDrawGetBounds(viewer,&nbounds,&bounds);CHKERRQ(ierr); 147 148 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 149 if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 150 151 ierr = PetscObjectGetComm((PetscObject)xin,&comm);CHKERRQ(ierr); 152 ierr = MPI_Comm_rank(comm,&zctx.rank);CHKERRQ(ierr); 153 154 ierr = DMDAGetInfo(da,0,&M,&N,0,&zctx.m,&zctx.n,0,&w,&s,&bx,&by,0,&st);CHKERRQ(ierr); 155 ierr = DMDAGetOwnershipRanges(da,&lx,&ly,NULL);CHKERRQ(ierr); 156 157 /* 158 Obtain a sequential vector that is going to contain the local values plus ONE layer of 159 ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will 160 update the local values pluse ONE layer of ghost values. 161 */ 162 ierr = PetscObjectQuery((PetscObject)da,"GraphicsGhosted",(PetscObject*)&xlocal);CHKERRQ(ierr); 163 if (!xlocal) { 164 if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) { 165 /* 166 if original da is not of stencil width one, or periodic or not a box stencil then 167 create a special DMDA to handle one level of ghost points for graphics 168 */ 169 ierr = DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,w,1,lx,ly,&dac);CHKERRQ(ierr); 170 ierr = PetscInfo(da,"Creating auxilary DMDA for managing graphics ghost points\n");CHKERRQ(ierr); 171 } else { 172 /* otherwise we can use the da we already have */ 173 dac = da; 174 } 175 /* create local vector for holding ghosted values used in graphics */ 176 ierr = DMCreateLocalVector(dac,&xlocal);CHKERRQ(ierr); 177 if (dac != da) { 178 /* don't keep any public reference of this DMDA, is is only available through xlocal */ 179 ierr = PetscObjectDereference((PetscObject)dac);CHKERRQ(ierr); 180 } else { 181 /* remove association between xlocal and da, because below we compose in the opposite 182 direction and if we left this connect we'd get a loop, so the objects could 183 never be destroyed */ 184 ierr = PetscObjectRemoveReference((PetscObject)xlocal,"__PETSc_dm");CHKERRQ(ierr); 185 } 186 ierr = PetscObjectCompose((PetscObject)da,"GraphicsGhosted",(PetscObject)xlocal);CHKERRQ(ierr); 187 ierr = PetscObjectDereference((PetscObject)xlocal);CHKERRQ(ierr); 188 } else { 189 if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) { 190 ierr = VecGetDM(xlocal, &dac);CHKERRQ(ierr); 191 } else { 192 dac = da; 193 } 194 } 195 196 /* 197 Get local (ghosted) values of vector 198 */ 199 ierr = DMGlobalToLocalBegin(dac,xin,INSERT_VALUES,xlocal);CHKERRQ(ierr); 200 ierr = DMGlobalToLocalEnd(dac,xin,INSERT_VALUES,xlocal);CHKERRQ(ierr); 201 ierr = VecGetArrayRead(xlocal,&zctx.v);CHKERRQ(ierr); 202 203 /* 204 Get coordinates of nodes 205 */ 206 ierr = DMGetCoordinates(da,&xcoor);CHKERRQ(ierr); 207 if (!xcoor) { 208 ierr = DMDASetUniformCoordinates(da,0.0,1.0,0.0,1.0,0.0,0.0);CHKERRQ(ierr); 209 ierr = DMGetCoordinates(da,&xcoor);CHKERRQ(ierr); 210 } 211 212 /* 213 Determine the min and max coordinates in plot 214 */ 215 ierr = VecStrideMin(xcoor,0,NULL,&zctx.xmin);CHKERRQ(ierr); 216 ierr = VecStrideMax(xcoor,0,NULL,&zctx.xmax);CHKERRQ(ierr); 217 ierr = VecStrideMin(xcoor,1,NULL,&zctx.ymin);CHKERRQ(ierr); 218 ierr = VecStrideMax(xcoor,1,NULL,&zctx.ymax);CHKERRQ(ierr); 219 coors[0] = zctx.xmin - .05*(zctx.xmax- zctx.xmin); coors[2] = zctx.xmax + .05*(zctx.xmax - zctx.xmin); 220 coors[1] = zctx.ymin - .05*(zctx.ymax- zctx.ymin); coors[3] = zctx.ymax + .05*(zctx.ymax - zctx.ymin); 221 ierr = PetscOptionsGetRealArray(NULL,NULL,"-draw_coordinates",coors,&ncoors,NULL);CHKERRQ(ierr); 222 ierr = PetscInfo4(da,"Preparing DMDA 2d contour plot coordinates %g %g %g %g\n",(double)coors[0],(double)coors[1],(double)coors[2],(double)coors[3]);CHKERRQ(ierr); 223 224 /* 225 Get local ghosted version of coordinates 226 */ 227 ierr = PetscObjectQuery((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject*)&xcoorl);CHKERRQ(ierr); 228 if (!xcoorl) { 229 /* create DMDA to get local version of graphics */ 230 ierr = DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,2,1,lx,ly,&dag);CHKERRQ(ierr); 231 ierr = PetscInfo(dag,"Creating auxilary DMDA for managing graphics coordinates ghost points\n");CHKERRQ(ierr); 232 ierr = DMCreateLocalVector(dag,&xcoorl);CHKERRQ(ierr); 233 ierr = PetscObjectCompose((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject)xcoorl);CHKERRQ(ierr); 234 ierr = PetscObjectDereference((PetscObject)dag);CHKERRQ(ierr); 235 ierr = PetscObjectDereference((PetscObject)xcoorl);CHKERRQ(ierr); 236 } else { 237 ierr = VecGetDM(xcoorl,&dag);CHKERRQ(ierr); 238 } 239 ierr = DMGlobalToLocalBegin(dag,xcoor,INSERT_VALUES,xcoorl);CHKERRQ(ierr); 240 ierr = DMGlobalToLocalEnd(dag,xcoor,INSERT_VALUES,xcoorl);CHKERRQ(ierr); 241 ierr = VecGetArrayRead(xcoorl,&zctx.xy);CHKERRQ(ierr); 242 ierr = DMDAGetCoordinateName(da,0,&zctx.name0);CHKERRQ(ierr); 243 ierr = DMDAGetCoordinateName(da,1,&zctx.name1);CHKERRQ(ierr); 244 245 /* 246 Get information about size of area each processor must do graphics for 247 */ 248 ierr = DMDAGetInfo(dac,NULL,&M,&N,NULL,NULL,NULL,NULL,&zctx.dof,NULL,&bx,&by,NULL,NULL);CHKERRQ(ierr); 249 ierr = DMDAGetGhostCorners(dac,NULL,NULL,NULL,&zctx.m,&zctx.n,NULL);CHKERRQ(ierr); 250 ierr = PetscOptionsGetBool(NULL,NULL,"-draw_contour_grid",&zctx.showgrid,NULL);CHKERRQ(ierr); 251 252 ierr = DMDASelectFields(da,&ndisplayfields,&displayfields);CHKERRQ(ierr); 253 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 254 ierr = PetscOptionsGetBool(NULL,NULL,"-draw_ports",&useports,NULL);CHKERRQ(ierr); 255 if (format == PETSC_VIEWER_DRAW_PORTS) useports = PETSC_TRUE; 256 if (useports) { 257 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 258 ierr = PetscDrawCheckResizedWindow(draw);CHKERRQ(ierr); 259 ierr = PetscDrawClear(draw);CHKERRQ(ierr); 260 ierr = PetscDrawViewPortsCreate(draw,ndisplayfields,&ports);CHKERRQ(ierr); 261 } 262 263 /* 264 Loop over each field; drawing each in a different window 265 */ 266 for (i=0; i<ndisplayfields; i++) { 267 zctx.k = displayfields[i]; 268 269 /* determine the min and max value in plot */ 270 ierr = VecStrideMin(xin,zctx.k,NULL,&zctx.min);CHKERRQ(ierr); 271 ierr = VecStrideMax(xin,zctx.k,NULL,&zctx.max);CHKERRQ(ierr); 272 if (zctx.k < nbounds) { 273 zctx.min = bounds[2*zctx.k]; 274 zctx.max = bounds[2*zctx.k+1]; 275 } 276 if (zctx.min == zctx.max) { 277 zctx.min -= 1.e-12; 278 zctx.max += 1.e-12; 279 } 280 ierr = PetscInfo2(da,"DMDA 2d contour plot min %g max %g\n",(double)zctx.min,(double)zctx.max);CHKERRQ(ierr); 281 282 if (useports) { 283 ierr = PetscDrawViewPortsSet(ports,i);CHKERRQ(ierr); 284 } else { 285 const char *title; 286 ierr = PetscViewerDrawGetDraw(viewer,i,&draw);CHKERRQ(ierr); 287 ierr = DMDAGetFieldName(da,zctx.k,&title);CHKERRQ(ierr); 288 if (title) {ierr = PetscDrawSetTitle(draw,title);CHKERRQ(ierr);} 289 } 290 291 ierr = PetscDrawGetPopup(draw,&popup);CHKERRQ(ierr); 292 if (popup) {ierr = PetscDrawScalePopup(popup,zctx.min,zctx.max);CHKERRQ(ierr);} 293 ierr = PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);CHKERRQ(ierr); 294 ierr = PetscDrawZoom(draw,VecView_MPI_Draw_DA2d_Zoom,&zctx);CHKERRQ(ierr); 295 if (!useports) {ierr = PetscDrawSave(draw);CHKERRQ(ierr);} 296 } 297 if (useports) { 298 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 299 ierr = PetscDrawSave(draw);CHKERRQ(ierr); 300 } 301 302 ierr = PetscDrawViewPortsDestroy(ports);CHKERRQ(ierr); 303 ierr = PetscFree(displayfields);CHKERRQ(ierr); 304 ierr = VecRestoreArrayRead(xcoorl,&zctx.xy);CHKERRQ(ierr); 305 ierr = VecRestoreArrayRead(xlocal,&zctx.v);CHKERRQ(ierr); 306 PetscFunctionReturn(0); 307 } 308 309 #if defined(PETSC_HAVE_HDF5) 310 #undef __FUNCT__ 311 #define __FUNCT__ "VecGetHDF5ChunkSize" 312 static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims) 313 { 314 PetscMPIInt comm_size; 315 PetscErrorCode ierr; 316 hsize_t chunk_size, target_size, dim; 317 hsize_t vec_size = sizeof(PetscScalar)*da->M*da->N*da->P*da->w; 318 hsize_t avg_local_vec_size,KiB = 1024,MiB = KiB*KiB,GiB = MiB*KiB,min_size = MiB; 319 hsize_t max_chunks = 64*KiB; /* HDF5 internal limitation */ 320 hsize_t max_chunk_size = 4*GiB; /* HDF5 internal limitation */ 321 PetscInt zslices=da->p, yslices=da->n, xslices=da->m; 322 323 PetscFunctionBegin; 324 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size);CHKERRQ(ierr); 325 avg_local_vec_size = (hsize_t) ceil(vec_size*1.0/comm_size); /* we will attempt to use this as the chunk size */ 326 327 target_size = (hsize_t) ceil(PetscMin(vec_size,PetscMin(max_chunk_size,PetscMax(avg_local_vec_size,PetscMax(ceil(vec_size*1.0/max_chunks),min_size))))); 328 /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */ 329 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(PetscReal); 330 331 /* 332 if size/rank > max_chunk_size, we need radical measures: even going down to 333 avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter 334 what, composed in the most efficient way possible. 335 N.B. this minimises the number of chunks, which may or may not be the optimal 336 solution. In a BG, for example, the optimal solution is probably to make # chunks = # 337 IO nodes involved, but this author has no access to a BG to figure out how to 338 reliably find the right number. And even then it may or may not be enough. 339 */ 340 if (avg_local_vec_size > max_chunk_size) { 341 /* check if we can just split local z-axis: is that enough? */ 342 zslices = (PetscInt)ceil(vec_size*1.0/(da->p*max_chunk_size))*zslices; 343 if (zslices > da->P) { 344 /* lattice is too large in xy-directions, splitting z only is not enough */ 345 zslices = da->P; 346 yslices= (PetscInt)ceil(vec_size*1.0/(zslices*da->n*max_chunk_size))*yslices; 347 if (yslices > da->N) { 348 /* lattice is too large in x-direction, splitting along z, y is not enough */ 349 yslices = da->N; 350 xslices= (PetscInt)ceil(vec_size*1.0/(zslices*yslices*da->m*max_chunk_size))*xslices; 351 } 352 } 353 dim = 0; 354 if (timestep >= 0) { 355 ++dim; 356 } 357 /* prefer to split z-axis, even down to planar slices */ 358 if (dimension == 3) { 359 chunkDims[dim++] = (hsize_t) da->P/zslices; 360 chunkDims[dim++] = (hsize_t) da->N/yslices; 361 chunkDims[dim++] = (hsize_t) da->M/xslices; 362 } else { 363 /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */ 364 chunkDims[dim++] = (hsize_t) da->N/yslices; 365 chunkDims[dim++] = (hsize_t) da->M/xslices; 366 } 367 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double); 368 } else { 369 if (target_size < chunk_size) { 370 /* only change the defaults if target_size < chunk_size */ 371 dim = 0; 372 if (timestep >= 0) { 373 ++dim; 374 } 375 /* prefer to split z-axis, even down to planar slices */ 376 if (dimension == 3) { 377 /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */ 378 if (target_size >= chunk_size/da->p) { 379 /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */ 380 chunkDims[dim] = (hsize_t) ceil(da->P*1.0/da->p); 381 } else { 382 /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be 383 radical and let everyone write all they've got */ 384 chunkDims[dim++] = (hsize_t) ceil(da->P*1.0/da->p); 385 chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n); 386 chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m); 387 } 388 } else { 389 /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */ 390 if (target_size >= chunk_size/da->n) { 391 /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */ 392 chunkDims[dim] = (hsize_t) ceil(da->N*1.0/da->n); 393 } else { 394 /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be 395 radical and let everyone write all they've got */ 396 chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n); 397 chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m); 398 } 399 400 } 401 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double); 402 } else { 403 /* precomputed chunks are fine, we don't need to do anything */ 404 } 405 } 406 PetscFunctionReturn(0); 407 } 408 #endif 409 410 #if defined(PETSC_HAVE_HDF5) 411 #undef __FUNCT__ 412 #define __FUNCT__ "VecView_MPI_HDF5_DA" 413 PetscErrorCode VecView_MPI_HDF5_DA(Vec xin,PetscViewer viewer) 414 { 415 DM dm; 416 DM_DA *da; 417 hid_t filespace; /* file dataspace identifier */ 418 hid_t chunkspace; /* chunk dataset property identifier */ 419 hid_t plist_id; /* property list identifier */ 420 hid_t dset_id; /* dataset identifier */ 421 hid_t memspace; /* memory dataspace identifier */ 422 hid_t file_id; 423 hid_t group; 424 hid_t memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 425 hid_t filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 426 hsize_t dim; 427 hsize_t maxDims[6]={0}, dims[6]={0}, chunkDims[6]={0}, count[6]={0}, offset[6]={0}; /* we depend on these being sane later on */ 428 PetscInt timestep, dimension; 429 const PetscScalar *x; 430 const char *vecname; 431 PetscErrorCode ierr; 432 PetscBool dim2; 433 PetscBool spoutput; 434 435 PetscFunctionBegin; 436 ierr = PetscViewerHDF5OpenGroup(viewer, &file_id, &group);CHKERRQ(ierr); 437 ierr = PetscViewerHDF5GetTimestep(viewer, ×tep);CHKERRQ(ierr); 438 ierr = PetscViewerHDF5GetBaseDimension2(viewer,&dim2);CHKERRQ(ierr); 439 ierr = PetscViewerHDF5GetSPOutput(viewer,&spoutput);CHKERRQ(ierr); 440 441 ierr = VecGetDM(xin,&dm);CHKERRQ(ierr); 442 if (!dm) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 443 da = (DM_DA*)dm->data; 444 ierr = DMGetDimension(dm, &dimension);CHKERRQ(ierr); 445 446 /* Create the dataspace for the dataset. 447 * 448 * dims - holds the current dimensions of the dataset 449 * 450 * maxDims - holds the maximum dimensions of the dataset (unlimited 451 * for the number of time steps with the current dimensions for the 452 * other dimensions; so only additional time steps can be added). 453 * 454 * chunkDims - holds the size of a single time step (required to 455 * permit extending dataset). 456 */ 457 dim = 0; 458 if (timestep >= 0) { 459 dims[dim] = timestep+1; 460 maxDims[dim] = H5S_UNLIMITED; 461 chunkDims[dim] = 1; 462 ++dim; 463 } 464 if (dimension == 3) { 465 ierr = PetscHDF5IntCast(da->P,dims+dim);CHKERRQ(ierr); 466 maxDims[dim] = dims[dim]; 467 chunkDims[dim] = dims[dim]; 468 ++dim; 469 } 470 if (dimension > 1) { 471 ierr = PetscHDF5IntCast(da->N,dims+dim);CHKERRQ(ierr); 472 maxDims[dim] = dims[dim]; 473 chunkDims[dim] = dims[dim]; 474 ++dim; 475 } 476 ierr = PetscHDF5IntCast(da->M,dims+dim);CHKERRQ(ierr); 477 maxDims[dim] = dims[dim]; 478 chunkDims[dim] = dims[dim]; 479 ++dim; 480 if (da->w > 1 || dim2) { 481 ierr = PetscHDF5IntCast(da->w,dims+dim);CHKERRQ(ierr); 482 maxDims[dim] = dims[dim]; 483 chunkDims[dim] = dims[dim]; 484 ++dim; 485 } 486 #if defined(PETSC_USE_COMPLEX) 487 dims[dim] = 2; 488 maxDims[dim] = dims[dim]; 489 chunkDims[dim] = dims[dim]; 490 ++dim; 491 #endif 492 493 ierr = VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims); CHKERRQ(ierr); 494 495 PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims)); 496 497 #if defined(PETSC_USE_REAL_SINGLE) 498 memscalartype = H5T_NATIVE_FLOAT; 499 filescalartype = H5T_NATIVE_FLOAT; 500 #elif defined(PETSC_USE_REAL___FLOAT128) 501 #error "HDF5 output with 128 bit floats not supported." 502 #else 503 memscalartype = H5T_NATIVE_DOUBLE; 504 if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT; 505 else filescalartype = H5T_NATIVE_DOUBLE; 506 #endif 507 508 /* Create the dataset with default properties and close filespace */ 509 ierr = PetscObjectGetName((PetscObject)xin,&vecname);CHKERRQ(ierr); 510 if (!H5Lexists(group, vecname, H5P_DEFAULT)) { 511 /* Create chunk */ 512 PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE)); 513 PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims)); 514 515 #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800) 516 PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT)); 517 #else 518 PetscStackCallHDF5Return(dset_id,H5Dcreate,(group, vecname, filescalartype, filespace, H5P_DEFAULT)); 519 #endif 520 } else { 521 PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT)); 522 PetscStackCallHDF5(H5Dset_extent,(dset_id, dims)); 523 } 524 PetscStackCallHDF5(H5Sclose,(filespace)); 525 526 /* Each process defines a dataset and writes it to the hyperslab in the file */ 527 dim = 0; 528 if (timestep >= 0) { 529 offset[dim] = timestep; 530 ++dim; 531 } 532 if (dimension == 3) {ierr = PetscHDF5IntCast(da->zs,offset + dim++);CHKERRQ(ierr);} 533 if (dimension > 1) {ierr = PetscHDF5IntCast(da->ys,offset + dim++);CHKERRQ(ierr);} 534 ierr = PetscHDF5IntCast(da->xs/da->w,offset + dim++);CHKERRQ(ierr); 535 if (da->w > 1 || dim2) offset[dim++] = 0; 536 #if defined(PETSC_USE_COMPLEX) 537 offset[dim++] = 0; 538 #endif 539 dim = 0; 540 if (timestep >= 0) { 541 count[dim] = 1; 542 ++dim; 543 } 544 if (dimension == 3) {ierr = PetscHDF5IntCast(da->ze - da->zs,count + dim++);CHKERRQ(ierr);} 545 if (dimension > 1) {ierr = PetscHDF5IntCast(da->ye - da->ys,count + dim++);CHKERRQ(ierr);} 546 ierr = PetscHDF5IntCast((da->xe - da->xs)/da->w,count + dim++);CHKERRQ(ierr); 547 if (da->w > 1 || dim2) {ierr = PetscHDF5IntCast(da->w,count + dim++);CHKERRQ(ierr);} 548 #if defined(PETSC_USE_COMPLEX) 549 count[dim++] = 2; 550 #endif 551 PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL)); 552 PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id)); 553 PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL)); 554 555 /* Create property list for collective dataset write */ 556 PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER)); 557 #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO) 558 PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE)); 559 #endif 560 /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */ 561 562 ierr = VecGetArrayRead(xin, &x);CHKERRQ(ierr); 563 PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, plist_id, x)); 564 PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL)); 565 ierr = VecRestoreArrayRead(xin, &x);CHKERRQ(ierr); 566 567 /* Close/release resources */ 568 if (group != file_id) { 569 PetscStackCallHDF5(H5Gclose,(group)); 570 } 571 PetscStackCallHDF5(H5Pclose,(plist_id)); 572 PetscStackCallHDF5(H5Sclose,(filespace)); 573 PetscStackCallHDF5(H5Sclose,(memspace)); 574 PetscStackCallHDF5(H5Dclose,(dset_id)); 575 ierr = PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);CHKERRQ(ierr); 576 PetscFunctionReturn(0); 577 } 578 #endif 579 580 extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec,PetscViewer); 581 582 #if defined(PETSC_HAVE_MPIIO) 583 #undef __FUNCT__ 584 #define __FUNCT__ "DMDAArrayMPIIO" 585 static PetscErrorCode DMDAArrayMPIIO(DM da,PetscViewer viewer,Vec xin,PetscBool write) 586 { 587 PetscErrorCode ierr; 588 MPI_File mfdes; 589 PetscMPIInt gsizes[4],lsizes[4],lstarts[4],asiz,dof; 590 MPI_Datatype view; 591 const PetscScalar *array; 592 MPI_Offset off; 593 MPI_Aint ub,ul; 594 PetscInt type,rows,vecrows,tr[2]; 595 DM_DA *dd = (DM_DA*)da->data; 596 PetscBool skipheader; 597 598 PetscFunctionBegin; 599 ierr = VecGetSize(xin,&vecrows);CHKERRQ(ierr); 600 ierr = PetscViewerBinaryGetSkipHeader(viewer,&skipheader);CHKERRQ(ierr); 601 if (!write) { 602 /* Read vector header. */ 603 if (!skipheader) { 604 ierr = PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT);CHKERRQ(ierr); 605 type = tr[0]; 606 rows = tr[1]; 607 if (type != VEC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Not vector next in file"); 608 if (rows != vecrows) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Vector in file not same size as DMDA vector"); 609 } 610 } else { 611 tr[0] = VEC_FILE_CLASSID; 612 tr[1] = vecrows; 613 if (!skipheader) { 614 ierr = PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 615 } 616 } 617 618 ierr = PetscMPIIntCast(dd->w,&dof);CHKERRQ(ierr); 619 gsizes[0] = dof; 620 ierr = PetscMPIIntCast(dd->M,gsizes+1);CHKERRQ(ierr); 621 ierr = PetscMPIIntCast(dd->N,gsizes+2);CHKERRQ(ierr); 622 ierr = PetscMPIIntCast(dd->P,gsizes+3);CHKERRQ(ierr); 623 lsizes[0] = dof; 624 ierr = PetscMPIIntCast((dd->xe-dd->xs)/dof,lsizes+1);CHKERRQ(ierr); 625 ierr = PetscMPIIntCast(dd->ye-dd->ys,lsizes+2);CHKERRQ(ierr); 626 ierr = PetscMPIIntCast(dd->ze-dd->zs,lsizes+3);CHKERRQ(ierr); 627 lstarts[0] = 0; 628 ierr = PetscMPIIntCast(dd->xs/dof,lstarts+1);CHKERRQ(ierr); 629 ierr = PetscMPIIntCast(dd->ys,lstarts+2);CHKERRQ(ierr); 630 ierr = PetscMPIIntCast(dd->zs,lstarts+3);CHKERRQ(ierr); 631 ierr = MPI_Type_create_subarray(da->dim+1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);CHKERRQ(ierr); 632 ierr = MPI_Type_commit(&view);CHKERRQ(ierr); 633 634 ierr = PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);CHKERRQ(ierr); 635 ierr = PetscViewerBinaryGetMPIIOOffset(viewer,&off);CHKERRQ(ierr); 636 ierr = MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char*)"native",MPI_INFO_NULL);CHKERRQ(ierr); 637 ierr = VecGetArrayRead(xin,&array);CHKERRQ(ierr); 638 asiz = lsizes[1]*(lsizes[2] > 0 ? lsizes[2] : 1)*(lsizes[3] > 0 ? lsizes[3] : 1)*dof; 639 if (write) { 640 ierr = MPIU_File_write_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);CHKERRQ(ierr); 641 } else { 642 ierr = MPIU_File_read_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);CHKERRQ(ierr); 643 } 644 ierr = MPI_Type_get_extent(view,&ul,&ub);CHKERRQ(ierr); 645 ierr = PetscViewerBinaryAddMPIIOOffset(viewer,ub);CHKERRQ(ierr); 646 ierr = VecRestoreArrayRead(xin,&array);CHKERRQ(ierr); 647 ierr = MPI_Type_free(&view);CHKERRQ(ierr); 648 PetscFunctionReturn(0); 649 } 650 #endif 651 652 #undef __FUNCT__ 653 #define __FUNCT__ "VecView_MPI_DA" 654 PetscErrorCode VecView_MPI_DA(Vec xin,PetscViewer viewer) 655 { 656 DM da; 657 PetscErrorCode ierr; 658 PetscInt dim; 659 Vec natural; 660 PetscBool isdraw,isvtk; 661 #if defined(PETSC_HAVE_HDF5) 662 PetscBool ishdf5; 663 #endif 664 const char *prefix,*name; 665 PetscViewerFormat format; 666 667 PetscFunctionBegin; 668 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 669 if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 670 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 671 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);CHKERRQ(ierr); 672 #if defined(PETSC_HAVE_HDF5) 673 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);CHKERRQ(ierr); 674 #endif 675 if (isdraw) { 676 ierr = DMDAGetInfo(da,&dim,0,0,0,0,0,0,0,0,0,0,0,0);CHKERRQ(ierr); 677 if (dim == 1) { 678 ierr = VecView_MPI_Draw_DA1d(xin,viewer);CHKERRQ(ierr); 679 } else if (dim == 2) { 680 ierr = VecView_MPI_Draw_DA2d(xin,viewer);CHKERRQ(ierr); 681 } else SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot graphically view vector associated with this dimensional DMDA %D",dim); 682 } else if (isvtk) { /* Duplicate the Vec and hold a reference to the DM */ 683 Vec Y; 684 ierr = PetscObjectReference((PetscObject)da);CHKERRQ(ierr); 685 ierr = VecDuplicate(xin,&Y);CHKERRQ(ierr); 686 if (((PetscObject)xin)->name) { 687 /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */ 688 ierr = PetscObjectSetName((PetscObject)Y,((PetscObject)xin)->name);CHKERRQ(ierr); 689 } 690 ierr = VecCopy(xin,Y);CHKERRQ(ierr); 691 ierr = PetscViewerVTKAddField(viewer,(PetscObject)da,DMDAVTKWriteAll,PETSC_VTK_POINT_FIELD,(PetscObject)Y);CHKERRQ(ierr); 692 #if defined(PETSC_HAVE_HDF5) 693 } else if (ishdf5) { 694 ierr = VecView_MPI_HDF5_DA(xin,viewer);CHKERRQ(ierr); 695 #endif 696 } else { 697 #if defined(PETSC_HAVE_MPIIO) 698 PetscBool isbinary,isMPIIO; 699 700 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 701 if (isbinary) { 702 ierr = PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);CHKERRQ(ierr); 703 if (isMPIIO) { 704 ierr = DMDAArrayMPIIO(da,viewer,xin,PETSC_TRUE);CHKERRQ(ierr); 705 PetscFunctionReturn(0); 706 } 707 } 708 #endif 709 710 /* call viewer on natural ordering */ 711 ierr = PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);CHKERRQ(ierr); 712 ierr = DMDACreateNaturalVector(da,&natural);CHKERRQ(ierr); 713 ierr = PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);CHKERRQ(ierr); 714 ierr = DMDAGlobalToNaturalBegin(da,xin,INSERT_VALUES,natural);CHKERRQ(ierr); 715 ierr = DMDAGlobalToNaturalEnd(da,xin,INSERT_VALUES,natural);CHKERRQ(ierr); 716 ierr = PetscObjectGetName((PetscObject)xin,&name);CHKERRQ(ierr); 717 ierr = PetscObjectSetName((PetscObject)natural,name);CHKERRQ(ierr); 718 719 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 720 if (format == PETSC_VIEWER_BINARY_MATLAB) { 721 /* temporarily remove viewer format so it won't trigger in the VecView() */ 722 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_DEFAULT);CHKERRQ(ierr); 723 } 724 725 ierr = VecView(natural,viewer);CHKERRQ(ierr); 726 727 if (format == PETSC_VIEWER_BINARY_MATLAB) { 728 MPI_Comm comm; 729 FILE *info; 730 const char *fieldname; 731 char fieldbuf[256]; 732 PetscInt dim,ni,nj,nk,pi,pj,pk,dof,n; 733 734 /* set the viewer format back into the viewer */ 735 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 736 ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr); 737 ierr = PetscViewerBinaryGetInfoPointer(viewer,&info);CHKERRQ(ierr); 738 ierr = DMDAGetInfo(da,&dim,&ni,&nj,&nk,&pi,&pj,&pk,&dof,0,0,0,0,0);CHKERRQ(ierr); 739 ierr = PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");CHKERRQ(ierr); 740 ierr = PetscFPrintf(comm,info,"#$$ tmp = PetscBinaryRead(fd); \n");CHKERRQ(ierr); 741 if (dim == 1) { ierr = PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d);\n",dof,ni);CHKERRQ(ierr); } 742 if (dim == 2) { ierr = PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d);\n",dof,ni,nj);CHKERRQ(ierr); } 743 if (dim == 3) { ierr = PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d,%d);\n",dof,ni,nj,nk);CHKERRQ(ierr); } 744 745 for (n=0; n<dof; n++) { 746 ierr = DMDAGetFieldName(da,n,&fieldname);CHKERRQ(ierr); 747 if (!fieldname || !fieldname[0]) { 748 ierr = PetscSNPrintf(fieldbuf,sizeof fieldbuf,"field%D",n);CHKERRQ(ierr); 749 fieldname = fieldbuf; 750 } 751 if (dim == 1) { ierr = PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:))';\n",name,fieldname,n+1);CHKERRQ(ierr); } 752 if (dim == 2) { ierr = PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:,:))';\n",name,fieldname,n+1);CHKERRQ(ierr); } 753 if (dim == 3) { ierr = PetscFPrintf(comm,info,"#$$ Set.%s.%s = permute(squeeze(tmp(%d,:,:,:)),[2 1 3]);\n",name,fieldname,n+1);CHKERRQ(ierr);} 754 } 755 ierr = PetscFPrintf(comm,info,"#$$ clear tmp; \n");CHKERRQ(ierr); 756 ierr = PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");CHKERRQ(ierr); 757 } 758 759 ierr = VecDestroy(&natural);CHKERRQ(ierr); 760 } 761 PetscFunctionReturn(0); 762 } 763 764 #if defined(PETSC_HAVE_HDF5) 765 #undef __FUNCT__ 766 #define __FUNCT__ "VecLoad_HDF5_DA" 767 PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer) 768 { 769 DM da; 770 PetscErrorCode ierr; 771 hsize_t dim,rdim; 772 hsize_t dims[6]={0},count[6]={0},offset[6]={0}; 773 PetscInt dimension,timestep,dofInd; 774 PetscScalar *x; 775 const char *vecname; 776 hid_t filespace; /* file dataspace identifier */ 777 hid_t plist_id; /* property list identifier */ 778 hid_t dset_id; /* dataset identifier */ 779 hid_t memspace; /* memory dataspace identifier */ 780 hid_t file_id,group; 781 hid_t scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 782 DM_DA *dd; 783 PetscBool dim2 = PETSC_FALSE; 784 785 PetscFunctionBegin; 786 #if defined(PETSC_USE_REAL_SINGLE) 787 scalartype = H5T_NATIVE_FLOAT; 788 #elif defined(PETSC_USE_REAL___FLOAT128) 789 #error "HDF5 output with 128 bit floats not supported." 790 #else 791 scalartype = H5T_NATIVE_DOUBLE; 792 #endif 793 794 ierr = PetscViewerHDF5OpenGroup(viewer, &file_id, &group);CHKERRQ(ierr); 795 ierr = PetscViewerHDF5GetTimestep(viewer, ×tep);CHKERRQ(ierr); 796 ierr = PetscObjectGetName((PetscObject)xin,&vecname);CHKERRQ(ierr); 797 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 798 dd = (DM_DA*)da->data; 799 ierr = DMGetDimension(da, &dimension);CHKERRQ(ierr); 800 801 /* Open dataset */ 802 #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800) 803 PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT)); 804 #else 805 PetscStackCallHDF5Return(dset_id,H5Dopen,(group, vecname)); 806 #endif 807 808 /* Retrieve the dataspace for the dataset */ 809 PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id)); 810 PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL)); 811 812 /* Expected dimension for holding the dof's */ 813 #if defined(PETSC_USE_COMPLEX) 814 dofInd = rdim-2; 815 #else 816 dofInd = rdim-1; 817 #endif 818 819 /* The expected number of dimensions, assuming basedimension2 = false */ 820 dim = dimension; 821 if (dd->w > 1) ++dim; 822 if (timestep >= 0) ++dim; 823 #if defined(PETSC_USE_COMPLEX) 824 ++dim; 825 #endif 826 827 /* In this case the input dataset have one extra, unexpected dimension. */ 828 if (rdim == dim+1) { 829 /* In this case the block size unity */ 830 if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE; 831 832 /* Special error message for the case where dof does not match the input file */ 833 else if (dd->w != (PetscInt) dims[dofInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Number of dofs in file is %D, not %D as expected",(PetscInt)dims[dofInd],dd->w); 834 835 /* Other cases where rdim != dim cannot be handled currently */ 836 } else if (rdim != dim) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with dof = %D",rdim,dim,dd->w); 837 838 /* Set up the hyperslab size */ 839 dim = 0; 840 if (timestep >= 0) { 841 offset[dim] = timestep; 842 count[dim] = 1; 843 ++dim; 844 } 845 if (dimension == 3) { 846 ierr = PetscHDF5IntCast(dd->zs,offset + dim);CHKERRQ(ierr); 847 ierr = PetscHDF5IntCast(dd->ze - dd->zs,count + dim);CHKERRQ(ierr); 848 ++dim; 849 } 850 if (dimension > 1) { 851 ierr = PetscHDF5IntCast(dd->ys,offset + dim);CHKERRQ(ierr); 852 ierr = PetscHDF5IntCast(dd->ye - dd->ys,count + dim);CHKERRQ(ierr); 853 ++dim; 854 } 855 ierr = PetscHDF5IntCast(dd->xs/dd->w,offset + dim);CHKERRQ(ierr); 856 ierr = PetscHDF5IntCast((dd->xe - dd->xs)/dd->w,count + dim);CHKERRQ(ierr); 857 ++dim; 858 if (dd->w > 1 || dim2) { 859 offset[dim] = 0; 860 ierr = PetscHDF5IntCast(dd->w,count + dim);CHKERRQ(ierr); 861 ++dim; 862 } 863 #if defined(PETSC_USE_COMPLEX) 864 offset[dim] = 0; 865 count[dim] = 2; 866 ++dim; 867 #endif 868 869 /* Create the memory and filespace */ 870 PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL)); 871 PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL)); 872 873 /* Create property list for collective dataset write */ 874 PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER)); 875 #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO) 876 PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE)); 877 #endif 878 /* To read dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */ 879 880 ierr = VecGetArray(xin, &x);CHKERRQ(ierr); 881 PetscStackCallHDF5(H5Dread,(dset_id, scalartype, memspace, filespace, plist_id, x)); 882 ierr = VecRestoreArray(xin, &x);CHKERRQ(ierr); 883 884 /* Close/release resources */ 885 if (group != file_id) { 886 PetscStackCallHDF5(H5Gclose,(group)); 887 } 888 PetscStackCallHDF5(H5Pclose,(plist_id)); 889 PetscStackCallHDF5(H5Sclose,(filespace)); 890 PetscStackCallHDF5(H5Sclose,(memspace)); 891 PetscStackCallHDF5(H5Dclose,(dset_id)); 892 PetscFunctionReturn(0); 893 } 894 #endif 895 896 #undef __FUNCT__ 897 #define __FUNCT__ "VecLoad_Binary_DA" 898 PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer) 899 { 900 DM da; 901 PetscErrorCode ierr; 902 Vec natural; 903 const char *prefix; 904 PetscInt bs; 905 PetscBool flag; 906 DM_DA *dd; 907 #if defined(PETSC_HAVE_MPIIO) 908 PetscBool isMPIIO; 909 #endif 910 911 PetscFunctionBegin; 912 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 913 dd = (DM_DA*)da->data; 914 #if defined(PETSC_HAVE_MPIIO) 915 ierr = PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);CHKERRQ(ierr); 916 if (isMPIIO) { 917 ierr = DMDAArrayMPIIO(da,viewer,xin,PETSC_FALSE);CHKERRQ(ierr); 918 PetscFunctionReturn(0); 919 } 920 #endif 921 922 ierr = PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);CHKERRQ(ierr); 923 ierr = DMDACreateNaturalVector(da,&natural);CHKERRQ(ierr); 924 ierr = PetscObjectSetName((PetscObject)natural,((PetscObject)xin)->name);CHKERRQ(ierr); 925 ierr = PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);CHKERRQ(ierr); 926 ierr = VecLoad(natural,viewer);CHKERRQ(ierr); 927 ierr = DMDANaturalToGlobalBegin(da,natural,INSERT_VALUES,xin);CHKERRQ(ierr); 928 ierr = DMDANaturalToGlobalEnd(da,natural,INSERT_VALUES,xin);CHKERRQ(ierr); 929 ierr = VecDestroy(&natural);CHKERRQ(ierr); 930 ierr = PetscInfo(xin,"Loading vector from natural ordering into DMDA\n");CHKERRQ(ierr); 931 ierr = PetscOptionsGetInt(NULL,((PetscObject)xin)->prefix,"-vecload_block_size",&bs,&flag);CHKERRQ(ierr); 932 if (flag && bs != dd->w) { 933 ierr = PetscInfo2(xin,"Block size in file %D not equal to DMDA's dof %D\n",bs,dd->w);CHKERRQ(ierr); 934 } 935 PetscFunctionReturn(0); 936 } 937 938 #undef __FUNCT__ 939 #define __FUNCT__ "VecLoad_Default_DA" 940 PetscErrorCode VecLoad_Default_DA(Vec xin, PetscViewer viewer) 941 { 942 PetscErrorCode ierr; 943 DM da; 944 PetscBool isbinary; 945 #if defined(PETSC_HAVE_HDF5) 946 PetscBool ishdf5; 947 #endif 948 949 PetscFunctionBegin; 950 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 951 if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 952 953 #if defined(PETSC_HAVE_HDF5) 954 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);CHKERRQ(ierr); 955 #endif 956 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 957 958 if (isbinary) { 959 ierr = VecLoad_Binary_DA(xin,viewer);CHKERRQ(ierr); 960 #if defined(PETSC_HAVE_HDF5) 961 } else if (ishdf5) { 962 ierr = VecLoad_HDF5_DA(xin,viewer);CHKERRQ(ierr); 963 #endif 964 } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name); 965 PetscFunctionReturn(0); 966 } 967