1 2 /* 3 Plots vectors obtained with DMDACreate2d() 4 */ 5 6 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 7 #include <petscdraw.h> 8 #include <petscviewerhdf5.h> 9 10 /* 11 The data that is passed into the graphics callback 12 */ 13 typedef struct { 14 PetscMPIInt rank; 15 PetscInt m,n,dof,k; 16 PetscReal xmin,xmax,ymin,ymax,min,max; 17 const PetscScalar *xy,*v; 18 PetscBool showaxis,showgrid; 19 const char *name0,*name1; 20 } ZoomCtx; 21 22 /* 23 This does the drawing for one particular field 24 in one particular set of coordinates. It is a callback 25 called from PetscDrawZoom() 26 */ 27 PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw,void *ctx) 28 { 29 ZoomCtx *zctx = (ZoomCtx*)ctx; 30 PetscErrorCode ierr; 31 PetscInt m,n,i,j,k,dof,id,c1,c2,c3,c4; 32 PetscReal min,max,x1,x2,x3,x4,y_1,y2,y3,y4; 33 const PetscScalar *xy,*v; 34 35 PetscFunctionBegin; 36 m = zctx->m; 37 n = zctx->n; 38 dof = zctx->dof; 39 k = zctx->k; 40 xy = zctx->xy; 41 v = zctx->v; 42 min = zctx->min; 43 max = zctx->max; 44 45 /* PetscDraw the contour plot patch */ 46 ierr = PetscDrawCollectiveBegin(draw);CHKERRQ(ierr); 47 for (j=0; j<n-1; j++) { 48 for (i=0; i<m-1; i++) { 49 id = i+j*m; 50 x1 = PetscRealPart(xy[2*id]); 51 y_1 = PetscRealPart(xy[2*id+1]); 52 c1 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 53 54 id = i+j*m+1; 55 x2 = PetscRealPart(xy[2*id]); 56 y2 = PetscRealPart(xy[2*id+1]); 57 c2 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 58 59 id = i+j*m+1+m; 60 x3 = PetscRealPart(xy[2*id]); 61 y3 = PetscRealPart(xy[2*id+1]); 62 c3 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 63 64 id = i+j*m+m; 65 x4 = PetscRealPart(xy[2*id]); 66 y4 = PetscRealPart(xy[2*id+1]); 67 c4 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 68 69 ierr = PetscDrawTriangle(draw,x1,y_1,x2,y2,x3,y3,c1,c2,c3);CHKERRQ(ierr); 70 ierr = PetscDrawTriangle(draw,x1,y_1,x3,y3,x4,y4,c1,c3,c4);CHKERRQ(ierr); 71 if (zctx->showgrid) { 72 ierr = PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK);CHKERRQ(ierr); 73 ierr = PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK);CHKERRQ(ierr); 74 ierr = PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK);CHKERRQ(ierr); 75 ierr = PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK);CHKERRQ(ierr); 76 } 77 } 78 } 79 if (zctx->showaxis && !zctx->rank) { 80 if (zctx->name0 || zctx->name1) { 81 PetscReal xl,yl,xr,yr,x,y; 82 ierr = PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr);CHKERRQ(ierr); 83 x = xl + .30*(xr - xl); 84 xl = xl + .01*(xr - xl); 85 y = yr - .30*(yr - yl); 86 yl = yl + .01*(yr - yl); 87 if (zctx->name0) {ierr = PetscDrawString(draw,x,yl,PETSC_DRAW_BLACK,zctx->name0);CHKERRQ(ierr);} 88 if (zctx->name1) {ierr = PetscDrawStringVertical(draw,xl,y,PETSC_DRAW_BLACK,zctx->name1);CHKERRQ(ierr);} 89 } 90 /* 91 Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits 92 but that may require some refactoring. 93 */ 94 { 95 double xmin = (double)zctx->xmin, ymin = (double)zctx->ymin; 96 double xmax = (double)zctx->xmax, ymax = (double)zctx->ymax; 97 char value[16]; size_t len; PetscReal w; 98 ierr = PetscSNPrintf(value,16,"%0.2e",xmin);CHKERRQ(ierr); 99 ierr = PetscDrawString(draw,xmin,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 100 ierr = PetscSNPrintf(value,16,"%0.2e",xmax);CHKERRQ(ierr); 101 ierr = PetscStrlen(value,&len);CHKERRQ(ierr); 102 ierr = PetscDrawStringGetSize(draw,&w,NULL);CHKERRQ(ierr); 103 ierr = PetscDrawString(draw,xmax - len*w,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 104 ierr = PetscSNPrintf(value,16,"%0.2e",ymin);CHKERRQ(ierr); 105 ierr = PetscDrawString(draw,xmin - .05*(xmax - xmin),ymin,PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 106 ierr = PetscSNPrintf(value,16,"%0.2e",ymax);CHKERRQ(ierr); 107 ierr = PetscDrawString(draw,xmin - .05*(xmax - xmin),ymax,PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 108 } 109 } 110 ierr = PetscDrawCollectiveEnd(draw);CHKERRQ(ierr); 111 PetscFunctionReturn(0); 112 } 113 114 PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin,PetscViewer viewer) 115 { 116 DM da,dac,dag; 117 PetscErrorCode ierr; 118 PetscInt N,s,M,w,ncoors = 4; 119 const PetscInt *lx,*ly; 120 PetscReal coors[4]; 121 PetscDraw draw,popup; 122 PetscBool isnull,useports = PETSC_FALSE; 123 MPI_Comm comm; 124 Vec xlocal,xcoor,xcoorl; 125 DMBoundaryType bx,by; 126 DMDAStencilType st; 127 ZoomCtx zctx; 128 PetscDrawViewPorts *ports = NULL; 129 PetscViewerFormat format; 130 PetscInt *displayfields; 131 PetscInt ndisplayfields,i,nbounds; 132 const PetscReal *bounds; 133 134 PetscFunctionBegin; 135 zctx.showgrid = PETSC_FALSE; 136 zctx.showaxis = PETSC_TRUE; 137 138 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 139 ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); 140 if (isnull) PetscFunctionReturn(0); 141 142 ierr = PetscViewerDrawGetBounds(viewer,&nbounds,&bounds);CHKERRQ(ierr); 143 144 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 145 if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 146 147 ierr = PetscObjectGetComm((PetscObject)xin,&comm);CHKERRQ(ierr); 148 ierr = MPI_Comm_rank(comm,&zctx.rank);CHKERRQ(ierr); 149 150 ierr = DMDAGetInfo(da,0,&M,&N,0,&zctx.m,&zctx.n,0,&w,&s,&bx,&by,0,&st);CHKERRQ(ierr); 151 ierr = DMDAGetOwnershipRanges(da,&lx,&ly,NULL);CHKERRQ(ierr); 152 153 /* 154 Obtain a sequential vector that is going to contain the local values plus ONE layer of 155 ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will 156 update the local values pluse ONE layer of ghost values. 157 */ 158 ierr = PetscObjectQuery((PetscObject)da,"GraphicsGhosted",(PetscObject*)&xlocal);CHKERRQ(ierr); 159 if (!xlocal) { 160 if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) { 161 /* 162 if original da is not of stencil width one, or periodic or not a box stencil then 163 create a special DMDA to handle one level of ghost points for graphics 164 */ 165 ierr = DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,w,1,lx,ly,&dac);CHKERRQ(ierr); 166 ierr = DMSetUp(dac);CHKERRQ(ierr); 167 ierr = PetscInfo(da,"Creating auxilary DMDA for managing graphics ghost points\n");CHKERRQ(ierr); 168 } else { 169 /* otherwise we can use the da we already have */ 170 dac = da; 171 } 172 /* create local vector for holding ghosted values used in graphics */ 173 ierr = DMCreateLocalVector(dac,&xlocal);CHKERRQ(ierr); 174 if (dac != da) { 175 /* don't keep any public reference of this DMDA, is is only available through xlocal */ 176 ierr = PetscObjectDereference((PetscObject)dac);CHKERRQ(ierr); 177 } else { 178 /* remove association between xlocal and da, because below we compose in the opposite 179 direction and if we left this connect we'd get a loop, so the objects could 180 never be destroyed */ 181 ierr = PetscObjectRemoveReference((PetscObject)xlocal,"__PETSc_dm");CHKERRQ(ierr); 182 } 183 ierr = PetscObjectCompose((PetscObject)da,"GraphicsGhosted",(PetscObject)xlocal);CHKERRQ(ierr); 184 ierr = PetscObjectDereference((PetscObject)xlocal);CHKERRQ(ierr); 185 } else { 186 if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) { 187 ierr = VecGetDM(xlocal, &dac);CHKERRQ(ierr); 188 } else { 189 dac = da; 190 } 191 } 192 193 /* 194 Get local (ghosted) values of vector 195 */ 196 ierr = DMGlobalToLocalBegin(dac,xin,INSERT_VALUES,xlocal);CHKERRQ(ierr); 197 ierr = DMGlobalToLocalEnd(dac,xin,INSERT_VALUES,xlocal);CHKERRQ(ierr); 198 ierr = VecGetArrayRead(xlocal,&zctx.v);CHKERRQ(ierr); 199 200 /* 201 Get coordinates of nodes 202 */ 203 ierr = DMGetCoordinates(da,&xcoor);CHKERRQ(ierr); 204 if (!xcoor) { 205 ierr = DMDASetUniformCoordinates(da,0.0,1.0,0.0,1.0,0.0,0.0);CHKERRQ(ierr); 206 ierr = DMGetCoordinates(da,&xcoor);CHKERRQ(ierr); 207 } 208 209 /* 210 Determine the min and max coordinates in plot 211 */ 212 ierr = VecStrideMin(xcoor,0,NULL,&zctx.xmin);CHKERRQ(ierr); 213 ierr = VecStrideMax(xcoor,0,NULL,&zctx.xmax);CHKERRQ(ierr); 214 ierr = VecStrideMin(xcoor,1,NULL,&zctx.ymin);CHKERRQ(ierr); 215 ierr = VecStrideMax(xcoor,1,NULL,&zctx.ymax);CHKERRQ(ierr); 216 ierr = PetscOptionsGetBool(NULL,NULL,"-draw_contour_axis",&zctx.showaxis,NULL);CHKERRQ(ierr); 217 if (zctx.showaxis) { 218 coors[0] = zctx.xmin - .05*(zctx.xmax - zctx.xmin); coors[1] = zctx.ymin - .05*(zctx.ymax - zctx.ymin); 219 coors[2] = zctx.xmax + .05*(zctx.xmax - zctx.xmin); coors[3] = zctx.ymax + .05*(zctx.ymax - zctx.ymin); 220 } else { 221 coors[0] = zctx.xmin; coors[1] = zctx.ymin; coors[2] = zctx.xmax; coors[3] = zctx.ymax; 222 } 223 ierr = PetscOptionsGetRealArray(NULL,NULL,"-draw_coordinates",coors,&ncoors,NULL);CHKERRQ(ierr); 224 ierr = PetscInfo4(da,"Preparing DMDA 2d contour plot coordinates %g %g %g %g\n",(double)coors[0],(double)coors[1],(double)coors[2],(double)coors[3]);CHKERRQ(ierr); 225 226 /* 227 Get local ghosted version of coordinates 228 */ 229 ierr = PetscObjectQuery((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject*)&xcoorl);CHKERRQ(ierr); 230 if (!xcoorl) { 231 /* create DMDA to get local version of graphics */ 232 ierr = DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,2,1,lx,ly,&dag);CHKERRQ(ierr); 233 ierr = DMSetUp(dag);CHKERRQ(ierr); 234 ierr = PetscInfo(dag,"Creating auxilary DMDA for managing graphics coordinates ghost points\n");CHKERRQ(ierr); 235 ierr = DMCreateLocalVector(dag,&xcoorl);CHKERRQ(ierr); 236 ierr = PetscObjectCompose((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject)xcoorl);CHKERRQ(ierr); 237 ierr = PetscObjectDereference((PetscObject)dag);CHKERRQ(ierr); 238 ierr = PetscObjectDereference((PetscObject)xcoorl);CHKERRQ(ierr); 239 } else { 240 ierr = VecGetDM(xcoorl,&dag);CHKERRQ(ierr); 241 } 242 ierr = DMGlobalToLocalBegin(dag,xcoor,INSERT_VALUES,xcoorl);CHKERRQ(ierr); 243 ierr = DMGlobalToLocalEnd(dag,xcoor,INSERT_VALUES,xcoorl);CHKERRQ(ierr); 244 ierr = VecGetArrayRead(xcoorl,&zctx.xy);CHKERRQ(ierr); 245 ierr = DMDAGetCoordinateName(da,0,&zctx.name0);CHKERRQ(ierr); 246 ierr = DMDAGetCoordinateName(da,1,&zctx.name1);CHKERRQ(ierr); 247 248 /* 249 Get information about size of area each processor must do graphics for 250 */ 251 ierr = DMDAGetInfo(dac,NULL,&M,&N,NULL,NULL,NULL,NULL,&zctx.dof,NULL,&bx,&by,NULL,NULL);CHKERRQ(ierr); 252 ierr = DMDAGetGhostCorners(dac,NULL,NULL,NULL,&zctx.m,&zctx.n,NULL);CHKERRQ(ierr); 253 ierr = PetscOptionsGetBool(NULL,NULL,"-draw_contour_grid",&zctx.showgrid,NULL);CHKERRQ(ierr); 254 255 ierr = DMDASelectFields(da,&ndisplayfields,&displayfields);CHKERRQ(ierr); 256 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 257 ierr = PetscOptionsGetBool(NULL,NULL,"-draw_ports",&useports,NULL);CHKERRQ(ierr); 258 if (format == PETSC_VIEWER_DRAW_PORTS) useports = PETSC_TRUE; 259 if (useports) { 260 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 261 ierr = PetscDrawCheckResizedWindow(draw);CHKERRQ(ierr); 262 ierr = PetscDrawClear(draw);CHKERRQ(ierr); 263 ierr = PetscDrawViewPortsCreate(draw,ndisplayfields,&ports);CHKERRQ(ierr); 264 } 265 266 /* 267 Loop over each field; drawing each in a different window 268 */ 269 for (i=0; i<ndisplayfields; i++) { 270 zctx.k = displayfields[i]; 271 272 /* determine the min and max value in plot */ 273 ierr = VecStrideMin(xin,zctx.k,NULL,&zctx.min);CHKERRQ(ierr); 274 ierr = VecStrideMax(xin,zctx.k,NULL,&zctx.max);CHKERRQ(ierr); 275 if (zctx.k < nbounds) { 276 zctx.min = bounds[2*zctx.k]; 277 zctx.max = bounds[2*zctx.k+1]; 278 } 279 if (zctx.min == zctx.max) { 280 zctx.min -= 1.e-12; 281 zctx.max += 1.e-12; 282 } 283 ierr = PetscInfo2(da,"DMDA 2d contour plot min %g max %g\n",(double)zctx.min,(double)zctx.max);CHKERRQ(ierr); 284 285 if (useports) { 286 ierr = PetscDrawViewPortsSet(ports,i);CHKERRQ(ierr); 287 } else { 288 const char *title; 289 ierr = PetscViewerDrawGetDraw(viewer,i,&draw);CHKERRQ(ierr); 290 ierr = DMDAGetFieldName(da,zctx.k,&title);CHKERRQ(ierr); 291 if (title) {ierr = PetscDrawSetTitle(draw,title);CHKERRQ(ierr);} 292 } 293 294 ierr = PetscDrawGetPopup(draw,&popup);CHKERRQ(ierr); 295 ierr = PetscDrawScalePopup(popup,zctx.min,zctx.max);CHKERRQ(ierr); 296 ierr = PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);CHKERRQ(ierr); 297 ierr = PetscDrawZoom(draw,VecView_MPI_Draw_DA2d_Zoom,&zctx);CHKERRQ(ierr); 298 if (!useports) {ierr = PetscDrawSave(draw);CHKERRQ(ierr);} 299 } 300 if (useports) { 301 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 302 ierr = PetscDrawSave(draw);CHKERRQ(ierr); 303 } 304 305 ierr = PetscDrawViewPortsDestroy(ports);CHKERRQ(ierr); 306 ierr = PetscFree(displayfields);CHKERRQ(ierr); 307 ierr = VecRestoreArrayRead(xcoorl,&zctx.xy);CHKERRQ(ierr); 308 ierr = VecRestoreArrayRead(xlocal,&zctx.v);CHKERRQ(ierr); 309 PetscFunctionReturn(0); 310 } 311 312 #if defined(PETSC_HAVE_HDF5) 313 static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims) 314 { 315 PetscMPIInt comm_size; 316 PetscErrorCode ierr; 317 hsize_t chunk_size, target_size, dim; 318 hsize_t vec_size = sizeof(PetscScalar)*da->M*da->N*da->P*da->w; 319 hsize_t avg_local_vec_size,KiB = 1024,MiB = KiB*KiB,GiB = MiB*KiB,min_size = MiB; 320 hsize_t max_chunks = 64*KiB; /* HDF5 internal limitation */ 321 hsize_t max_chunk_size = 4*GiB; /* HDF5 internal limitation */ 322 PetscInt zslices=da->p, yslices=da->n, xslices=da->m; 323 324 PetscFunctionBegin; 325 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size);CHKERRQ(ierr); 326 avg_local_vec_size = (hsize_t) ceil(vec_size*1.0/comm_size); /* we will attempt to use this as the chunk size */ 327 328 target_size = (hsize_t) ceil(PetscMin(vec_size,PetscMin(max_chunk_size,PetscMax(avg_local_vec_size,PetscMax(ceil(vec_size*1.0/max_chunks),min_size))))); 329 /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */ 330 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(PetscReal); 331 332 /* 333 if size/rank > max_chunk_size, we need radical measures: even going down to 334 avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter 335 what, composed in the most efficient way possible. 336 N.B. this minimises the number of chunks, which may or may not be the optimal 337 solution. In a BG, for example, the optimal solution is probably to make # chunks = # 338 IO nodes involved, but this author has no access to a BG to figure out how to 339 reliably find the right number. And even then it may or may not be enough. 340 */ 341 if (avg_local_vec_size > max_chunk_size) { 342 /* check if we can just split local z-axis: is that enough? */ 343 zslices = (PetscInt)ceil(vec_size*1.0/(da->p*max_chunk_size))*zslices; 344 if (zslices > da->P) { 345 /* lattice is too large in xy-directions, splitting z only is not enough */ 346 zslices = da->P; 347 yslices= (PetscInt)ceil(vec_size*1.0/(zslices*da->n*max_chunk_size))*yslices; 348 if (yslices > da->N) { 349 /* lattice is too large in x-direction, splitting along z, y is not enough */ 350 yslices = da->N; 351 xslices= (PetscInt)ceil(vec_size*1.0/(zslices*yslices*da->m*max_chunk_size))*xslices; 352 } 353 } 354 dim = 0; 355 if (timestep >= 0) { 356 ++dim; 357 } 358 /* prefer to split z-axis, even down to planar slices */ 359 if (dimension == 3) { 360 chunkDims[dim++] = (hsize_t) da->P/zslices; 361 chunkDims[dim++] = (hsize_t) da->N/yslices; 362 chunkDims[dim++] = (hsize_t) da->M/xslices; 363 } else { 364 /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */ 365 chunkDims[dim++] = (hsize_t) da->N/yslices; 366 chunkDims[dim++] = (hsize_t) da->M/xslices; 367 } 368 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double); 369 } else { 370 if (target_size < chunk_size) { 371 /* only change the defaults if target_size < chunk_size */ 372 dim = 0; 373 if (timestep >= 0) { 374 ++dim; 375 } 376 /* prefer to split z-axis, even down to planar slices */ 377 if (dimension == 3) { 378 /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */ 379 if (target_size >= chunk_size/da->p) { 380 /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */ 381 chunkDims[dim] = (hsize_t) ceil(da->P*1.0/da->p); 382 } else { 383 /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be 384 radical and let everyone write all they've got */ 385 chunkDims[dim++] = (hsize_t) ceil(da->P*1.0/da->p); 386 chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n); 387 chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m); 388 } 389 } else { 390 /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */ 391 if (target_size >= chunk_size/da->n) { 392 /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */ 393 chunkDims[dim] = (hsize_t) ceil(da->N*1.0/da->n); 394 } else { 395 /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be 396 radical and let everyone write all they've got */ 397 chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n); 398 chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m); 399 } 400 401 } 402 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double); 403 } else { 404 /* precomputed chunks are fine, we don't need to do anything */ 405 } 406 } 407 PetscFunctionReturn(0); 408 } 409 #endif 410 411 #if defined(PETSC_HAVE_HDF5) 412 PetscErrorCode VecView_MPI_HDF5_DA(Vec xin,PetscViewer viewer) 413 { 414 DM dm; 415 DM_DA *da; 416 hid_t filespace; /* file dataspace identifier */ 417 hid_t chunkspace; /* chunk dataset property identifier */ 418 hid_t plist_id; /* property list identifier */ 419 hid_t dset_id; /* dataset identifier */ 420 hid_t memspace; /* memory dataspace identifier */ 421 hid_t file_id; 422 hid_t group; 423 hid_t memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 424 hid_t filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 425 hsize_t dim; 426 hsize_t maxDims[6]={0}, dims[6]={0}, chunkDims[6]={0}, count[6]={0}, offset[6]={0}; /* we depend on these being sane later on */ 427 PetscInt timestep, dimension; 428 const PetscScalar *x; 429 const char *vecname; 430 PetscErrorCode ierr; 431 PetscBool dim2; 432 PetscBool spoutput; 433 434 PetscFunctionBegin; 435 ierr = PetscViewerHDF5OpenGroup(viewer, &file_id, &group);CHKERRQ(ierr); 436 ierr = PetscViewerHDF5GetTimestep(viewer, ×tep);CHKERRQ(ierr); 437 ierr = PetscViewerHDF5GetBaseDimension2(viewer,&dim2);CHKERRQ(ierr); 438 ierr = PetscViewerHDF5GetSPOutput(viewer,&spoutput);CHKERRQ(ierr); 439 440 ierr = VecGetDM(xin,&dm);CHKERRQ(ierr); 441 if (!dm) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 442 da = (DM_DA*)dm->data; 443 ierr = DMGetDimension(dm, &dimension);CHKERRQ(ierr); 444 445 /* Create the dataspace for the dataset. 446 * 447 * dims - holds the current dimensions of the dataset 448 * 449 * maxDims - holds the maximum dimensions of the dataset (unlimited 450 * for the number of time steps with the current dimensions for the 451 * other dimensions; so only additional time steps can be added). 452 * 453 * chunkDims - holds the size of a single time step (required to 454 * permit extending dataset). 455 */ 456 dim = 0; 457 if (timestep >= 0) { 458 dims[dim] = timestep+1; 459 maxDims[dim] = H5S_UNLIMITED; 460 chunkDims[dim] = 1; 461 ++dim; 462 } 463 if (dimension == 3) { 464 ierr = PetscHDF5IntCast(da->P,dims+dim);CHKERRQ(ierr); 465 maxDims[dim] = dims[dim]; 466 chunkDims[dim] = dims[dim]; 467 ++dim; 468 } 469 if (dimension > 1) { 470 ierr = PetscHDF5IntCast(da->N,dims+dim);CHKERRQ(ierr); 471 maxDims[dim] = dims[dim]; 472 chunkDims[dim] = dims[dim]; 473 ++dim; 474 } 475 ierr = PetscHDF5IntCast(da->M,dims+dim);CHKERRQ(ierr); 476 maxDims[dim] = dims[dim]; 477 chunkDims[dim] = dims[dim]; 478 ++dim; 479 if (da->w > 1 || dim2) { 480 ierr = PetscHDF5IntCast(da->w,dims+dim);CHKERRQ(ierr); 481 maxDims[dim] = dims[dim]; 482 chunkDims[dim] = dims[dim]; 483 ++dim; 484 } 485 #if defined(PETSC_USE_COMPLEX) 486 dims[dim] = 2; 487 maxDims[dim] = dims[dim]; 488 chunkDims[dim] = dims[dim]; 489 ++dim; 490 #endif 491 492 ierr = VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims); CHKERRQ(ierr); 493 494 PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims)); 495 496 #if defined(PETSC_USE_REAL_SINGLE) 497 memscalartype = H5T_NATIVE_FLOAT; 498 filescalartype = H5T_NATIVE_FLOAT; 499 #elif defined(PETSC_USE_REAL___FLOAT128) 500 #error "HDF5 output with 128 bit floats not supported." 501 #else 502 memscalartype = H5T_NATIVE_DOUBLE; 503 if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT; 504 else filescalartype = H5T_NATIVE_DOUBLE; 505 #endif 506 507 /* Create the dataset with default properties and close filespace */ 508 ierr = PetscObjectGetName((PetscObject)xin,&vecname);CHKERRQ(ierr); 509 if (!H5Lexists(group, vecname, H5P_DEFAULT)) { 510 /* Create chunk */ 511 PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE)); 512 PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims)); 513 514 #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800) 515 PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT)); 516 #else 517 PetscStackCallHDF5Return(dset_id,H5Dcreate,(group, vecname, filescalartype, filespace, H5P_DEFAULT)); 518 #endif 519 } else { 520 PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT)); 521 PetscStackCallHDF5(H5Dset_extent,(dset_id, dims)); 522 } 523 PetscStackCallHDF5(H5Sclose,(filespace)); 524 525 /* Each process defines a dataset and writes it to the hyperslab in the file */ 526 dim = 0; 527 if (timestep >= 0) { 528 offset[dim] = timestep; 529 ++dim; 530 } 531 if (dimension == 3) {ierr = PetscHDF5IntCast(da->zs,offset + dim++);CHKERRQ(ierr);} 532 if (dimension > 1) {ierr = PetscHDF5IntCast(da->ys,offset + dim++);CHKERRQ(ierr);} 533 ierr = PetscHDF5IntCast(da->xs/da->w,offset + dim++);CHKERRQ(ierr); 534 if (da->w > 1 || dim2) offset[dim++] = 0; 535 #if defined(PETSC_USE_COMPLEX) 536 offset[dim++] = 0; 537 #endif 538 dim = 0; 539 if (timestep >= 0) { 540 count[dim] = 1; 541 ++dim; 542 } 543 if (dimension == 3) {ierr = PetscHDF5IntCast(da->ze - da->zs,count + dim++);CHKERRQ(ierr);} 544 if (dimension > 1) {ierr = PetscHDF5IntCast(da->ye - da->ys,count + dim++);CHKERRQ(ierr);} 545 ierr = PetscHDF5IntCast((da->xe - da->xs)/da->w,count + dim++);CHKERRQ(ierr); 546 if (da->w > 1 || dim2) {ierr = PetscHDF5IntCast(da->w,count + dim++);CHKERRQ(ierr);} 547 #if defined(PETSC_USE_COMPLEX) 548 count[dim++] = 2; 549 #endif 550 PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL)); 551 PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id)); 552 PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL)); 553 554 /* Create property list for collective dataset write */ 555 PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER)); 556 #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO) 557 PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE)); 558 #endif 559 /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */ 560 561 ierr = VecGetArrayRead(xin, &x);CHKERRQ(ierr); 562 PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, plist_id, x)); 563 PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL)); 564 ierr = VecRestoreArrayRead(xin, &x);CHKERRQ(ierr); 565 566 /* Close/release resources */ 567 if (group != file_id) { 568 PetscStackCallHDF5(H5Gclose,(group)); 569 } 570 PetscStackCallHDF5(H5Pclose,(plist_id)); 571 PetscStackCallHDF5(H5Sclose,(filespace)); 572 PetscStackCallHDF5(H5Sclose,(memspace)); 573 PetscStackCallHDF5(H5Dclose,(dset_id)); 574 ierr = PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);CHKERRQ(ierr); 575 PetscFunctionReturn(0); 576 } 577 #endif 578 579 extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec,PetscViewer); 580 581 #if defined(PETSC_HAVE_MPIIO) 582 static PetscErrorCode DMDAArrayMPIIO(DM da,PetscViewer viewer,Vec xin,PetscBool write) 583 { 584 PetscErrorCode ierr; 585 MPI_File mfdes; 586 PetscMPIInt gsizes[4],lsizes[4],lstarts[4],asiz,dof; 587 MPI_Datatype view; 588 const PetscScalar *array; 589 MPI_Offset off; 590 MPI_Aint ub,ul; 591 PetscInt type,rows,vecrows,tr[2]; 592 DM_DA *dd = (DM_DA*)da->data; 593 PetscBool skipheader; 594 595 PetscFunctionBegin; 596 ierr = VecGetSize(xin,&vecrows);CHKERRQ(ierr); 597 ierr = PetscViewerBinaryGetSkipHeader(viewer,&skipheader);CHKERRQ(ierr); 598 if (!write) { 599 /* Read vector header. */ 600 if (!skipheader) { 601 ierr = PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT);CHKERRQ(ierr); 602 type = tr[0]; 603 rows = tr[1]; 604 if (type != VEC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Not vector next in file"); 605 if (rows != vecrows) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Vector in file not same size as DMDA vector"); 606 } 607 } else { 608 tr[0] = VEC_FILE_CLASSID; 609 tr[1] = vecrows; 610 if (!skipheader) { 611 ierr = PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 612 } 613 } 614 615 ierr = PetscMPIIntCast(dd->w,&dof);CHKERRQ(ierr); 616 gsizes[0] = dof; 617 ierr = PetscMPIIntCast(dd->M,gsizes+1);CHKERRQ(ierr); 618 ierr = PetscMPIIntCast(dd->N,gsizes+2);CHKERRQ(ierr); 619 ierr = PetscMPIIntCast(dd->P,gsizes+3);CHKERRQ(ierr); 620 lsizes[0] = dof; 621 ierr = PetscMPIIntCast((dd->xe-dd->xs)/dof,lsizes+1);CHKERRQ(ierr); 622 ierr = PetscMPIIntCast(dd->ye-dd->ys,lsizes+2);CHKERRQ(ierr); 623 ierr = PetscMPIIntCast(dd->ze-dd->zs,lsizes+3);CHKERRQ(ierr); 624 lstarts[0] = 0; 625 ierr = PetscMPIIntCast(dd->xs/dof,lstarts+1);CHKERRQ(ierr); 626 ierr = PetscMPIIntCast(dd->ys,lstarts+2);CHKERRQ(ierr); 627 ierr = PetscMPIIntCast(dd->zs,lstarts+3);CHKERRQ(ierr); 628 ierr = MPI_Type_create_subarray(da->dim+1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);CHKERRQ(ierr); 629 ierr = MPI_Type_commit(&view);CHKERRQ(ierr); 630 631 ierr = PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);CHKERRQ(ierr); 632 ierr = PetscViewerBinaryGetMPIIOOffset(viewer,&off);CHKERRQ(ierr); 633 ierr = MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char*)"native",MPI_INFO_NULL);CHKERRQ(ierr); 634 ierr = VecGetArrayRead(xin,&array);CHKERRQ(ierr); 635 asiz = lsizes[1]*(lsizes[2] > 0 ? lsizes[2] : 1)*(lsizes[3] > 0 ? lsizes[3] : 1)*dof; 636 if (write) { 637 ierr = MPIU_File_write_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);CHKERRQ(ierr); 638 } else { 639 ierr = MPIU_File_read_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);CHKERRQ(ierr); 640 } 641 ierr = MPI_Type_get_extent(view,&ul,&ub);CHKERRQ(ierr); 642 ierr = PetscViewerBinaryAddMPIIOOffset(viewer,ub);CHKERRQ(ierr); 643 ierr = VecRestoreArrayRead(xin,&array);CHKERRQ(ierr); 644 ierr = MPI_Type_free(&view);CHKERRQ(ierr); 645 PetscFunctionReturn(0); 646 } 647 #endif 648 649 PetscErrorCode VecView_MPI_DA(Vec xin,PetscViewer viewer) 650 { 651 DM da; 652 PetscErrorCode ierr; 653 PetscInt dim; 654 Vec natural; 655 PetscBool isdraw,isvtk; 656 #if defined(PETSC_HAVE_HDF5) 657 PetscBool ishdf5; 658 #endif 659 const char *prefix,*name; 660 PetscViewerFormat format; 661 662 PetscFunctionBegin; 663 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 664 if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 665 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 666 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);CHKERRQ(ierr); 667 #if defined(PETSC_HAVE_HDF5) 668 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);CHKERRQ(ierr); 669 #endif 670 if (isdraw) { 671 ierr = DMDAGetInfo(da,&dim,0,0,0,0,0,0,0,0,0,0,0,0);CHKERRQ(ierr); 672 if (dim == 1) { 673 ierr = VecView_MPI_Draw_DA1d(xin,viewer);CHKERRQ(ierr); 674 } else if (dim == 2) { 675 ierr = VecView_MPI_Draw_DA2d(xin,viewer);CHKERRQ(ierr); 676 } else SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot graphically view vector associated with this dimensional DMDA %D",dim); 677 } else if (isvtk) { /* Duplicate the Vec */ 678 Vec Y; 679 ierr = VecDuplicate(xin,&Y);CHKERRQ(ierr); 680 if (((PetscObject)xin)->name) { 681 /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */ 682 ierr = PetscObjectSetName((PetscObject)Y,((PetscObject)xin)->name);CHKERRQ(ierr); 683 } 684 ierr = VecCopy(xin,Y);CHKERRQ(ierr); 685 ierr = PetscViewerVTKAddField(viewer,(PetscObject)da,DMDAVTKWriteAll,PETSC_VTK_POINT_FIELD,(PetscObject)Y);CHKERRQ(ierr); 686 #if defined(PETSC_HAVE_HDF5) 687 } else if (ishdf5) { 688 ierr = VecView_MPI_HDF5_DA(xin,viewer);CHKERRQ(ierr); 689 #endif 690 } else { 691 #if defined(PETSC_HAVE_MPIIO) 692 PetscBool isbinary,isMPIIO; 693 694 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 695 if (isbinary) { 696 ierr = PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);CHKERRQ(ierr); 697 if (isMPIIO) { 698 ierr = DMDAArrayMPIIO(da,viewer,xin,PETSC_TRUE);CHKERRQ(ierr); 699 PetscFunctionReturn(0); 700 } 701 } 702 #endif 703 704 /* call viewer on natural ordering */ 705 ierr = PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);CHKERRQ(ierr); 706 ierr = DMDACreateNaturalVector(da,&natural);CHKERRQ(ierr); 707 ierr = PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);CHKERRQ(ierr); 708 ierr = DMDAGlobalToNaturalBegin(da,xin,INSERT_VALUES,natural);CHKERRQ(ierr); 709 ierr = DMDAGlobalToNaturalEnd(da,xin,INSERT_VALUES,natural);CHKERRQ(ierr); 710 ierr = PetscObjectGetName((PetscObject)xin,&name);CHKERRQ(ierr); 711 ierr = PetscObjectSetName((PetscObject)natural,name);CHKERRQ(ierr); 712 713 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 714 if (format == PETSC_VIEWER_BINARY_MATLAB) { 715 /* temporarily remove viewer format so it won't trigger in the VecView() */ 716 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_DEFAULT);CHKERRQ(ierr); 717 } 718 719 ierr = VecView(natural,viewer);CHKERRQ(ierr); 720 721 if (format == PETSC_VIEWER_BINARY_MATLAB) { 722 MPI_Comm comm; 723 FILE *info; 724 const char *fieldname; 725 char fieldbuf[256]; 726 PetscInt dim,ni,nj,nk,pi,pj,pk,dof,n; 727 728 /* set the viewer format back into the viewer */ 729 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 730 ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr); 731 ierr = PetscViewerBinaryGetInfoPointer(viewer,&info);CHKERRQ(ierr); 732 ierr = DMDAGetInfo(da,&dim,&ni,&nj,&nk,&pi,&pj,&pk,&dof,0,0,0,0,0);CHKERRQ(ierr); 733 ierr = PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");CHKERRQ(ierr); 734 ierr = PetscFPrintf(comm,info,"#$$ tmp = PetscBinaryRead(fd); \n");CHKERRQ(ierr); 735 if (dim == 1) { ierr = PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d);\n",dof,ni);CHKERRQ(ierr); } 736 if (dim == 2) { ierr = PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d);\n",dof,ni,nj);CHKERRQ(ierr); } 737 if (dim == 3) { ierr = PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d,%d);\n",dof,ni,nj,nk);CHKERRQ(ierr); } 738 739 for (n=0; n<dof; n++) { 740 ierr = DMDAGetFieldName(da,n,&fieldname);CHKERRQ(ierr); 741 if (!fieldname || !fieldname[0]) { 742 ierr = PetscSNPrintf(fieldbuf,sizeof fieldbuf,"field%D",n);CHKERRQ(ierr); 743 fieldname = fieldbuf; 744 } 745 if (dim == 1) { ierr = PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:))';\n",name,fieldname,n+1);CHKERRQ(ierr); } 746 if (dim == 2) { ierr = PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:,:))';\n",name,fieldname,n+1);CHKERRQ(ierr); } 747 if (dim == 3) { ierr = PetscFPrintf(comm,info,"#$$ Set.%s.%s = permute(squeeze(tmp(%d,:,:,:)),[2 1 3]);\n",name,fieldname,n+1);CHKERRQ(ierr);} 748 } 749 ierr = PetscFPrintf(comm,info,"#$$ clear tmp; \n");CHKERRQ(ierr); 750 ierr = PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");CHKERRQ(ierr); 751 } 752 753 ierr = VecDestroy(&natural);CHKERRQ(ierr); 754 } 755 PetscFunctionReturn(0); 756 } 757 758 #if defined(PETSC_HAVE_HDF5) 759 PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer) 760 { 761 DM da; 762 PetscErrorCode ierr; 763 hsize_t dim,rdim; 764 hsize_t dims[6]={0},count[6]={0},offset[6]={0}; 765 PetscInt dimension,timestep,dofInd; 766 PetscScalar *x; 767 const char *vecname; 768 hid_t filespace; /* file dataspace identifier */ 769 hid_t plist_id; /* property list identifier */ 770 hid_t dset_id; /* dataset identifier */ 771 hid_t memspace; /* memory dataspace identifier */ 772 hid_t file_id,group; 773 hid_t scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 774 DM_DA *dd; 775 PetscBool dim2 = PETSC_FALSE; 776 777 PetscFunctionBegin; 778 #if defined(PETSC_USE_REAL_SINGLE) 779 scalartype = H5T_NATIVE_FLOAT; 780 #elif defined(PETSC_USE_REAL___FLOAT128) 781 #error "HDF5 output with 128 bit floats not supported." 782 #else 783 scalartype = H5T_NATIVE_DOUBLE; 784 #endif 785 786 ierr = PetscViewerHDF5OpenGroup(viewer, &file_id, &group);CHKERRQ(ierr); 787 ierr = PetscViewerHDF5GetTimestep(viewer, ×tep);CHKERRQ(ierr); 788 ierr = PetscObjectGetName((PetscObject)xin,&vecname);CHKERRQ(ierr); 789 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 790 dd = (DM_DA*)da->data; 791 ierr = DMGetDimension(da, &dimension);CHKERRQ(ierr); 792 793 /* Open dataset */ 794 #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800) 795 PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT)); 796 #else 797 PetscStackCallHDF5Return(dset_id,H5Dopen,(group, vecname)); 798 #endif 799 800 /* Retrieve the dataspace for the dataset */ 801 PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id)); 802 PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL)); 803 804 /* Expected dimension for holding the dof's */ 805 #if defined(PETSC_USE_COMPLEX) 806 dofInd = rdim-2; 807 #else 808 dofInd = rdim-1; 809 #endif 810 811 /* The expected number of dimensions, assuming basedimension2 = false */ 812 dim = dimension; 813 if (dd->w > 1) ++dim; 814 if (timestep >= 0) ++dim; 815 #if defined(PETSC_USE_COMPLEX) 816 ++dim; 817 #endif 818 819 /* In this case the input dataset have one extra, unexpected dimension. */ 820 if (rdim == dim+1) { 821 /* In this case the block size unity */ 822 if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE; 823 824 /* Special error message for the case where dof does not match the input file */ 825 else if (dd->w != (PetscInt) dims[dofInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Number of dofs in file is %D, not %D as expected",(PetscInt)dims[dofInd],dd->w); 826 827 /* Other cases where rdim != dim cannot be handled currently */ 828 } else if (rdim != dim) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with dof = %D",rdim,dim,dd->w); 829 830 /* Set up the hyperslab size */ 831 dim = 0; 832 if (timestep >= 0) { 833 offset[dim] = timestep; 834 count[dim] = 1; 835 ++dim; 836 } 837 if (dimension == 3) { 838 ierr = PetscHDF5IntCast(dd->zs,offset + dim);CHKERRQ(ierr); 839 ierr = PetscHDF5IntCast(dd->ze - dd->zs,count + dim);CHKERRQ(ierr); 840 ++dim; 841 } 842 if (dimension > 1) { 843 ierr = PetscHDF5IntCast(dd->ys,offset + dim);CHKERRQ(ierr); 844 ierr = PetscHDF5IntCast(dd->ye - dd->ys,count + dim);CHKERRQ(ierr); 845 ++dim; 846 } 847 ierr = PetscHDF5IntCast(dd->xs/dd->w,offset + dim);CHKERRQ(ierr); 848 ierr = PetscHDF5IntCast((dd->xe - dd->xs)/dd->w,count + dim);CHKERRQ(ierr); 849 ++dim; 850 if (dd->w > 1 || dim2) { 851 offset[dim] = 0; 852 ierr = PetscHDF5IntCast(dd->w,count + dim);CHKERRQ(ierr); 853 ++dim; 854 } 855 #if defined(PETSC_USE_COMPLEX) 856 offset[dim] = 0; 857 count[dim] = 2; 858 ++dim; 859 #endif 860 861 /* Create the memory and filespace */ 862 PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL)); 863 PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL)); 864 865 /* Create property list for collective dataset write */ 866 PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER)); 867 #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO) 868 PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE)); 869 #endif 870 /* To read dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */ 871 872 ierr = VecGetArray(xin, &x);CHKERRQ(ierr); 873 PetscStackCallHDF5(H5Dread,(dset_id, scalartype, memspace, filespace, plist_id, x)); 874 ierr = VecRestoreArray(xin, &x);CHKERRQ(ierr); 875 876 /* Close/release resources */ 877 if (group != file_id) { 878 PetscStackCallHDF5(H5Gclose,(group)); 879 } 880 PetscStackCallHDF5(H5Pclose,(plist_id)); 881 PetscStackCallHDF5(H5Sclose,(filespace)); 882 PetscStackCallHDF5(H5Sclose,(memspace)); 883 PetscStackCallHDF5(H5Dclose,(dset_id)); 884 PetscFunctionReturn(0); 885 } 886 #endif 887 888 PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer) 889 { 890 DM da; 891 PetscErrorCode ierr; 892 Vec natural; 893 const char *prefix; 894 PetscInt bs; 895 PetscBool flag; 896 DM_DA *dd; 897 #if defined(PETSC_HAVE_MPIIO) 898 PetscBool isMPIIO; 899 #endif 900 901 PetscFunctionBegin; 902 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 903 dd = (DM_DA*)da->data; 904 #if defined(PETSC_HAVE_MPIIO) 905 ierr = PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);CHKERRQ(ierr); 906 if (isMPIIO) { 907 ierr = DMDAArrayMPIIO(da,viewer,xin,PETSC_FALSE);CHKERRQ(ierr); 908 PetscFunctionReturn(0); 909 } 910 #endif 911 912 ierr = PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);CHKERRQ(ierr); 913 ierr = DMDACreateNaturalVector(da,&natural);CHKERRQ(ierr); 914 ierr = PetscObjectSetName((PetscObject)natural,((PetscObject)xin)->name);CHKERRQ(ierr); 915 ierr = PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);CHKERRQ(ierr); 916 ierr = VecLoad(natural,viewer);CHKERRQ(ierr); 917 ierr = DMDANaturalToGlobalBegin(da,natural,INSERT_VALUES,xin);CHKERRQ(ierr); 918 ierr = DMDANaturalToGlobalEnd(da,natural,INSERT_VALUES,xin);CHKERRQ(ierr); 919 ierr = VecDestroy(&natural);CHKERRQ(ierr); 920 ierr = PetscInfo(xin,"Loading vector from natural ordering into DMDA\n");CHKERRQ(ierr); 921 ierr = PetscOptionsGetInt(NULL,((PetscObject)xin)->prefix,"-vecload_block_size",&bs,&flag);CHKERRQ(ierr); 922 if (flag && bs != dd->w) { 923 ierr = PetscInfo2(xin,"Block size in file %D not equal to DMDA's dof %D\n",bs,dd->w);CHKERRQ(ierr); 924 } 925 PetscFunctionReturn(0); 926 } 927 928 PetscErrorCode VecLoad_Default_DA(Vec xin, PetscViewer viewer) 929 { 930 PetscErrorCode ierr; 931 DM da; 932 PetscBool isbinary; 933 #if defined(PETSC_HAVE_HDF5) 934 PetscBool ishdf5; 935 #endif 936 937 PetscFunctionBegin; 938 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 939 if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 940 941 #if defined(PETSC_HAVE_HDF5) 942 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);CHKERRQ(ierr); 943 #endif 944 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 945 946 if (isbinary) { 947 ierr = VecLoad_Binary_DA(xin,viewer);CHKERRQ(ierr); 948 #if defined(PETSC_HAVE_HDF5) 949 } else if (ishdf5) { 950 ierr = VecLoad_HDF5_DA(xin,viewer);CHKERRQ(ierr); 951 #endif 952 } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name); 953 PetscFunctionReturn(0); 954 } 955