1 2 /* 3 Plots vectors obtained with DMDACreate2d() 4 */ 5 6 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 7 #include <petsc/private/glvisvecimpl.h> 8 #include <petsc/private/viewerhdf5impl.h> 9 #include <petscdraw.h> 10 11 /* 12 The data that is passed into the graphics callback 13 */ 14 typedef struct { 15 PetscMPIInt rank; 16 PetscInt m,n,dof,k; 17 PetscReal xmin,xmax,ymin,ymax,min,max; 18 const PetscScalar *xy,*v; 19 PetscBool showaxis,showgrid; 20 const char *name0,*name1; 21 } ZoomCtx; 22 23 /* 24 This does the drawing for one particular field 25 in one particular set of coordinates. It is a callback 26 called from PetscDrawZoom() 27 */ 28 PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw,void *ctx) 29 { 30 ZoomCtx *zctx = (ZoomCtx*)ctx; 31 PetscInt m,n,i,j,k,dof,id,c1,c2,c3,c4; 32 PetscReal min,max,x1,x2,x3,x4,y_1,y2,y3,y4; 33 const PetscScalar *xy,*v; 34 35 PetscFunctionBegin; 36 m = zctx->m; 37 n = zctx->n; 38 dof = zctx->dof; 39 k = zctx->k; 40 xy = zctx->xy; 41 v = zctx->v; 42 min = zctx->min; 43 max = zctx->max; 44 45 /* PetscDraw the contour plot patch */ 46 PetscDrawCollectiveBegin(draw); 47 for (j=0; j<n-1; j++) { 48 for (i=0; i<m-1; i++) { 49 id = i+j*m; 50 x1 = PetscRealPart(xy[2*id]); 51 y_1 = PetscRealPart(xy[2*id+1]); 52 c1 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 53 54 id = i+j*m+1; 55 x2 = PetscRealPart(xy[2*id]); 56 y2 = PetscRealPart(xy[2*id+1]); 57 c2 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 58 59 id = i+j*m+1+m; 60 x3 = PetscRealPart(xy[2*id]); 61 y3 = PetscRealPart(xy[2*id+1]); 62 c3 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 63 64 id = i+j*m+m; 65 x4 = PetscRealPart(xy[2*id]); 66 y4 = PetscRealPart(xy[2*id+1]); 67 c4 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 68 69 PetscCall(PetscDrawTriangle(draw,x1,y_1,x2,y2,x3,y3,c1,c2,c3)); 70 PetscCall(PetscDrawTriangle(draw,x1,y_1,x3,y3,x4,y4,c1,c3,c4)); 71 if (zctx->showgrid) { 72 PetscCall(PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK)); 73 PetscCall(PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK)); 74 PetscCall(PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK)); 75 PetscCall(PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK)); 76 } 77 } 78 } 79 if (zctx->showaxis && !zctx->rank) { 80 if (zctx->name0 || zctx->name1) { 81 PetscReal xl,yl,xr,yr,x,y; 82 PetscCall(PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr)); 83 x = xl + .30*(xr - xl); 84 xl = xl + .01*(xr - xl); 85 y = yr - .30*(yr - yl); 86 yl = yl + .01*(yr - yl); 87 if (zctx->name0) PetscCall(PetscDrawString(draw,x,yl,PETSC_DRAW_BLACK,zctx->name0)); 88 if (zctx->name1) PetscCall(PetscDrawStringVertical(draw,xl,y,PETSC_DRAW_BLACK,zctx->name1)); 89 } 90 /* 91 Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits 92 but that may require some refactoring. 93 */ 94 { 95 double xmin = (double)zctx->xmin, ymin = (double)zctx->ymin; 96 double xmax = (double)zctx->xmax, ymax = (double)zctx->ymax; 97 char value[16]; size_t len; PetscReal w; 98 PetscCall(PetscSNPrintf(value,16,"%0.2e",xmin)); 99 PetscCall(PetscDrawString(draw,xmin,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value)); 100 PetscCall(PetscSNPrintf(value,16,"%0.2e",xmax)); 101 PetscCall(PetscStrlen(value,&len)); 102 PetscCall(PetscDrawStringGetSize(draw,&w,NULL)); 103 PetscCall(PetscDrawString(draw,xmax - len*w,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value)); 104 PetscCall(PetscSNPrintf(value,16,"%0.2e",ymin)); 105 PetscCall(PetscDrawString(draw,xmin - .05*(xmax - xmin),ymin,PETSC_DRAW_BLACK,value)); 106 PetscCall(PetscSNPrintf(value,16,"%0.2e",ymax)); 107 PetscCall(PetscDrawString(draw,xmin - .05*(xmax - xmin),ymax,PETSC_DRAW_BLACK,value)); 108 } 109 } 110 PetscDrawCollectiveEnd(draw); 111 PetscFunctionReturn(0); 112 } 113 114 PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin,PetscViewer viewer) 115 { 116 DM da,dac,dag; 117 PetscInt N,s,M,w,ncoors = 4; 118 const PetscInt *lx,*ly; 119 PetscReal coors[4]; 120 PetscDraw draw,popup; 121 PetscBool isnull,useports = PETSC_FALSE; 122 MPI_Comm comm; 123 Vec xlocal,xcoor,xcoorl; 124 DMBoundaryType bx,by; 125 DMDAStencilType st; 126 ZoomCtx zctx; 127 PetscDrawViewPorts *ports = NULL; 128 PetscViewerFormat format; 129 PetscInt *displayfields; 130 PetscInt ndisplayfields,i,nbounds; 131 const PetscReal *bounds; 132 133 PetscFunctionBegin; 134 zctx.showgrid = PETSC_FALSE; 135 zctx.showaxis = PETSC_TRUE; 136 137 PetscCall(PetscViewerDrawGetDraw(viewer,0,&draw)); 138 PetscCall(PetscDrawIsNull(draw,&isnull)); 139 if (isnull) PetscFunctionReturn(0); 140 141 PetscCall(PetscViewerDrawGetBounds(viewer,&nbounds,&bounds)); 142 143 PetscCall(VecGetDM(xin,&da)); 144 PetscCheck(da,PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 145 146 PetscCall(PetscObjectGetComm((PetscObject)xin,&comm)); 147 PetscCallMPI(MPI_Comm_rank(comm,&zctx.rank)); 148 149 PetscCall(DMDAGetInfo(da,NULL,&M,&N,NULL,&zctx.m,&zctx.n,NULL,&w,&s,&bx,&by,NULL,&st)); 150 PetscCall(DMDAGetOwnershipRanges(da,&lx,&ly,NULL)); 151 152 /* 153 Obtain a sequential vector that is going to contain the local values plus ONE layer of 154 ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will 155 update the local values pluse ONE layer of ghost values. 156 */ 157 PetscCall(PetscObjectQuery((PetscObject)da,"GraphicsGhosted",(PetscObject*)&xlocal)); 158 if (!xlocal) { 159 if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) { 160 /* 161 if original da is not of stencil width one, or periodic or not a box stencil then 162 create a special DMDA to handle one level of ghost points for graphics 163 */ 164 PetscCall(DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,w,1,lx,ly,&dac)); 165 PetscCall(DMSetUp(dac)); 166 PetscCall(PetscInfo(da,"Creating auxiliary DMDA for managing graphics ghost points\n")); 167 } else { 168 /* otherwise we can use the da we already have */ 169 dac = da; 170 } 171 /* create local vector for holding ghosted values used in graphics */ 172 PetscCall(DMCreateLocalVector(dac,&xlocal)); 173 if (dac != da) { 174 /* don't keep any public reference of this DMDA, is is only available through xlocal */ 175 PetscCall(PetscObjectDereference((PetscObject)dac)); 176 } else { 177 /* remove association between xlocal and da, because below we compose in the opposite 178 direction and if we left this connect we'd get a loop, so the objects could 179 never be destroyed */ 180 PetscCall(PetscObjectRemoveReference((PetscObject)xlocal,"__PETSc_dm")); 181 } 182 PetscCall(PetscObjectCompose((PetscObject)da,"GraphicsGhosted",(PetscObject)xlocal)); 183 PetscCall(PetscObjectDereference((PetscObject)xlocal)); 184 } else { 185 if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) { 186 PetscCall(VecGetDM(xlocal, &dac)); 187 } else { 188 dac = da; 189 } 190 } 191 192 /* 193 Get local (ghosted) values of vector 194 */ 195 PetscCall(DMGlobalToLocalBegin(dac,xin,INSERT_VALUES,xlocal)); 196 PetscCall(DMGlobalToLocalEnd(dac,xin,INSERT_VALUES,xlocal)); 197 PetscCall(VecGetArrayRead(xlocal,&zctx.v)); 198 199 /* 200 Get coordinates of nodes 201 */ 202 PetscCall(DMGetCoordinates(da,&xcoor)); 203 if (!xcoor) { 204 PetscCall(DMDASetUniformCoordinates(da,0.0,1.0,0.0,1.0,0.0,0.0)); 205 PetscCall(DMGetCoordinates(da,&xcoor)); 206 } 207 208 /* 209 Determine the min and max coordinates in plot 210 */ 211 PetscCall(VecStrideMin(xcoor,0,NULL,&zctx.xmin)); 212 PetscCall(VecStrideMax(xcoor,0,NULL,&zctx.xmax)); 213 PetscCall(VecStrideMin(xcoor,1,NULL,&zctx.ymin)); 214 PetscCall(VecStrideMax(xcoor,1,NULL,&zctx.ymax)); 215 PetscCall(PetscOptionsGetBool(NULL,NULL,"-draw_contour_axis",&zctx.showaxis,NULL)); 216 if (zctx.showaxis) { 217 coors[0] = zctx.xmin - .05*(zctx.xmax - zctx.xmin); coors[1] = zctx.ymin - .05*(zctx.ymax - zctx.ymin); 218 coors[2] = zctx.xmax + .05*(zctx.xmax - zctx.xmin); coors[3] = zctx.ymax + .05*(zctx.ymax - zctx.ymin); 219 } else { 220 coors[0] = zctx.xmin; coors[1] = zctx.ymin; coors[2] = zctx.xmax; coors[3] = zctx.ymax; 221 } 222 PetscCall(PetscOptionsGetRealArray(NULL,NULL,"-draw_coordinates",coors,&ncoors,NULL)); 223 PetscCall(PetscInfo(da,"Preparing DMDA 2d contour plot coordinates %g %g %g %g\n",(double)coors[0],(double)coors[1],(double)coors[2],(double)coors[3])); 224 225 /* 226 Get local ghosted version of coordinates 227 */ 228 PetscCall(PetscObjectQuery((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject*)&xcoorl)); 229 if (!xcoorl) { 230 /* create DMDA to get local version of graphics */ 231 PetscCall(DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,2,1,lx,ly,&dag)); 232 PetscCall(DMSetUp(dag)); 233 PetscCall(PetscInfo(dag,"Creating auxiliary DMDA for managing graphics coordinates ghost points\n")); 234 PetscCall(DMCreateLocalVector(dag,&xcoorl)); 235 PetscCall(PetscObjectCompose((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject)xcoorl)); 236 PetscCall(PetscObjectDereference((PetscObject)dag)); 237 PetscCall(PetscObjectDereference((PetscObject)xcoorl)); 238 } else PetscCall(VecGetDM(xcoorl,&dag)); 239 PetscCall(DMGlobalToLocalBegin(dag,xcoor,INSERT_VALUES,xcoorl)); 240 PetscCall(DMGlobalToLocalEnd(dag,xcoor,INSERT_VALUES,xcoorl)); 241 PetscCall(VecGetArrayRead(xcoorl,&zctx.xy)); 242 PetscCall(DMDAGetCoordinateName(da,0,&zctx.name0)); 243 PetscCall(DMDAGetCoordinateName(da,1,&zctx.name1)); 244 245 /* 246 Get information about size of area each processor must do graphics for 247 */ 248 PetscCall(DMDAGetInfo(dac,NULL,&M,&N,NULL,NULL,NULL,NULL,&zctx.dof,NULL,&bx,&by,NULL,NULL)); 249 PetscCall(DMDAGetGhostCorners(dac,NULL,NULL,NULL,&zctx.m,&zctx.n,NULL)); 250 PetscCall(PetscOptionsGetBool(NULL,NULL,"-draw_contour_grid",&zctx.showgrid,NULL)); 251 252 PetscCall(DMDASelectFields(da,&ndisplayfields,&displayfields)); 253 PetscCall(PetscViewerGetFormat(viewer,&format)); 254 PetscCall(PetscOptionsGetBool(NULL,NULL,"-draw_ports",&useports,NULL)); 255 if (format == PETSC_VIEWER_DRAW_PORTS) useports = PETSC_TRUE; 256 if (useports) { 257 PetscCall(PetscViewerDrawGetDraw(viewer,0,&draw)); 258 PetscCall(PetscDrawCheckResizedWindow(draw)); 259 PetscCall(PetscDrawClear(draw)); 260 PetscCall(PetscDrawViewPortsCreate(draw,ndisplayfields,&ports)); 261 } 262 263 /* 264 Loop over each field; drawing each in a different window 265 */ 266 for (i=0; i<ndisplayfields; i++) { 267 zctx.k = displayfields[i]; 268 269 /* determine the min and max value in plot */ 270 PetscCall(VecStrideMin(xin,zctx.k,NULL,&zctx.min)); 271 PetscCall(VecStrideMax(xin,zctx.k,NULL,&zctx.max)); 272 if (zctx.k < nbounds) { 273 zctx.min = bounds[2*zctx.k]; 274 zctx.max = bounds[2*zctx.k+1]; 275 } 276 if (zctx.min == zctx.max) { 277 zctx.min -= 1.e-12; 278 zctx.max += 1.e-12; 279 } 280 PetscCall(PetscInfo(da,"DMDA 2d contour plot min %g max %g\n",(double)zctx.min,(double)zctx.max)); 281 282 if (useports) { 283 PetscCall(PetscDrawViewPortsSet(ports,i)); 284 } else { 285 const char *title; 286 PetscCall(PetscViewerDrawGetDraw(viewer,i,&draw)); 287 PetscCall(DMDAGetFieldName(da,zctx.k,&title)); 288 if (title) PetscCall(PetscDrawSetTitle(draw,title)); 289 } 290 291 PetscCall(PetscDrawGetPopup(draw,&popup)); 292 PetscCall(PetscDrawScalePopup(popup,zctx.min,zctx.max)); 293 PetscCall(PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3])); 294 PetscCall(PetscDrawZoom(draw,VecView_MPI_Draw_DA2d_Zoom,&zctx)); 295 if (!useports) PetscCall(PetscDrawSave(draw)); 296 } 297 if (useports) { 298 PetscCall(PetscViewerDrawGetDraw(viewer,0,&draw)); 299 PetscCall(PetscDrawSave(draw)); 300 } 301 302 PetscCall(PetscDrawViewPortsDestroy(ports)); 303 PetscCall(PetscFree(displayfields)); 304 PetscCall(VecRestoreArrayRead(xcoorl,&zctx.xy)); 305 PetscCall(VecRestoreArrayRead(xlocal,&zctx.v)); 306 PetscFunctionReturn(0); 307 } 308 309 #if defined(PETSC_HAVE_HDF5) 310 static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims) 311 { 312 PetscMPIInt comm_size; 313 hsize_t chunk_size, target_size, dim; 314 hsize_t vec_size = sizeof(PetscScalar)*da->M*da->N*da->P*da->w; 315 hsize_t avg_local_vec_size,KiB = 1024,MiB = KiB*KiB,GiB = MiB*KiB,min_size = MiB; 316 hsize_t max_chunks = 64*KiB; /* HDF5 internal limitation */ 317 hsize_t max_chunk_size = 4*GiB; /* HDF5 internal limitation */ 318 PetscInt zslices=da->p, yslices=da->n, xslices=da->m; 319 320 PetscFunctionBegin; 321 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size)); 322 avg_local_vec_size = (hsize_t) PetscCeilInt(vec_size,comm_size); /* we will attempt to use this as the chunk size */ 323 324 target_size = (hsize_t) PetscMin((PetscInt64)vec_size,PetscMin((PetscInt64)max_chunk_size,PetscMax((PetscInt64)avg_local_vec_size,PetscMax(PetscCeilInt64(vec_size,max_chunks),(PetscInt64)min_size)))); 325 /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */ 326 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(PetscReal); 327 328 /* 329 if size/rank > max_chunk_size, we need radical measures: even going down to 330 avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter 331 what, composed in the most efficient way possible. 332 N.B. this minimises the number of chunks, which may or may not be the optimal 333 solution. In a BG, for example, the optimal solution is probably to make # chunks = # 334 IO nodes involved, but this author has no access to a BG to figure out how to 335 reliably find the right number. And even then it may or may not be enough. 336 */ 337 if (avg_local_vec_size > max_chunk_size) { 338 /* check if we can just split local z-axis: is that enough? */ 339 zslices = PetscCeilInt(vec_size,da->p*max_chunk_size)*zslices; 340 if (zslices > da->P) { 341 /* lattice is too large in xy-directions, splitting z only is not enough */ 342 zslices = da->P; 343 yslices = PetscCeilInt(vec_size,zslices*da->n*max_chunk_size)*yslices; 344 if (yslices > da->N) { 345 /* lattice is too large in x-direction, splitting along z, y is not enough */ 346 yslices = da->N; 347 xslices = PetscCeilInt(vec_size,zslices*yslices*da->m*max_chunk_size)*xslices; 348 } 349 } 350 dim = 0; 351 if (timestep >= 0) { 352 ++dim; 353 } 354 /* prefer to split z-axis, even down to planar slices */ 355 if (dimension == 3) { 356 chunkDims[dim++] = (hsize_t) da->P/zslices; 357 chunkDims[dim++] = (hsize_t) da->N/yslices; 358 chunkDims[dim++] = (hsize_t) da->M/xslices; 359 } else { 360 /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */ 361 chunkDims[dim++] = (hsize_t) da->N/yslices; 362 chunkDims[dim++] = (hsize_t) da->M/xslices; 363 } 364 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double); 365 } else { 366 if (target_size < chunk_size) { 367 /* only change the defaults if target_size < chunk_size */ 368 dim = 0; 369 if (timestep >= 0) { 370 ++dim; 371 } 372 /* prefer to split z-axis, even down to planar slices */ 373 if (dimension == 3) { 374 /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */ 375 if (target_size >= chunk_size/da->p) { 376 /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */ 377 chunkDims[dim] = (hsize_t) PetscCeilInt(da->P,da->p); 378 } else { 379 /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be 380 radical and let everyone write all they've got */ 381 chunkDims[dim++] = (hsize_t) PetscCeilInt(da->P,da->p); 382 chunkDims[dim++] = (hsize_t) PetscCeilInt(da->N,da->n); 383 chunkDims[dim++] = (hsize_t) PetscCeilInt(da->M,da->m); 384 } 385 } else { 386 /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */ 387 if (target_size >= chunk_size/da->n) { 388 /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */ 389 chunkDims[dim] = (hsize_t) PetscCeilInt(da->N,da->n); 390 } else { 391 /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be 392 radical and let everyone write all they've got */ 393 chunkDims[dim++] = (hsize_t) PetscCeilInt(da->N,da->n); 394 chunkDims[dim++] = (hsize_t) PetscCeilInt(da->M,da->m); 395 } 396 397 } 398 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double); 399 } else { 400 /* precomputed chunks are fine, we don't need to do anything */ 401 } 402 } 403 PetscFunctionReturn(0); 404 } 405 #endif 406 407 #if defined(PETSC_HAVE_HDF5) 408 PetscErrorCode VecView_MPI_HDF5_DA(Vec xin,PetscViewer viewer) 409 { 410 PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5*) viewer->data; 411 DM dm; 412 DM_DA *da; 413 hid_t filespace; /* file dataspace identifier */ 414 hid_t chunkspace; /* chunk dataset property identifier */ 415 hid_t dset_id; /* dataset identifier */ 416 hid_t memspace; /* memory dataspace identifier */ 417 hid_t file_id; 418 hid_t group; 419 hid_t memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 420 hid_t filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 421 hsize_t dim; 422 hsize_t maxDims[6]={0}, dims[6]={0}, chunkDims[6]={0}, count[6]={0}, offset[6]={0}; /* we depend on these being sane later on */ 423 PetscBool timestepping=PETSC_FALSE, dim2, spoutput; 424 PetscInt timestep=PETSC_MIN_INT, dimension; 425 const PetscScalar *x; 426 const char *vecname; 427 428 PetscFunctionBegin; 429 PetscCall(PetscViewerHDF5OpenGroup(viewer, &file_id, &group)); 430 PetscCall(PetscViewerHDF5IsTimestepping(viewer, ×tepping)); 431 if (timestepping) { 432 PetscCall(PetscViewerHDF5GetTimestep(viewer, ×tep)); 433 } 434 PetscCall(PetscViewerHDF5GetBaseDimension2(viewer,&dim2)); 435 PetscCall(PetscViewerHDF5GetSPOutput(viewer,&spoutput)); 436 437 PetscCall(VecGetDM(xin,&dm)); 438 PetscCheck(dm,PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 439 da = (DM_DA*)dm->data; 440 PetscCall(DMGetDimension(dm, &dimension)); 441 442 /* Create the dataspace for the dataset. 443 * 444 * dims - holds the current dimensions of the dataset 445 * 446 * maxDims - holds the maximum dimensions of the dataset (unlimited 447 * for the number of time steps with the current dimensions for the 448 * other dimensions; so only additional time steps can be added). 449 * 450 * chunkDims - holds the size of a single time step (required to 451 * permit extending dataset). 452 */ 453 dim = 0; 454 if (timestep >= 0) { 455 dims[dim] = timestep+1; 456 maxDims[dim] = H5S_UNLIMITED; 457 chunkDims[dim] = 1; 458 ++dim; 459 } 460 if (dimension == 3) { 461 PetscCall(PetscHDF5IntCast(da->P,dims+dim)); 462 maxDims[dim] = dims[dim]; 463 chunkDims[dim] = dims[dim]; 464 ++dim; 465 } 466 if (dimension > 1) { 467 PetscCall(PetscHDF5IntCast(da->N,dims+dim)); 468 maxDims[dim] = dims[dim]; 469 chunkDims[dim] = dims[dim]; 470 ++dim; 471 } 472 PetscCall(PetscHDF5IntCast(da->M,dims+dim)); 473 maxDims[dim] = dims[dim]; 474 chunkDims[dim] = dims[dim]; 475 ++dim; 476 if (da->w > 1 || dim2) { 477 PetscCall(PetscHDF5IntCast(da->w,dims+dim)); 478 maxDims[dim] = dims[dim]; 479 chunkDims[dim] = dims[dim]; 480 ++dim; 481 } 482 #if defined(PETSC_USE_COMPLEX) 483 dims[dim] = 2; 484 maxDims[dim] = dims[dim]; 485 chunkDims[dim] = dims[dim]; 486 ++dim; 487 #endif 488 489 PetscCall(VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims)); 490 491 PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims)); 492 493 #if defined(PETSC_USE_REAL_SINGLE) 494 memscalartype = H5T_NATIVE_FLOAT; 495 filescalartype = H5T_NATIVE_FLOAT; 496 #elif defined(PETSC_USE_REAL___FLOAT128) 497 #error "HDF5 output with 128 bit floats not supported." 498 #elif defined(PETSC_USE_REAL___FP16) 499 #error "HDF5 output with 16 bit floats not supported." 500 #else 501 memscalartype = H5T_NATIVE_DOUBLE; 502 if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT; 503 else filescalartype = H5T_NATIVE_DOUBLE; 504 #endif 505 506 /* Create the dataset with default properties and close filespace */ 507 PetscCall(PetscObjectGetName((PetscObject)xin,&vecname)); 508 if (!H5Lexists(group, vecname, H5P_DEFAULT)) { 509 /* Create chunk */ 510 PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE)); 511 PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims)); 512 513 PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT)); 514 } else { 515 PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT)); 516 PetscStackCallHDF5(H5Dset_extent,(dset_id, dims)); 517 } 518 PetscStackCallHDF5(H5Sclose,(filespace)); 519 520 /* Each process defines a dataset and writes it to the hyperslab in the file */ 521 dim = 0; 522 if (timestep >= 0) { 523 offset[dim] = timestep; 524 ++dim; 525 } 526 if (dimension == 3) PetscCall(PetscHDF5IntCast(da->zs,offset + dim++)); 527 if (dimension > 1) PetscCall(PetscHDF5IntCast(da->ys,offset + dim++)); 528 PetscCall(PetscHDF5IntCast(da->xs/da->w,offset + dim++)); 529 if (da->w > 1 || dim2) offset[dim++] = 0; 530 #if defined(PETSC_USE_COMPLEX) 531 offset[dim++] = 0; 532 #endif 533 dim = 0; 534 if (timestep >= 0) { 535 count[dim] = 1; 536 ++dim; 537 } 538 if (dimension == 3) PetscCall(PetscHDF5IntCast(da->ze - da->zs,count + dim++)); 539 if (dimension > 1) PetscCall(PetscHDF5IntCast(da->ye - da->ys,count + dim++)); 540 PetscCall(PetscHDF5IntCast((da->xe - da->xs)/da->w,count + dim++)); 541 if (da->w > 1 || dim2) PetscCall(PetscHDF5IntCast(da->w,count + dim++)); 542 #if defined(PETSC_USE_COMPLEX) 543 count[dim++] = 2; 544 #endif 545 PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL)); 546 PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id)); 547 PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL)); 548 549 PetscCall(VecGetArrayRead(xin, &x)); 550 PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, hdf5->dxpl_id, x)); 551 PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL)); 552 PetscCall(VecRestoreArrayRead(xin, &x)); 553 554 #if defined(PETSC_USE_COMPLEX) 555 { 556 PetscBool tru = PETSC_TRUE; 557 PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer,(PetscObject)xin,"complex",PETSC_BOOL,&tru)); 558 } 559 #endif 560 if (timestepping) { 561 PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer,(PetscObject)xin,"timestepping",PETSC_BOOL,×tepping)); 562 } 563 564 /* Close/release resources */ 565 if (group != file_id) { 566 PetscStackCallHDF5(H5Gclose,(group)); 567 } 568 PetscStackCallHDF5(H5Sclose,(filespace)); 569 PetscStackCallHDF5(H5Sclose,(memspace)); 570 PetscStackCallHDF5(H5Dclose,(dset_id)); 571 PetscCall(PetscInfo(xin,"Wrote Vec object with name %s\n",vecname)); 572 PetscFunctionReturn(0); 573 } 574 #endif 575 576 extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec,PetscViewer); 577 578 #if defined(PETSC_HAVE_MPIIO) 579 static PetscErrorCode DMDAArrayMPIIO(DM da,PetscViewer viewer,Vec xin,PetscBool write) 580 { 581 MPI_File mfdes; 582 PetscMPIInt gsizes[4],lsizes[4],lstarts[4],asiz,dof; 583 MPI_Datatype view; 584 const PetscScalar *array; 585 MPI_Offset off; 586 MPI_Aint ub,ul; 587 PetscInt type,rows,vecrows,tr[2]; 588 DM_DA *dd = (DM_DA*)da->data; 589 PetscBool skipheader; 590 591 PetscFunctionBegin; 592 PetscCall(VecGetSize(xin,&vecrows)); 593 PetscCall(PetscViewerBinaryGetSkipHeader(viewer,&skipheader)); 594 if (!write) { 595 /* Read vector header. */ 596 if (!skipheader) { 597 PetscCall(PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT)); 598 type = tr[0]; 599 rows = tr[1]; 600 PetscCheck(type == VEC_FILE_CLASSID,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Not vector next in file"); 601 PetscCheck(rows == vecrows,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Vector in file not same size as DMDA vector"); 602 } 603 } else { 604 tr[0] = VEC_FILE_CLASSID; 605 tr[1] = vecrows; 606 if (!skipheader) { 607 PetscCall(PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT)); 608 } 609 } 610 611 PetscCall(PetscMPIIntCast(dd->w,&dof)); 612 gsizes[0] = dof; 613 PetscCall(PetscMPIIntCast(dd->M,gsizes+1)); 614 PetscCall(PetscMPIIntCast(dd->N,gsizes+2)); 615 PetscCall(PetscMPIIntCast(dd->P,gsizes+3)); 616 lsizes[0] = dof; 617 PetscCall(PetscMPIIntCast((dd->xe-dd->xs)/dof,lsizes+1)); 618 PetscCall(PetscMPIIntCast(dd->ye-dd->ys,lsizes+2)); 619 PetscCall(PetscMPIIntCast(dd->ze-dd->zs,lsizes+3)); 620 lstarts[0] = 0; 621 PetscCall(PetscMPIIntCast(dd->xs/dof,lstarts+1)); 622 PetscCall(PetscMPIIntCast(dd->ys,lstarts+2)); 623 PetscCall(PetscMPIIntCast(dd->zs,lstarts+3)); 624 PetscCallMPI(MPI_Type_create_subarray(da->dim+1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view)); 625 PetscCallMPI(MPI_Type_commit(&view)); 626 627 PetscCall(PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes)); 628 PetscCall(PetscViewerBinaryGetMPIIOOffset(viewer,&off)); 629 PetscCallMPI(MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char*)"native",MPI_INFO_NULL)); 630 PetscCall(VecGetArrayRead(xin,&array)); 631 asiz = lsizes[1]*(lsizes[2] > 0 ? lsizes[2] : 1)*(lsizes[3] > 0 ? lsizes[3] : 1)*dof; 632 if (write) PetscCall(MPIU_File_write_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE)); 633 else PetscCall(MPIU_File_read_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE)); 634 PetscCallMPI(MPI_Type_get_extent(view,&ul,&ub)); 635 PetscCall(PetscViewerBinaryAddMPIIOOffset(viewer,ub)); 636 PetscCall(VecRestoreArrayRead(xin,&array)); 637 PetscCallMPI(MPI_Type_free(&view)); 638 PetscFunctionReturn(0); 639 } 640 #endif 641 642 PetscErrorCode VecView_MPI_DA(Vec xin,PetscViewer viewer) 643 { 644 DM da; 645 PetscInt dim; 646 Vec natural; 647 PetscBool isdraw,isvtk,isglvis; 648 #if defined(PETSC_HAVE_HDF5) 649 PetscBool ishdf5; 650 #endif 651 const char *prefix,*name; 652 PetscViewerFormat format; 653 654 PetscFunctionBegin; 655 PetscCall(VecGetDM(xin,&da)); 656 PetscCheck(da,PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 657 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw)); 658 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk)); 659 #if defined(PETSC_HAVE_HDF5) 660 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5)); 661 #endif 662 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis)); 663 if (isdraw) { 664 PetscCall(DMDAGetInfo(da,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL)); 665 if (dim == 1) { 666 PetscCall(VecView_MPI_Draw_DA1d(xin,viewer)); 667 } else if (dim == 2) { 668 PetscCall(VecView_MPI_Draw_DA2d(xin,viewer)); 669 } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot graphically view vector associated with this dimensional DMDA %" PetscInt_FMT,dim); 670 } else if (isvtk) { /* Duplicate the Vec */ 671 Vec Y; 672 PetscCall(VecDuplicate(xin,&Y)); 673 if (((PetscObject)xin)->name) { 674 /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */ 675 PetscCall(PetscObjectSetName((PetscObject)Y,((PetscObject)xin)->name)); 676 } 677 PetscCall(VecCopy(xin,Y)); 678 { 679 PetscObject dmvtk; 680 PetscBool compatible,compatibleSet; 681 PetscCall(PetscViewerVTKGetDM(viewer,&dmvtk)); 682 if (dmvtk) { 683 PetscValidHeaderSpecific((DM)dmvtk,DM_CLASSID,2); 684 PetscCall(DMGetCompatibility(da,(DM)dmvtk,&compatible,&compatibleSet)); 685 PetscCheck(compatibleSet && compatible,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Cannot confirm compatibility of DMs associated with Vecs viewed in the same VTK file. Check that grids are the same."); 686 } 687 PetscCall(PetscViewerVTKAddField(viewer,(PetscObject)da,DMDAVTKWriteAll,PETSC_DEFAULT,PETSC_VTK_POINT_FIELD,PETSC_FALSE,(PetscObject)Y)); 688 } 689 #if defined(PETSC_HAVE_HDF5) 690 } else if (ishdf5) { 691 PetscCall(VecView_MPI_HDF5_DA(xin,viewer)); 692 #endif 693 } else if (isglvis) { 694 PetscCall(VecView_GLVis(xin,viewer)); 695 } else { 696 #if defined(PETSC_HAVE_MPIIO) 697 PetscBool isbinary,isMPIIO; 698 699 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary)); 700 if (isbinary) { 701 PetscCall(PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO)); 702 if (isMPIIO) { 703 PetscCall(DMDAArrayMPIIO(da,viewer,xin,PETSC_TRUE)); 704 PetscFunctionReturn(0); 705 } 706 } 707 #endif 708 709 /* call viewer on natural ordering */ 710 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix)); 711 PetscCall(DMDACreateNaturalVector(da,&natural)); 712 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)natural,prefix)); 713 PetscCall(DMDAGlobalToNaturalBegin(da,xin,INSERT_VALUES,natural)); 714 PetscCall(DMDAGlobalToNaturalEnd(da,xin,INSERT_VALUES,natural)); 715 PetscCall(PetscObjectGetName((PetscObject)xin,&name)); 716 PetscCall(PetscObjectSetName((PetscObject)natural,name)); 717 718 PetscCall(PetscViewerGetFormat(viewer,&format)); 719 if (format == PETSC_VIEWER_BINARY_MATLAB) { 720 /* temporarily remove viewer format so it won't trigger in the VecView() */ 721 PetscCall(PetscViewerPushFormat(viewer,PETSC_VIEWER_DEFAULT)); 722 } 723 724 ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_TRUE; 725 PetscCall(VecView(natural,viewer)); 726 ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_FALSE; 727 728 if (format == PETSC_VIEWER_BINARY_MATLAB) { 729 MPI_Comm comm; 730 FILE *info; 731 const char *fieldname; 732 char fieldbuf[256]; 733 PetscInt dim,ni,nj,nk,pi,pj,pk,dof,n; 734 735 /* set the viewer format back into the viewer */ 736 PetscCall(PetscViewerPopFormat(viewer)); 737 PetscCall(PetscObjectGetComm((PetscObject)viewer,&comm)); 738 PetscCall(PetscViewerBinaryGetInfoPointer(viewer,&info)); 739 PetscCall(DMDAGetInfo(da,&dim,&ni,&nj,&nk,&pi,&pj,&pk,&dof,NULL,NULL,NULL,NULL,NULL)); 740 PetscCall(PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n")); 741 PetscCall(PetscFPrintf(comm,info,"#$$ tmp = PetscBinaryRead(fd); \n")); 742 if (dim == 1) { PetscCall(PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ");\n",dof,ni)); } 743 if (dim == 2) { PetscCall(PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ");\n",dof,ni,nj)); } 744 if (dim == 3) { PetscCall(PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ");\n",dof,ni,nj,nk)); } 745 746 for (n=0; n<dof; n++) { 747 PetscCall(DMDAGetFieldName(da,n,&fieldname)); 748 if (!fieldname || !fieldname[0]) { 749 PetscCall(PetscSNPrintf(fieldbuf,sizeof fieldbuf,"field%" PetscInt_FMT,n)); 750 fieldname = fieldbuf; 751 } 752 if (dim == 1) { PetscCall(PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%" PetscInt_FMT ",:))';\n",name,fieldname,n+1)); } 753 if (dim == 2) { PetscCall(PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%" PetscInt_FMT ",:,:))';\n",name,fieldname,n+1)); } 754 if (dim == 3) { PetscCall(PetscFPrintf(comm,info,"#$$ Set.%s.%s = permute(squeeze(tmp(%" PetscInt_FMT ",:,:,:)),[2 1 3]);\n",name,fieldname,n+1));} 755 } 756 PetscCall(PetscFPrintf(comm,info,"#$$ clear tmp; \n")); 757 PetscCall(PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n")); 758 } 759 760 PetscCall(VecDestroy(&natural)); 761 } 762 PetscFunctionReturn(0); 763 } 764 765 #if defined(PETSC_HAVE_HDF5) 766 PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer) 767 { 768 PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5*) viewer->data; 769 DM da; 770 int dim,rdim; 771 hsize_t dims[6]={0},count[6]={0},offset[6]={0}; 772 PetscBool dim2=PETSC_FALSE,timestepping=PETSC_FALSE; 773 PetscInt dimension,timestep=PETSC_MIN_INT,dofInd; 774 PetscScalar *x; 775 const char *vecname; 776 hid_t filespace; /* file dataspace identifier */ 777 hid_t dset_id; /* dataset identifier */ 778 hid_t memspace; /* memory dataspace identifier */ 779 hid_t file_id,group; 780 hid_t scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 781 DM_DA *dd; 782 783 PetscFunctionBegin; 784 #if defined(PETSC_USE_REAL_SINGLE) 785 scalartype = H5T_NATIVE_FLOAT; 786 #elif defined(PETSC_USE_REAL___FLOAT128) 787 #error "HDF5 output with 128 bit floats not supported." 788 #elif defined(PETSC_USE_REAL___FP16) 789 #error "HDF5 output with 16 bit floats not supported." 790 #else 791 scalartype = H5T_NATIVE_DOUBLE; 792 #endif 793 794 PetscCall(PetscViewerHDF5OpenGroup(viewer, &file_id, &group)); 795 PetscCall(PetscObjectGetName((PetscObject)xin,&vecname)); 796 PetscCall(PetscViewerHDF5CheckTimestepping_Internal(viewer, vecname)); 797 PetscCall(PetscViewerHDF5IsTimestepping(viewer, ×tepping)); 798 if (timestepping) { 799 PetscCall(PetscViewerHDF5GetTimestep(viewer, ×tep)); 800 } 801 PetscCall(VecGetDM(xin,&da)); 802 dd = (DM_DA*)da->data; 803 PetscCall(DMGetDimension(da, &dimension)); 804 805 /* Open dataset */ 806 PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT)); 807 808 /* Retrieve the dataspace for the dataset */ 809 PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id)); 810 PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL)); 811 812 /* Expected dimension for holding the dof's */ 813 #if defined(PETSC_USE_COMPLEX) 814 dofInd = rdim-2; 815 #else 816 dofInd = rdim-1; 817 #endif 818 819 /* The expected number of dimensions, assuming basedimension2 = false */ 820 dim = dimension; 821 if (dd->w > 1) ++dim; 822 if (timestep >= 0) ++dim; 823 #if defined(PETSC_USE_COMPLEX) 824 ++dim; 825 #endif 826 827 /* In this case the input dataset have one extra, unexpected dimension. */ 828 if (rdim == dim+1) { 829 /* In this case the block size unity */ 830 if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE; 831 832 /* Special error message for the case where dof does not match the input file */ 833 else PetscCheck(dd->w == (PetscInt) dims[dofInd],PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Number of dofs in file is %" PetscInt_FMT ", not %" PetscInt_FMT " as expected",(PetscInt)dims[dofInd],dd->w); 834 835 /* Other cases where rdim != dim cannot be handled currently */ 836 } else PetscCheck(rdim == dim,PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with dof = %" PetscInt_FMT,rdim,dim,dd->w); 837 838 /* Set up the hyperslab size */ 839 dim = 0; 840 if (timestep >= 0) { 841 offset[dim] = timestep; 842 count[dim] = 1; 843 ++dim; 844 } 845 if (dimension == 3) { 846 PetscCall(PetscHDF5IntCast(dd->zs,offset + dim)); 847 PetscCall(PetscHDF5IntCast(dd->ze - dd->zs,count + dim)); 848 ++dim; 849 } 850 if (dimension > 1) { 851 PetscCall(PetscHDF5IntCast(dd->ys,offset + dim)); 852 PetscCall(PetscHDF5IntCast(dd->ye - dd->ys,count + dim)); 853 ++dim; 854 } 855 PetscCall(PetscHDF5IntCast(dd->xs/dd->w,offset + dim)); 856 PetscCall(PetscHDF5IntCast((dd->xe - dd->xs)/dd->w,count + dim)); 857 ++dim; 858 if (dd->w > 1 || dim2) { 859 offset[dim] = 0; 860 PetscCall(PetscHDF5IntCast(dd->w,count + dim)); 861 ++dim; 862 } 863 #if defined(PETSC_USE_COMPLEX) 864 offset[dim] = 0; 865 count[dim] = 2; 866 ++dim; 867 #endif 868 869 /* Create the memory and filespace */ 870 PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL)); 871 PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL)); 872 873 PetscCall(VecGetArray(xin, &x)); 874 PetscStackCallHDF5(H5Dread,(dset_id, scalartype, memspace, filespace, hdf5->dxpl_id, x)); 875 PetscCall(VecRestoreArray(xin, &x)); 876 877 /* Close/release resources */ 878 if (group != file_id) { 879 PetscStackCallHDF5(H5Gclose,(group)); 880 } 881 PetscStackCallHDF5(H5Sclose,(filespace)); 882 PetscStackCallHDF5(H5Sclose,(memspace)); 883 PetscStackCallHDF5(H5Dclose,(dset_id)); 884 PetscFunctionReturn(0); 885 } 886 #endif 887 888 PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer) 889 { 890 DM da; 891 Vec natural; 892 const char *prefix; 893 PetscInt bs; 894 PetscBool flag; 895 DM_DA *dd; 896 #if defined(PETSC_HAVE_MPIIO) 897 PetscBool isMPIIO; 898 #endif 899 900 PetscFunctionBegin; 901 PetscCall(VecGetDM(xin,&da)); 902 dd = (DM_DA*)da->data; 903 #if defined(PETSC_HAVE_MPIIO) 904 PetscCall(PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO)); 905 if (isMPIIO) { 906 PetscCall(DMDAArrayMPIIO(da,viewer,xin,PETSC_FALSE)); 907 PetscFunctionReturn(0); 908 } 909 #endif 910 911 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix)); 912 PetscCall(DMDACreateNaturalVector(da,&natural)); 913 PetscCall(PetscObjectSetName((PetscObject)natural,((PetscObject)xin)->name)); 914 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)natural,prefix)); 915 PetscCall(VecLoad(natural,viewer)); 916 PetscCall(DMDANaturalToGlobalBegin(da,natural,INSERT_VALUES,xin)); 917 PetscCall(DMDANaturalToGlobalEnd(da,natural,INSERT_VALUES,xin)); 918 PetscCall(VecDestroy(&natural)); 919 PetscCall(PetscInfo(xin,"Loading vector from natural ordering into DMDA\n")); 920 PetscCall(PetscOptionsGetInt(NULL,((PetscObject)xin)->prefix,"-vecload_block_size",&bs,&flag)); 921 if (flag && bs != dd->w) { 922 PetscCall(PetscInfo(xin,"Block size in file %" PetscInt_FMT " not equal to DMDA's dof %" PetscInt_FMT "\n",bs,dd->w)); 923 } 924 PetscFunctionReturn(0); 925 } 926 927 PetscErrorCode VecLoad_Default_DA(Vec xin, PetscViewer viewer) 928 { 929 DM da; 930 PetscBool isbinary; 931 #if defined(PETSC_HAVE_HDF5) 932 PetscBool ishdf5; 933 #endif 934 935 PetscFunctionBegin; 936 PetscCall(VecGetDM(xin,&da)); 937 PetscCheck(da,PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 938 939 #if defined(PETSC_HAVE_HDF5) 940 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5)); 941 #endif 942 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary)); 943 944 if (isbinary) { 945 PetscCall(VecLoad_Binary_DA(xin,viewer)); 946 #if defined(PETSC_HAVE_HDF5) 947 } else if (ishdf5) { 948 PetscCall(VecLoad_HDF5_DA(xin,viewer)); 949 #endif 950 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name); 951 PetscFunctionReturn(0); 952 } 953