1 2 /* 3 Plots vectors obtained with DMDACreate2d() 4 */ 5 6 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 7 #include <petsc/private/glvisvecimpl.h> 8 #include <petsc/private/viewerhdf5impl.h> 9 #include <petscdraw.h> 10 11 /* 12 The data that is passed into the graphics callback 13 */ 14 typedef struct { 15 PetscMPIInt rank; 16 PetscInt m,n,dof,k; 17 PetscReal xmin,xmax,ymin,ymax,min,max; 18 const PetscScalar *xy,*v; 19 PetscBool showaxis,showgrid; 20 const char *name0,*name1; 21 } ZoomCtx; 22 23 /* 24 This does the drawing for one particular field 25 in one particular set of coordinates. It is a callback 26 called from PetscDrawZoom() 27 */ 28 PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw,void *ctx) 29 { 30 ZoomCtx *zctx = (ZoomCtx*)ctx; 31 PetscInt m,n,i,j,k,dof,id,c1,c2,c3,c4; 32 PetscReal min,max,x1,x2,x3,x4,y_1,y2,y3,y4; 33 const PetscScalar *xy,*v; 34 35 PetscFunctionBegin; 36 m = zctx->m; 37 n = zctx->n; 38 dof = zctx->dof; 39 k = zctx->k; 40 xy = zctx->xy; 41 v = zctx->v; 42 min = zctx->min; 43 max = zctx->max; 44 45 /* PetscDraw the contour plot patch */ 46 PetscDrawCollectiveBegin(draw); 47 for (j=0; j<n-1; j++) { 48 for (i=0; i<m-1; i++) { 49 id = i+j*m; 50 x1 = PetscRealPart(xy[2*id]); 51 y_1 = PetscRealPart(xy[2*id+1]); 52 c1 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 53 54 id = i+j*m+1; 55 x2 = PetscRealPart(xy[2*id]); 56 y2 = PetscRealPart(xy[2*id+1]); 57 c2 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 58 59 id = i+j*m+1+m; 60 x3 = PetscRealPart(xy[2*id]); 61 y3 = PetscRealPart(xy[2*id+1]); 62 c3 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 63 64 id = i+j*m+m; 65 x4 = PetscRealPart(xy[2*id]); 66 y4 = PetscRealPart(xy[2*id+1]); 67 c4 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 68 69 PetscCall(PetscDrawTriangle(draw,x1,y_1,x2,y2,x3,y3,c1,c2,c3)); 70 PetscCall(PetscDrawTriangle(draw,x1,y_1,x3,y3,x4,y4,c1,c3,c4)); 71 if (zctx->showgrid) { 72 PetscCall(PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK)); 73 PetscCall(PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK)); 74 PetscCall(PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK)); 75 PetscCall(PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK)); 76 } 77 } 78 } 79 if (zctx->showaxis && !zctx->rank) { 80 if (zctx->name0 || zctx->name1) { 81 PetscReal xl,yl,xr,yr,x,y; 82 PetscCall(PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr)); 83 x = xl + .30*(xr - xl); 84 xl = xl + .01*(xr - xl); 85 y = yr - .30*(yr - yl); 86 yl = yl + .01*(yr - yl); 87 if (zctx->name0) PetscCall(PetscDrawString(draw,x,yl,PETSC_DRAW_BLACK,zctx->name0)); 88 if (zctx->name1) PetscCall(PetscDrawStringVertical(draw,xl,y,PETSC_DRAW_BLACK,zctx->name1)); 89 } 90 /* 91 Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits 92 but that may require some refactoring. 93 */ 94 { 95 double xmin = (double)zctx->xmin, ymin = (double)zctx->ymin; 96 double xmax = (double)zctx->xmax, ymax = (double)zctx->ymax; 97 char value[16]; size_t len; PetscReal w; 98 PetscCall(PetscSNPrintf(value,16,"%0.2e",xmin)); 99 PetscCall(PetscDrawString(draw,xmin,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value)); 100 PetscCall(PetscSNPrintf(value,16,"%0.2e",xmax)); 101 PetscCall(PetscStrlen(value,&len)); 102 PetscCall(PetscDrawStringGetSize(draw,&w,NULL)); 103 PetscCall(PetscDrawString(draw,xmax - len*w,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value)); 104 PetscCall(PetscSNPrintf(value,16,"%0.2e",ymin)); 105 PetscCall(PetscDrawString(draw,xmin - .05*(xmax - xmin),ymin,PETSC_DRAW_BLACK,value)); 106 PetscCall(PetscSNPrintf(value,16,"%0.2e",ymax)); 107 PetscCall(PetscDrawString(draw,xmin - .05*(xmax - xmin),ymax,PETSC_DRAW_BLACK,value)); 108 } 109 } 110 PetscDrawCollectiveEnd(draw); 111 PetscFunctionReturn(0); 112 } 113 114 PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin,PetscViewer viewer) 115 { 116 DM da,dac,dag; 117 PetscInt N,s,M,w,ncoors = 4; 118 const PetscInt *lx,*ly; 119 PetscReal coors[4]; 120 PetscDraw draw,popup; 121 PetscBool isnull,useports = PETSC_FALSE; 122 MPI_Comm comm; 123 Vec xlocal,xcoor,xcoorl; 124 DMBoundaryType bx,by; 125 DMDAStencilType st; 126 ZoomCtx zctx; 127 PetscDrawViewPorts *ports = NULL; 128 PetscViewerFormat format; 129 PetscInt *displayfields; 130 PetscInt ndisplayfields,i,nbounds; 131 const PetscReal *bounds; 132 133 PetscFunctionBegin; 134 zctx.showgrid = PETSC_FALSE; 135 zctx.showaxis = PETSC_TRUE; 136 137 PetscCall(PetscViewerDrawGetDraw(viewer,0,&draw)); 138 PetscCall(PetscDrawIsNull(draw,&isnull)); 139 if (isnull) PetscFunctionReturn(0); 140 141 PetscCall(PetscViewerDrawGetBounds(viewer,&nbounds,&bounds)); 142 143 PetscCall(VecGetDM(xin,&da)); 144 PetscCheck(da,PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 145 146 PetscCall(PetscObjectGetComm((PetscObject)xin,&comm)); 147 PetscCallMPI(MPI_Comm_rank(comm,&zctx.rank)); 148 149 PetscCall(DMDAGetInfo(da,NULL,&M,&N,NULL,&zctx.m,&zctx.n,NULL,&w,&s,&bx,&by,NULL,&st)); 150 PetscCall(DMDAGetOwnershipRanges(da,&lx,&ly,NULL)); 151 152 /* 153 Obtain a sequential vector that is going to contain the local values plus ONE layer of 154 ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will 155 update the local values pluse ONE layer of ghost values. 156 */ 157 PetscCall(PetscObjectQuery((PetscObject)da,"GraphicsGhosted",(PetscObject*)&xlocal)); 158 if (!xlocal) { 159 if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) { 160 /* 161 if original da is not of stencil width one, or periodic or not a box stencil then 162 create a special DMDA to handle one level of ghost points for graphics 163 */ 164 PetscCall(DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,w,1,lx,ly,&dac)); 165 PetscCall(DMSetUp(dac)); 166 PetscCall(PetscInfo(da,"Creating auxiliary DMDA for managing graphics ghost points\n")); 167 } else { 168 /* otherwise we can use the da we already have */ 169 dac = da; 170 } 171 /* create local vector for holding ghosted values used in graphics */ 172 PetscCall(DMCreateLocalVector(dac,&xlocal)); 173 if (dac != da) { 174 /* don't keep any public reference of this DMDA, is is only available through xlocal */ 175 PetscCall(PetscObjectDereference((PetscObject)dac)); 176 } else { 177 /* remove association between xlocal and da, because below we compose in the opposite 178 direction and if we left this connect we'd get a loop, so the objects could 179 never be destroyed */ 180 PetscCall(PetscObjectRemoveReference((PetscObject)xlocal,"__PETSc_dm")); 181 } 182 PetscCall(PetscObjectCompose((PetscObject)da,"GraphicsGhosted",(PetscObject)xlocal)); 183 PetscCall(PetscObjectDereference((PetscObject)xlocal)); 184 } else { 185 if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) { 186 PetscCall(VecGetDM(xlocal, &dac)); 187 } else { 188 dac = da; 189 } 190 } 191 192 /* 193 Get local (ghosted) values of vector 194 */ 195 PetscCall(DMGlobalToLocalBegin(dac,xin,INSERT_VALUES,xlocal)); 196 PetscCall(DMGlobalToLocalEnd(dac,xin,INSERT_VALUES,xlocal)); 197 PetscCall(VecGetArrayRead(xlocal,&zctx.v)); 198 199 /* 200 Get coordinates of nodes 201 */ 202 PetscCall(DMGetCoordinates(da,&xcoor)); 203 if (!xcoor) { 204 PetscCall(DMDASetUniformCoordinates(da,0.0,1.0,0.0,1.0,0.0,0.0)); 205 PetscCall(DMGetCoordinates(da,&xcoor)); 206 } 207 208 /* 209 Determine the min and max coordinates in plot 210 */ 211 PetscCall(VecStrideMin(xcoor,0,NULL,&zctx.xmin)); 212 PetscCall(VecStrideMax(xcoor,0,NULL,&zctx.xmax)); 213 PetscCall(VecStrideMin(xcoor,1,NULL,&zctx.ymin)); 214 PetscCall(VecStrideMax(xcoor,1,NULL,&zctx.ymax)); 215 PetscCall(PetscOptionsGetBool(NULL,NULL,"-draw_contour_axis",&zctx.showaxis,NULL)); 216 if (zctx.showaxis) { 217 coors[0] = zctx.xmin - .05*(zctx.xmax - zctx.xmin); coors[1] = zctx.ymin - .05*(zctx.ymax - zctx.ymin); 218 coors[2] = zctx.xmax + .05*(zctx.xmax - zctx.xmin); coors[3] = zctx.ymax + .05*(zctx.ymax - zctx.ymin); 219 } else { 220 coors[0] = zctx.xmin; coors[1] = zctx.ymin; coors[2] = zctx.xmax; coors[3] = zctx.ymax; 221 } 222 PetscCall(PetscOptionsGetRealArray(NULL,NULL,"-draw_coordinates",coors,&ncoors,NULL)); 223 PetscCall(PetscInfo(da,"Preparing DMDA 2d contour plot coordinates %g %g %g %g\n",(double)coors[0],(double)coors[1],(double)coors[2],(double)coors[3])); 224 225 /* 226 Get local ghosted version of coordinates 227 */ 228 PetscCall(PetscObjectQuery((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject*)&xcoorl)); 229 if (!xcoorl) { 230 /* create DMDA to get local version of graphics */ 231 PetscCall(DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,2,1,lx,ly,&dag)); 232 PetscCall(DMSetUp(dag)); 233 PetscCall(PetscInfo(dag,"Creating auxiliary DMDA for managing graphics coordinates ghost points\n")); 234 PetscCall(DMCreateLocalVector(dag,&xcoorl)); 235 PetscCall(PetscObjectCompose((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject)xcoorl)); 236 PetscCall(PetscObjectDereference((PetscObject)dag)); 237 PetscCall(PetscObjectDereference((PetscObject)xcoorl)); 238 } else { 239 PetscCall(VecGetDM(xcoorl,&dag)); 240 } 241 PetscCall(DMGlobalToLocalBegin(dag,xcoor,INSERT_VALUES,xcoorl)); 242 PetscCall(DMGlobalToLocalEnd(dag,xcoor,INSERT_VALUES,xcoorl)); 243 PetscCall(VecGetArrayRead(xcoorl,&zctx.xy)); 244 PetscCall(DMDAGetCoordinateName(da,0,&zctx.name0)); 245 PetscCall(DMDAGetCoordinateName(da,1,&zctx.name1)); 246 247 /* 248 Get information about size of area each processor must do graphics for 249 */ 250 PetscCall(DMDAGetInfo(dac,NULL,&M,&N,NULL,NULL,NULL,NULL,&zctx.dof,NULL,&bx,&by,NULL,NULL)); 251 PetscCall(DMDAGetGhostCorners(dac,NULL,NULL,NULL,&zctx.m,&zctx.n,NULL)); 252 PetscCall(PetscOptionsGetBool(NULL,NULL,"-draw_contour_grid",&zctx.showgrid,NULL)); 253 254 PetscCall(DMDASelectFields(da,&ndisplayfields,&displayfields)); 255 PetscCall(PetscViewerGetFormat(viewer,&format)); 256 PetscCall(PetscOptionsGetBool(NULL,NULL,"-draw_ports",&useports,NULL)); 257 if (format == PETSC_VIEWER_DRAW_PORTS) useports = PETSC_TRUE; 258 if (useports) { 259 PetscCall(PetscViewerDrawGetDraw(viewer,0,&draw)); 260 PetscCall(PetscDrawCheckResizedWindow(draw)); 261 PetscCall(PetscDrawClear(draw)); 262 PetscCall(PetscDrawViewPortsCreate(draw,ndisplayfields,&ports)); 263 } 264 265 /* 266 Loop over each field; drawing each in a different window 267 */ 268 for (i=0; i<ndisplayfields; i++) { 269 zctx.k = displayfields[i]; 270 271 /* determine the min and max value in plot */ 272 PetscCall(VecStrideMin(xin,zctx.k,NULL,&zctx.min)); 273 PetscCall(VecStrideMax(xin,zctx.k,NULL,&zctx.max)); 274 if (zctx.k < nbounds) { 275 zctx.min = bounds[2*zctx.k]; 276 zctx.max = bounds[2*zctx.k+1]; 277 } 278 if (zctx.min == zctx.max) { 279 zctx.min -= 1.e-12; 280 zctx.max += 1.e-12; 281 } 282 PetscCall(PetscInfo(da,"DMDA 2d contour plot min %g max %g\n",(double)zctx.min,(double)zctx.max)); 283 284 if (useports) { 285 PetscCall(PetscDrawViewPortsSet(ports,i)); 286 } else { 287 const char *title; 288 PetscCall(PetscViewerDrawGetDraw(viewer,i,&draw)); 289 PetscCall(DMDAGetFieldName(da,zctx.k,&title)); 290 if (title) PetscCall(PetscDrawSetTitle(draw,title)); 291 } 292 293 PetscCall(PetscDrawGetPopup(draw,&popup)); 294 PetscCall(PetscDrawScalePopup(popup,zctx.min,zctx.max)); 295 PetscCall(PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3])); 296 PetscCall(PetscDrawZoom(draw,VecView_MPI_Draw_DA2d_Zoom,&zctx)); 297 if (!useports) PetscCall(PetscDrawSave(draw)); 298 } 299 if (useports) { 300 PetscCall(PetscViewerDrawGetDraw(viewer,0,&draw)); 301 PetscCall(PetscDrawSave(draw)); 302 } 303 304 PetscCall(PetscDrawViewPortsDestroy(ports)); 305 PetscCall(PetscFree(displayfields)); 306 PetscCall(VecRestoreArrayRead(xcoorl,&zctx.xy)); 307 PetscCall(VecRestoreArrayRead(xlocal,&zctx.v)); 308 PetscFunctionReturn(0); 309 } 310 311 #if defined(PETSC_HAVE_HDF5) 312 static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims) 313 { 314 PetscMPIInt comm_size; 315 hsize_t chunk_size, target_size, dim; 316 hsize_t vec_size = sizeof(PetscScalar)*da->M*da->N*da->P*da->w; 317 hsize_t avg_local_vec_size,KiB = 1024,MiB = KiB*KiB,GiB = MiB*KiB,min_size = MiB; 318 hsize_t max_chunks = 64*KiB; /* HDF5 internal limitation */ 319 hsize_t max_chunk_size = 4*GiB; /* HDF5 internal limitation */ 320 PetscInt zslices=da->p, yslices=da->n, xslices=da->m; 321 322 PetscFunctionBegin; 323 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size)); 324 avg_local_vec_size = (hsize_t) PetscCeilInt(vec_size,comm_size); /* we will attempt to use this as the chunk size */ 325 326 target_size = (hsize_t) PetscMin((PetscInt64)vec_size,PetscMin((PetscInt64)max_chunk_size,PetscMax((PetscInt64)avg_local_vec_size,PetscMax(PetscCeilInt64(vec_size,max_chunks),(PetscInt64)min_size)))); 327 /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */ 328 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(PetscReal); 329 330 /* 331 if size/rank > max_chunk_size, we need radical measures: even going down to 332 avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter 333 what, composed in the most efficient way possible. 334 N.B. this minimises the number of chunks, which may or may not be the optimal 335 solution. In a BG, for example, the optimal solution is probably to make # chunks = # 336 IO nodes involved, but this author has no access to a BG to figure out how to 337 reliably find the right number. And even then it may or may not be enough. 338 */ 339 if (avg_local_vec_size > max_chunk_size) { 340 /* check if we can just split local z-axis: is that enough? */ 341 zslices = PetscCeilInt(vec_size,da->p*max_chunk_size)*zslices; 342 if (zslices > da->P) { 343 /* lattice is too large in xy-directions, splitting z only is not enough */ 344 zslices = da->P; 345 yslices = PetscCeilInt(vec_size,zslices*da->n*max_chunk_size)*yslices; 346 if (yslices > da->N) { 347 /* lattice is too large in x-direction, splitting along z, y is not enough */ 348 yslices = da->N; 349 xslices = PetscCeilInt(vec_size,zslices*yslices*da->m*max_chunk_size)*xslices; 350 } 351 } 352 dim = 0; 353 if (timestep >= 0) { 354 ++dim; 355 } 356 /* prefer to split z-axis, even down to planar slices */ 357 if (dimension == 3) { 358 chunkDims[dim++] = (hsize_t) da->P/zslices; 359 chunkDims[dim++] = (hsize_t) da->N/yslices; 360 chunkDims[dim++] = (hsize_t) da->M/xslices; 361 } else { 362 /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */ 363 chunkDims[dim++] = (hsize_t) da->N/yslices; 364 chunkDims[dim++] = (hsize_t) da->M/xslices; 365 } 366 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double); 367 } else { 368 if (target_size < chunk_size) { 369 /* only change the defaults if target_size < chunk_size */ 370 dim = 0; 371 if (timestep >= 0) { 372 ++dim; 373 } 374 /* prefer to split z-axis, even down to planar slices */ 375 if (dimension == 3) { 376 /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */ 377 if (target_size >= chunk_size/da->p) { 378 /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */ 379 chunkDims[dim] = (hsize_t) PetscCeilInt(da->P,da->p); 380 } else { 381 /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be 382 radical and let everyone write all they've got */ 383 chunkDims[dim++] = (hsize_t) PetscCeilInt(da->P,da->p); 384 chunkDims[dim++] = (hsize_t) PetscCeilInt(da->N,da->n); 385 chunkDims[dim++] = (hsize_t) PetscCeilInt(da->M,da->m); 386 } 387 } else { 388 /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */ 389 if (target_size >= chunk_size/da->n) { 390 /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */ 391 chunkDims[dim] = (hsize_t) PetscCeilInt(da->N,da->n); 392 } else { 393 /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be 394 radical and let everyone write all they've got */ 395 chunkDims[dim++] = (hsize_t) PetscCeilInt(da->N,da->n); 396 chunkDims[dim++] = (hsize_t) PetscCeilInt(da->M,da->m); 397 } 398 399 } 400 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double); 401 } else { 402 /* precomputed chunks are fine, we don't need to do anything */ 403 } 404 } 405 PetscFunctionReturn(0); 406 } 407 #endif 408 409 #if defined(PETSC_HAVE_HDF5) 410 PetscErrorCode VecView_MPI_HDF5_DA(Vec xin,PetscViewer viewer) 411 { 412 PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5*) viewer->data; 413 DM dm; 414 DM_DA *da; 415 hid_t filespace; /* file dataspace identifier */ 416 hid_t chunkspace; /* chunk dataset property identifier */ 417 hid_t dset_id; /* dataset identifier */ 418 hid_t memspace; /* memory dataspace identifier */ 419 hid_t file_id; 420 hid_t group; 421 hid_t memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 422 hid_t filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 423 hsize_t dim; 424 hsize_t maxDims[6]={0}, dims[6]={0}, chunkDims[6]={0}, count[6]={0}, offset[6]={0}; /* we depend on these being sane later on */ 425 PetscBool timestepping=PETSC_FALSE, dim2, spoutput; 426 PetscInt timestep=PETSC_MIN_INT, dimension; 427 const PetscScalar *x; 428 const char *vecname; 429 430 PetscFunctionBegin; 431 PetscCall(PetscViewerHDF5OpenGroup(viewer, &file_id, &group)); 432 PetscCall(PetscViewerHDF5IsTimestepping(viewer, ×tepping)); 433 if (timestepping) { 434 PetscCall(PetscViewerHDF5GetTimestep(viewer, ×tep)); 435 } 436 PetscCall(PetscViewerHDF5GetBaseDimension2(viewer,&dim2)); 437 PetscCall(PetscViewerHDF5GetSPOutput(viewer,&spoutput)); 438 439 PetscCall(VecGetDM(xin,&dm)); 440 PetscCheck(dm,PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 441 da = (DM_DA*)dm->data; 442 PetscCall(DMGetDimension(dm, &dimension)); 443 444 /* Create the dataspace for the dataset. 445 * 446 * dims - holds the current dimensions of the dataset 447 * 448 * maxDims - holds the maximum dimensions of the dataset (unlimited 449 * for the number of time steps with the current dimensions for the 450 * other dimensions; so only additional time steps can be added). 451 * 452 * chunkDims - holds the size of a single time step (required to 453 * permit extending dataset). 454 */ 455 dim = 0; 456 if (timestep >= 0) { 457 dims[dim] = timestep+1; 458 maxDims[dim] = H5S_UNLIMITED; 459 chunkDims[dim] = 1; 460 ++dim; 461 } 462 if (dimension == 3) { 463 PetscCall(PetscHDF5IntCast(da->P,dims+dim)); 464 maxDims[dim] = dims[dim]; 465 chunkDims[dim] = dims[dim]; 466 ++dim; 467 } 468 if (dimension > 1) { 469 PetscCall(PetscHDF5IntCast(da->N,dims+dim)); 470 maxDims[dim] = dims[dim]; 471 chunkDims[dim] = dims[dim]; 472 ++dim; 473 } 474 PetscCall(PetscHDF5IntCast(da->M,dims+dim)); 475 maxDims[dim] = dims[dim]; 476 chunkDims[dim] = dims[dim]; 477 ++dim; 478 if (da->w > 1 || dim2) { 479 PetscCall(PetscHDF5IntCast(da->w,dims+dim)); 480 maxDims[dim] = dims[dim]; 481 chunkDims[dim] = dims[dim]; 482 ++dim; 483 } 484 #if defined(PETSC_USE_COMPLEX) 485 dims[dim] = 2; 486 maxDims[dim] = dims[dim]; 487 chunkDims[dim] = dims[dim]; 488 ++dim; 489 #endif 490 491 PetscCall(VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims)); 492 493 PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims)); 494 495 #if defined(PETSC_USE_REAL_SINGLE) 496 memscalartype = H5T_NATIVE_FLOAT; 497 filescalartype = H5T_NATIVE_FLOAT; 498 #elif defined(PETSC_USE_REAL___FLOAT128) 499 #error "HDF5 output with 128 bit floats not supported." 500 #elif defined(PETSC_USE_REAL___FP16) 501 #error "HDF5 output with 16 bit floats not supported." 502 #else 503 memscalartype = H5T_NATIVE_DOUBLE; 504 if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT; 505 else filescalartype = H5T_NATIVE_DOUBLE; 506 #endif 507 508 /* Create the dataset with default properties and close filespace */ 509 PetscCall(PetscObjectGetName((PetscObject)xin,&vecname)); 510 if (!H5Lexists(group, vecname, H5P_DEFAULT)) { 511 /* Create chunk */ 512 PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE)); 513 PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims)); 514 515 PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT)); 516 } else { 517 PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT)); 518 PetscStackCallHDF5(H5Dset_extent,(dset_id, dims)); 519 } 520 PetscStackCallHDF5(H5Sclose,(filespace)); 521 522 /* Each process defines a dataset and writes it to the hyperslab in the file */ 523 dim = 0; 524 if (timestep >= 0) { 525 offset[dim] = timestep; 526 ++dim; 527 } 528 if (dimension == 3) PetscCall(PetscHDF5IntCast(da->zs,offset + dim++)); 529 if (dimension > 1) PetscCall(PetscHDF5IntCast(da->ys,offset + dim++)); 530 PetscCall(PetscHDF5IntCast(da->xs/da->w,offset + dim++)); 531 if (da->w > 1 || dim2) offset[dim++] = 0; 532 #if defined(PETSC_USE_COMPLEX) 533 offset[dim++] = 0; 534 #endif 535 dim = 0; 536 if (timestep >= 0) { 537 count[dim] = 1; 538 ++dim; 539 } 540 if (dimension == 3) PetscCall(PetscHDF5IntCast(da->ze - da->zs,count + dim++)); 541 if (dimension > 1) PetscCall(PetscHDF5IntCast(da->ye - da->ys,count + dim++)); 542 PetscCall(PetscHDF5IntCast((da->xe - da->xs)/da->w,count + dim++)); 543 if (da->w > 1 || dim2) PetscCall(PetscHDF5IntCast(da->w,count + dim++)); 544 #if defined(PETSC_USE_COMPLEX) 545 count[dim++] = 2; 546 #endif 547 PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL)); 548 PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id)); 549 PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL)); 550 551 PetscCall(VecGetArrayRead(xin, &x)); 552 PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, hdf5->dxpl_id, x)); 553 PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL)); 554 PetscCall(VecRestoreArrayRead(xin, &x)); 555 556 #if defined(PETSC_USE_COMPLEX) 557 { 558 PetscBool tru = PETSC_TRUE; 559 PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer,(PetscObject)xin,"complex",PETSC_BOOL,&tru)); 560 } 561 #endif 562 if (timestepping) { 563 PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer,(PetscObject)xin,"timestepping",PETSC_BOOL,×tepping)); 564 } 565 566 /* Close/release resources */ 567 if (group != file_id) { 568 PetscStackCallHDF5(H5Gclose,(group)); 569 } 570 PetscStackCallHDF5(H5Sclose,(filespace)); 571 PetscStackCallHDF5(H5Sclose,(memspace)); 572 PetscStackCallHDF5(H5Dclose,(dset_id)); 573 PetscCall(PetscInfo(xin,"Wrote Vec object with name %s\n",vecname)); 574 PetscFunctionReturn(0); 575 } 576 #endif 577 578 extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec,PetscViewer); 579 580 #if defined(PETSC_HAVE_MPIIO) 581 static PetscErrorCode DMDAArrayMPIIO(DM da,PetscViewer viewer,Vec xin,PetscBool write) 582 { 583 MPI_File mfdes; 584 PetscMPIInt gsizes[4],lsizes[4],lstarts[4],asiz,dof; 585 MPI_Datatype view; 586 const PetscScalar *array; 587 MPI_Offset off; 588 MPI_Aint ub,ul; 589 PetscInt type,rows,vecrows,tr[2]; 590 DM_DA *dd = (DM_DA*)da->data; 591 PetscBool skipheader; 592 593 PetscFunctionBegin; 594 PetscCall(VecGetSize(xin,&vecrows)); 595 PetscCall(PetscViewerBinaryGetSkipHeader(viewer,&skipheader)); 596 if (!write) { 597 /* Read vector header. */ 598 if (!skipheader) { 599 PetscCall(PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT)); 600 type = tr[0]; 601 rows = tr[1]; 602 PetscCheck(type == VEC_FILE_CLASSID,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Not vector next in file"); 603 PetscCheck(rows == vecrows,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Vector in file not same size as DMDA vector"); 604 } 605 } else { 606 tr[0] = VEC_FILE_CLASSID; 607 tr[1] = vecrows; 608 if (!skipheader) { 609 PetscCall(PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT)); 610 } 611 } 612 613 PetscCall(PetscMPIIntCast(dd->w,&dof)); 614 gsizes[0] = dof; 615 PetscCall(PetscMPIIntCast(dd->M,gsizes+1)); 616 PetscCall(PetscMPIIntCast(dd->N,gsizes+2)); 617 PetscCall(PetscMPIIntCast(dd->P,gsizes+3)); 618 lsizes[0] = dof; 619 PetscCall(PetscMPIIntCast((dd->xe-dd->xs)/dof,lsizes+1)); 620 PetscCall(PetscMPIIntCast(dd->ye-dd->ys,lsizes+2)); 621 PetscCall(PetscMPIIntCast(dd->ze-dd->zs,lsizes+3)); 622 lstarts[0] = 0; 623 PetscCall(PetscMPIIntCast(dd->xs/dof,lstarts+1)); 624 PetscCall(PetscMPIIntCast(dd->ys,lstarts+2)); 625 PetscCall(PetscMPIIntCast(dd->zs,lstarts+3)); 626 PetscCallMPI(MPI_Type_create_subarray(da->dim+1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view)); 627 PetscCallMPI(MPI_Type_commit(&view)); 628 629 PetscCall(PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes)); 630 PetscCall(PetscViewerBinaryGetMPIIOOffset(viewer,&off)); 631 PetscCallMPI(MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char*)"native",MPI_INFO_NULL)); 632 PetscCall(VecGetArrayRead(xin,&array)); 633 asiz = lsizes[1]*(lsizes[2] > 0 ? lsizes[2] : 1)*(lsizes[3] > 0 ? lsizes[3] : 1)*dof; 634 if (write) { 635 PetscCall(MPIU_File_write_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE)); 636 } else { 637 PetscCall(MPIU_File_read_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE)); 638 } 639 PetscCallMPI(MPI_Type_get_extent(view,&ul,&ub)); 640 PetscCall(PetscViewerBinaryAddMPIIOOffset(viewer,ub)); 641 PetscCall(VecRestoreArrayRead(xin,&array)); 642 PetscCallMPI(MPI_Type_free(&view)); 643 PetscFunctionReturn(0); 644 } 645 #endif 646 647 PetscErrorCode VecView_MPI_DA(Vec xin,PetscViewer viewer) 648 { 649 DM da; 650 PetscInt dim; 651 Vec natural; 652 PetscBool isdraw,isvtk,isglvis; 653 #if defined(PETSC_HAVE_HDF5) 654 PetscBool ishdf5; 655 #endif 656 const char *prefix,*name; 657 PetscViewerFormat format; 658 659 PetscFunctionBegin; 660 PetscCall(VecGetDM(xin,&da)); 661 PetscCheck(da,PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 662 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw)); 663 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk)); 664 #if defined(PETSC_HAVE_HDF5) 665 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5)); 666 #endif 667 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis)); 668 if (isdraw) { 669 PetscCall(DMDAGetInfo(da,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL)); 670 if (dim == 1) { 671 PetscCall(VecView_MPI_Draw_DA1d(xin,viewer)); 672 } else if (dim == 2) { 673 PetscCall(VecView_MPI_Draw_DA2d(xin,viewer)); 674 } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot graphically view vector associated with this dimensional DMDA %" PetscInt_FMT,dim); 675 } else if (isvtk) { /* Duplicate the Vec */ 676 Vec Y; 677 PetscCall(VecDuplicate(xin,&Y)); 678 if (((PetscObject)xin)->name) { 679 /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */ 680 PetscCall(PetscObjectSetName((PetscObject)Y,((PetscObject)xin)->name)); 681 } 682 PetscCall(VecCopy(xin,Y)); 683 { 684 PetscObject dmvtk; 685 PetscBool compatible,compatibleSet; 686 PetscCall(PetscViewerVTKGetDM(viewer,&dmvtk)); 687 if (dmvtk) { 688 PetscValidHeaderSpecific((DM)dmvtk,DM_CLASSID,2); 689 PetscCall(DMGetCompatibility(da,(DM)dmvtk,&compatible,&compatibleSet)); 690 PetscCheck(compatibleSet && compatible,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Cannot confirm compatibility of DMs associated with Vecs viewed in the same VTK file. Check that grids are the same."); 691 } 692 PetscCall(PetscViewerVTKAddField(viewer,(PetscObject)da,DMDAVTKWriteAll,PETSC_DEFAULT,PETSC_VTK_POINT_FIELD,PETSC_FALSE,(PetscObject)Y)); 693 } 694 #if defined(PETSC_HAVE_HDF5) 695 } else if (ishdf5) { 696 PetscCall(VecView_MPI_HDF5_DA(xin,viewer)); 697 #endif 698 } else if (isglvis) { 699 PetscCall(VecView_GLVis(xin,viewer)); 700 } else { 701 #if defined(PETSC_HAVE_MPIIO) 702 PetscBool isbinary,isMPIIO; 703 704 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary)); 705 if (isbinary) { 706 PetscCall(PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO)); 707 if (isMPIIO) { 708 PetscCall(DMDAArrayMPIIO(da,viewer,xin,PETSC_TRUE)); 709 PetscFunctionReturn(0); 710 } 711 } 712 #endif 713 714 /* call viewer on natural ordering */ 715 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix)); 716 PetscCall(DMDACreateNaturalVector(da,&natural)); 717 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)natural,prefix)); 718 PetscCall(DMDAGlobalToNaturalBegin(da,xin,INSERT_VALUES,natural)); 719 PetscCall(DMDAGlobalToNaturalEnd(da,xin,INSERT_VALUES,natural)); 720 PetscCall(PetscObjectGetName((PetscObject)xin,&name)); 721 PetscCall(PetscObjectSetName((PetscObject)natural,name)); 722 723 PetscCall(PetscViewerGetFormat(viewer,&format)); 724 if (format == PETSC_VIEWER_BINARY_MATLAB) { 725 /* temporarily remove viewer format so it won't trigger in the VecView() */ 726 PetscCall(PetscViewerPushFormat(viewer,PETSC_VIEWER_DEFAULT)); 727 } 728 729 ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_TRUE; 730 PetscCall(VecView(natural,viewer)); 731 ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_FALSE; 732 733 if (format == PETSC_VIEWER_BINARY_MATLAB) { 734 MPI_Comm comm; 735 FILE *info; 736 const char *fieldname; 737 char fieldbuf[256]; 738 PetscInt dim,ni,nj,nk,pi,pj,pk,dof,n; 739 740 /* set the viewer format back into the viewer */ 741 PetscCall(PetscViewerPopFormat(viewer)); 742 PetscCall(PetscObjectGetComm((PetscObject)viewer,&comm)); 743 PetscCall(PetscViewerBinaryGetInfoPointer(viewer,&info)); 744 PetscCall(DMDAGetInfo(da,&dim,&ni,&nj,&nk,&pi,&pj,&pk,&dof,NULL,NULL,NULL,NULL,NULL)); 745 PetscCall(PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n")); 746 PetscCall(PetscFPrintf(comm,info,"#$$ tmp = PetscBinaryRead(fd); \n")); 747 if (dim == 1) { PetscCall(PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ");\n",dof,ni)); } 748 if (dim == 2) { PetscCall(PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ");\n",dof,ni,nj)); } 749 if (dim == 3) { PetscCall(PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ");\n",dof,ni,nj,nk)); } 750 751 for (n=0; n<dof; n++) { 752 PetscCall(DMDAGetFieldName(da,n,&fieldname)); 753 if (!fieldname || !fieldname[0]) { 754 PetscCall(PetscSNPrintf(fieldbuf,sizeof fieldbuf,"field%" PetscInt_FMT,n)); 755 fieldname = fieldbuf; 756 } 757 if (dim == 1) { PetscCall(PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%" PetscInt_FMT ",:))';\n",name,fieldname,n+1)); } 758 if (dim == 2) { PetscCall(PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%" PetscInt_FMT ",:,:))';\n",name,fieldname,n+1)); } 759 if (dim == 3) { PetscCall(PetscFPrintf(comm,info,"#$$ Set.%s.%s = permute(squeeze(tmp(%" PetscInt_FMT ",:,:,:)),[2 1 3]);\n",name,fieldname,n+1));} 760 } 761 PetscCall(PetscFPrintf(comm,info,"#$$ clear tmp; \n")); 762 PetscCall(PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n")); 763 } 764 765 PetscCall(VecDestroy(&natural)); 766 } 767 PetscFunctionReturn(0); 768 } 769 770 #if defined(PETSC_HAVE_HDF5) 771 PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer) 772 { 773 PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5*) viewer->data; 774 DM da; 775 int dim,rdim; 776 hsize_t dims[6]={0},count[6]={0},offset[6]={0}; 777 PetscBool dim2=PETSC_FALSE,timestepping=PETSC_FALSE; 778 PetscInt dimension,timestep=PETSC_MIN_INT,dofInd; 779 PetscScalar *x; 780 const char *vecname; 781 hid_t filespace; /* file dataspace identifier */ 782 hid_t dset_id; /* dataset identifier */ 783 hid_t memspace; /* memory dataspace identifier */ 784 hid_t file_id,group; 785 hid_t scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 786 DM_DA *dd; 787 788 PetscFunctionBegin; 789 #if defined(PETSC_USE_REAL_SINGLE) 790 scalartype = H5T_NATIVE_FLOAT; 791 #elif defined(PETSC_USE_REAL___FLOAT128) 792 #error "HDF5 output with 128 bit floats not supported." 793 #elif defined(PETSC_USE_REAL___FP16) 794 #error "HDF5 output with 16 bit floats not supported." 795 #else 796 scalartype = H5T_NATIVE_DOUBLE; 797 #endif 798 799 PetscCall(PetscViewerHDF5OpenGroup(viewer, &file_id, &group)); 800 PetscCall(PetscObjectGetName((PetscObject)xin,&vecname)); 801 PetscCall(PetscViewerHDF5CheckTimestepping_Internal(viewer, vecname)); 802 PetscCall(PetscViewerHDF5IsTimestepping(viewer, ×tepping)); 803 if (timestepping) { 804 PetscCall(PetscViewerHDF5GetTimestep(viewer, ×tep)); 805 } 806 PetscCall(VecGetDM(xin,&da)); 807 dd = (DM_DA*)da->data; 808 PetscCall(DMGetDimension(da, &dimension)); 809 810 /* Open dataset */ 811 PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT)); 812 813 /* Retrieve the dataspace for the dataset */ 814 PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id)); 815 PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL)); 816 817 /* Expected dimension for holding the dof's */ 818 #if defined(PETSC_USE_COMPLEX) 819 dofInd = rdim-2; 820 #else 821 dofInd = rdim-1; 822 #endif 823 824 /* The expected number of dimensions, assuming basedimension2 = false */ 825 dim = dimension; 826 if (dd->w > 1) ++dim; 827 if (timestep >= 0) ++dim; 828 #if defined(PETSC_USE_COMPLEX) 829 ++dim; 830 #endif 831 832 /* In this case the input dataset have one extra, unexpected dimension. */ 833 if (rdim == dim+1) { 834 /* In this case the block size unity */ 835 if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE; 836 837 /* Special error message for the case where dof does not match the input file */ 838 else PetscCheck(dd->w == (PetscInt) dims[dofInd],PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Number of dofs in file is %" PetscInt_FMT ", not %" PetscInt_FMT " as expected",(PetscInt)dims[dofInd],dd->w); 839 840 /* Other cases where rdim != dim cannot be handled currently */ 841 } else PetscCheck(rdim == dim,PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with dof = %" PetscInt_FMT,rdim,dim,dd->w); 842 843 /* Set up the hyperslab size */ 844 dim = 0; 845 if (timestep >= 0) { 846 offset[dim] = timestep; 847 count[dim] = 1; 848 ++dim; 849 } 850 if (dimension == 3) { 851 PetscCall(PetscHDF5IntCast(dd->zs,offset + dim)); 852 PetscCall(PetscHDF5IntCast(dd->ze - dd->zs,count + dim)); 853 ++dim; 854 } 855 if (dimension > 1) { 856 PetscCall(PetscHDF5IntCast(dd->ys,offset + dim)); 857 PetscCall(PetscHDF5IntCast(dd->ye - dd->ys,count + dim)); 858 ++dim; 859 } 860 PetscCall(PetscHDF5IntCast(dd->xs/dd->w,offset + dim)); 861 PetscCall(PetscHDF5IntCast((dd->xe - dd->xs)/dd->w,count + dim)); 862 ++dim; 863 if (dd->w > 1 || dim2) { 864 offset[dim] = 0; 865 PetscCall(PetscHDF5IntCast(dd->w,count + dim)); 866 ++dim; 867 } 868 #if defined(PETSC_USE_COMPLEX) 869 offset[dim] = 0; 870 count[dim] = 2; 871 ++dim; 872 #endif 873 874 /* Create the memory and filespace */ 875 PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL)); 876 PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL)); 877 878 PetscCall(VecGetArray(xin, &x)); 879 PetscStackCallHDF5(H5Dread,(dset_id, scalartype, memspace, filespace, hdf5->dxpl_id, x)); 880 PetscCall(VecRestoreArray(xin, &x)); 881 882 /* Close/release resources */ 883 if (group != file_id) { 884 PetscStackCallHDF5(H5Gclose,(group)); 885 } 886 PetscStackCallHDF5(H5Sclose,(filespace)); 887 PetscStackCallHDF5(H5Sclose,(memspace)); 888 PetscStackCallHDF5(H5Dclose,(dset_id)); 889 PetscFunctionReturn(0); 890 } 891 #endif 892 893 PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer) 894 { 895 DM da; 896 Vec natural; 897 const char *prefix; 898 PetscInt bs; 899 PetscBool flag; 900 DM_DA *dd; 901 #if defined(PETSC_HAVE_MPIIO) 902 PetscBool isMPIIO; 903 #endif 904 905 PetscFunctionBegin; 906 PetscCall(VecGetDM(xin,&da)); 907 dd = (DM_DA*)da->data; 908 #if defined(PETSC_HAVE_MPIIO) 909 PetscCall(PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO)); 910 if (isMPIIO) { 911 PetscCall(DMDAArrayMPIIO(da,viewer,xin,PETSC_FALSE)); 912 PetscFunctionReturn(0); 913 } 914 #endif 915 916 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix)); 917 PetscCall(DMDACreateNaturalVector(da,&natural)); 918 PetscCall(PetscObjectSetName((PetscObject)natural,((PetscObject)xin)->name)); 919 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)natural,prefix)); 920 PetscCall(VecLoad(natural,viewer)); 921 PetscCall(DMDANaturalToGlobalBegin(da,natural,INSERT_VALUES,xin)); 922 PetscCall(DMDANaturalToGlobalEnd(da,natural,INSERT_VALUES,xin)); 923 PetscCall(VecDestroy(&natural)); 924 PetscCall(PetscInfo(xin,"Loading vector from natural ordering into DMDA\n")); 925 PetscCall(PetscOptionsGetInt(NULL,((PetscObject)xin)->prefix,"-vecload_block_size",&bs,&flag)); 926 if (flag && bs != dd->w) { 927 PetscCall(PetscInfo(xin,"Block size in file %" PetscInt_FMT " not equal to DMDA's dof %" PetscInt_FMT "\n",bs,dd->w)); 928 } 929 PetscFunctionReturn(0); 930 } 931 932 PetscErrorCode VecLoad_Default_DA(Vec xin, PetscViewer viewer) 933 { 934 DM da; 935 PetscBool isbinary; 936 #if defined(PETSC_HAVE_HDF5) 937 PetscBool ishdf5; 938 #endif 939 940 PetscFunctionBegin; 941 PetscCall(VecGetDM(xin,&da)); 942 PetscCheck(da,PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 943 944 #if defined(PETSC_HAVE_HDF5) 945 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5)); 946 #endif 947 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary)); 948 949 if (isbinary) { 950 PetscCall(VecLoad_Binary_DA(xin,viewer)); 951 #if defined(PETSC_HAVE_HDF5) 952 } else if (ishdf5) { 953 PetscCall(VecLoad_HDF5_DA(xin,viewer)); 954 #endif 955 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name); 956 PetscFunctionReturn(0); 957 } 958