1 2 /* 3 Plots vectors obtained with DMDACreate2d() 4 */ 5 6 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 7 #include <petsc/private/vecimpl.h> 8 #include <petscdraw.h> 9 #include <petscviewerhdf5.h> 10 11 /* 12 The data that is passed into the graphics callback 13 */ 14 typedef struct { 15 PetscMPIInt rank; 16 PetscInt m,n,dof,k; 17 PetscReal xmin,xmax,ymin,ymax,min,max; 18 const PetscScalar *xy,*v; 19 PetscBool showgrid; 20 const char *name0,*name1; 21 } ZoomCtx; 22 23 /* 24 This does the drawing for one particular field 25 in one particular set of coordinates. It is a callback 26 called from PetscDrawZoom() 27 */ 28 #undef __FUNCT__ 29 #define __FUNCT__ "VecView_MPI_Draw_DA2d_Zoom" 30 PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw,void *ctx) 31 { 32 ZoomCtx *zctx = (ZoomCtx*)ctx; 33 PetscErrorCode ierr; 34 PetscInt m,n,i,j,k,dof,id,c1,c2,c3,c4; 35 PetscReal min,max,x1,x2,x3,x4,y_1,y2,y3,y4; 36 const PetscScalar *xy,*v; 37 38 PetscFunctionBegin; 39 m = zctx->m; 40 n = zctx->n; 41 dof = zctx->dof; 42 k = zctx->k; 43 xy = zctx->xy; 44 v = zctx->v; 45 min = zctx->min; 46 max = zctx->max; 47 48 /* PetscDraw the contour plot patch */ 49 ierr = PetscDrawCollectiveBegin(draw);CHKERRQ(ierr); 50 for (j=0; j<n-1; j++) { 51 for (i=0; i<m-1; i++) { 52 id = i+j*m; 53 x1 = PetscRealPart(xy[2*id]); 54 y_1 = PetscRealPart(xy[2*id+1]); 55 c1 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 56 57 id = i+j*m+1; 58 x2 = PetscRealPart(xy[2*id]); 59 y2 = PetscRealPart(xy[2*id+1]); 60 c2 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 61 62 id = i+j*m+1+m; 63 x3 = PetscRealPart(xy[2*id]); 64 y3 = PetscRealPart(xy[2*id+1]); 65 c3 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 66 67 id = i+j*m+m; 68 x4 = PetscRealPart(xy[2*id]); 69 y4 = PetscRealPart(xy[2*id+1]); 70 c4 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 71 72 ierr = PetscDrawTriangle(draw,x1,y_1,x2,y2,x3,y3,c1,c2,c3);CHKERRQ(ierr); 73 ierr = PetscDrawTriangle(draw,x1,y_1,x3,y3,x4,y4,c1,c3,c4);CHKERRQ(ierr); 74 if (zctx->showgrid) { 75 ierr = PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK);CHKERRQ(ierr); 76 ierr = PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK);CHKERRQ(ierr); 77 ierr = PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK);CHKERRQ(ierr); 78 ierr = PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK);CHKERRQ(ierr); 79 } 80 } 81 } 82 if (!zctx->rank) { 83 if (zctx->name0 || zctx->name1) { 84 PetscReal xl,yl,xr,yr,x,y; 85 ierr = PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr);CHKERRQ(ierr); 86 x = xl + .30*(xr - xl); 87 xl = xl + .01*(xr - xl); 88 y = yr - .30*(yr - yl); 89 yl = yl + .01*(yr - yl); 90 if (zctx->name0) {ierr = PetscDrawString(draw,x,yl,PETSC_DRAW_BLACK,zctx->name0);CHKERRQ(ierr);} 91 if (zctx->name1) {ierr = PetscDrawStringVertical(draw,xl,y,PETSC_DRAW_BLACK,zctx->name1);CHKERRQ(ierr);} 92 } 93 /* 94 Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits 95 but that may require some refactoring. 96 */ 97 { 98 PetscReal xmin = zctx->xmin, ymin = zctx->ymin; 99 PetscReal xmax = zctx->xmax, ymax = zctx->ymax; 100 char value[16]; size_t len; PetscReal w; 101 ierr = PetscSNPrintf(value,16,"%f",xmin);CHKERRQ(ierr); 102 ierr = PetscDrawString(draw,xmin,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 103 ierr = PetscSNPrintf(value,16,"%f",xmax);CHKERRQ(ierr); 104 ierr = PetscStrlen(value,&len);CHKERRQ(ierr); 105 ierr = PetscDrawStringGetSize(draw,&w,NULL);CHKERRQ(ierr); 106 ierr = PetscDrawString(draw,xmax - len*w,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 107 ierr = PetscSNPrintf(value,16,"%f",ymin);CHKERRQ(ierr); 108 ierr = PetscDrawString(draw,xmin - .05*(xmax - xmin),ymin,PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 109 ierr = PetscSNPrintf(value,16,"%f",ymax);CHKERRQ(ierr); 110 ierr = PetscDrawString(draw,xmin - .05*(xmax - xmin),ymax,PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 111 } 112 } 113 ierr = PetscDrawCollectiveEnd(draw);CHKERRQ(ierr); 114 PetscFunctionReturn(0); 115 } 116 117 #undef __FUNCT__ 118 #define __FUNCT__ "VecView_MPI_Draw_DA2d" 119 PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin,PetscViewer viewer) 120 { 121 DM da,dac,dag; 122 PetscErrorCode ierr; 123 PetscInt N,s,M,w,ncoors = 4; 124 const PetscInt *lx,*ly; 125 PetscReal coors[4]; 126 PetscDraw draw,popup; 127 PetscBool isnull,useports = PETSC_FALSE; 128 MPI_Comm comm; 129 Vec xlocal,xcoor,xcoorl; 130 DMBoundaryType bx,by; 131 DMDAStencilType st; 132 ZoomCtx zctx; 133 PetscDrawViewPorts *ports = NULL; 134 PetscViewerFormat format; 135 PetscInt *displayfields; 136 PetscInt ndisplayfields,i,nbounds; 137 const PetscReal *bounds; 138 139 PetscFunctionBegin; 140 zctx.showgrid = PETSC_FALSE; 141 142 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 143 ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); 144 if (isnull) PetscFunctionReturn(0); 145 146 ierr = PetscViewerDrawGetBounds(viewer,&nbounds,&bounds);CHKERRQ(ierr); 147 148 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 149 if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 150 151 ierr = PetscObjectGetComm((PetscObject)xin,&comm);CHKERRQ(ierr); 152 ierr = MPI_Comm_rank(comm,&zctx.rank);CHKERRQ(ierr); 153 154 ierr = DMDAGetInfo(da,0,&M,&N,0,&zctx.m,&zctx.n,0,&w,&s,&bx,&by,0,&st);CHKERRQ(ierr); 155 ierr = DMDAGetOwnershipRanges(da,&lx,&ly,NULL);CHKERRQ(ierr); 156 157 /* 158 Obtain a sequential vector that is going to contain the local values plus ONE layer of 159 ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will 160 update the local values pluse ONE layer of ghost values. 161 */ 162 ierr = PetscObjectQuery((PetscObject)da,"GraphicsGhosted",(PetscObject*)&xlocal);CHKERRQ(ierr); 163 if (!xlocal) { 164 if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) { 165 /* 166 if original da is not of stencil width one, or periodic or not a box stencil then 167 create a special DMDA to handle one level of ghost points for graphics 168 */ 169 ierr = DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,w,1,lx,ly,&dac);CHKERRQ(ierr); 170 ierr = PetscInfo(da,"Creating auxilary DMDA for managing graphics ghost points\n");CHKERRQ(ierr); 171 } else { 172 /* otherwise we can use the da we already have */ 173 dac = da; 174 } 175 /* create local vector for holding ghosted values used in graphics */ 176 ierr = DMCreateLocalVector(dac,&xlocal);CHKERRQ(ierr); 177 if (dac != da) { 178 /* don't keep any public reference of this DMDA, is is only available through xlocal */ 179 ierr = PetscObjectDereference((PetscObject)dac);CHKERRQ(ierr); 180 } else { 181 /* remove association between xlocal and da, because below we compose in the opposite 182 direction and if we left this connect we'd get a loop, so the objects could 183 never be destroyed */ 184 ierr = PetscObjectRemoveReference((PetscObject)xlocal,"__PETSc_dm");CHKERRQ(ierr); 185 } 186 ierr = PetscObjectCompose((PetscObject)da,"GraphicsGhosted",(PetscObject)xlocal);CHKERRQ(ierr); 187 ierr = PetscObjectDereference((PetscObject)xlocal);CHKERRQ(ierr); 188 } else { 189 if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) { 190 ierr = VecGetDM(xlocal, &dac);CHKERRQ(ierr); 191 } else { 192 dac = da; 193 } 194 } 195 196 /* 197 Get local (ghosted) values of vector 198 */ 199 ierr = DMGlobalToLocalBegin(dac,xin,INSERT_VALUES,xlocal);CHKERRQ(ierr); 200 ierr = DMGlobalToLocalEnd(dac,xin,INSERT_VALUES,xlocal);CHKERRQ(ierr); 201 ierr = VecGetArrayRead(xlocal,&zctx.v);CHKERRQ(ierr); 202 203 /* get coordinates of nodes */ 204 ierr = DMGetCoordinates(da,&xcoor);CHKERRQ(ierr); 205 if (!xcoor) { 206 ierr = DMDASetUniformCoordinates(da,0.0,1.0,0.0,1.0,0.0,0.0);CHKERRQ(ierr); 207 ierr = DMGetCoordinates(da,&xcoor);CHKERRQ(ierr); 208 } 209 210 /* 211 Determine the min and max coordinates in plot 212 */ 213 ierr = VecStrideMin(xcoor,0,NULL,&zctx.xmin);CHKERRQ(ierr); 214 ierr = VecStrideMax(xcoor,0,NULL,&zctx.xmax);CHKERRQ(ierr); 215 ierr = VecStrideMin(xcoor,1,NULL,&zctx.ymin);CHKERRQ(ierr); 216 ierr = VecStrideMax(xcoor,1,NULL,&zctx.ymax);CHKERRQ(ierr); 217 coors[0] = zctx.xmin - .05*(zctx.xmax- zctx.xmin); coors[2] = zctx.xmax + .05*(zctx.xmax - zctx.xmin); 218 coors[1] = zctx.ymin - .05*(zctx.ymax- zctx.ymin); coors[3] = zctx.ymax + .05*(zctx.ymax - zctx.ymin); 219 ierr = PetscOptionsGetRealArray(NULL,NULL,"-draw_coordinates",coors,&ncoors,NULL);CHKERRQ(ierr); 220 ierr = PetscInfo4(da,"Preparing DMDA 2d contour plot coordinates %g %g %g %g\n",(double)coors[0],(double)coors[1],(double)coors[2],(double)coors[3]);CHKERRQ(ierr); 221 222 /* 223 get local ghosted version of coordinates 224 */ 225 ierr = PetscObjectQuery((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject*)&xcoorl);CHKERRQ(ierr); 226 if (!xcoorl) { 227 /* create DMDA to get local version of graphics */ 228 ierr = DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,2,1,lx,ly,&dag);CHKERRQ(ierr); 229 ierr = PetscInfo(dag,"Creating auxilary DMDA for managing graphics coordinates ghost points\n");CHKERRQ(ierr); 230 ierr = DMCreateLocalVector(dag,&xcoorl);CHKERRQ(ierr); 231 ierr = PetscObjectCompose((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject)xcoorl);CHKERRQ(ierr); 232 ierr = PetscObjectDereference((PetscObject)dag);CHKERRQ(ierr); 233 ierr = PetscObjectDereference((PetscObject)xcoorl);CHKERRQ(ierr); 234 } else { 235 ierr = VecGetDM(xcoorl,&dag);CHKERRQ(ierr); 236 } 237 ierr = DMGlobalToLocalBegin(dag,xcoor,INSERT_VALUES,xcoorl);CHKERRQ(ierr); 238 ierr = DMGlobalToLocalEnd(dag,xcoor,INSERT_VALUES,xcoorl);CHKERRQ(ierr); 239 ierr = VecGetArrayRead(xcoorl,&zctx.xy);CHKERRQ(ierr); 240 241 /* 242 Get information about size of area each processor must do graphics for 243 */ 244 ierr = DMDAGetInfo(dac,NULL,&M,&N,NULL,NULL,NULL,NULL,&zctx.dof,NULL,&bx,&by,NULL,NULL);CHKERRQ(ierr); 245 ierr = DMDAGetGhostCorners(dac,NULL,NULL,NULL,&zctx.m,&zctx.n,NULL);CHKERRQ(ierr); 246 ierr = PetscOptionsGetBool(NULL,NULL,"-draw_contour_grid",&zctx.showgrid,NULL);CHKERRQ(ierr); 247 248 ierr = DMDASelectFields(da,&ndisplayfields,&displayfields);CHKERRQ(ierr); 249 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 250 ierr = PetscOptionsGetBool(NULL,NULL,"-draw_ports",&useports,NULL);CHKERRQ(ierr); 251 if (useports || format == PETSC_VIEWER_DRAW_PORTS) { 252 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 253 ierr = PetscDrawCheckResizedWindow(draw);CHKERRQ(ierr); 254 ierr = PetscDrawClear(draw);CHKERRQ(ierr); 255 ierr = PetscDrawViewPortsCreate(draw,ndisplayfields,&ports);CHKERRQ(ierr); 256 } 257 ierr = DMDAGetCoordinateName(da,0,&zctx.name0);CHKERRQ(ierr); 258 ierr = DMDAGetCoordinateName(da,1,&zctx.name1);CHKERRQ(ierr); 259 260 /* loop over each field; drawing each in a different window */ 261 for (i=0; i<ndisplayfields; i++) { 262 zctx.k = displayfields[i]; 263 if (useports || format == PETSC_VIEWER_DRAW_PORTS) { 264 ierr = PetscDrawViewPortsSet(ports,i);CHKERRQ(ierr); 265 } else { 266 const char *title; 267 ierr = PetscViewerDrawGetDraw(viewer,i,&draw);CHKERRQ(ierr); 268 ierr = DMDAGetFieldName(da,zctx.k,&title);CHKERRQ(ierr); 269 if (title) {ierr = PetscDrawSetTitle(draw,title);CHKERRQ(ierr);} 270 } 271 272 /* determine the min and max value in plot */ 273 ierr = VecStrideMin(xin,zctx.k,NULL,&zctx.min);CHKERRQ(ierr); 274 ierr = VecStrideMax(xin,zctx.k,NULL,&zctx.max);CHKERRQ(ierr); 275 if (zctx.k < nbounds) { 276 zctx.min = bounds[2*zctx.k]; 277 zctx.max = bounds[2*zctx.k+1]; 278 } 279 if (zctx.min == zctx.max) { 280 zctx.min -= 1.e-12; 281 zctx.max += 1.e-12; 282 } 283 ierr = PetscInfo2(da,"DMDA 2d contour plot min %g max %g\n",(double)zctx.min,(double)zctx.max);CHKERRQ(ierr); 284 285 ierr = PetscDrawGetPopup(draw,&popup);CHKERRQ(ierr); 286 if (popup) {ierr = PetscDrawScalePopup(popup,zctx.min,zctx.max);CHKERRQ(ierr);} 287 ierr = PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);CHKERRQ(ierr); 288 ierr = PetscDrawZoom(draw,VecView_MPI_Draw_DA2d_Zoom,&zctx);CHKERRQ(ierr); 289 } 290 ierr = PetscDrawViewPortsDestroy(ports);CHKERRQ(ierr); 291 ierr = PetscFree(displayfields);CHKERRQ(ierr); 292 293 ierr = VecRestoreArrayRead(xcoorl,&zctx.xy);CHKERRQ(ierr); 294 ierr = VecRestoreArrayRead(xlocal,&zctx.v);CHKERRQ(ierr); 295 PetscFunctionReturn(0); 296 } 297 298 #if defined(PETSC_HAVE_HDF5) 299 #undef __FUNCT__ 300 #define __FUNCT__ "VecGetHDF5ChunkSize" 301 static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims) 302 { 303 PetscMPIInt comm_size; 304 PetscErrorCode ierr; 305 hsize_t chunk_size, target_size, dim; 306 hsize_t vec_size = sizeof(PetscScalar)*da->M*da->N*da->P*da->w; 307 hsize_t avg_local_vec_size,KiB = 1024,MiB = KiB*KiB,GiB = MiB*KiB,min_size = MiB; 308 hsize_t max_chunks = 64*KiB; /* HDF5 internal limitation */ 309 hsize_t max_chunk_size = 4*GiB; /* HDF5 internal limitation */ 310 PetscInt zslices=da->p, yslices=da->n, xslices=da->m; 311 312 PetscFunctionBegin; 313 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size);CHKERRQ(ierr); 314 avg_local_vec_size = (hsize_t) ceil(vec_size*1.0/comm_size); /* we will attempt to use this as the chunk size */ 315 316 target_size = (hsize_t) ceil(PetscMin(vec_size,PetscMin(max_chunk_size,PetscMax(avg_local_vec_size,PetscMax(ceil(vec_size*1.0/max_chunks),min_size))))); 317 /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */ 318 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(PetscReal); 319 320 /* 321 if size/rank > max_chunk_size, we need radical measures: even going down to 322 avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter 323 what, composed in the most efficient way possible. 324 N.B. this minimises the number of chunks, which may or may not be the optimal 325 solution. In a BG, for example, the optimal solution is probably to make # chunks = # 326 IO nodes involved, but this author has no access to a BG to figure out how to 327 reliably find the right number. And even then it may or may not be enough. 328 */ 329 if (avg_local_vec_size > max_chunk_size) { 330 /* check if we can just split local z-axis: is that enough? */ 331 zslices = (PetscInt)ceil(vec_size*1.0/(da->p*max_chunk_size))*zslices; 332 if (zslices > da->P) { 333 /* lattice is too large in xy-directions, splitting z only is not enough */ 334 zslices = da->P; 335 yslices= (PetscInt)ceil(vec_size*1.0/(zslices*da->n*max_chunk_size))*yslices; 336 if (yslices > da->N) { 337 /* lattice is too large in x-direction, splitting along z, y is not enough */ 338 yslices = da->N; 339 xslices= (PetscInt)ceil(vec_size*1.0/(zslices*yslices*da->m*max_chunk_size))*xslices; 340 } 341 } 342 dim = 0; 343 if (timestep >= 0) { 344 ++dim; 345 } 346 /* prefer to split z-axis, even down to planar slices */ 347 if (dimension == 3) { 348 chunkDims[dim++] = (hsize_t) da->P/zslices; 349 chunkDims[dim++] = (hsize_t) da->N/yslices; 350 chunkDims[dim++] = (hsize_t) da->M/xslices; 351 } else { 352 /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */ 353 chunkDims[dim++] = (hsize_t) da->N/yslices; 354 chunkDims[dim++] = (hsize_t) da->M/xslices; 355 } 356 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double); 357 } else { 358 if (target_size < chunk_size) { 359 /* only change the defaults if target_size < chunk_size */ 360 dim = 0; 361 if (timestep >= 0) { 362 ++dim; 363 } 364 /* prefer to split z-axis, even down to planar slices */ 365 if (dimension == 3) { 366 /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */ 367 if (target_size >= chunk_size/da->p) { 368 /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */ 369 chunkDims[dim] = (hsize_t) ceil(da->P*1.0/da->p); 370 } else { 371 /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be 372 radical and let everyone write all they've got */ 373 chunkDims[dim++] = (hsize_t) ceil(da->P*1.0/da->p); 374 chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n); 375 chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m); 376 } 377 } else { 378 /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */ 379 if (target_size >= chunk_size/da->n) { 380 /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */ 381 chunkDims[dim] = (hsize_t) ceil(da->N*1.0/da->n); 382 } else { 383 /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be 384 radical and let everyone write all they've got */ 385 chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n); 386 chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m); 387 } 388 389 } 390 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double); 391 } else { 392 /* precomputed chunks are fine, we don't need to do anything */ 393 } 394 } 395 PetscFunctionReturn(0); 396 } 397 #endif 398 399 #if defined(PETSC_HAVE_HDF5) 400 #undef __FUNCT__ 401 #define __FUNCT__ "VecView_MPI_HDF5_DA" 402 PetscErrorCode VecView_MPI_HDF5_DA(Vec xin,PetscViewer viewer) 403 { 404 DM dm; 405 DM_DA *da; 406 hid_t filespace; /* file dataspace identifier */ 407 hid_t chunkspace; /* chunk dataset property identifier */ 408 hid_t plist_id; /* property list identifier */ 409 hid_t dset_id; /* dataset identifier */ 410 hid_t memspace; /* memory dataspace identifier */ 411 hid_t file_id; 412 hid_t group; 413 hid_t memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 414 hid_t filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 415 hsize_t dim; 416 hsize_t maxDims[6]={0}, dims[6]={0}, chunkDims[6]={0}, count[6]={0}, offset[6]={0}; /* we depend on these being sane later on */ 417 PetscInt timestep, dimension; 418 const PetscScalar *x; 419 const char *vecname; 420 PetscErrorCode ierr; 421 PetscBool dim2; 422 PetscBool spoutput; 423 424 PetscFunctionBegin; 425 ierr = PetscViewerHDF5OpenGroup(viewer, &file_id, &group);CHKERRQ(ierr); 426 ierr = PetscViewerHDF5GetTimestep(viewer, ×tep);CHKERRQ(ierr); 427 ierr = PetscViewerHDF5GetBaseDimension2(viewer,&dim2);CHKERRQ(ierr); 428 ierr = PetscViewerHDF5GetSPOutput(viewer,&spoutput);CHKERRQ(ierr); 429 430 ierr = VecGetDM(xin,&dm);CHKERRQ(ierr); 431 if (!dm) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 432 da = (DM_DA*)dm->data; 433 ierr = DMGetDimension(dm, &dimension);CHKERRQ(ierr); 434 435 /* Create the dataspace for the dataset. 436 * 437 * dims - holds the current dimensions of the dataset 438 * 439 * maxDims - holds the maximum dimensions of the dataset (unlimited 440 * for the number of time steps with the current dimensions for the 441 * other dimensions; so only additional time steps can be added). 442 * 443 * chunkDims - holds the size of a single time step (required to 444 * permit extending dataset). 445 */ 446 dim = 0; 447 if (timestep >= 0) { 448 dims[dim] = timestep+1; 449 maxDims[dim] = H5S_UNLIMITED; 450 chunkDims[dim] = 1; 451 ++dim; 452 } 453 if (dimension == 3) { 454 ierr = PetscHDF5IntCast(da->P,dims+dim);CHKERRQ(ierr); 455 maxDims[dim] = dims[dim]; 456 chunkDims[dim] = dims[dim]; 457 ++dim; 458 } 459 if (dimension > 1) { 460 ierr = PetscHDF5IntCast(da->N,dims+dim);CHKERRQ(ierr); 461 maxDims[dim] = dims[dim]; 462 chunkDims[dim] = dims[dim]; 463 ++dim; 464 } 465 ierr = PetscHDF5IntCast(da->M,dims+dim);CHKERRQ(ierr); 466 maxDims[dim] = dims[dim]; 467 chunkDims[dim] = dims[dim]; 468 ++dim; 469 if (da->w > 1 || dim2) { 470 ierr = PetscHDF5IntCast(da->w,dims+dim);CHKERRQ(ierr); 471 maxDims[dim] = dims[dim]; 472 chunkDims[dim] = dims[dim]; 473 ++dim; 474 } 475 #if defined(PETSC_USE_COMPLEX) 476 dims[dim] = 2; 477 maxDims[dim] = dims[dim]; 478 chunkDims[dim] = dims[dim]; 479 ++dim; 480 #endif 481 482 ierr = VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims); CHKERRQ(ierr); 483 484 PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims)); 485 486 #if defined(PETSC_USE_REAL_SINGLE) 487 memscalartype = H5T_NATIVE_FLOAT; 488 filescalartype = H5T_NATIVE_FLOAT; 489 #elif defined(PETSC_USE_REAL___FLOAT128) 490 #error "HDF5 output with 128 bit floats not supported." 491 #else 492 memscalartype = H5T_NATIVE_DOUBLE; 493 if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT; 494 else filescalartype = H5T_NATIVE_DOUBLE; 495 #endif 496 497 /* Create the dataset with default properties and close filespace */ 498 ierr = PetscObjectGetName((PetscObject)xin,&vecname);CHKERRQ(ierr); 499 if (!H5Lexists(group, vecname, H5P_DEFAULT)) { 500 /* Create chunk */ 501 PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE)); 502 PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims)); 503 504 #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800) 505 PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT)); 506 #else 507 PetscStackCallHDF5Return(dset_id,H5Dcreate,(group, vecname, filescalartype, filespace, H5P_DEFAULT)); 508 #endif 509 } else { 510 PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT)); 511 PetscStackCallHDF5(H5Dset_extent,(dset_id, dims)); 512 } 513 PetscStackCallHDF5(H5Sclose,(filespace)); 514 515 /* Each process defines a dataset and writes it to the hyperslab in the file */ 516 dim = 0; 517 if (timestep >= 0) { 518 offset[dim] = timestep; 519 ++dim; 520 } 521 if (dimension == 3) {ierr = PetscHDF5IntCast(da->zs,offset + dim++);CHKERRQ(ierr);} 522 if (dimension > 1) {ierr = PetscHDF5IntCast(da->ys,offset + dim++);CHKERRQ(ierr);} 523 ierr = PetscHDF5IntCast(da->xs/da->w,offset + dim++);CHKERRQ(ierr); 524 if (da->w > 1 || dim2) offset[dim++] = 0; 525 #if defined(PETSC_USE_COMPLEX) 526 offset[dim++] = 0; 527 #endif 528 dim = 0; 529 if (timestep >= 0) { 530 count[dim] = 1; 531 ++dim; 532 } 533 if (dimension == 3) {ierr = PetscHDF5IntCast(da->ze - da->zs,count + dim++);CHKERRQ(ierr);} 534 if (dimension > 1) {ierr = PetscHDF5IntCast(da->ye - da->ys,count + dim++);CHKERRQ(ierr);} 535 ierr = PetscHDF5IntCast((da->xe - da->xs)/da->w,count + dim++);CHKERRQ(ierr); 536 if (da->w > 1 || dim2) {ierr = PetscHDF5IntCast(da->w,count + dim++);CHKERRQ(ierr);} 537 #if defined(PETSC_USE_COMPLEX) 538 count[dim++] = 2; 539 #endif 540 PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL)); 541 PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id)); 542 PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL)); 543 544 /* Create property list for collective dataset write */ 545 PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER)); 546 #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO) 547 PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE)); 548 #endif 549 /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */ 550 551 ierr = VecGetArrayRead(xin, &x);CHKERRQ(ierr); 552 PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, plist_id, x)); 553 PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL)); 554 ierr = VecRestoreArrayRead(xin, &x);CHKERRQ(ierr); 555 556 /* Close/release resources */ 557 if (group != file_id) { 558 PetscStackCallHDF5(H5Gclose,(group)); 559 } 560 PetscStackCallHDF5(H5Pclose,(plist_id)); 561 PetscStackCallHDF5(H5Sclose,(filespace)); 562 PetscStackCallHDF5(H5Sclose,(memspace)); 563 PetscStackCallHDF5(H5Dclose,(dset_id)); 564 ierr = PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);CHKERRQ(ierr); 565 PetscFunctionReturn(0); 566 } 567 #endif 568 569 extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec,PetscViewer); 570 571 #if defined(PETSC_HAVE_MPIIO) 572 #undef __FUNCT__ 573 #define __FUNCT__ "DMDAArrayMPIIO" 574 static PetscErrorCode DMDAArrayMPIIO(DM da,PetscViewer viewer,Vec xin,PetscBool write) 575 { 576 PetscErrorCode ierr; 577 MPI_File mfdes; 578 PetscMPIInt gsizes[4],lsizes[4],lstarts[4],asiz,dof; 579 MPI_Datatype view; 580 const PetscScalar *array; 581 MPI_Offset off; 582 MPI_Aint ub,ul; 583 PetscInt type,rows,vecrows,tr[2]; 584 DM_DA *dd = (DM_DA*)da->data; 585 PetscBool skipheader; 586 587 PetscFunctionBegin; 588 ierr = VecGetSize(xin,&vecrows);CHKERRQ(ierr); 589 ierr = PetscViewerBinaryGetSkipHeader(viewer,&skipheader);CHKERRQ(ierr); 590 if (!write) { 591 /* Read vector header. */ 592 if (!skipheader) { 593 ierr = PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT);CHKERRQ(ierr); 594 type = tr[0]; 595 rows = tr[1]; 596 if (type != VEC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Not vector next in file"); 597 if (rows != vecrows) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Vector in file not same size as DMDA vector"); 598 } 599 } else { 600 tr[0] = VEC_FILE_CLASSID; 601 tr[1] = vecrows; 602 if (!skipheader) { 603 ierr = PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 604 } 605 } 606 607 ierr = PetscMPIIntCast(dd->w,&dof);CHKERRQ(ierr); 608 gsizes[0] = dof; 609 ierr = PetscMPIIntCast(dd->M,gsizes+1);CHKERRQ(ierr); 610 ierr = PetscMPIIntCast(dd->N,gsizes+2);CHKERRQ(ierr); 611 ierr = PetscMPIIntCast(dd->P,gsizes+3);CHKERRQ(ierr); 612 lsizes[0] = dof; 613 ierr = PetscMPIIntCast((dd->xe-dd->xs)/dof,lsizes+1);CHKERRQ(ierr); 614 ierr = PetscMPIIntCast(dd->ye-dd->ys,lsizes+2);CHKERRQ(ierr); 615 ierr = PetscMPIIntCast(dd->ze-dd->zs,lsizes+3);CHKERRQ(ierr); 616 lstarts[0] = 0; 617 ierr = PetscMPIIntCast(dd->xs/dof,lstarts+1);CHKERRQ(ierr); 618 ierr = PetscMPIIntCast(dd->ys,lstarts+2);CHKERRQ(ierr); 619 ierr = PetscMPIIntCast(dd->zs,lstarts+3);CHKERRQ(ierr); 620 ierr = MPI_Type_create_subarray(da->dim+1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);CHKERRQ(ierr); 621 ierr = MPI_Type_commit(&view);CHKERRQ(ierr); 622 623 ierr = PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);CHKERRQ(ierr); 624 ierr = PetscViewerBinaryGetMPIIOOffset(viewer,&off);CHKERRQ(ierr); 625 ierr = MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char*)"native",MPI_INFO_NULL);CHKERRQ(ierr); 626 ierr = VecGetArrayRead(xin,&array);CHKERRQ(ierr); 627 asiz = lsizes[1]*(lsizes[2] > 0 ? lsizes[2] : 1)*(lsizes[3] > 0 ? lsizes[3] : 1)*dof; 628 if (write) { 629 ierr = MPIU_File_write_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);CHKERRQ(ierr); 630 } else { 631 ierr = MPIU_File_read_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);CHKERRQ(ierr); 632 } 633 ierr = MPI_Type_get_extent(view,&ul,&ub);CHKERRQ(ierr); 634 ierr = PetscViewerBinaryAddMPIIOOffset(viewer,ub);CHKERRQ(ierr); 635 ierr = VecRestoreArrayRead(xin,&array);CHKERRQ(ierr); 636 ierr = MPI_Type_free(&view);CHKERRQ(ierr); 637 PetscFunctionReturn(0); 638 } 639 #endif 640 641 #undef __FUNCT__ 642 #define __FUNCT__ "VecView_MPI_DA" 643 PetscErrorCode VecView_MPI_DA(Vec xin,PetscViewer viewer) 644 { 645 DM da; 646 PetscErrorCode ierr; 647 PetscInt dim; 648 Vec natural; 649 PetscBool isdraw,isvtk; 650 #if defined(PETSC_HAVE_HDF5) 651 PetscBool ishdf5; 652 #endif 653 const char *prefix,*name; 654 PetscViewerFormat format; 655 656 PetscFunctionBegin; 657 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 658 if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 659 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 660 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);CHKERRQ(ierr); 661 #if defined(PETSC_HAVE_HDF5) 662 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);CHKERRQ(ierr); 663 #endif 664 if (isdraw) { 665 ierr = DMDAGetInfo(da,&dim,0,0,0,0,0,0,0,0,0,0,0,0);CHKERRQ(ierr); 666 if (dim == 1) { 667 ierr = VecView_MPI_Draw_DA1d(xin,viewer);CHKERRQ(ierr); 668 } else if (dim == 2) { 669 ierr = VecView_MPI_Draw_DA2d(xin,viewer);CHKERRQ(ierr); 670 } else SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot graphically view vector associated with this dimensional DMDA %D",dim); 671 } else if (isvtk) { /* Duplicate the Vec and hold a reference to the DM */ 672 Vec Y; 673 ierr = PetscObjectReference((PetscObject)da);CHKERRQ(ierr); 674 ierr = VecDuplicate(xin,&Y);CHKERRQ(ierr); 675 if (((PetscObject)xin)->name) { 676 /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */ 677 ierr = PetscObjectSetName((PetscObject)Y,((PetscObject)xin)->name);CHKERRQ(ierr); 678 } 679 ierr = VecCopy(xin,Y);CHKERRQ(ierr); 680 ierr = PetscViewerVTKAddField(viewer,(PetscObject)da,DMDAVTKWriteAll,PETSC_VTK_POINT_FIELD,(PetscObject)Y);CHKERRQ(ierr); 681 #if defined(PETSC_HAVE_HDF5) 682 } else if (ishdf5) { 683 ierr = VecView_MPI_HDF5_DA(xin,viewer);CHKERRQ(ierr); 684 #endif 685 } else { 686 #if defined(PETSC_HAVE_MPIIO) 687 PetscBool isbinary,isMPIIO; 688 689 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 690 if (isbinary) { 691 ierr = PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);CHKERRQ(ierr); 692 if (isMPIIO) { 693 ierr = DMDAArrayMPIIO(da,viewer,xin,PETSC_TRUE);CHKERRQ(ierr); 694 PetscFunctionReturn(0); 695 } 696 } 697 #endif 698 699 /* call viewer on natural ordering */ 700 ierr = PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);CHKERRQ(ierr); 701 ierr = DMDACreateNaturalVector(da,&natural);CHKERRQ(ierr); 702 ierr = PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);CHKERRQ(ierr); 703 ierr = DMDAGlobalToNaturalBegin(da,xin,INSERT_VALUES,natural);CHKERRQ(ierr); 704 ierr = DMDAGlobalToNaturalEnd(da,xin,INSERT_VALUES,natural);CHKERRQ(ierr); 705 ierr = PetscObjectGetName((PetscObject)xin,&name);CHKERRQ(ierr); 706 ierr = PetscObjectSetName((PetscObject)natural,name);CHKERRQ(ierr); 707 708 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 709 if (format == PETSC_VIEWER_BINARY_MATLAB) { 710 /* temporarily remove viewer format so it won't trigger in the VecView() */ 711 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_DEFAULT);CHKERRQ(ierr); 712 } 713 714 ierr = VecView(natural,viewer);CHKERRQ(ierr); 715 716 if (format == PETSC_VIEWER_BINARY_MATLAB) { 717 MPI_Comm comm; 718 FILE *info; 719 const char *fieldname; 720 char fieldbuf[256]; 721 PetscInt dim,ni,nj,nk,pi,pj,pk,dof,n; 722 723 /* set the viewer format back into the viewer */ 724 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 725 ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr); 726 ierr = PetscViewerBinaryGetInfoPointer(viewer,&info);CHKERRQ(ierr); 727 ierr = DMDAGetInfo(da,&dim,&ni,&nj,&nk,&pi,&pj,&pk,&dof,0,0,0,0,0);CHKERRQ(ierr); 728 ierr = PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");CHKERRQ(ierr); 729 ierr = PetscFPrintf(comm,info,"#$$ tmp = PetscBinaryRead(fd); \n");CHKERRQ(ierr); 730 if (dim == 1) { ierr = PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d);\n",dof,ni);CHKERRQ(ierr); } 731 if (dim == 2) { ierr = PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d);\n",dof,ni,nj);CHKERRQ(ierr); } 732 if (dim == 3) { ierr = PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d,%d);\n",dof,ni,nj,nk);CHKERRQ(ierr); } 733 734 for (n=0; n<dof; n++) { 735 ierr = DMDAGetFieldName(da,n,&fieldname);CHKERRQ(ierr); 736 if (!fieldname || !fieldname[0]) { 737 ierr = PetscSNPrintf(fieldbuf,sizeof fieldbuf,"field%D",n);CHKERRQ(ierr); 738 fieldname = fieldbuf; 739 } 740 if (dim == 1) { ierr = PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:))';\n",name,fieldname,n+1);CHKERRQ(ierr); } 741 if (dim == 2) { ierr = PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:,:))';\n",name,fieldname,n+1);CHKERRQ(ierr); } 742 if (dim == 3) { ierr = PetscFPrintf(comm,info,"#$$ Set.%s.%s = permute(squeeze(tmp(%d,:,:,:)),[2 1 3]);\n",name,fieldname,n+1);CHKERRQ(ierr);} 743 } 744 ierr = PetscFPrintf(comm,info,"#$$ clear tmp; \n");CHKERRQ(ierr); 745 ierr = PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");CHKERRQ(ierr); 746 } 747 748 ierr = VecDestroy(&natural);CHKERRQ(ierr); 749 } 750 PetscFunctionReturn(0); 751 } 752 753 #if defined(PETSC_HAVE_HDF5) 754 #undef __FUNCT__ 755 #define __FUNCT__ "VecLoad_HDF5_DA" 756 PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer) 757 { 758 DM da; 759 PetscErrorCode ierr; 760 hsize_t dim,rdim; 761 hsize_t dims[6]={0},count[6]={0},offset[6]={0}; 762 PetscInt dimension,timestep,dofInd; 763 PetscScalar *x; 764 const char *vecname; 765 hid_t filespace; /* file dataspace identifier */ 766 hid_t plist_id; /* property list identifier */ 767 hid_t dset_id; /* dataset identifier */ 768 hid_t memspace; /* memory dataspace identifier */ 769 hid_t file_id,group; 770 hid_t scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 771 DM_DA *dd; 772 PetscBool dim2 = PETSC_FALSE; 773 774 PetscFunctionBegin; 775 #if defined(PETSC_USE_REAL_SINGLE) 776 scalartype = H5T_NATIVE_FLOAT; 777 #elif defined(PETSC_USE_REAL___FLOAT128) 778 #error "HDF5 output with 128 bit floats not supported." 779 #else 780 scalartype = H5T_NATIVE_DOUBLE; 781 #endif 782 783 ierr = PetscViewerHDF5OpenGroup(viewer, &file_id, &group);CHKERRQ(ierr); 784 ierr = PetscViewerHDF5GetTimestep(viewer, ×tep);CHKERRQ(ierr); 785 ierr = PetscObjectGetName((PetscObject)xin,&vecname);CHKERRQ(ierr); 786 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 787 dd = (DM_DA*)da->data; 788 ierr = DMGetDimension(da, &dimension);CHKERRQ(ierr); 789 790 /* Open dataset */ 791 #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800) 792 PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT)); 793 #else 794 PetscStackCallHDF5Return(dset_id,H5Dopen,(group, vecname)); 795 #endif 796 797 /* Retrieve the dataspace for the dataset */ 798 PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id)); 799 PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL)); 800 801 /* Expected dimension for holding the dof's */ 802 #if defined(PETSC_USE_COMPLEX) 803 dofInd = rdim-2; 804 #else 805 dofInd = rdim-1; 806 #endif 807 808 /* The expected number of dimensions, assuming basedimension2 = false */ 809 dim = dimension; 810 if (dd->w > 1) ++dim; 811 if (timestep >= 0) ++dim; 812 #if defined(PETSC_USE_COMPLEX) 813 ++dim; 814 #endif 815 816 /* In this case the input dataset have one extra, unexpected dimension. */ 817 if (rdim == dim+1) { 818 /* In this case the block size unity */ 819 if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE; 820 821 /* Special error message for the case where dof does not match the input file */ 822 else if (dd->w != (PetscInt) dims[dofInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Number of dofs in file is %D, not %D as expected",(PetscInt)dims[dofInd],dd->w); 823 824 /* Other cases where rdim != dim cannot be handled currently */ 825 } else if (rdim != dim) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with dof = %D",rdim,dim,dd->w); 826 827 /* Set up the hyperslab size */ 828 dim = 0; 829 if (timestep >= 0) { 830 offset[dim] = timestep; 831 count[dim] = 1; 832 ++dim; 833 } 834 if (dimension == 3) { 835 ierr = PetscHDF5IntCast(dd->zs,offset + dim);CHKERRQ(ierr); 836 ierr = PetscHDF5IntCast(dd->ze - dd->zs,count + dim);CHKERRQ(ierr); 837 ++dim; 838 } 839 if (dimension > 1) { 840 ierr = PetscHDF5IntCast(dd->ys,offset + dim);CHKERRQ(ierr); 841 ierr = PetscHDF5IntCast(dd->ye - dd->ys,count + dim);CHKERRQ(ierr); 842 ++dim; 843 } 844 ierr = PetscHDF5IntCast(dd->xs/dd->w,offset + dim);CHKERRQ(ierr); 845 ierr = PetscHDF5IntCast((dd->xe - dd->xs)/dd->w,count + dim);CHKERRQ(ierr); 846 ++dim; 847 if (dd->w > 1 || dim2) { 848 offset[dim] = 0; 849 ierr = PetscHDF5IntCast(dd->w,count + dim);CHKERRQ(ierr); 850 ++dim; 851 } 852 #if defined(PETSC_USE_COMPLEX) 853 offset[dim] = 0; 854 count[dim] = 2; 855 ++dim; 856 #endif 857 858 /* Create the memory and filespace */ 859 PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL)); 860 PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL)); 861 862 /* Create property list for collective dataset write */ 863 PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER)); 864 #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO) 865 PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE)); 866 #endif 867 /* To read dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */ 868 869 ierr = VecGetArray(xin, &x);CHKERRQ(ierr); 870 PetscStackCallHDF5(H5Dread,(dset_id, scalartype, memspace, filespace, plist_id, x)); 871 ierr = VecRestoreArray(xin, &x);CHKERRQ(ierr); 872 873 /* Close/release resources */ 874 if (group != file_id) { 875 PetscStackCallHDF5(H5Gclose,(group)); 876 } 877 PetscStackCallHDF5(H5Pclose,(plist_id)); 878 PetscStackCallHDF5(H5Sclose,(filespace)); 879 PetscStackCallHDF5(H5Sclose,(memspace)); 880 PetscStackCallHDF5(H5Dclose,(dset_id)); 881 PetscFunctionReturn(0); 882 } 883 #endif 884 885 #undef __FUNCT__ 886 #define __FUNCT__ "VecLoad_Binary_DA" 887 PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer) 888 { 889 DM da; 890 PetscErrorCode ierr; 891 Vec natural; 892 const char *prefix; 893 PetscInt bs; 894 PetscBool flag; 895 DM_DA *dd; 896 #if defined(PETSC_HAVE_MPIIO) 897 PetscBool isMPIIO; 898 #endif 899 900 PetscFunctionBegin; 901 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 902 dd = (DM_DA*)da->data; 903 #if defined(PETSC_HAVE_MPIIO) 904 ierr = PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);CHKERRQ(ierr); 905 if (isMPIIO) { 906 ierr = DMDAArrayMPIIO(da,viewer,xin,PETSC_FALSE);CHKERRQ(ierr); 907 PetscFunctionReturn(0); 908 } 909 #endif 910 911 ierr = PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);CHKERRQ(ierr); 912 ierr = DMDACreateNaturalVector(da,&natural);CHKERRQ(ierr); 913 ierr = PetscObjectSetName((PetscObject)natural,((PetscObject)xin)->name);CHKERRQ(ierr); 914 ierr = PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);CHKERRQ(ierr); 915 ierr = VecLoad(natural,viewer);CHKERRQ(ierr); 916 ierr = DMDANaturalToGlobalBegin(da,natural,INSERT_VALUES,xin);CHKERRQ(ierr); 917 ierr = DMDANaturalToGlobalEnd(da,natural,INSERT_VALUES,xin);CHKERRQ(ierr); 918 ierr = VecDestroy(&natural);CHKERRQ(ierr); 919 ierr = PetscInfo(xin,"Loading vector from natural ordering into DMDA\n");CHKERRQ(ierr); 920 ierr = PetscOptionsGetInt(NULL,((PetscObject)xin)->prefix,"-vecload_block_size",&bs,&flag);CHKERRQ(ierr); 921 if (flag && bs != dd->w) { 922 ierr = PetscInfo2(xin,"Block size in file %D not equal to DMDA's dof %D\n",bs,dd->w);CHKERRQ(ierr); 923 } 924 PetscFunctionReturn(0); 925 } 926 927 #undef __FUNCT__ 928 #define __FUNCT__ "VecLoad_Default_DA" 929 PetscErrorCode VecLoad_Default_DA(Vec xin, PetscViewer viewer) 930 { 931 PetscErrorCode ierr; 932 DM da; 933 PetscBool isbinary; 934 #if defined(PETSC_HAVE_HDF5) 935 PetscBool ishdf5; 936 #endif 937 938 PetscFunctionBegin; 939 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 940 if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 941 942 #if defined(PETSC_HAVE_HDF5) 943 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);CHKERRQ(ierr); 944 #endif 945 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 946 947 if (isbinary) { 948 ierr = VecLoad_Binary_DA(xin,viewer);CHKERRQ(ierr); 949 #if defined(PETSC_HAVE_HDF5) 950 } else if (ishdf5) { 951 ierr = VecLoad_HDF5_DA(xin,viewer);CHKERRQ(ierr); 952 #endif 953 } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name); 954 PetscFunctionReturn(0); 955 } 956