1 2 /* 3 Plots vectors obtained with DMDACreate2d() 4 */ 5 6 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 7 #include <petscdraw.h> 8 #include <petscviewerhdf5.h> 9 10 /* 11 The data that is passed into the graphics callback 12 */ 13 typedef struct { 14 PetscMPIInt rank; 15 PetscInt m,n,dof,k; 16 PetscReal xmin,xmax,ymin,ymax,min,max; 17 const PetscScalar *xy,*v; 18 PetscBool showaxis,showgrid; 19 const char *name0,*name1; 20 } ZoomCtx; 21 22 /* 23 This does the drawing for one particular field 24 in one particular set of coordinates. It is a callback 25 called from PetscDrawZoom() 26 */ 27 PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw,void *ctx) 28 { 29 ZoomCtx *zctx = (ZoomCtx*)ctx; 30 PetscErrorCode ierr; 31 PetscInt m,n,i,j,k,dof,id,c1,c2,c3,c4; 32 PetscReal min,max,x1,x2,x3,x4,y_1,y2,y3,y4; 33 const PetscScalar *xy,*v; 34 35 PetscFunctionBegin; 36 m = zctx->m; 37 n = zctx->n; 38 dof = zctx->dof; 39 k = zctx->k; 40 xy = zctx->xy; 41 v = zctx->v; 42 min = zctx->min; 43 max = zctx->max; 44 45 /* PetscDraw the contour plot patch */ 46 ierr = PetscDrawCollectiveBegin(draw);CHKERRQ(ierr); 47 for (j=0; j<n-1; j++) { 48 for (i=0; i<m-1; i++) { 49 id = i+j*m; 50 x1 = PetscRealPart(xy[2*id]); 51 y_1 = PetscRealPart(xy[2*id+1]); 52 c1 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 53 54 id = i+j*m+1; 55 x2 = PetscRealPart(xy[2*id]); 56 y2 = PetscRealPart(xy[2*id+1]); 57 c2 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 58 59 id = i+j*m+1+m; 60 x3 = PetscRealPart(xy[2*id]); 61 y3 = PetscRealPart(xy[2*id+1]); 62 c3 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 63 64 id = i+j*m+m; 65 x4 = PetscRealPart(xy[2*id]); 66 y4 = PetscRealPart(xy[2*id+1]); 67 c4 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max); 68 69 ierr = PetscDrawTriangle(draw,x1,y_1,x2,y2,x3,y3,c1,c2,c3);CHKERRQ(ierr); 70 ierr = PetscDrawTriangle(draw,x1,y_1,x3,y3,x4,y4,c1,c3,c4);CHKERRQ(ierr); 71 if (zctx->showgrid) { 72 ierr = PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK);CHKERRQ(ierr); 73 ierr = PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK);CHKERRQ(ierr); 74 ierr = PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK);CHKERRQ(ierr); 75 ierr = PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK);CHKERRQ(ierr); 76 } 77 } 78 } 79 if (zctx->showaxis && !zctx->rank) { 80 if (zctx->name0 || zctx->name1) { 81 PetscReal xl,yl,xr,yr,x,y; 82 ierr = PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr);CHKERRQ(ierr); 83 x = xl + .30*(xr - xl); 84 xl = xl + .01*(xr - xl); 85 y = yr - .30*(yr - yl); 86 yl = yl + .01*(yr - yl); 87 if (zctx->name0) {ierr = PetscDrawString(draw,x,yl,PETSC_DRAW_BLACK,zctx->name0);CHKERRQ(ierr);} 88 if (zctx->name1) {ierr = PetscDrawStringVertical(draw,xl,y,PETSC_DRAW_BLACK,zctx->name1);CHKERRQ(ierr);} 89 } 90 /* 91 Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits 92 but that may require some refactoring. 93 */ 94 { 95 double xmin = (double)zctx->xmin, ymin = (double)zctx->ymin; 96 double xmax = (double)zctx->xmax, ymax = (double)zctx->ymax; 97 char value[16]; size_t len; PetscReal w; 98 ierr = PetscSNPrintf(value,16,"%0.2e",xmin);CHKERRQ(ierr); 99 ierr = PetscDrawString(draw,xmin,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 100 ierr = PetscSNPrintf(value,16,"%0.2e",xmax);CHKERRQ(ierr); 101 ierr = PetscStrlen(value,&len);CHKERRQ(ierr); 102 ierr = PetscDrawStringGetSize(draw,&w,NULL);CHKERRQ(ierr); 103 ierr = PetscDrawString(draw,xmax - len*w,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 104 ierr = PetscSNPrintf(value,16,"%0.2e",ymin);CHKERRQ(ierr); 105 ierr = PetscDrawString(draw,xmin - .05*(xmax - xmin),ymin,PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 106 ierr = PetscSNPrintf(value,16,"%0.2e",ymax);CHKERRQ(ierr); 107 ierr = PetscDrawString(draw,xmin - .05*(xmax - xmin),ymax,PETSC_DRAW_BLACK,value);CHKERRQ(ierr); 108 } 109 } 110 ierr = PetscDrawCollectiveEnd(draw);CHKERRQ(ierr); 111 PetscFunctionReturn(0); 112 } 113 114 PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin,PetscViewer viewer) 115 { 116 DM da,dac,dag; 117 PetscErrorCode ierr; 118 PetscInt N,s,M,w,ncoors = 4; 119 const PetscInt *lx,*ly; 120 PetscReal coors[4]; 121 PetscDraw draw,popup; 122 PetscBool isnull,useports = PETSC_FALSE; 123 MPI_Comm comm; 124 Vec xlocal,xcoor,xcoorl; 125 DMBoundaryType bx,by; 126 DMDAStencilType st; 127 ZoomCtx zctx; 128 PetscDrawViewPorts *ports = NULL; 129 PetscViewerFormat format; 130 PetscInt *displayfields; 131 PetscInt ndisplayfields,i,nbounds; 132 const PetscReal *bounds; 133 134 PetscFunctionBegin; 135 zctx.showgrid = PETSC_FALSE; 136 zctx.showaxis = PETSC_TRUE; 137 138 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 139 ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); 140 if (isnull) PetscFunctionReturn(0); 141 142 ierr = PetscViewerDrawGetBounds(viewer,&nbounds,&bounds);CHKERRQ(ierr); 143 144 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 145 if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 146 147 ierr = PetscObjectGetComm((PetscObject)xin,&comm);CHKERRQ(ierr); 148 ierr = MPI_Comm_rank(comm,&zctx.rank);CHKERRQ(ierr); 149 150 ierr = DMDAGetInfo(da,0,&M,&N,0,&zctx.m,&zctx.n,0,&w,&s,&bx,&by,0,&st);CHKERRQ(ierr); 151 ierr = DMDAGetOwnershipRanges(da,&lx,&ly,NULL);CHKERRQ(ierr); 152 153 /* 154 Obtain a sequential vector that is going to contain the local values plus ONE layer of 155 ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will 156 update the local values pluse ONE layer of ghost values. 157 */ 158 ierr = PetscObjectQuery((PetscObject)da,"GraphicsGhosted",(PetscObject*)&xlocal);CHKERRQ(ierr); 159 if (!xlocal) { 160 if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) { 161 /* 162 if original da is not of stencil width one, or periodic or not a box stencil then 163 create a special DMDA to handle one level of ghost points for graphics 164 */ 165 ierr = DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,w,1,lx,ly,&dac);CHKERRQ(ierr); 166 ierr = DMSetUp(dac);CHKERRQ(ierr); 167 ierr = PetscInfo(da,"Creating auxilary DMDA for managing graphics ghost points\n");CHKERRQ(ierr); 168 } else { 169 /* otherwise we can use the da we already have */ 170 dac = da; 171 } 172 /* create local vector for holding ghosted values used in graphics */ 173 ierr = DMCreateLocalVector(dac,&xlocal);CHKERRQ(ierr); 174 if (dac != da) { 175 /* don't keep any public reference of this DMDA, is is only available through xlocal */ 176 ierr = PetscObjectDereference((PetscObject)dac);CHKERRQ(ierr); 177 } else { 178 /* remove association between xlocal and da, because below we compose in the opposite 179 direction and if we left this connect we'd get a loop, so the objects could 180 never be destroyed */ 181 ierr = PetscObjectRemoveReference((PetscObject)xlocal,"__PETSc_dm");CHKERRQ(ierr); 182 } 183 ierr = PetscObjectCompose((PetscObject)da,"GraphicsGhosted",(PetscObject)xlocal);CHKERRQ(ierr); 184 ierr = PetscObjectDereference((PetscObject)xlocal);CHKERRQ(ierr); 185 } else { 186 if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) { 187 ierr = VecGetDM(xlocal, &dac);CHKERRQ(ierr); 188 } else { 189 dac = da; 190 } 191 } 192 193 /* 194 Get local (ghosted) values of vector 195 */ 196 ierr = DMGlobalToLocalBegin(dac,xin,INSERT_VALUES,xlocal);CHKERRQ(ierr); 197 ierr = DMGlobalToLocalEnd(dac,xin,INSERT_VALUES,xlocal);CHKERRQ(ierr); 198 ierr = VecGetArrayRead(xlocal,&zctx.v);CHKERRQ(ierr); 199 200 /* 201 Get coordinates of nodes 202 */ 203 ierr = DMGetCoordinates(da,&xcoor);CHKERRQ(ierr); 204 if (!xcoor) { 205 ierr = DMDASetUniformCoordinates(da,0.0,1.0,0.0,1.0,0.0,0.0);CHKERRQ(ierr); 206 ierr = DMGetCoordinates(da,&xcoor);CHKERRQ(ierr); 207 } 208 209 /* 210 Determine the min and max coordinates in plot 211 */ 212 ierr = VecStrideMin(xcoor,0,NULL,&zctx.xmin);CHKERRQ(ierr); 213 ierr = VecStrideMax(xcoor,0,NULL,&zctx.xmax);CHKERRQ(ierr); 214 ierr = VecStrideMin(xcoor,1,NULL,&zctx.ymin);CHKERRQ(ierr); 215 ierr = VecStrideMax(xcoor,1,NULL,&zctx.ymax);CHKERRQ(ierr); 216 ierr = PetscOptionsGetBool(NULL,NULL,"-draw_contour_axis",&zctx.showaxis,NULL);CHKERRQ(ierr); 217 if (zctx.showaxis) { 218 coors[0] = zctx.xmin - .05*(zctx.xmax - zctx.xmin); coors[1] = zctx.ymin - .05*(zctx.ymax - zctx.ymin); 219 coors[2] = zctx.xmax + .05*(zctx.xmax - zctx.xmin); coors[3] = zctx.ymax + .05*(zctx.ymax - zctx.ymin); 220 } else { 221 coors[0] = zctx.xmin; coors[1] = zctx.ymin; coors[2] = zctx.xmax; coors[3] = zctx.ymax; 222 } 223 ierr = PetscOptionsGetRealArray(NULL,NULL,"-draw_coordinates",coors,&ncoors,NULL);CHKERRQ(ierr); 224 ierr = PetscInfo4(da,"Preparing DMDA 2d contour plot coordinates %g %g %g %g\n",(double)coors[0],(double)coors[1],(double)coors[2],(double)coors[3]);CHKERRQ(ierr); 225 226 /* 227 Get local ghosted version of coordinates 228 */ 229 ierr = PetscObjectQuery((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject*)&xcoorl);CHKERRQ(ierr); 230 if (!xcoorl) { 231 /* create DMDA to get local version of graphics */ 232 ierr = DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,2,1,lx,ly,&dag);CHKERRQ(ierr); 233 ierr = DMSetUp(dag);CHKERRQ(ierr); 234 ierr = PetscInfo(dag,"Creating auxilary DMDA for managing graphics coordinates ghost points\n");CHKERRQ(ierr); 235 ierr = DMCreateLocalVector(dag,&xcoorl);CHKERRQ(ierr); 236 ierr = PetscObjectCompose((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject)xcoorl);CHKERRQ(ierr); 237 ierr = PetscObjectDereference((PetscObject)dag);CHKERRQ(ierr); 238 ierr = PetscObjectDereference((PetscObject)xcoorl);CHKERRQ(ierr); 239 } else { 240 ierr = VecGetDM(xcoorl,&dag);CHKERRQ(ierr); 241 } 242 ierr = DMGlobalToLocalBegin(dag,xcoor,INSERT_VALUES,xcoorl);CHKERRQ(ierr); 243 ierr = DMGlobalToLocalEnd(dag,xcoor,INSERT_VALUES,xcoorl);CHKERRQ(ierr); 244 ierr = VecGetArrayRead(xcoorl,&zctx.xy);CHKERRQ(ierr); 245 ierr = DMDAGetCoordinateName(da,0,&zctx.name0);CHKERRQ(ierr); 246 ierr = DMDAGetCoordinateName(da,1,&zctx.name1);CHKERRQ(ierr); 247 248 /* 249 Get information about size of area each processor must do graphics for 250 */ 251 ierr = DMDAGetInfo(dac,NULL,&M,&N,NULL,NULL,NULL,NULL,&zctx.dof,NULL,&bx,&by,NULL,NULL);CHKERRQ(ierr); 252 ierr = DMDAGetGhostCorners(dac,NULL,NULL,NULL,&zctx.m,&zctx.n,NULL);CHKERRQ(ierr); 253 ierr = PetscOptionsGetBool(NULL,NULL,"-draw_contour_grid",&zctx.showgrid,NULL);CHKERRQ(ierr); 254 255 ierr = DMDASelectFields(da,&ndisplayfields,&displayfields);CHKERRQ(ierr); 256 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 257 ierr = PetscOptionsGetBool(NULL,NULL,"-draw_ports",&useports,NULL);CHKERRQ(ierr); 258 if (format == PETSC_VIEWER_DRAW_PORTS) useports = PETSC_TRUE; 259 if (useports) { 260 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 261 ierr = PetscDrawCheckResizedWindow(draw);CHKERRQ(ierr); 262 ierr = PetscDrawClear(draw);CHKERRQ(ierr); 263 ierr = PetscDrawViewPortsCreate(draw,ndisplayfields,&ports);CHKERRQ(ierr); 264 } 265 266 /* 267 Loop over each field; drawing each in a different window 268 */ 269 for (i=0; i<ndisplayfields; i++) { 270 zctx.k = displayfields[i]; 271 272 /* determine the min and max value in plot */ 273 ierr = VecStrideMin(xin,zctx.k,NULL,&zctx.min);CHKERRQ(ierr); 274 ierr = VecStrideMax(xin,zctx.k,NULL,&zctx.max);CHKERRQ(ierr); 275 if (zctx.k < nbounds) { 276 zctx.min = bounds[2*zctx.k]; 277 zctx.max = bounds[2*zctx.k+1]; 278 } 279 if (zctx.min == zctx.max) { 280 zctx.min -= 1.e-12; 281 zctx.max += 1.e-12; 282 } 283 ierr = PetscInfo2(da,"DMDA 2d contour plot min %g max %g\n",(double)zctx.min,(double)zctx.max);CHKERRQ(ierr); 284 285 if (useports) { 286 ierr = PetscDrawViewPortsSet(ports,i);CHKERRQ(ierr); 287 } else { 288 const char *title; 289 ierr = PetscViewerDrawGetDraw(viewer,i,&draw);CHKERRQ(ierr); 290 ierr = DMDAGetFieldName(da,zctx.k,&title);CHKERRQ(ierr); 291 if (title) {ierr = PetscDrawSetTitle(draw,title);CHKERRQ(ierr);} 292 } 293 294 ierr = PetscDrawGetPopup(draw,&popup);CHKERRQ(ierr); 295 ierr = PetscDrawScalePopup(popup,zctx.min,zctx.max);CHKERRQ(ierr); 296 ierr = PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);CHKERRQ(ierr); 297 ierr = PetscDrawZoom(draw,VecView_MPI_Draw_DA2d_Zoom,&zctx);CHKERRQ(ierr); 298 if (!useports) {ierr = PetscDrawSave(draw);CHKERRQ(ierr);} 299 } 300 if (useports) { 301 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 302 ierr = PetscDrawSave(draw);CHKERRQ(ierr); 303 } 304 305 ierr = PetscDrawViewPortsDestroy(ports);CHKERRQ(ierr); 306 ierr = PetscFree(displayfields);CHKERRQ(ierr); 307 ierr = VecRestoreArrayRead(xcoorl,&zctx.xy);CHKERRQ(ierr); 308 ierr = VecRestoreArrayRead(xlocal,&zctx.v);CHKERRQ(ierr); 309 PetscFunctionReturn(0); 310 } 311 312 #if defined(PETSC_HAVE_HDF5) 313 static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims) 314 { 315 PetscMPIInt comm_size; 316 PetscErrorCode ierr; 317 hsize_t chunk_size, target_size, dim; 318 hsize_t vec_size = sizeof(PetscScalar)*da->M*da->N*da->P*da->w; 319 hsize_t avg_local_vec_size,KiB = 1024,MiB = KiB*KiB,GiB = MiB*KiB,min_size = MiB; 320 hsize_t max_chunks = 64*KiB; /* HDF5 internal limitation */ 321 hsize_t max_chunk_size = 4*GiB; /* HDF5 internal limitation */ 322 PetscInt zslices=da->p, yslices=da->n, xslices=da->m; 323 324 PetscFunctionBegin; 325 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size);CHKERRQ(ierr); 326 avg_local_vec_size = (hsize_t) ceil(vec_size*1.0/comm_size); /* we will attempt to use this as the chunk size */ 327 328 target_size = (hsize_t) ceil(PetscMin(vec_size,PetscMin(max_chunk_size,PetscMax(avg_local_vec_size,PetscMax(ceil(vec_size*1.0/max_chunks),min_size))))); 329 /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */ 330 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(PetscReal); 331 332 /* 333 if size/rank > max_chunk_size, we need radical measures: even going down to 334 avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter 335 what, composed in the most efficient way possible. 336 N.B. this minimises the number of chunks, which may or may not be the optimal 337 solution. In a BG, for example, the optimal solution is probably to make # chunks = # 338 IO nodes involved, but this author has no access to a BG to figure out how to 339 reliably find the right number. And even then it may or may not be enough. 340 */ 341 if (avg_local_vec_size > max_chunk_size) { 342 /* check if we can just split local z-axis: is that enough? */ 343 zslices = (PetscInt)ceil(vec_size*1.0/(da->p*max_chunk_size))*zslices; 344 if (zslices > da->P) { 345 /* lattice is too large in xy-directions, splitting z only is not enough */ 346 zslices = da->P; 347 yslices= (PetscInt)ceil(vec_size*1.0/(zslices*da->n*max_chunk_size))*yslices; 348 if (yslices > da->N) { 349 /* lattice is too large in x-direction, splitting along z, y is not enough */ 350 yslices = da->N; 351 xslices= (PetscInt)ceil(vec_size*1.0/(zslices*yslices*da->m*max_chunk_size))*xslices; 352 } 353 } 354 dim = 0; 355 if (timestep >= 0) { 356 ++dim; 357 } 358 /* prefer to split z-axis, even down to planar slices */ 359 if (dimension == 3) { 360 chunkDims[dim++] = (hsize_t) da->P/zslices; 361 chunkDims[dim++] = (hsize_t) da->N/yslices; 362 chunkDims[dim++] = (hsize_t) da->M/xslices; 363 } else { 364 /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */ 365 chunkDims[dim++] = (hsize_t) da->N/yslices; 366 chunkDims[dim++] = (hsize_t) da->M/xslices; 367 } 368 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double); 369 } else { 370 if (target_size < chunk_size) { 371 /* only change the defaults if target_size < chunk_size */ 372 dim = 0; 373 if (timestep >= 0) { 374 ++dim; 375 } 376 /* prefer to split z-axis, even down to planar slices */ 377 if (dimension == 3) { 378 /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */ 379 if (target_size >= chunk_size/da->p) { 380 /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */ 381 chunkDims[dim] = (hsize_t) ceil(da->P*1.0/da->p); 382 } else { 383 /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be 384 radical and let everyone write all they've got */ 385 chunkDims[dim++] = (hsize_t) ceil(da->P*1.0/da->p); 386 chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n); 387 chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m); 388 } 389 } else { 390 /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */ 391 if (target_size >= chunk_size/da->n) { 392 /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */ 393 chunkDims[dim] = (hsize_t) ceil(da->N*1.0/da->n); 394 } else { 395 /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be 396 radical and let everyone write all they've got */ 397 chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n); 398 chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m); 399 } 400 401 } 402 chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double); 403 } else { 404 /* precomputed chunks are fine, we don't need to do anything */ 405 } 406 } 407 PetscFunctionReturn(0); 408 } 409 #endif 410 411 #if defined(PETSC_HAVE_HDF5) 412 PetscErrorCode VecView_MPI_HDF5_DA(Vec xin,PetscViewer viewer) 413 { 414 DM dm; 415 DM_DA *da; 416 hid_t filespace; /* file dataspace identifier */ 417 hid_t chunkspace; /* chunk dataset property identifier */ 418 hid_t plist_id; /* property list identifier */ 419 hid_t dset_id; /* dataset identifier */ 420 hid_t memspace; /* memory dataspace identifier */ 421 hid_t file_id; 422 hid_t group; 423 hid_t memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 424 hid_t filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 425 hsize_t dim; 426 hsize_t maxDims[6]={0}, dims[6]={0}, chunkDims[6]={0}, count[6]={0}, offset[6]={0}; /* we depend on these being sane later on */ 427 PetscInt timestep, dimension; 428 const PetscScalar *x; 429 const char *vecname; 430 PetscErrorCode ierr; 431 PetscBool dim2; 432 PetscBool spoutput; 433 434 PetscFunctionBegin; 435 ierr = PetscViewerHDF5OpenGroup(viewer, &file_id, &group);CHKERRQ(ierr); 436 ierr = PetscViewerHDF5GetTimestep(viewer, ×tep);CHKERRQ(ierr); 437 ierr = PetscViewerHDF5GetBaseDimension2(viewer,&dim2);CHKERRQ(ierr); 438 ierr = PetscViewerHDF5GetSPOutput(viewer,&spoutput);CHKERRQ(ierr); 439 440 ierr = VecGetDM(xin,&dm);CHKERRQ(ierr); 441 if (!dm) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 442 da = (DM_DA*)dm->data; 443 ierr = DMGetDimension(dm, &dimension);CHKERRQ(ierr); 444 445 /* Create the dataspace for the dataset. 446 * 447 * dims - holds the current dimensions of the dataset 448 * 449 * maxDims - holds the maximum dimensions of the dataset (unlimited 450 * for the number of time steps with the current dimensions for the 451 * other dimensions; so only additional time steps can be added). 452 * 453 * chunkDims - holds the size of a single time step (required to 454 * permit extending dataset). 455 */ 456 dim = 0; 457 if (timestep >= 0) { 458 dims[dim] = timestep+1; 459 maxDims[dim] = H5S_UNLIMITED; 460 chunkDims[dim] = 1; 461 ++dim; 462 } 463 if (dimension == 3) { 464 ierr = PetscHDF5IntCast(da->P,dims+dim);CHKERRQ(ierr); 465 maxDims[dim] = dims[dim]; 466 chunkDims[dim] = dims[dim]; 467 ++dim; 468 } 469 if (dimension > 1) { 470 ierr = PetscHDF5IntCast(da->N,dims+dim);CHKERRQ(ierr); 471 maxDims[dim] = dims[dim]; 472 chunkDims[dim] = dims[dim]; 473 ++dim; 474 } 475 ierr = PetscHDF5IntCast(da->M,dims+dim);CHKERRQ(ierr); 476 maxDims[dim] = dims[dim]; 477 chunkDims[dim] = dims[dim]; 478 ++dim; 479 if (da->w > 1 || dim2) { 480 ierr = PetscHDF5IntCast(da->w,dims+dim);CHKERRQ(ierr); 481 maxDims[dim] = dims[dim]; 482 chunkDims[dim] = dims[dim]; 483 ++dim; 484 } 485 #if defined(PETSC_USE_COMPLEX) 486 dims[dim] = 2; 487 maxDims[dim] = dims[dim]; 488 chunkDims[dim] = dims[dim]; 489 ++dim; 490 #endif 491 492 ierr = VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims); CHKERRQ(ierr); 493 494 PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims)); 495 496 #if defined(PETSC_USE_REAL_SINGLE) 497 memscalartype = H5T_NATIVE_FLOAT; 498 filescalartype = H5T_NATIVE_FLOAT; 499 #elif defined(PETSC_USE_REAL___FLOAT128) 500 #error "HDF5 output with 128 bit floats not supported." 501 #elif defined(PETSC_USE_REAL___FP16) 502 #error "HDF5 output with 16 bit floats not supported." 503 #else 504 memscalartype = H5T_NATIVE_DOUBLE; 505 if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT; 506 else filescalartype = H5T_NATIVE_DOUBLE; 507 #endif 508 509 /* Create the dataset with default properties and close filespace */ 510 ierr = PetscObjectGetName((PetscObject)xin,&vecname);CHKERRQ(ierr); 511 if (!H5Lexists(group, vecname, H5P_DEFAULT)) { 512 /* Create chunk */ 513 PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE)); 514 PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims)); 515 516 #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800) 517 PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT)); 518 #else 519 PetscStackCallHDF5Return(dset_id,H5Dcreate,(group, vecname, filescalartype, filespace, H5P_DEFAULT)); 520 #endif 521 } else { 522 PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT)); 523 PetscStackCallHDF5(H5Dset_extent,(dset_id, dims)); 524 } 525 PetscStackCallHDF5(H5Sclose,(filespace)); 526 527 /* Each process defines a dataset and writes it to the hyperslab in the file */ 528 dim = 0; 529 if (timestep >= 0) { 530 offset[dim] = timestep; 531 ++dim; 532 } 533 if (dimension == 3) {ierr = PetscHDF5IntCast(da->zs,offset + dim++);CHKERRQ(ierr);} 534 if (dimension > 1) {ierr = PetscHDF5IntCast(da->ys,offset + dim++);CHKERRQ(ierr);} 535 ierr = PetscHDF5IntCast(da->xs/da->w,offset + dim++);CHKERRQ(ierr); 536 if (da->w > 1 || dim2) offset[dim++] = 0; 537 #if defined(PETSC_USE_COMPLEX) 538 offset[dim++] = 0; 539 #endif 540 dim = 0; 541 if (timestep >= 0) { 542 count[dim] = 1; 543 ++dim; 544 } 545 if (dimension == 3) {ierr = PetscHDF5IntCast(da->ze - da->zs,count + dim++);CHKERRQ(ierr);} 546 if (dimension > 1) {ierr = PetscHDF5IntCast(da->ye - da->ys,count + dim++);CHKERRQ(ierr);} 547 ierr = PetscHDF5IntCast((da->xe - da->xs)/da->w,count + dim++);CHKERRQ(ierr); 548 if (da->w > 1 || dim2) {ierr = PetscHDF5IntCast(da->w,count + dim++);CHKERRQ(ierr);} 549 #if defined(PETSC_USE_COMPLEX) 550 count[dim++] = 2; 551 #endif 552 PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL)); 553 PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id)); 554 PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL)); 555 556 /* Create property list for collective dataset write */ 557 PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER)); 558 #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO) 559 PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE)); 560 #endif 561 /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */ 562 563 ierr = VecGetArrayRead(xin, &x);CHKERRQ(ierr); 564 PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, plist_id, x)); 565 PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL)); 566 ierr = VecRestoreArrayRead(xin, &x);CHKERRQ(ierr); 567 568 /* Close/release resources */ 569 if (group != file_id) { 570 PetscStackCallHDF5(H5Gclose,(group)); 571 } 572 PetscStackCallHDF5(H5Pclose,(plist_id)); 573 PetscStackCallHDF5(H5Sclose,(filespace)); 574 PetscStackCallHDF5(H5Sclose,(memspace)); 575 PetscStackCallHDF5(H5Dclose,(dset_id)); 576 ierr = PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);CHKERRQ(ierr); 577 PetscFunctionReturn(0); 578 } 579 #endif 580 581 extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec,PetscViewer); 582 583 #if defined(PETSC_HAVE_MPIIO) 584 static PetscErrorCode DMDAArrayMPIIO(DM da,PetscViewer viewer,Vec xin,PetscBool write) 585 { 586 PetscErrorCode ierr; 587 MPI_File mfdes; 588 PetscMPIInt gsizes[4],lsizes[4],lstarts[4],asiz,dof; 589 MPI_Datatype view; 590 const PetscScalar *array; 591 MPI_Offset off; 592 MPI_Aint ub,ul; 593 PetscInt type,rows,vecrows,tr[2]; 594 DM_DA *dd = (DM_DA*)da->data; 595 PetscBool skipheader; 596 597 PetscFunctionBegin; 598 ierr = VecGetSize(xin,&vecrows);CHKERRQ(ierr); 599 ierr = PetscViewerBinaryGetSkipHeader(viewer,&skipheader);CHKERRQ(ierr); 600 if (!write) { 601 /* Read vector header. */ 602 if (!skipheader) { 603 ierr = PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT);CHKERRQ(ierr); 604 type = tr[0]; 605 rows = tr[1]; 606 if (type != VEC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Not vector next in file"); 607 if (rows != vecrows) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Vector in file not same size as DMDA vector"); 608 } 609 } else { 610 tr[0] = VEC_FILE_CLASSID; 611 tr[1] = vecrows; 612 if (!skipheader) { 613 ierr = PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 614 } 615 } 616 617 ierr = PetscMPIIntCast(dd->w,&dof);CHKERRQ(ierr); 618 gsizes[0] = dof; 619 ierr = PetscMPIIntCast(dd->M,gsizes+1);CHKERRQ(ierr); 620 ierr = PetscMPIIntCast(dd->N,gsizes+2);CHKERRQ(ierr); 621 ierr = PetscMPIIntCast(dd->P,gsizes+3);CHKERRQ(ierr); 622 lsizes[0] = dof; 623 ierr = PetscMPIIntCast((dd->xe-dd->xs)/dof,lsizes+1);CHKERRQ(ierr); 624 ierr = PetscMPIIntCast(dd->ye-dd->ys,lsizes+2);CHKERRQ(ierr); 625 ierr = PetscMPIIntCast(dd->ze-dd->zs,lsizes+3);CHKERRQ(ierr); 626 lstarts[0] = 0; 627 ierr = PetscMPIIntCast(dd->xs/dof,lstarts+1);CHKERRQ(ierr); 628 ierr = PetscMPIIntCast(dd->ys,lstarts+2);CHKERRQ(ierr); 629 ierr = PetscMPIIntCast(dd->zs,lstarts+3);CHKERRQ(ierr); 630 ierr = MPI_Type_create_subarray(da->dim+1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);CHKERRQ(ierr); 631 ierr = MPI_Type_commit(&view);CHKERRQ(ierr); 632 633 ierr = PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);CHKERRQ(ierr); 634 ierr = PetscViewerBinaryGetMPIIOOffset(viewer,&off);CHKERRQ(ierr); 635 ierr = MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char*)"native",MPI_INFO_NULL);CHKERRQ(ierr); 636 ierr = VecGetArrayRead(xin,&array);CHKERRQ(ierr); 637 asiz = lsizes[1]*(lsizes[2] > 0 ? lsizes[2] : 1)*(lsizes[3] > 0 ? lsizes[3] : 1)*dof; 638 if (write) { 639 ierr = MPIU_File_write_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);CHKERRQ(ierr); 640 } else { 641 ierr = MPIU_File_read_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);CHKERRQ(ierr); 642 } 643 ierr = MPI_Type_get_extent(view,&ul,&ub);CHKERRQ(ierr); 644 ierr = PetscViewerBinaryAddMPIIOOffset(viewer,ub);CHKERRQ(ierr); 645 ierr = VecRestoreArrayRead(xin,&array);CHKERRQ(ierr); 646 ierr = MPI_Type_free(&view);CHKERRQ(ierr); 647 PetscFunctionReturn(0); 648 } 649 #endif 650 651 PetscErrorCode VecView_MPI_DA(Vec xin,PetscViewer viewer) 652 { 653 DM da; 654 PetscErrorCode ierr; 655 PetscInt dim; 656 Vec natural; 657 PetscBool isdraw,isvtk; 658 #if defined(PETSC_HAVE_HDF5) 659 PetscBool ishdf5; 660 #endif 661 const char *prefix,*name; 662 PetscViewerFormat format; 663 664 PetscFunctionBegin; 665 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 666 if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 667 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 668 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);CHKERRQ(ierr); 669 #if defined(PETSC_HAVE_HDF5) 670 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);CHKERRQ(ierr); 671 #endif 672 if (isdraw) { 673 ierr = DMDAGetInfo(da,&dim,0,0,0,0,0,0,0,0,0,0,0,0);CHKERRQ(ierr); 674 if (dim == 1) { 675 ierr = VecView_MPI_Draw_DA1d(xin,viewer);CHKERRQ(ierr); 676 } else if (dim == 2) { 677 ierr = VecView_MPI_Draw_DA2d(xin,viewer);CHKERRQ(ierr); 678 } else SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot graphically view vector associated with this dimensional DMDA %D",dim); 679 } else if (isvtk) { /* Duplicate the Vec */ 680 Vec Y; 681 ierr = VecDuplicate(xin,&Y);CHKERRQ(ierr); 682 if (((PetscObject)xin)->name) { 683 /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */ 684 ierr = PetscObjectSetName((PetscObject)Y,((PetscObject)xin)->name);CHKERRQ(ierr); 685 } 686 ierr = VecCopy(xin,Y);CHKERRQ(ierr); 687 ierr = PetscViewerVTKAddField(viewer,(PetscObject)da,DMDAVTKWriteAll,PETSC_VTK_POINT_FIELD,(PetscObject)Y);CHKERRQ(ierr); 688 #if defined(PETSC_HAVE_HDF5) 689 } else if (ishdf5) { 690 ierr = VecView_MPI_HDF5_DA(xin,viewer);CHKERRQ(ierr); 691 #endif 692 } else { 693 #if defined(PETSC_HAVE_MPIIO) 694 PetscBool isbinary,isMPIIO; 695 696 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 697 if (isbinary) { 698 ierr = PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);CHKERRQ(ierr); 699 if (isMPIIO) { 700 ierr = DMDAArrayMPIIO(da,viewer,xin,PETSC_TRUE);CHKERRQ(ierr); 701 PetscFunctionReturn(0); 702 } 703 } 704 #endif 705 706 /* call viewer on natural ordering */ 707 ierr = PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);CHKERRQ(ierr); 708 ierr = DMDACreateNaturalVector(da,&natural);CHKERRQ(ierr); 709 ierr = PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);CHKERRQ(ierr); 710 ierr = DMDAGlobalToNaturalBegin(da,xin,INSERT_VALUES,natural);CHKERRQ(ierr); 711 ierr = DMDAGlobalToNaturalEnd(da,xin,INSERT_VALUES,natural);CHKERRQ(ierr); 712 ierr = PetscObjectGetName((PetscObject)xin,&name);CHKERRQ(ierr); 713 ierr = PetscObjectSetName((PetscObject)natural,name);CHKERRQ(ierr); 714 715 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 716 if (format == PETSC_VIEWER_BINARY_MATLAB) { 717 /* temporarily remove viewer format so it won't trigger in the VecView() */ 718 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_DEFAULT);CHKERRQ(ierr); 719 } 720 721 ierr = VecView(natural,viewer);CHKERRQ(ierr); 722 723 if (format == PETSC_VIEWER_BINARY_MATLAB) { 724 MPI_Comm comm; 725 FILE *info; 726 const char *fieldname; 727 char fieldbuf[256]; 728 PetscInt dim,ni,nj,nk,pi,pj,pk,dof,n; 729 730 /* set the viewer format back into the viewer */ 731 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 732 ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr); 733 ierr = PetscViewerBinaryGetInfoPointer(viewer,&info);CHKERRQ(ierr); 734 ierr = DMDAGetInfo(da,&dim,&ni,&nj,&nk,&pi,&pj,&pk,&dof,0,0,0,0,0);CHKERRQ(ierr); 735 ierr = PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");CHKERRQ(ierr); 736 ierr = PetscFPrintf(comm,info,"#$$ tmp = PetscBinaryRead(fd); \n");CHKERRQ(ierr); 737 if (dim == 1) { ierr = PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d);\n",dof,ni);CHKERRQ(ierr); } 738 if (dim == 2) { ierr = PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d);\n",dof,ni,nj);CHKERRQ(ierr); } 739 if (dim == 3) { ierr = PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d,%d);\n",dof,ni,nj,nk);CHKERRQ(ierr); } 740 741 for (n=0; n<dof; n++) { 742 ierr = DMDAGetFieldName(da,n,&fieldname);CHKERRQ(ierr); 743 if (!fieldname || !fieldname[0]) { 744 ierr = PetscSNPrintf(fieldbuf,sizeof fieldbuf,"field%D",n);CHKERRQ(ierr); 745 fieldname = fieldbuf; 746 } 747 if (dim == 1) { ierr = PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:))';\n",name,fieldname,n+1);CHKERRQ(ierr); } 748 if (dim == 2) { ierr = PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:,:))';\n",name,fieldname,n+1);CHKERRQ(ierr); } 749 if (dim == 3) { ierr = PetscFPrintf(comm,info,"#$$ Set.%s.%s = permute(squeeze(tmp(%d,:,:,:)),[2 1 3]);\n",name,fieldname,n+1);CHKERRQ(ierr);} 750 } 751 ierr = PetscFPrintf(comm,info,"#$$ clear tmp; \n");CHKERRQ(ierr); 752 ierr = PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");CHKERRQ(ierr); 753 } 754 755 ierr = VecDestroy(&natural);CHKERRQ(ierr); 756 } 757 PetscFunctionReturn(0); 758 } 759 760 #if defined(PETSC_HAVE_HDF5) 761 PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer) 762 { 763 DM da; 764 PetscErrorCode ierr; 765 hsize_t dim,rdim; 766 hsize_t dims[6]={0},count[6]={0},offset[6]={0}; 767 PetscInt dimension,timestep,dofInd; 768 PetscScalar *x; 769 const char *vecname; 770 hid_t filespace; /* file dataspace identifier */ 771 hid_t plist_id; /* property list identifier */ 772 hid_t dset_id; /* dataset identifier */ 773 hid_t memspace; /* memory dataspace identifier */ 774 hid_t file_id,group; 775 hid_t scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 776 DM_DA *dd; 777 PetscBool dim2 = PETSC_FALSE; 778 779 PetscFunctionBegin; 780 #if defined(PETSC_USE_REAL_SINGLE) 781 scalartype = H5T_NATIVE_FLOAT; 782 #elif defined(PETSC_USE_REAL___FLOAT128) 783 #error "HDF5 output with 128 bit floats not supported." 784 #elif defined(PETSC_USE_REAL___FP16) 785 #error "HDF5 output with 16 bit floats not supported." 786 #else 787 scalartype = H5T_NATIVE_DOUBLE; 788 #endif 789 790 ierr = PetscViewerHDF5OpenGroup(viewer, &file_id, &group);CHKERRQ(ierr); 791 ierr = PetscViewerHDF5GetTimestep(viewer, ×tep);CHKERRQ(ierr); 792 ierr = PetscObjectGetName((PetscObject)xin,&vecname);CHKERRQ(ierr); 793 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 794 dd = (DM_DA*)da->data; 795 ierr = DMGetDimension(da, &dimension);CHKERRQ(ierr); 796 797 /* Open dataset */ 798 #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800) 799 PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT)); 800 #else 801 PetscStackCallHDF5Return(dset_id,H5Dopen,(group, vecname)); 802 #endif 803 804 /* Retrieve the dataspace for the dataset */ 805 PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id)); 806 PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL)); 807 808 /* Expected dimension for holding the dof's */ 809 #if defined(PETSC_USE_COMPLEX) 810 dofInd = rdim-2; 811 #else 812 dofInd = rdim-1; 813 #endif 814 815 /* The expected number of dimensions, assuming basedimension2 = false */ 816 dim = dimension; 817 if (dd->w > 1) ++dim; 818 if (timestep >= 0) ++dim; 819 #if defined(PETSC_USE_COMPLEX) 820 ++dim; 821 #endif 822 823 /* In this case the input dataset have one extra, unexpected dimension. */ 824 if (rdim == dim+1) { 825 /* In this case the block size unity */ 826 if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE; 827 828 /* Special error message for the case where dof does not match the input file */ 829 else if (dd->w != (PetscInt) dims[dofInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Number of dofs in file is %D, not %D as expected",(PetscInt)dims[dofInd],dd->w); 830 831 /* Other cases where rdim != dim cannot be handled currently */ 832 } else if (rdim != dim) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with dof = %D",rdim,dim,dd->w); 833 834 /* Set up the hyperslab size */ 835 dim = 0; 836 if (timestep >= 0) { 837 offset[dim] = timestep; 838 count[dim] = 1; 839 ++dim; 840 } 841 if (dimension == 3) { 842 ierr = PetscHDF5IntCast(dd->zs,offset + dim);CHKERRQ(ierr); 843 ierr = PetscHDF5IntCast(dd->ze - dd->zs,count + dim);CHKERRQ(ierr); 844 ++dim; 845 } 846 if (dimension > 1) { 847 ierr = PetscHDF5IntCast(dd->ys,offset + dim);CHKERRQ(ierr); 848 ierr = PetscHDF5IntCast(dd->ye - dd->ys,count + dim);CHKERRQ(ierr); 849 ++dim; 850 } 851 ierr = PetscHDF5IntCast(dd->xs/dd->w,offset + dim);CHKERRQ(ierr); 852 ierr = PetscHDF5IntCast((dd->xe - dd->xs)/dd->w,count + dim);CHKERRQ(ierr); 853 ++dim; 854 if (dd->w > 1 || dim2) { 855 offset[dim] = 0; 856 ierr = PetscHDF5IntCast(dd->w,count + dim);CHKERRQ(ierr); 857 ++dim; 858 } 859 #if defined(PETSC_USE_COMPLEX) 860 offset[dim] = 0; 861 count[dim] = 2; 862 ++dim; 863 #endif 864 865 /* Create the memory and filespace */ 866 PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL)); 867 PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL)); 868 869 /* Create property list for collective dataset write */ 870 PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER)); 871 #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO) 872 PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE)); 873 #endif 874 /* To read dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */ 875 876 ierr = VecGetArray(xin, &x);CHKERRQ(ierr); 877 PetscStackCallHDF5(H5Dread,(dset_id, scalartype, memspace, filespace, plist_id, x)); 878 ierr = VecRestoreArray(xin, &x);CHKERRQ(ierr); 879 880 /* Close/release resources */ 881 if (group != file_id) { 882 PetscStackCallHDF5(H5Gclose,(group)); 883 } 884 PetscStackCallHDF5(H5Pclose,(plist_id)); 885 PetscStackCallHDF5(H5Sclose,(filespace)); 886 PetscStackCallHDF5(H5Sclose,(memspace)); 887 PetscStackCallHDF5(H5Dclose,(dset_id)); 888 PetscFunctionReturn(0); 889 } 890 #endif 891 892 PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer) 893 { 894 DM da; 895 PetscErrorCode ierr; 896 Vec natural; 897 const char *prefix; 898 PetscInt bs; 899 PetscBool flag; 900 DM_DA *dd; 901 #if defined(PETSC_HAVE_MPIIO) 902 PetscBool isMPIIO; 903 #endif 904 905 PetscFunctionBegin; 906 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 907 dd = (DM_DA*)da->data; 908 #if defined(PETSC_HAVE_MPIIO) 909 ierr = PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);CHKERRQ(ierr); 910 if (isMPIIO) { 911 ierr = DMDAArrayMPIIO(da,viewer,xin,PETSC_FALSE);CHKERRQ(ierr); 912 PetscFunctionReturn(0); 913 } 914 #endif 915 916 ierr = PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);CHKERRQ(ierr); 917 ierr = DMDACreateNaturalVector(da,&natural);CHKERRQ(ierr); 918 ierr = PetscObjectSetName((PetscObject)natural,((PetscObject)xin)->name);CHKERRQ(ierr); 919 ierr = PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);CHKERRQ(ierr); 920 ierr = VecLoad(natural,viewer);CHKERRQ(ierr); 921 ierr = DMDANaturalToGlobalBegin(da,natural,INSERT_VALUES,xin);CHKERRQ(ierr); 922 ierr = DMDANaturalToGlobalEnd(da,natural,INSERT_VALUES,xin);CHKERRQ(ierr); 923 ierr = VecDestroy(&natural);CHKERRQ(ierr); 924 ierr = PetscInfo(xin,"Loading vector from natural ordering into DMDA\n");CHKERRQ(ierr); 925 ierr = PetscOptionsGetInt(NULL,((PetscObject)xin)->prefix,"-vecload_block_size",&bs,&flag);CHKERRQ(ierr); 926 if (flag && bs != dd->w) { 927 ierr = PetscInfo2(xin,"Block size in file %D not equal to DMDA's dof %D\n",bs,dd->w);CHKERRQ(ierr); 928 } 929 PetscFunctionReturn(0); 930 } 931 932 PetscErrorCode VecLoad_Default_DA(Vec xin, PetscViewer viewer) 933 { 934 PetscErrorCode ierr; 935 DM da; 936 PetscBool isbinary; 937 #if defined(PETSC_HAVE_HDF5) 938 PetscBool ishdf5; 939 #endif 940 941 PetscFunctionBegin; 942 ierr = VecGetDM(xin,&da);CHKERRQ(ierr); 943 if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA"); 944 945 #if defined(PETSC_HAVE_HDF5) 946 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);CHKERRQ(ierr); 947 #endif 948 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 949 950 if (isbinary) { 951 ierr = VecLoad_Binary_DA(xin,viewer);CHKERRQ(ierr); 952 #if defined(PETSC_HAVE_HDF5) 953 } else if (ishdf5) { 954 ierr = VecLoad_HDF5_DA(xin,viewer);CHKERRQ(ierr); 955 #endif 956 } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name); 957 PetscFunctionReturn(0); 958 } 959